hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
d9afca45a6adc9c41c0b981032c729d59e9db234
| 2,801
|
py
|
Python
|
examples/p02_budgets/budget_data_ingest/migrations/0001_initial.py
|
18F/data-federation-ingest
|
a896ef2da1faf3966f018366b26a338bb66cc717
|
[
"CC0-1.0"
] | 18
|
2019-07-26T13:43:01.000Z
|
2022-01-15T14:57:52.000Z
|
examples/p02_budgets/budget_data_ingest/migrations/0001_initial.py
|
18F/data-federation-ingest
|
a896ef2da1faf3966f018366b26a338bb66cc717
|
[
"CC0-1.0"
] | 96
|
2019-06-14T18:30:54.000Z
|
2021-08-03T09:25:02.000Z
|
examples/p02_budgets/budget_data_ingest/migrations/0001_initial.py
|
18F/data-federation-ingest
|
a896ef2da1faf3966f018366b26a338bb66cc717
|
[
"CC0-1.0"
] | 3
|
2020-01-23T04:48:18.000Z
|
2021-01-12T09:31:20.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.13 on 2018-06-08 22:54
from __future__ import unicode_literals
from django.conf import settings
import django.contrib.postgres.fields.jsonb
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='BudgetItem',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('year', models.IntegerField()),
('agency', models.TextField()),
('data_source', models.TextField()),
('category', models.TextField()),
('dollars_budgeted', models.DecimalField(decimal_places=2, max_digits=14)),
('dollars_spent', models.DecimalField(decimal_places=2, max_digits=14)),
('row_number', models.IntegerField()),
],
),
migrations.CreateModel(
name='Upload',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('file_metadata', django.contrib.postgres.fields.jsonb.JSONField(null=True)),
('file', models.FileField(upload_to='')),
('raw', models.BinaryField(null=True)),
('validation_results', django.contrib.postgres.fields.jsonb.JSONField(null=True)),
('status', models.CharField(choices=[('LOADING', 'Loading'), ('PENDING', 'Pending'), ('STAGED', 'Staged'), ('INSERTED', 'Inserted'), ('DELETED', 'Deleted')], default='LOADING', max_length=10)),
('status_changed_at', models.DateTimeField(null=True)),
('replaces', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='replaced_by', to='budget_data_ingest.Upload')),
('status_changed_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to=settings.AUTH_USER_MODEL)),
('submitter', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.AddField(
model_name='budgetitem',
name='upload',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='budget_data_ingest.Upload'),
),
]
| 47.474576
| 209
| 0.611567
| 285
| 2,801
| 5.831579
| 0.396491
| 0.028881
| 0.042118
| 0.066185
| 0.465704
| 0.358604
| 0.358604
| 0.358604
| 0.247894
| 0.247894
| 0
| 0.012258
| 0.24277
| 2,801
| 58
| 210
| 48.293103
| 0.771334
| 0.024634
| 0
| 0.22
| 1
| 0
| 0.135581
| 0.018322
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.1
| 0
| 0.18
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
d9bd741cd9ad9e20eeb1069fce4709781f43edd4
| 6,476
|
py
|
Python
|
Qt_interface/add_subject.py
|
kithsirij/NLP-based-Syllabus-Coverage-Exam-paper-checker-Tool
|
b7b38a7b7c6d0a2ad5264df32acd75cdef552bd0
|
[
"MIT"
] | 1
|
2019-07-17T09:08:41.000Z
|
2019-07-17T09:08:41.000Z
|
Qt_interface/add_subject.py
|
kithsirij/NLP-based-Syllabus-Coverage-Exam-paper-checker-Tool
|
b7b38a7b7c6d0a2ad5264df32acd75cdef552bd0
|
[
"MIT"
] | null | null | null |
Qt_interface/add_subject.py
|
kithsirij/NLP-based-Syllabus-Coverage-Exam-paper-checker-Tool
|
b7b38a7b7c6d0a2ad5264df32acd75cdef552bd0
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'add_subject.ui'
#
# Created by: PyQt4 UI code generator 4.11.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_Dialog_add_subject(object):
def setupUi(self, Dialog_add_subject):
Dialog_add_subject.setObjectName(_fromUtf8("Dialog_add_subject"))
Dialog_add_subject.resize(568, 374)
font = QtGui.QFont()
font.setFamily(_fromUtf8("Times New Roman"))
font.setPointSize(10)
Dialog_add_subject.setFont(font)
Dialog_add_subject.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(_fromUtf8("Qt_interface/SE_syllabus/4zIr6y.jpg")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
Dialog_add_subject.setWindowIcon(icon)
self.lbl_subject_name = QtGui.QLabel(Dialog_add_subject)
self.lbl_subject_name.setGeometry(QtCore.QRect(50, 235, 131, 21))
font = QtGui.QFont()
font.setFamily(_fromUtf8("Times New Roman"))
font.setPointSize(12)
self.lbl_subject_name.setFont(font)
self.lbl_subject_name.setObjectName(_fromUtf8("lbl_subject_name"))
self.label_add_subject = QtGui.QLabel(Dialog_add_subject)
self.label_add_subject.setGeometry(QtCore.QRect(220, 30, 151, 31))
font = QtGui.QFont()
font.setFamily(_fromUtf8("Times New Roman"))
font.setPointSize(14)
font.setBold(True)
font.setWeight(75)
self.label_add_subject.setFont(font)
self.label_add_subject.setObjectName(_fromUtf8("label_add_subject"))
self.lineEdit_subject_name = QtGui.QLineEdit(Dialog_add_subject)
self.lineEdit_subject_name.setGeometry(QtCore.QRect(190, 230, 321, 31))
font = QtGui.QFont()
font.setFamily(_fromUtf8("Times New Roman"))
font.setPointSize(12)
self.lineEdit_subject_name.setFont(font)
self.lineEdit_subject_name.setObjectName(_fromUtf8("lineEdit_subject_name"))
self.label_year = QtGui.QLabel(Dialog_add_subject)
self.label_year.setGeometry(QtCore.QRect(50, 95, 81, 21))
font = QtGui.QFont()
font.setFamily(_fromUtf8("Times New Roman"))
font.setPointSize(12)
self.label_year.setFont(font)
self.label_year.setObjectName(_fromUtf8("label_year"))
self.label_semester = QtGui.QLabel(Dialog_add_subject)
self.label_semester.setGeometry(QtCore.QRect(50, 165, 91, 21))
font = QtGui.QFont()
font.setFamily(_fromUtf8("Times New Roman"))
font.setPointSize(12)
self.label_semester.setFont(font)
self.label_semester.setObjectName(_fromUtf8("label_semester"))
self.pushButton_save = QtGui.QPushButton(Dialog_add_subject)
self.pushButton_save.setGeometry(QtCore.QRect(190, 290, 111, 31))
font = QtGui.QFont()
font.setFamily(_fromUtf8("Times New Roman"))
font.setPointSize(10)
self.pushButton_save.setFont(font)
icon1 = QtGui.QIcon()
icon1.addPixmap(QtGui.QPixmap(_fromUtf8("Qt_interface/SE_syllabus/Save-as.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.pushButton_save.setIcon(icon1)
self.pushButton_save.setIconSize(QtCore.QSize(20, 20))
self.pushButton_save.setObjectName(_fromUtf8("pushButton_save"))
self.pushButton_cancel = QtGui.QPushButton(Dialog_add_subject)
self.pushButton_cancel.setGeometry(QtCore.QRect(340, 290, 111, 31))
font = QtGui.QFont()
font.setFamily(_fromUtf8("Times New Roman"))
self.pushButton_cancel.setFont(font)
icon2 = QtGui.QIcon()
icon2.addPixmap(QtGui.QPixmap(_fromUtf8("Qt_interface/SE_syllabus/if_draw-08_725558.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.pushButton_cancel.setIcon(icon2)
self.pushButton_cancel.setIconSize(QtCore.QSize(20, 20))
self.pushButton_cancel.setObjectName(_fromUtf8("pushButton_cancel"))
self.comboBox_year = QtGui.QComboBox(Dialog_add_subject)
self.comboBox_year.setGeometry(QtCore.QRect(190, 91, 111, 31))
font = QtGui.QFont()
font.setFamily(_fromUtf8("Times New Roman"))
font.setPointSize(12)
self.comboBox_year.setFont(font)
self.comboBox_year.setObjectName(_fromUtf8("comboBox_year"))
self.comboBox_semester = QtGui.QComboBox(Dialog_add_subject)
self.comboBox_semester.setGeometry(QtCore.QRect(190, 160, 111, 31))
font = QtGui.QFont()
font.setFamily(_fromUtf8("Times New Roman"))
font.setPointSize(12)
self.comboBox_semester.setFont(font)
self.comboBox_semester.setObjectName(_fromUtf8("comboBox_semester"))
self.retranslateUi(Dialog_add_subject)
QtCore.QObject.connect(self.pushButton_cancel, QtCore.SIGNAL(_fromUtf8("clicked()")), self.lineEdit_subject_name.clear)
QtCore.QMetaObject.connectSlotsByName(Dialog_add_subject)
def retranslateUi(self, Dialog_add_subject):
Dialog_add_subject.setWindowTitle(_translate("Dialog_add_subject", "Dialog", None))
self.lbl_subject_name.setText(_translate("Dialog_add_subject", "SUBJECT NAME", None))
self.label_add_subject.setText(_translate("Dialog_add_subject", "ADD SUBJECT", None))
self.label_year.setText(_translate("Dialog_add_subject", "YEAR", None))
self.label_semester.setText(_translate("Dialog_add_subject", "SEMESTER", None))
self.pushButton_save.setText(_translate("Dialog_add_subject", "SAVE", None))
self.pushButton_cancel.setText(_translate("Dialog_add_subject", "CANCEL", None))
if __name__ == "__main__":
import sys
app = QtGui.QApplication(sys.argv)
Dialog_add_subject = QtGui.QDialog()
ui = Ui_Dialog_add_subject()
ui.setupUi(Dialog_add_subject)
Dialog_add_subject.show()
sys.exit(app.exec_())
| 48.691729
| 137
| 0.694719
| 761
| 6,476
| 5.654402
| 0.198423
| 0.092958
| 0.118987
| 0.041831
| 0.478039
| 0.408552
| 0.364397
| 0.263072
| 0.228213
| 0.206832
| 0
| 0.033788
| 0.195645
| 6,476
| 132
| 138
| 49.060606
| 0.792283
| 0.028567
| 0
| 0.293103
| 1
| 0
| 0.100634
| 0.022435
| 0
| 0
| 0
| 0
| 0
| 1
| 0.043103
| false
| 0
| 0.017241
| 0.025862
| 0.094828
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
8a3ff7ca606f5ce67c32533b5892e230c75d4eb8
| 413
|
py
|
Python
|
tables/migrations/0004_auto_20200901_2004.py
|
jarnoln/exposures
|
bbae3f79078048d25b77e178db6c0801ffe9f97e
|
[
"MIT"
] | null | null | null |
tables/migrations/0004_auto_20200901_2004.py
|
jarnoln/exposures
|
bbae3f79078048d25b77e178db6c0801ffe9f97e
|
[
"MIT"
] | null | null | null |
tables/migrations/0004_auto_20200901_2004.py
|
jarnoln/exposures
|
bbae3f79078048d25b77e178db6c0801ffe9f97e
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.1.1 on 2020-09-01 17:04
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('tables', '0003_exposure_category'),
]
operations = [
migrations.AlterField(
model_name='exposure',
name='location',
field=models.CharField(blank=True, default='', max_length=200),
),
]
| 21.736842
| 75
| 0.605327
| 44
| 413
| 5.590909
| 0.818182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.073333
| 0.273608
| 413
| 18
| 76
| 22.944444
| 0.746667
| 0.108959
| 0
| 0
| 1
| 0
| 0.120219
| 0.060109
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.083333
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
8a458f7c27c0535d07e4b642f5a00528aee12141
| 3,387
|
py
|
Python
|
main.py
|
DanielM24/Romanian-sub-dialect-identificator
|
78b3e00f8ee768eb0b1e8cf832a2dc0b8504b04d
|
[
"MIT"
] | null | null | null |
main.py
|
DanielM24/Romanian-sub-dialect-identificator
|
78b3e00f8ee768eb0b1e8cf832a2dc0b8504b04d
|
[
"MIT"
] | null | null | null |
main.py
|
DanielM24/Romanian-sub-dialect-identificator
|
78b3e00f8ee768eb0b1e8cf832a2dc0b8504b04d
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""Proiect.ipynb
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/1TR1Frf0EX4PtFZkLlVdGtMTINqhoQwRw
"""
# Importarea librariilor
import numpy as np
import pandas as pd # pandas pentru citirea fisierelor
from sklearn import preprocessing
from sklearn import svm # importarea modelului
from sklearn.feature_extraction.text import TfidfVectorizer # modelarea datelor pentru a obtine valori numerice din text
from sklearn.metrics import classification_report, confusion_matrix
# Incarcarea datelor
train_labels = pd.read_csv('train_labels.txt', sep='\t', header=None, engine='python')
train_labels = train_labels.to_numpy() # convertim data frame-ul intr-un vector
train_labels = train_labels[:,1] # pastram doar etichetele
train_samples = pd.read_csv('train_samples.txt', sep='\t', header=None, engine='python')
train_samples = train_samples.to_numpy()
train_samples = train_samples[:,1] # pastram doar cuvintele
validation_samples = pd.read_csv('validation_samples.txt', sep='\t', header=None, engine='python')
validation_samples = validation_samples.to_numpy()
validation_samples = validation_samples[:,1] # salvam cuvintele
validation_labels = pd.read_csv('validation_labels.txt', sep='\t', header=None, engine='python')
validation_labels = validation_labels.to_numpy()
validation_labels = validation_labels[:,1] # pastram doar etichetele
test_samples = pd.read_csv('test_samples.txt', sep='\t', header=None, engine='python')
test_samples = test_samples.to_numpy()
label = test_samples[:,0] # salvam etichetele
test_samples = test_samples[:,1] # salvam cuvintele
def normalize_data(train_data, test_data, type='l2'): # functia care intoarce datele normalizate
#tipul de normalizare este setat implicit la l2
scaler = None
if type == 'standard':
scaler = preprocessing.StandardScaler()
elif type == 'min_max':
scaler = preprocessing.MinMaxScaler()
elif type == 'l1' or type == 'l2':
scaler = preprocessing.Normalizer(norm = type)
if scaler is not None:
scaler.fit(train_data)
scaled_train_data = scaler.transform(train_data)
scaled_test_data = scaler.transform(test_data)
return scaled_train_data, scaled_test_data
else:
return train_data, test_data
# Modelarea datelor
vectorizer = TfidfVectorizer()
training_features = vectorizer.fit_transform(train_samples)
validation_features = vectorizer.transform(validation_samples)
testing_features = vectorizer.transform(test_samples)
# Normalizarea datelor
norm_train, norm_test = normalize_data(training_features, testing_features)
norm_validation, _ = normalize_data(validation_features, validation_features)
# Aplicam modelul SVM
model_svm = svm.SVC(kernel='linear', C=23, gamma=110) # definim modelul
model_svm.fit(norm_train, train_labels) # procesul de invatare
test_predictions = model_svm.predict(norm_test) # predictie pe datele de test
print("Classification report: ")
print(classification_report(validation_labels, model_svm.predict(norm_validation)))
print("Confusion matrix: ")
print(confusion_matrix(validation_labels, model_svm.predict(norm_validation)))
# Exportarea datelor in format CSV
test_export = {'id':label,'label':test_predictions}
data_f = pd.DataFrame(test_export)
data_f.to_csv('test_submission.csv',index=False)
| 38.05618
| 120
| 0.775613
| 439
| 3,387
| 5.756264
| 0.343964
| 0.050653
| 0.017808
| 0.025722
| 0.15829
| 0.117926
| 0.117926
| 0.082311
| 0
| 0
| 0
| 0.006741
| 0.124004
| 3,387
| 89
| 121
| 38.05618
| 0.844961
| 0.220254
| 0
| 0
| 1
| 0
| 0.086458
| 0.01645
| 0
| 0
| 0
| 0
| 0
| 1
| 0.018868
| false
| 0
| 0.113208
| 0
| 0.169811
| 0.075472
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
8a49e6407bf66d6fbb676497c6a102a344eeed6b
| 2,533
|
py
|
Python
|
apps/core/migrations/0001_initial.py
|
Visualway/Vitary
|
c7db9a25837fa7390b2177b9db48e73c6f1ab3c8
|
[
"BSD-3-Clause"
] | 4
|
2021-12-24T16:07:44.000Z
|
2022-03-04T02:30:20.000Z
|
apps/core/migrations/0001_initial.py
|
Visualway/Vitary
|
c7db9a25837fa7390b2177b9db48e73c6f1ab3c8
|
[
"BSD-3-Clause"
] | 4
|
2021-12-30T13:32:56.000Z
|
2022-03-15T03:58:48.000Z
|
apps/core/migrations/0001_initial.py
|
Visualway/Vitary
|
c7db9a25837fa7390b2177b9db48e73c6f1ab3c8
|
[
"BSD-3-Clause"
] | null | null | null |
# Generated by Django 4.0.2 on 2022-03-02 03:29
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('vit', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Badge',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=50)),
('description', models.TextField()),
('color', models.CharField(choices=[('success', 'Green'), ('info', 'Blue'), ('link', 'Purple'), ('primary', 'Turquoise'), ('warning', 'Yellow'), ('danger', 'Red'), ('dark', 'Black'), ('white', 'White')], max_length=50)),
('special', models.BooleanField(default=False)),
],
options={
'ordering': ['name'],
},
),
migrations.CreateModel(
name='Requirments',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=50)),
('description', models.TextField()),
('badge', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.badge')),
],
options={
'ordering': ['name'],
},
),
migrations.CreateModel(
name='Abuse',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('abuse_type', models.CharField(choices=[('ABUSE', 'Abuse'), ('INAPPROPRIATE', 'Inappropriate'), ('SPAM', 'Spam'), ('BULLYING', 'Bullying'), ('SEXUAL_CONTENT', 'Sexual Content'), ('OTHER', 'Other')], max_length=50)),
('description', models.TextField()),
('date', models.DateTimeField(auto_now_add=True)),
('to_vit', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='vit.vit')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name_plural': 'Abuses',
'ordering': ['-date'],
},
),
]
| 42.932203
| 236
| 0.55073
| 236
| 2,533
| 5.788136
| 0.389831
| 0.029283
| 0.040996
| 0.064422
| 0.474378
| 0.474378
| 0.38287
| 0.38287
| 0.38287
| 0.38287
| 0
| 0.014803
| 0.279905
| 2,533
| 58
| 237
| 43.672414
| 0.734101
| 0.017766
| 0
| 0.490196
| 1
| 0
| 0.158488
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.058824
| 0
| 0.137255
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
8a4ccded7f4f9f9be895e48e8a31955a7046241e
| 4,371
|
py
|
Python
|
dddppp/settings.py
|
tysonclugg/dddppp
|
22f52d671ca71c2df8d6ac566a1626e5f05b3159
|
[
"MIT"
] | null | null | null |
dddppp/settings.py
|
tysonclugg/dddppp
|
22f52d671ca71c2df8d6ac566a1626e5f05b3159
|
[
"MIT"
] | null | null | null |
dddppp/settings.py
|
tysonclugg/dddppp
|
22f52d671ca71c2df8d6ac566a1626e5f05b3159
|
[
"MIT"
] | null | null | null |
"""
Django settings for dddppp project.
Generated by 'django-admin startproject' using Django 1.8.2.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
import pkg_resources
import pwd
PROJECT_NAME = 'dddppp'
# Enforce a valid POSIX environment
# Get missing environment variables via call to pwd.getpwuid(...)
_PW_CACHE = None
_PW_MAP = {
'LOGNAME': 'pw_name',
'USER': 'pw_name',
'USERNAME': 'pw_name',
'UID': 'pw_uid',
'GID': 'pw_gid',
'HOME': 'pw_dir',
'SHELL': 'pw_shell',
}
for _missing_env in set(_PW_MAP).difference(os.environ):
if _PW_CACHE is None:
_PW_CACHE = pwd.getpwuid(os.getuid())
os.environ[_missing_env] = str(getattr(_PW_CACHE, _PW_MAP[_missing_env]))
del _PW_CACHE, _PW_MAP, pwd
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'nfd_lvt=&k#h#$a^_l09j#5%s=mg+0aw=@t84ry$&rps43c33+'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = [
'localhost',
]
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'dddp',
'dddp.server',
'dddp.accounts',
'dddppp.slides',
]
for (requirement, pth) in [
('django-extensions', 'django_extensions'),
]:
try:
pkg_resources.get_distribution(requirement)
except (
pkg_resources.DistributionNotFound,
pkg_resources.VersionConflict,
):
continue
INSTALLED_APPS.append(pth)
MIDDLEWARE_CLASSES = [
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
#'django.middleware.security.SecurityMiddleware',
]
ROOT_URLCONF = 'dddppp.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'dddppp.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': os.environ.get('PGDATABASE', PROJECT_NAME),
'USER': os.environ.get('PGUSER', os.environ['LOGNAME']),
'PASSWORD': os.environ.get('DJANGO_DATABASE_PASSWORD', ''),
'HOST': os.environ.get('PGHOST', ''),
'PORT': os.environ.get('PGPORT', ''),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-au'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
STATICFILES_STORAGE = 'whitenoise.django.GzipManifestStaticFilesStorage'
# django-secure
# see: https://github.com/carljm/django-secure/ for more options
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
#SECURE_SSL_REDIRECT = True
SECURE_CONTENT_TYPE_NOSNIFF = True
SECURE_FRAME_DENY = True
SESSION_COOKIE_SECURE = True
SESSION_COOKIE_HTTPONLY = True
DDDPPP_CONTENT_TYPES = []
PROJ_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
| 26.981481
| 77
| 0.695722
| 524
| 4,371
| 5.620229
| 0.429389
| 0.052971
| 0.044822
| 0.050934
| 0.099491
| 0.07708
| 0.07708
| 0.07708
| 0.027165
| 0
| 0
| 0.008782
| 0.166324
| 4,371
| 161
| 78
| 27.149068
| 0.799396
| 0.260581
| 0
| 0
| 1
| 0.010101
| 0.389894
| 0.275733
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.010101
| 0.030303
| 0
| 0.030303
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
8aa50b5f8d204a63672c266b3319435ba3678601
| 2,686
|
py
|
Python
|
insight/migrations/0001_initial.py
|
leonhead/chess-insight
|
b893295719df21b4fee10d4e7b01639ded8b42b4
|
[
"MIT"
] | null | null | null |
insight/migrations/0001_initial.py
|
leonhead/chess-insight
|
b893295719df21b4fee10d4e7b01639ded8b42b4
|
[
"MIT"
] | null | null | null |
insight/migrations/0001_initial.py
|
leonhead/chess-insight
|
b893295719df21b4fee10d4e7b01639ded8b42b4
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.1 on 2020-09-08 07:43
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='OpeningSystem',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=40)),
],
),
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=40)),
],
),
migrations.CreateModel(
name='Opening',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=40)),
('eco', models.CharField(max_length=3)),
('moves', models.TextField()),
('opening_system', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='insight.openingsystem')),
],
),
migrations.CreateModel(
name='Game',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('elo_mean', models.IntegerField(default=0)),
('elo_diff', models.IntegerField(default=0)),
('result', models.CharField(max_length=40)),
('timecontrol', models.CharField(max_length=40)),
('timestamp', models.DateTimeField()),
('raw', models.TextField()),
('opening', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='insight.opening')),
],
),
migrations.CreateModel(
name='Analyse',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('turnover_move', models.IntegerField(default=0)),
('turnover_evaluation', models.IntegerField(default=0)),
('unbalance_material', models.IntegerField(default=0)),
('unbalance_officers', models.IntegerField(default=0)),
('unbalance_exchange', models.IntegerField(default=0)),
('game', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='insight.game')),
],
),
]
| 41.323077
| 127
| 0.562919
| 252
| 2,686
| 5.873016
| 0.281746
| 0.085135
| 0.118243
| 0.122973
| 0.593243
| 0.487162
| 0.487162
| 0.487162
| 0.487162
| 0.487162
| 0
| 0.016869
| 0.293745
| 2,686
| 64
| 128
| 41.96875
| 0.76331
| 0.016009
| 0
| 0.491228
| 1
| 0
| 0.105642
| 0.007952
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.035088
| 0
| 0.105263
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
76dfdcc4b341cedf794e7489e27908f2ae58e24b
| 10,024
|
py
|
Python
|
mllib/nlp/seq2seq.py
|
pmaxit/dlnotebooks
|
5e5a161bbd9d0753850029be29e1488b8858ecd5
|
[
"Apache-2.0"
] | null | null | null |
mllib/nlp/seq2seq.py
|
pmaxit/dlnotebooks
|
5e5a161bbd9d0753850029be29e1488b8858ecd5
|
[
"Apache-2.0"
] | null | null | null |
mllib/nlp/seq2seq.py
|
pmaxit/dlnotebooks
|
5e5a161bbd9d0753850029be29e1488b8858ecd5
|
[
"Apache-2.0"
] | null | null | null |
# AUTOGENERATED! DO NOT EDIT! File to edit: nbs/01_seq2seq.ipynb (unless otherwise specified).
__all__ = ['Encoder', 'NewDecoder', 'Seq2Seq']
# Cell
from torch import nn
from torch import optim
import torch
import torch.nn.functional as F
from torch.nn.utils.rnn import pack_padded_sequence, pad_packed_sequence
# Cell
class Encoder(nn.Module):
def __init__(self, input_size, embedding_size, hidden_size, num_layers=2, p=0.1):
super(Encoder, self).__init__()
self.hidden_size = hidden_size
self.num_layers = num_layers
self.dropout = nn.Dropout(p)
self.embedding = nn.Embedding(input_size, embedding_size)
self.rnn = nn.LSTM(embedding_size, hidden_size, num_layers, dropout=p,batch_first=False)
def forward(self, x, x_len):
# x shape (seq_length, N)
embedding = self.dropout(self.embedding(x))
# embedding shape : (seq_length, N, embedding_size)
x_packed = pack_padded_sequence(embedding, x_len.cpu(), batch_first=False, enforce_sorted=False)
output_packed, (hidden,cell) = self.rnn(x_packed)
# irrelevant because we are interested only in hidden state
#output_padded, output_lengths = pad_packed_sequence(output_packed, batch_first=True)
# output is irrelevant, context vector is important
return hidden,cell
# Cell
class NewDecoder(nn.Module):
def __init__(self, hidden_size, embedding_size, output_size, n_layers=1, dropout_p=0.1):
super(NewDecoder, self).__init__()
# Define parameters
self.hidden_size = hidden_size
self.output_size = output_size
self.n_layers =n_layers
self.dropout_p = dropout_p
# Define layers
self.embedding = nn.Embedding(output_size, embedding_size)
self.dropout=nn.Dropout(dropout_p)
self.rnn = nn.LSTM(embedding_size, hidden_size, n_layers, dropout=dropout_p, batch_first=False)
self.out = nn.Linear(hidden_size, output_size)
def forward(self, word_input, last_hidden, encoder_outputs):
# Note that we will only be running forward for a single decoder time step, but will
# use all encoder outputs
word_input = word_input.unsqueeze(0)
# we are not using encoder_outputs here
word_embedded = self.embedding(word_input) # 1 X B
word_embedded = self.dropout(word_embedded) # 1 X B X emb_length
# Combine embedded input word and hidden vector, run through RNN
output, hidden = self.rnn(word_embedded, last_hidden) # 1 X B X hidden
predictions = self.out(output) # 1, B, out
#output = F.log_softmax(predictions)
return predictions, hidden
# Cell
import random
import pytorch_lightning as pl
import pytorch_lightning.metrics.functional as plfunc
from pytorch_lightning.loggers import TensorBoardLogger
# Cell
class Seq2Seq(pl.LightningModule):
""" Encoder decoder pytorch lightning module for training seq2seq model with teacher forcing
Module try to learn mapping from one sequence to another
"""
@staticmethod
def add_model_specific_args(parent_parser):
parser = ArgumentParser(parents=[parent_parser], add_help=False)
parser.add_argument("--emb_dim", type=int, default=32)
parser.add_argument('--hidden_dim', type=int, default=64)
parser.add_argument('--dropout', type=float, default=0.1)
return parser
def __init__(self,
input_vocab_size,
output_vocab_size,
padding_index = 0,
emb_dim = 8,
hidden_dim=32,
dropout=0.1,
max_length=20,
**kwargs):
super().__init__()
# dynamic, based on tokenizer vocab size defined in datamodule
self.input_dim = input_vocab_size
self.output_dim = output_vocab_size
self.enc_emb_dim = emb_dim
self.dec_emb_dim = emb_dim
self.enc_hid_dim = hidden_dim
self.dec_hid_dim = hidden_dim
self.enc_dropout = dropout
self.dec_dropout = dropout
self.pad_idx = padding_index
self.num_layers = 2
self.max_length =10
self.save_hyperparameters()
self.max_epochs= kwargs.get('max_epochs',5)
self.learning_rate = 0.0005
self._loss = nn.CrossEntropyLoss(ignore_index=self.pad_idx)
self.encoder = Encoder(
self.input_dim,
self.enc_emb_dim,
self.enc_hid_dim,
self.num_layers,
self.enc_dropout
)
self.decoder = NewDecoder(
self.enc_hid_dim,
self.dec_emb_dim,
self.output_dim,
self.num_layers,
self.dec_dropout
)
self._init_weights()
def _init_weights(self):
for name, param in self.named_parameters():
if "weight" in name:
nn.init.normal_(param.data, mean=0, std=0.01)
else:
nn.init.constant_(param.data, 0)
def create_mask(self, src):
mask = (src != self.pad_idx).permute(1, 0)
return mask
def forward(self, src_seq, source_len, trg_seq, teacher_force_ratio=0.5):
"""
teacher_force_ratio is used to help in decoding.
In starting, original input token will be sent as input token
"""
source = src_seq.transpose(0, 1)
target_len = self.max_length
if trg_seq is not None:
target = trg_seq.transpose(0, 1)
target_len = target.shape[0]
batch_size = source.shape[1]
target_vocab_size = self.output_dim
outputs = torch.zeros(target_len, batch_size, target_vocab_size).to(self.device)
encoder_hidden = self.encoder(source, source_len)
# mask = [batch_size, src len]
# without sos token at the beginning and eos token at the end
#x = target[0,:]
decoder_input = torch.ones(batch_size).long().to(self.device)
decoder_hidden = encoder_hidden
encoder_outputs = None
for t in range(target_len):
decoder_output, decoder_hidden = self.decoder(decoder_input, decoder_hidden, encoder_outputs)
outputs[t] = decoder_output
#(N, english_vocab_size)
#best_guess = output.argmax(1)
topv, topi = decoder_output.topk(1)
decoder_input = topi.squeeze().detach()
decoder_input = target[t] if random.random() < teacher_force_ratio and target is not None else decoder_input
return outputs
def loss(self, logits, target):
return self._loss(logits, target)
def configure_optimizers(self):
optimizer = optim.AdamW(self.parameters(), lr=self.learning_rate)
lr_scheduler = {
'scheduler': optim.lr_scheduler.OneCycleLR(
optimizer,
max_lr = self.learning_rate,
steps_per_epoch = 3379,
epochs=self.max_epochs,
anneal_strategy='linear',
final_div_factor=1000,
pct_start = 0.01
),
"name": "learning_rate",
"interval":"step",
"frequency": 1
}
return [optimizer],[lr_scheduler]
def training_step(self, batch, batch_idx):
src_seq, trg_seq, src_lengths = batch['src'],batch['trg'], batch['src_len']
output = self.forward(src_seq, src_lengths,trg_seq)
# do not know if this is a problem, loss will be computed with sos token
# without sos token at the beginning and eos token at the end
output = output.view(-1, self.output_dim)
trg_seq = trg_seq.transpose(0, 1)
trg = trg_seq.reshape(-1)
loss = self.loss(output, trg)
self.log('train_loss',loss.item(),
on_step = True,
on_epoch=True,
prog_bar = True,
logger=True)
return loss
def validation_step(self, batch,batch_idx):
""" validation is in eval model so we do not have to use placeholder input sensors"""
src_seq, trg_seq, src_lengths = batch['src'],batch['trg'], batch['src_len']
outputs = self.forward(src_seq, src_lengths, trg_seq, 0)
logits = outputs[1:].view(-1, self.output_dim)
trg = trg_seq[1:].reshape(-1)
loss = self.loss(logits, trg)
pred_seq = outputs[1:].argmax(2) # seq_len*batch_size*vocab_size -> seq_len * batch_size
# change layout: sesq_len * batch_size -> batch_size * seq_len
pred_seq = pred_seq.T
# change layout: seq_len * batch_size -> batch_size * seq_len
trg_batch = trg_seq[1:].T
# compare list of predicted ids for all sequences in a batch to targets
acc = plfunc.accuracy(pred_seq.reshape(-1), trg_batch.reshape(-1))
# need to cast to list of predicted sequences ( as list of token ids ) [ seq_tok1, seqtok2]
predicted_ids - pred_seq.tolist()
# need to add additional dim to each target reference sequence in order to
# conver to format needed by blue_score_func
# [seq1=[[reference1],[reference2]], seq2=[reference1]]
target_ids = torch.unsqueeze(trg_batch, 1).tolist()
bleu_score - plfunc.nlp.bleu_score(predicted_ids, target_ids, n_gram=3).to(self.device)
self.log(
'val_loss',
loss,
on_step=False,
on_epoch=True,
prog_bar=True,
logger=True,
sync_dist=True)
self.log(
"val_acc",
acc,
on_step=False,
on_epoch=True,
prog_bar=True,
logger=True,
sync_dist=True
)
self.log(
"val_bleu_idx",
bleu_score,
on_step=False,
on_epoch=True,
prog_bar=True,
logger=True,
sync_dist=True
)
return loss, acc, bleu_score
| 32.025559
| 120
| 0.621409
| 1,303
| 10,024
| 4.537222
| 0.23561
| 0.012179
| 0.01184
| 0.010149
| 0.207037
| 0.139716
| 0.101996
| 0.092862
| 0.064107
| 0.064107
| 0
| 0.013053
| 0.289206
| 10,024
| 312
| 121
| 32.128205
| 0.816702
| 0.189346
| 0
| 0.142105
| 1
| 0
| 0.023146
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.068421
| false
| 0
| 0.047368
| 0.005263
| 0.178947
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0a017ba6441979fea8dcb4bd6912e6e472b2970d
| 456
|
py
|
Python
|
brokenChains/migrations/0003_auto_20181106_1819.py
|
bunya017/brokenChains
|
3e20c834efd7f0ade8e3abe7acf547c093f76758
|
[
"MIT"
] | 1
|
2018-12-07T09:15:57.000Z
|
2018-12-07T09:15:57.000Z
|
brokenChains/migrations/0003_auto_20181106_1819.py
|
bunya017/brokenChains
|
3e20c834efd7f0ade8e3abe7acf547c093f76758
|
[
"MIT"
] | null | null | null |
brokenChains/migrations/0003_auto_20181106_1819.py
|
bunya017/brokenChains
|
3e20c834efd7f0ade8e3abe7acf547c093f76758
|
[
"MIT"
] | null | null | null |
# Generated by Django 2.1.1 on 2018-11-06 17:19
from django.conf import settings
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('brokenChains', '0002_auto_20181106_1723'),
]
operations = [
migrations.AlterUniqueTogether(
name='habit',
unique_together={('owner', 'name')},
),
]
| 22.8
| 66
| 0.64693
| 48
| 456
| 6
| 0.770833
| 0.069444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.089855
| 0.243421
| 456
| 19
| 67
| 24
| 0.744928
| 0.098684
| 0
| 0
| 1
| 0
| 0.119804
| 0.056235
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.153846
| 0
| 0.384615
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0a03afbc022ab3ed1e3b4074455a3f3fdefc3a2e
| 1,189
|
py
|
Python
|
app/modules/ai_lab/migrations/0003_ailabcasestudy.py
|
nickmoreton/nhsx-website
|
2397d1308376c02b75323d30e6bc916af0daac9d
|
[
"MIT"
] | 50
|
2019-04-04T17:50:00.000Z
|
2021-08-05T15:08:37.000Z
|
app/modules/ai_lab/migrations/0003_ailabcasestudy.py
|
nickmoreton/nhsx-website
|
2397d1308376c02b75323d30e6bc916af0daac9d
|
[
"MIT"
] | 434
|
2019-04-04T18:25:32.000Z
|
2022-03-31T18:23:37.000Z
|
app/modules/ai_lab/migrations/0003_ailabcasestudy.py
|
nhsx-mirror/nhsx-website
|
2133b4e275ca35ff77f7d6874e809f139ec4bf86
|
[
"MIT"
] | 23
|
2019-04-04T09:52:07.000Z
|
2021-04-11T07:41:47.000Z
|
# Generated by Django 3.0.4 on 2020-07-14 11:00
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
("core", "0026_auto_20200713_1535"),
("ai_lab", "0002_ailabusecase"),
]
operations = [
migrations.CreateModel(
name="AiLabCaseStudy",
fields=[
(
"articlepage_ptr",
models.OneToOneField(
auto_created=True,
on_delete=django.db.models.deletion.CASCADE,
parent_link=True,
primary_key=True,
serialize=False,
to="core.ArticlePage",
),
),
(
"use_case",
models.ForeignKey(
on_delete=django.db.models.deletion.PROTECT,
to="ai_lab.AiLabUseCase",
),
),
],
options={"abstract": False,},
bases=("core.articlepage", models.Model),
),
]
| 29
| 68
| 0.444071
| 91
| 1,189
| 5.659341
| 0.626374
| 0.062136
| 0.081553
| 0.128155
| 0.116505
| 0.116505
| 0
| 0
| 0
| 0
| 0
| 0.054348
| 0.458368
| 1,189
| 40
| 69
| 29.725
| 0.745342
| 0.037847
| 0
| 0.147059
| 1
| 0
| 0.127846
| 0.02014
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.058824
| 0
| 0.147059
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0a114ea68c2fa1e2738f0d3ff99019e72e2ea941
| 1,074
|
py
|
Python
|
sitewebapp/migrations/0011_auto_20210130_0150.py
|
deucaleon18/debsoc-nitdgp-website
|
41bd6ade7f4af143ef34aff01848f830cc533add
|
[
"MIT"
] | 2
|
2020-12-05T05:34:56.000Z
|
2020-12-09T10:27:43.000Z
|
sitewebapp/migrations/0011_auto_20210130_0150.py
|
deucaleon18/debsoc-nitdgp-website
|
41bd6ade7f4af143ef34aff01848f830cc533add
|
[
"MIT"
] | 3
|
2021-06-28T16:47:23.000Z
|
2021-06-28T16:48:51.000Z
|
sitewebapp/migrations/0011_auto_20210130_0150.py
|
deucaleon18/debsoc-nitdgp-website
|
41bd6ade7f4af143ef34aff01848f830cc533add
|
[
"MIT"
] | 9
|
2021-01-29T17:06:30.000Z
|
2021-08-21T18:23:26.000Z
|
# Generated by Django 2.2.15 on 2021-01-29 20:20
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('sitewebapp', '0010_auditionanswers_auditionquestions_audtionrounds_candidates'),
]
operations = [
migrations.CreateModel(
name='auditionRounds',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('roundno', models.IntegerField(default=1)),
('candidate', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='inductees', to='sitewebapp.Candidates')),
],
),
migrations.AlterField(
model_name='auditionquestions',
name='round',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='round', to='sitewebapp.auditionRounds'),
),
migrations.DeleteModel(
name='audtionRounds',
),
]
| 34.645161
| 148
| 0.634078
| 102
| 1,074
| 6.558824
| 0.54902
| 0.047833
| 0.06278
| 0.098655
| 0.19133
| 0.19133
| 0.19133
| 0.19133
| 0.19133
| 0.19133
| 0
| 0.025862
| 0.243948
| 1,074
| 30
| 149
| 35.8
| 0.79803
| 0.042831
| 0
| 0.125
| 1
| 0
| 0.196881
| 0.106238
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.083333
| 0
| 0.208333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0a1a359a4636f368d0f28057e4bf1af274c7fb79
| 3,332
|
py
|
Python
|
influxdb_service_sdk/model/container/resource_requirements_pb2.py
|
easyopsapis/easyops-api-python
|
adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0
|
[
"Apache-2.0"
] | 5
|
2019-07-31T04:11:05.000Z
|
2021-01-07T03:23:20.000Z
|
influxdb_service_sdk/model/container/resource_requirements_pb2.py
|
easyopsapis/easyops-api-python
|
adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0
|
[
"Apache-2.0"
] | null | null | null |
influxdb_service_sdk/model/container/resource_requirements_pb2.py
|
easyopsapis/easyops-api-python
|
adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: resource_requirements.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from influxdb_service_sdk.model.container import resource_list_pb2 as influxdb__service__sdk_dot_model_dot_container_dot_resource__list__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='resource_requirements.proto',
package='container',
syntax='proto3',
serialized_options=_b('ZCgo.easyops.local/contracts/protorepo-models/easyops/model/container'),
serialized_pb=_b('\n\x1bresource_requirements.proto\x12\tcontainer\x1a\x38influxdb_service_sdk/model/container/resource_list.proto\"j\n\x14ResourceRequirements\x12\'\n\x06limits\x18\x01 \x01(\x0b\x32\x17.container.ResourceList\x12)\n\x08requests\x18\x02 \x01(\x0b\x32\x17.container.ResourceListBEZCgo.easyops.local/contracts/protorepo-models/easyops/model/containerb\x06proto3')
,
dependencies=[influxdb__service__sdk_dot_model_dot_container_dot_resource__list__pb2.DESCRIPTOR,])
_RESOURCEREQUIREMENTS = _descriptor.Descriptor(
name='ResourceRequirements',
full_name='container.ResourceRequirements',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='limits', full_name='container.ResourceRequirements.limits', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='requests', full_name='container.ResourceRequirements.requests', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=100,
serialized_end=206,
)
_RESOURCEREQUIREMENTS.fields_by_name['limits'].message_type = influxdb__service__sdk_dot_model_dot_container_dot_resource__list__pb2._RESOURCELIST
_RESOURCEREQUIREMENTS.fields_by_name['requests'].message_type = influxdb__service__sdk_dot_model_dot_container_dot_resource__list__pb2._RESOURCELIST
DESCRIPTOR.message_types_by_name['ResourceRequirements'] = _RESOURCEREQUIREMENTS
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
ResourceRequirements = _reflection.GeneratedProtocolMessageType('ResourceRequirements', (_message.Message,), {
'DESCRIPTOR' : _RESOURCEREQUIREMENTS,
'__module__' : 'resource_requirements_pb2'
# @@protoc_insertion_point(class_scope:container.ResourceRequirements)
})
_sym_db.RegisterMessage(ResourceRequirements)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)
| 40.144578
| 380
| 0.801921
| 396
| 3,332
| 6.358586
| 0.325758
| 0.02224
| 0.035743
| 0.038126
| 0.305798
| 0.289118
| 0.289118
| 0.250993
| 0.250993
| 0.250993
| 0
| 0.024228
| 0.095738
| 3,332
| 82
| 381
| 40.634146
| 0.811484
| 0.076831
| 0
| 0.258065
| 1
| 0.016129
| 0.165526
| 0.121538
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.096774
| 0
| 0.096774
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0a3bec6c960ec5a80b8e4e32d4669b80255b605f
| 1,114
|
py
|
Python
|
app/rss_feeder_api/migrations/0003_auto_20200813_1623.py
|
RSaab/rss-scraper
|
9bf608878e7d08fea6508ae90b27f1c226b313f1
|
[
"MIT"
] | null | null | null |
app/rss_feeder_api/migrations/0003_auto_20200813_1623.py
|
RSaab/rss-scraper
|
9bf608878e7d08fea6508ae90b27f1c226b313f1
|
[
"MIT"
] | null | null | null |
app/rss_feeder_api/migrations/0003_auto_20200813_1623.py
|
RSaab/rss-scraper
|
9bf608878e7d08fea6508ae90b27f1c226b313f1
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.1 on 2020-08-13 16:23
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('rss_feeder_api', '0002_feed_subtitle'),
]
operations = [
migrations.AlterModelOptions(
name='entry',
options={'ordering': ('-updated_at',), 'verbose_name_plural': 'entries'},
),
migrations.AlterModelOptions(
name='feed',
options={'ordering': ('-updated_at',), 'verbose_name': 'Feed', 'verbose_name_plural': 'Feeds'},
),
migrations.AddField(
model_name='entry',
name='created_at',
field=models.DateTimeField(auto_now_add=True, default=django.utils.timezone.now),
preserve_default=False,
),
migrations.AddField(
model_name='entry',
name='updated_at',
field=models.DateTimeField(auto_now=True),
),
migrations.AlterUniqueTogether(
name='entry',
unique_together={('guid',)},
),
]
| 29.315789
| 107
| 0.577199
| 104
| 1,114
| 5.990385
| 0.528846
| 0.057785
| 0.060995
| 0.077047
| 0.333868
| 0.333868
| 0
| 0
| 0
| 0
| 0
| 0.022785
| 0.290844
| 1,114
| 37
| 108
| 30.108108
| 0.765823
| 0.0386
| 0
| 0.419355
| 1
| 0
| 0.172123
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.064516
| 0
| 0.16129
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0a4049bea9cce33edfb9f0362df0cd2e91b7aa1a
| 335
|
py
|
Python
|
reo/migrations/0121_merge_20211001_1841.py
|
NREL/REopt_API
|
fbc70f3b0cdeec9ee220266d6b3b0c5d64f257a6
|
[
"BSD-3-Clause"
] | 7
|
2022-01-29T12:10:10.000Z
|
2022-03-28T13:45:20.000Z
|
reo/migrations/0121_merge_20211001_1841.py
|
NREL/reopt_api
|
fbc70f3b0cdeec9ee220266d6b3b0c5d64f257a6
|
[
"BSD-3-Clause"
] | 12
|
2022-02-01T18:23:18.000Z
|
2022-03-31T17:22:17.000Z
|
reo/migrations/0121_merge_20211001_1841.py
|
NREL/REopt_API
|
fbc70f3b0cdeec9ee220266d6b3b0c5d64f257a6
|
[
"BSD-3-Clause"
] | 3
|
2022-02-08T19:44:40.000Z
|
2022-03-12T11:05:36.000Z
|
# Generated by Django 3.1.13 on 2021-10-01 18:41
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('reo', '0117_financialmodel_generator_fuel_escalation_pct'),
('reo', '0120_auto_20210927_2046'),
('reo', '0121_auto_20211012_0305')
]
operations = [
]
| 20.9375
| 69
| 0.662687
| 40
| 335
| 5.275
| 0.85
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.198473
| 0.21791
| 335
| 15
| 70
| 22.333333
| 0.60687
| 0.137313
| 0
| 0
| 1
| 0
| 0.362369
| 0.33101
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.111111
| 0
| 0.444444
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0a4b453e9f68bd48c8b434b43c7c61e7c47c248d
| 3,400
|
py
|
Python
|
modelflow/graph_viz_from_outputs.py
|
ModelFlow/modelflow
|
c2b720b2da8bb17462baff5c00bbe942644474b0
|
[
"MIT"
] | 6
|
2020-07-28T19:58:28.000Z
|
2021-05-01T18:51:37.000Z
|
modelflow/graph_viz_from_outputs.py
|
ModelFlow/modelflow
|
c2b720b2da8bb17462baff5c00bbe942644474b0
|
[
"MIT"
] | 81
|
2020-07-30T07:08:10.000Z
|
2021-07-28T02:17:43.000Z
|
modelflow/graph_viz_from_outputs.py
|
ModelFlow/modelflow
|
c2b720b2da8bb17462baff5c00bbe942644474b0
|
[
"MIT"
] | null | null | null |
import pandas as pd
import argparse
import json
try:
from graphviz import Digraph
except:
print("Note: Optional graphviz not installed")
def generate_graph(df, graph_format='pdf'):
g = Digraph('ModelFlow', filename='modelflow.gv', engine='neato', format=graph_format)
g.attr(overlap='false')
g.attr(splines='true')
column_names = df.columns
states = []
g.attr('node', shape='ellipse')
for column_name in column_names:
if column_name[:6] == 'state_':
states.append((column_name[6:], column_name))
g.node(column_name[6:])
models = []
g.attr('node', shape='box')
for column_name in column_names:
if column_name[:6] != 'state_':
models.append((column_name.split('_')[0], column_name))
g.node(column_name.split('_')[0])
for column_name in column_names:
if column_name[:6] != 'state_':
parts = column_name.split('_')
state = '_'.join(parts[1:])[6:-7]
print(parts[0], state, df[column_name].min(),
df[column_name].max())
if df[column_name].min() < 0 and df[column_name].max() <= 0:
g.edge(state, parts[0])
elif df[column_name].min() >= 0 and df[column_name].max() > 0:
g.edge(parts[0], state)
else:
g.edge(parts[0], state)
g.edge(state, parts[0])
if graph_format == 'json':
# TODO: THIS DOES NOT WORK FOR MULTIPLE MODELFLOWS
with open('modelflow.gv.json', 'r') as f:
return json.load(f)
else:
g.view()
def generate_react_flow_chart(outputs):
df = pd.DataFrame()
for key, value in outputs['output_states'].items():
df[key] = value['data']
return generate_react_flow_chart_from_df(df)
def generate_react_flow_chart_from_df(df):
column_names = df.columns
nodes = {}
# Elipses
for column_name in column_names:
if column_name[:6] == 'state_':
nodes[column_name[6:]] = dict(name=column_name[6:], kind='elipse')
# Boxes
for column_name in column_names:
if column_name[:6] != 'state_':
nodes[column_name.split('_')[0]] = dict(name=column_name.split('_')[0], kind='box')
edges = []
for column_name in column_names:
if column_name[:6] != 'state_':
parts = column_name.split('_')
name1 = parts[0]
state = '_'.join(parts[1:])[6:-7]
# print(name1, state, df[column_name].min(),
# df[column_name].max())
if df[column_name].min() < 0 and df[column_name].max() <= 0:
edges.append([state, name1, 'one_way'])
elif df[column_name].min() >= 0 and df[column_name].max() > 0:
edges.append([name1, state, 'one_way'])
else:
edges.append([name1, state, 'both'])
return dict(nodes=list(nodes.values()), edges=edges)
def main(args):
df = pd.read_csv(args.output_file)
# generate_graph(df)
generate_react_flow_chart_from_df(df)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Generate Graph Viz')
parser.add_argument('-f', '--output_file', type=str,
help='The output file to generate a graph of', required=True)
args = parser.parse_args()
main(args)
| 32.380952
| 95
| 0.577941
| 446
| 3,400
| 4.188341
| 0.262332
| 0.192719
| 0.077088
| 0.04818
| 0.457173
| 0.413276
| 0.38651
| 0.314775
| 0.314775
| 0.314775
| 0
| 0.015783
| 0.273235
| 3,400
| 104
| 96
| 32.692308
| 0.740186
| 0.045
| 0
| 0.371795
| 1
| 0
| 0.087037
| 0
| 0
| 0
| 0
| 0.009615
| 0
| 1
| 0.051282
| false
| 0
| 0.051282
| 0
| 0.141026
| 0.025641
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
6a60c251c96da7b05351011b63ba88125eca7fb7
| 9,790
|
py
|
Python
|
sdk/python/pulumi_azure_native/storage/storage_account_static_website.py
|
sebtelko/pulumi-azure-native
|
711ec021b5c73da05611c56c8a35adb0ce3244e4
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/storage/storage_account_static_website.py
|
sebtelko/pulumi-azure-native
|
711ec021b5c73da05611c56c8a35adb0ce3244e4
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/storage/storage_account_static_website.py
|
sebtelko/pulumi-azure-native
|
711ec021b5c73da05611c56c8a35adb0ce3244e4
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['StorageAccountStaticWebsiteArgs', 'StorageAccountStaticWebsite']
@pulumi.input_type
class StorageAccountStaticWebsiteArgs:
def __init__(__self__, *,
account_name: pulumi.Input[str],
resource_group_name: pulumi.Input[str],
error404_document: Optional[pulumi.Input[str]] = None,
index_document: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a StorageAccountStaticWebsite resource.
:param pulumi.Input[str] account_name: The name of the storage account within the specified resource group.
:param pulumi.Input[str] resource_group_name: The name of the resource group within the user's subscription. The name is case insensitive.
:param pulumi.Input[str] error404_document: The absolute path to a custom webpage that should be used when a request is made which does not correspond to an existing file.
:param pulumi.Input[str] index_document: The webpage that Azure Storage serves for requests to the root of a website or any sub-folder. For example, 'index.html'. The value is case-sensitive.
"""
pulumi.set(__self__, "account_name", account_name)
pulumi.set(__self__, "resource_group_name", resource_group_name)
if error404_document is not None:
pulumi.set(__self__, "error404_document", error404_document)
if index_document is not None:
pulumi.set(__self__, "index_document", index_document)
@property
@pulumi.getter(name="accountName")
def account_name(self) -> pulumi.Input[str]:
"""
The name of the storage account within the specified resource group.
"""
return pulumi.get(self, "account_name")
@account_name.setter
def account_name(self, value: pulumi.Input[str]):
pulumi.set(self, "account_name", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the resource group within the user's subscription. The name is case insensitive.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="error404Document")
def error404_document(self) -> Optional[pulumi.Input[str]]:
"""
The absolute path to a custom webpage that should be used when a request is made which does not correspond to an existing file.
"""
return pulumi.get(self, "error404_document")
@error404_document.setter
def error404_document(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "error404_document", value)
@property
@pulumi.getter(name="indexDocument")
def index_document(self) -> Optional[pulumi.Input[str]]:
"""
The webpage that Azure Storage serves for requests to the root of a website or any sub-folder. For example, 'index.html'. The value is case-sensitive.
"""
return pulumi.get(self, "index_document")
@index_document.setter
def index_document(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "index_document", value)
class StorageAccountStaticWebsite(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
account_name: Optional[pulumi.Input[str]] = None,
error404_document: Optional[pulumi.Input[str]] = None,
index_document: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Enables the static website feature of a storage account.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] account_name: The name of the storage account within the specified resource group.
:param pulumi.Input[str] error404_document: The absolute path to a custom webpage that should be used when a request is made which does not correspond to an existing file.
:param pulumi.Input[str] index_document: The webpage that Azure Storage serves for requests to the root of a website or any sub-folder. For example, 'index.html'. The value is case-sensitive.
:param pulumi.Input[str] resource_group_name: The name of the resource group within the user's subscription. The name is case insensitive.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: StorageAccountStaticWebsiteArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Enables the static website feature of a storage account.
:param str resource_name: The name of the resource.
:param StorageAccountStaticWebsiteArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(StorageAccountStaticWebsiteArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
account_name: Optional[pulumi.Input[str]] = None,
error404_document: Optional[pulumi.Input[str]] = None,
index_document: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = StorageAccountStaticWebsiteArgs.__new__(StorageAccountStaticWebsiteArgs)
if account_name is None and not opts.urn:
raise TypeError("Missing required property 'account_name'")
__props__.__dict__["account_name"] = account_name
__props__.__dict__["error404_document"] = error404_document
__props__.__dict__["index_document"] = index_document
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["container_name"] = None
super(StorageAccountStaticWebsite, __self__).__init__(
'azure-native:storage:StorageAccountStaticWebsite',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'StorageAccountStaticWebsite':
"""
Get an existing StorageAccountStaticWebsite resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = StorageAccountStaticWebsiteArgs.__new__(StorageAccountStaticWebsiteArgs)
__props__.__dict__["container_name"] = None
__props__.__dict__["error404_document"] = None
__props__.__dict__["index_document"] = None
return StorageAccountStaticWebsite(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="containerName")
def container_name(self) -> pulumi.Output[str]:
"""
The name of the container to upload blobs to.
"""
return pulumi.get(self, "container_name")
@property
@pulumi.getter(name="error404Document")
def error404_document(self) -> pulumi.Output[Optional[str]]:
"""
The absolute path to a custom webpage that should be used when a request is made which does not correspond to an existing file.
"""
return pulumi.get(self, "error404_document")
@property
@pulumi.getter(name="indexDocument")
def index_document(self) -> pulumi.Output[Optional[str]]:
"""
The webpage that Azure Storage serves for requests to the root of a website or any sub-folder. For example, 'index.html'. The value is case-sensitive.
"""
return pulumi.get(self, "index_document")
| 48.226601
| 199
| 0.674157
| 1,146
| 9,790
| 5.49651
| 0.140489
| 0.054136
| 0.066677
| 0.048897
| 0.653755
| 0.575806
| 0.548976
| 0.502461
| 0.480235
| 0.422448
| 0
| 0.008201
| 0.240245
| 9,790
| 202
| 200
| 48.465347
| 0.838666
| 0.309806
| 0
| 0.330645
| 1
| 0
| 0.130107
| 0.024199
| 0
| 0
| 0
| 0
| 0
| 1
| 0.137097
| false
| 0.008065
| 0.040323
| 0
| 0.258065
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
6a75c6bcf2a235fe76f46e51c4cc31283811626a
| 2,534
|
py
|
Python
|
simulation/dataset_G_1q_X_Z_N1.py
|
eperrier/QDataSet
|
383b38b9b4166848f72fac0153800525e66b477b
|
[
"MIT"
] | 42
|
2021-08-17T02:27:59.000Z
|
2022-03-26T16:00:57.000Z
|
simulation/dataset_G_1q_X_Z_N1.py
|
eperrier/QDataSet
|
383b38b9b4166848f72fac0153800525e66b477b
|
[
"MIT"
] | 1
|
2021-09-25T11:15:20.000Z
|
2021-09-27T04:18:25.000Z
|
simulation/dataset_G_1q_X_Z_N1.py
|
eperrier/QDataSet
|
383b38b9b4166848f72fac0153800525e66b477b
|
[
"MIT"
] | 6
|
2021-08-17T02:28:04.000Z
|
2022-03-22T07:11:48.000Z
|
##############################################
"""
This module generate a dataset
"""
##############################################
# preample
import numpy as np
from utilites import Pauli_operators, simulate, CheckNoise
################################################
# meta parameters
name = "G_1q_X_Z_N1"
################################################
# quantum parameters
dim = 2 # dimension of the system
Omega = 12 # qubit energy gap
static_operators = [0.5*Pauli_operators[3]*Omega] # drift Hamiltonian
dynamic_operators = [0.5*Pauli_operators[1]] # control Hamiltonian
noise_operators = [0.5*Pauli_operators[3]] # noise Hamiltonian
initial_states = [
np.array([[0.5,0.5],[0.5,0.5]]), np.array([[0.5,-0.5],[-0.5,0.5]]),
np.array([[0.5,-0.5j],[0.5j,0.5]]),np.array([[0.5,0.5j],[-0.5j,0.5]]),
np.array([[1,0],[0,0]]), np.array([[0,0],[0,1]])
] # intial state of qubit
measurement_operators = Pauli_operators[1:] # measurement operators
##################################################
# simulation parameters
T = 1 # Evolution time
M = 1024 # Number of time steps
num_ex = 10000 # Number of examples
batch_size = 50 # batch size for TF
##################################################
# noise parameters
K = 2000 # Number of realzations
noise_profile = [1] # Noise type
###################################################
# control parameters
pulse_shape = "Gaussian" # Control pulse shape
num_pulses = 5 # Number of pulses per sequence
####################################################
# Generate the dataset
sim_parameters = dict( [(k,eval(k)) for k in ["name", "dim", "Omega", "static_operators", "dynamic_operators", "noise_operators", "measurement_operators", "initial_states", "T", "M", "num_ex", "batch_size", "K", "noise_profile", "pulse_shape", "num_pulses"] ])
CheckNoise(sim_parameters)
simulate(sim_parameters)
####################################################
| 56.311111
| 261
| 0.404893
| 228
| 2,534
| 4.359649
| 0.372807
| 0.030181
| 0.024145
| 0.024145
| 0.148893
| 0.123742
| 0.071429
| 0.071429
| 0.071429
| 0.071429
| 0
| 0.042815
| 0.327151
| 2,534
| 45
| 262
| 56.311111
| 0.540176
| 0.190608
| 0
| 0
| 1
| 0
| 0.108795
| 0.013681
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.08
| 0
| 0.08
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
6a7ebe45370c220d4cb3303c8715bdc2a5f264ae
| 7,074
|
py
|
Python
|
python/sdk/client/api/log_api.py
|
ashwinath/merlin
|
087a7fa6fb21e4c771d64418bd58873175226ca1
|
[
"Apache-2.0"
] | null | null | null |
python/sdk/client/api/log_api.py
|
ashwinath/merlin
|
087a7fa6fb21e4c771d64418bd58873175226ca1
|
[
"Apache-2.0"
] | null | null | null |
python/sdk/client/api/log_api.py
|
ashwinath/merlin
|
087a7fa6fb21e4c771d64418bd58873175226ca1
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Merlin
API Guide for accessing Merlin's model management, deployment, and serving functionalities # noqa: E501
OpenAPI spec version: 0.7.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from client.api_client import ApiClient
class LogApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def logs_get(self, name, pod_name, namespace, cluster, **kwargs): # noqa: E501
"""Retrieve log from a container # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.logs_get(name, pod_name, namespace, cluster, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: (required)
:param str pod_name: (required)
:param str namespace: (required)
:param str cluster: (required)
:param str follow:
:param str limit_bytes:
:param str pretty:
:param str previous:
:param str since_seconds:
:param str since_time:
:param str tail_lines:
:param str timestamps:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.logs_get_with_http_info(name, pod_name, namespace, cluster, **kwargs) # noqa: E501
else:
(data) = self.logs_get_with_http_info(name, pod_name, namespace, cluster, **kwargs) # noqa: E501
return data
def logs_get_with_http_info(self, name, pod_name, namespace, cluster, **kwargs): # noqa: E501
"""Retrieve log from a container # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.logs_get_with_http_info(name, pod_name, namespace, cluster, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: (required)
:param str pod_name: (required)
:param str namespace: (required)
:param str cluster: (required)
:param str follow:
:param str limit_bytes:
:param str pretty:
:param str previous:
:param str since_seconds:
:param str since_time:
:param str tail_lines:
:param str timestamps:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'pod_name', 'namespace', 'cluster', 'follow', 'limit_bytes', 'pretty', 'previous', 'since_seconds', 'since_time', 'tail_lines', 'timestamps'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method logs_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `logs_get`") # noqa: E501
# verify the required parameter 'pod_name' is set
if ('pod_name' not in params or
params['pod_name'] is None):
raise ValueError("Missing the required parameter `pod_name` when calling `logs_get`") # noqa: E501
# verify the required parameter 'namespace' is set
if ('namespace' not in params or
params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `logs_get`") # noqa: E501
# verify the required parameter 'cluster' is set
if ('cluster' not in params or
params['cluster'] is None):
raise ValueError("Missing the required parameter `cluster` when calling `logs_get`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'name' in params:
query_params.append(('name', params['name'])) # noqa: E501
if 'pod_name' in params:
query_params.append(('pod_name', params['pod_name'])) # noqa: E501
if 'namespace' in params:
query_params.append(('namespace', params['namespace'])) # noqa: E501
if 'cluster' in params:
query_params.append(('cluster', params['cluster'])) # noqa: E501
if 'follow' in params:
query_params.append(('follow', params['follow'])) # noqa: E501
if 'limit_bytes' in params:
query_params.append(('limit_bytes', params['limit_bytes'])) # noqa: E501
if 'pretty' in params:
query_params.append(('pretty', params['pretty'])) # noqa: E501
if 'previous' in params:
query_params.append(('previous', params['previous'])) # noqa: E501
if 'since_seconds' in params:
query_params.append(('since_seconds', params['since_seconds'])) # noqa: E501
if 'since_time' in params:
query_params.append(('since_time', params['since_time'])) # noqa: E501
if 'tail_lines' in params:
query_params.append(('tail_lines', params['tail_lines'])) # noqa: E501
if 'timestamps' in params:
query_params.append(('timestamps', params['timestamps'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/logs', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 39.3
| 185
| 0.607577
| 837
| 7,074
| 4.946237
| 0.193548
| 0.050242
| 0.057488
| 0.055072
| 0.525604
| 0.454589
| 0.408696
| 0.390338
| 0.36715
| 0.342029
| 0
| 0.017327
| 0.290218
| 7,074
| 179
| 186
| 39.519553
| 0.80721
| 0.326124
| 0
| 0
| 1
| 0
| 0.221276
| 0.015197
| 0
| 0
| 0
| 0
| 0
| 1
| 0.033333
| false
| 0
| 0.044444
| 0
| 0.122222
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
6aa897704d8b8b96376b6c78aa9de27ecec18071
| 378
|
py
|
Python
|
app/django_first/news/migrations/0002_movies_year.py
|
vvuri/flask_pipeline
|
d3f283b8a6a6239e56d85e67dbe3edce55bcb980
|
[
"MIT"
] | null | null | null |
app/django_first/news/migrations/0002_movies_year.py
|
vvuri/flask_pipeline
|
d3f283b8a6a6239e56d85e67dbe3edce55bcb980
|
[
"MIT"
] | null | null | null |
app/django_first/news/migrations/0002_movies_year.py
|
vvuri/flask_pipeline
|
d3f283b8a6a6239e56d85e67dbe3edce55bcb980
|
[
"MIT"
] | null | null | null |
# Generated by Django 4.0.1 on 2022-01-19 23:58
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('news', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='movies',
name='year',
field=models.CharField(max_length=4, null=True),
),
]
| 19.894737
| 60
| 0.582011
| 42
| 378
| 5.166667
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.075188
| 0.296296
| 378
| 18
| 61
| 21
| 0.740602
| 0.119048
| 0
| 0
| 1
| 0
| 0.07855
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.083333
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
6ac9be98a456dcdce40e3c4f391cc313ab62f054
| 13,522
|
py
|
Python
|
sdk/python/pulumi_google_native/healthcare/v1beta1/user_data_mapping.py
|
AaronFriel/pulumi-google-native
|
75d1cda425e33d4610348972cd70bddf35f1770d
|
[
"Apache-2.0"
] | 44
|
2021-04-18T23:00:48.000Z
|
2022-02-14T17:43:15.000Z
|
sdk/python/pulumi_google_native/healthcare/v1beta1/user_data_mapping.py
|
AaronFriel/pulumi-google-native
|
75d1cda425e33d4610348972cd70bddf35f1770d
|
[
"Apache-2.0"
] | 354
|
2021-04-16T16:48:39.000Z
|
2022-03-31T17:16:39.000Z
|
sdk/python/pulumi_google_native/healthcare/v1beta1/user_data_mapping.py
|
AaronFriel/pulumi-google-native
|
75d1cda425e33d4610348972cd70bddf35f1770d
|
[
"Apache-2.0"
] | 8
|
2021-04-24T17:46:51.000Z
|
2022-01-05T10:40:21.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
from ._inputs import *
__all__ = ['UserDataMappingArgs', 'UserDataMapping']
@pulumi.input_type
class UserDataMappingArgs:
def __init__(__self__, *,
consent_store_id: pulumi.Input[str],
data_id: pulumi.Input[str],
dataset_id: pulumi.Input[str],
user_id: pulumi.Input[str],
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
resource_attributes: Optional[pulumi.Input[Sequence[pulumi.Input['AttributeArgs']]]] = None):
"""
The set of arguments for constructing a UserDataMapping resource.
:param pulumi.Input[str] data_id: A unique identifier for the mapped resource.
:param pulumi.Input[str] user_id: User's UUID provided by the client.
:param pulumi.Input[str] name: Resource name of the User data mapping, of the form `projects/{project_id}/locations/{location_id}/datasets/{dataset_id}/consentStores/{consent_store_id}/userDataMappings/{user_data_mapping_id}`.
:param pulumi.Input[Sequence[pulumi.Input['AttributeArgs']]] resource_attributes: Attributes of the resource. Only explicitly set attributes are displayed here. Attribute definitions with defaults set implicitly apply to these User data mappings. Attributes listed here must be single valued, that is, exactly one value is specified for the field "values" in each Attribute.
"""
pulumi.set(__self__, "consent_store_id", consent_store_id)
pulumi.set(__self__, "data_id", data_id)
pulumi.set(__self__, "dataset_id", dataset_id)
pulumi.set(__self__, "user_id", user_id)
if location is not None:
pulumi.set(__self__, "location", location)
if name is not None:
pulumi.set(__self__, "name", name)
if project is not None:
pulumi.set(__self__, "project", project)
if resource_attributes is not None:
pulumi.set(__self__, "resource_attributes", resource_attributes)
@property
@pulumi.getter(name="consentStoreId")
def consent_store_id(self) -> pulumi.Input[str]:
return pulumi.get(self, "consent_store_id")
@consent_store_id.setter
def consent_store_id(self, value: pulumi.Input[str]):
pulumi.set(self, "consent_store_id", value)
@property
@pulumi.getter(name="dataId")
def data_id(self) -> pulumi.Input[str]:
"""
A unique identifier for the mapped resource.
"""
return pulumi.get(self, "data_id")
@data_id.setter
def data_id(self, value: pulumi.Input[str]):
pulumi.set(self, "data_id", value)
@property
@pulumi.getter(name="datasetId")
def dataset_id(self) -> pulumi.Input[str]:
return pulumi.get(self, "dataset_id")
@dataset_id.setter
def dataset_id(self, value: pulumi.Input[str]):
pulumi.set(self, "dataset_id", value)
@property
@pulumi.getter(name="userId")
def user_id(self) -> pulumi.Input[str]:
"""
User's UUID provided by the client.
"""
return pulumi.get(self, "user_id")
@user_id.setter
def user_id(self, value: pulumi.Input[str]):
pulumi.set(self, "user_id", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Resource name of the User data mapping, of the form `projects/{project_id}/locations/{location_id}/datasets/{dataset_id}/consentStores/{consent_store_id}/userDataMappings/{user_data_mapping_id}`.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def project(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "project")
@project.setter
def project(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project", value)
@property
@pulumi.getter(name="resourceAttributes")
def resource_attributes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['AttributeArgs']]]]:
"""
Attributes of the resource. Only explicitly set attributes are displayed here. Attribute definitions with defaults set implicitly apply to these User data mappings. Attributes listed here must be single valued, that is, exactly one value is specified for the field "values" in each Attribute.
"""
return pulumi.get(self, "resource_attributes")
@resource_attributes.setter
def resource_attributes(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['AttributeArgs']]]]):
pulumi.set(self, "resource_attributes", value)
class UserDataMapping(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
consent_store_id: Optional[pulumi.Input[str]] = None,
data_id: Optional[pulumi.Input[str]] = None,
dataset_id: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
resource_attributes: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['AttributeArgs']]]]] = None,
user_id: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Creates a new User data mapping in the parent consent store.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] data_id: A unique identifier for the mapped resource.
:param pulumi.Input[str] name: Resource name of the User data mapping, of the form `projects/{project_id}/locations/{location_id}/datasets/{dataset_id}/consentStores/{consent_store_id}/userDataMappings/{user_data_mapping_id}`.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['AttributeArgs']]]] resource_attributes: Attributes of the resource. Only explicitly set attributes are displayed here. Attribute definitions with defaults set implicitly apply to these User data mappings. Attributes listed here must be single valued, that is, exactly one value is specified for the field "values" in each Attribute.
:param pulumi.Input[str] user_id: User's UUID provided by the client.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: UserDataMappingArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Creates a new User data mapping in the parent consent store.
:param str resource_name: The name of the resource.
:param UserDataMappingArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(UserDataMappingArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
consent_store_id: Optional[pulumi.Input[str]] = None,
data_id: Optional[pulumi.Input[str]] = None,
dataset_id: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
resource_attributes: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['AttributeArgs']]]]] = None,
user_id: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = UserDataMappingArgs.__new__(UserDataMappingArgs)
if consent_store_id is None and not opts.urn:
raise TypeError("Missing required property 'consent_store_id'")
__props__.__dict__["consent_store_id"] = consent_store_id
if data_id is None and not opts.urn:
raise TypeError("Missing required property 'data_id'")
__props__.__dict__["data_id"] = data_id
if dataset_id is None and not opts.urn:
raise TypeError("Missing required property 'dataset_id'")
__props__.__dict__["dataset_id"] = dataset_id
__props__.__dict__["location"] = location
__props__.__dict__["name"] = name
__props__.__dict__["project"] = project
__props__.__dict__["resource_attributes"] = resource_attributes
if user_id is None and not opts.urn:
raise TypeError("Missing required property 'user_id'")
__props__.__dict__["user_id"] = user_id
__props__.__dict__["archive_time"] = None
__props__.__dict__["archived"] = None
super(UserDataMapping, __self__).__init__(
'google-native:healthcare/v1beta1:UserDataMapping',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'UserDataMapping':
"""
Get an existing UserDataMapping resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = UserDataMappingArgs.__new__(UserDataMappingArgs)
__props__.__dict__["archive_time"] = None
__props__.__dict__["archived"] = None
__props__.__dict__["data_id"] = None
__props__.__dict__["name"] = None
__props__.__dict__["resource_attributes"] = None
__props__.__dict__["user_id"] = None
return UserDataMapping(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="archiveTime")
def archive_time(self) -> pulumi.Output[str]:
"""
Indicates the time when this mapping was archived.
"""
return pulumi.get(self, "archive_time")
@property
@pulumi.getter
def archived(self) -> pulumi.Output[bool]:
"""
Indicates whether this mapping is archived.
"""
return pulumi.get(self, "archived")
@property
@pulumi.getter(name="dataId")
def data_id(self) -> pulumi.Output[str]:
"""
A unique identifier for the mapped resource.
"""
return pulumi.get(self, "data_id")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Resource name of the User data mapping, of the form `projects/{project_id}/locations/{location_id}/datasets/{dataset_id}/consentStores/{consent_store_id}/userDataMappings/{user_data_mapping_id}`.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="resourceAttributes")
def resource_attributes(self) -> pulumi.Output[Sequence['outputs.AttributeResponse']]:
"""
Attributes of the resource. Only explicitly set attributes are displayed here. Attribute definitions with defaults set implicitly apply to these User data mappings. Attributes listed here must be single valued, that is, exactly one value is specified for the field "values" in each Attribute.
"""
return pulumi.get(self, "resource_attributes")
@property
@pulumi.getter(name="userId")
def user_id(self) -> pulumi.Output[str]:
"""
User's UUID provided by the client.
"""
return pulumi.get(self, "user_id")
| 45.837288
| 400
| 0.654859
| 1,582
| 13,522
| 5.326802
| 0.112516
| 0.075709
| 0.071437
| 0.060045
| 0.702029
| 0.633915
| 0.59523
| 0.565088
| 0.546695
| 0.466477
| 0
| 0.000293
| 0.24205
| 13,522
| 294
| 401
| 45.993197
| 0.821934
| 0.278953
| 0
| 0.362245
| 1
| 0
| 0.111542
| 0.00786
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| false
| 0.005102
| 0.035714
| 0.020408
| 0.265306
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
6acb7ed968b97603aa5b744b910e0997b0f3f62d
| 561
|
py
|
Python
|
server/api/migrations/0002_auto_20201011_1053.py
|
ShahriarDhruvo/WebTech_Assignment2
|
845d198a91b1dcc8ed149362499754167fca419d
|
[
"MIT"
] | null | null | null |
server/api/migrations/0002_auto_20201011_1053.py
|
ShahriarDhruvo/WebTech_Assignment2
|
845d198a91b1dcc8ed149362499754167fca419d
|
[
"MIT"
] | null | null | null |
server/api/migrations/0002_auto_20201011_1053.py
|
ShahriarDhruvo/WebTech_Assignment2
|
845d198a91b1dcc8ed149362499754167fca419d
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.1.2 on 2020-10-11 10:53
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('api', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='task',
name='author',
field=models.CharField(default='Anonymous', max_length=100),
),
migrations.AlterField(
model_name='task',
name='deadline',
field=models.DateTimeField(default='2020-10-11 10:53'),
),
]
| 23.375
| 72
| 0.57041
| 58
| 561
| 5.448276
| 0.637931
| 0.037975
| 0.050633
| 0.063291
| 0.310127
| 0.234177
| 0
| 0
| 0
| 0
| 0
| 0.086957
| 0.30303
| 561
| 23
| 73
| 24.391304
| 0.721228
| 0.080214
| 0
| 0.352941
| 1
| 0
| 0.120623
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.058824
| 0
| 0.235294
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0a7c17bb65b9c51d7ea399323ecb512289bae204
| 8,155
|
py
|
Python
|
sdk/python/pulumi_kubernetes/coordination/v1/_inputs.py
|
polivbr/pulumi-kubernetes
|
36a5fb34240a38a60b52a5f4e55e66e248d9305f
|
[
"Apache-2.0"
] | 277
|
2018-06-18T14:57:09.000Z
|
2022-03-29T04:05:06.000Z
|
sdk/python/pulumi_kubernetes/coordination/v1/_inputs.py
|
polivbr/pulumi-kubernetes
|
36a5fb34240a38a60b52a5f4e55e66e248d9305f
|
[
"Apache-2.0"
] | 1,447
|
2018-06-20T00:58:34.000Z
|
2022-03-31T21:28:43.000Z
|
sdk/python/pulumi_kubernetes/coordination/v1/_inputs.py
|
polivbr/pulumi-kubernetes
|
36a5fb34240a38a60b52a5f4e55e66e248d9305f
|
[
"Apache-2.0"
] | 95
|
2018-06-30T03:30:05.000Z
|
2022-03-29T04:05:09.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by pulumigen. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from ... import meta as _meta
__all__ = [
'LeaseSpecArgs',
'LeaseArgs',
]
@pulumi.input_type
class LeaseSpecArgs:
def __init__(__self__, *,
acquire_time: Optional[pulumi.Input[str]] = None,
holder_identity: Optional[pulumi.Input[str]] = None,
lease_duration_seconds: Optional[pulumi.Input[int]] = None,
lease_transitions: Optional[pulumi.Input[int]] = None,
renew_time: Optional[pulumi.Input[str]] = None):
"""
LeaseSpec is a specification of a Lease.
:param pulumi.Input[str] acquire_time: acquireTime is a time when the current lease was acquired.
:param pulumi.Input[str] holder_identity: holderIdentity contains the identity of the holder of a current lease.
:param pulumi.Input[int] lease_duration_seconds: leaseDurationSeconds is a duration that candidates for a lease need to wait to force acquire it. This is measure against time of last observed RenewTime.
:param pulumi.Input[int] lease_transitions: leaseTransitions is the number of transitions of a lease between holders.
:param pulumi.Input[str] renew_time: renewTime is a time when the current holder of a lease has last updated the lease.
"""
if acquire_time is not None:
pulumi.set(__self__, "acquire_time", acquire_time)
if holder_identity is not None:
pulumi.set(__self__, "holder_identity", holder_identity)
if lease_duration_seconds is not None:
pulumi.set(__self__, "lease_duration_seconds", lease_duration_seconds)
if lease_transitions is not None:
pulumi.set(__self__, "lease_transitions", lease_transitions)
if renew_time is not None:
pulumi.set(__self__, "renew_time", renew_time)
@property
@pulumi.getter(name="acquireTime")
def acquire_time(self) -> Optional[pulumi.Input[str]]:
"""
acquireTime is a time when the current lease was acquired.
"""
return pulumi.get(self, "acquire_time")
@acquire_time.setter
def acquire_time(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "acquire_time", value)
@property
@pulumi.getter(name="holderIdentity")
def holder_identity(self) -> Optional[pulumi.Input[str]]:
"""
holderIdentity contains the identity of the holder of a current lease.
"""
return pulumi.get(self, "holder_identity")
@holder_identity.setter
def holder_identity(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "holder_identity", value)
@property
@pulumi.getter(name="leaseDurationSeconds")
def lease_duration_seconds(self) -> Optional[pulumi.Input[int]]:
"""
leaseDurationSeconds is a duration that candidates for a lease need to wait to force acquire it. This is measure against time of last observed RenewTime.
"""
return pulumi.get(self, "lease_duration_seconds")
@lease_duration_seconds.setter
def lease_duration_seconds(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "lease_duration_seconds", value)
@property
@pulumi.getter(name="leaseTransitions")
def lease_transitions(self) -> Optional[pulumi.Input[int]]:
"""
leaseTransitions is the number of transitions of a lease between holders.
"""
return pulumi.get(self, "lease_transitions")
@lease_transitions.setter
def lease_transitions(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "lease_transitions", value)
@property
@pulumi.getter(name="renewTime")
def renew_time(self) -> Optional[pulumi.Input[str]]:
"""
renewTime is a time when the current holder of a lease has last updated the lease.
"""
return pulumi.get(self, "renew_time")
@renew_time.setter
def renew_time(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "renew_time", value)
@pulumi.input_type
class LeaseArgs:
def __init__(__self__, *,
api_version: Optional[pulumi.Input[str]] = None,
kind: Optional[pulumi.Input[str]] = None,
metadata: Optional[pulumi.Input['_meta.v1.ObjectMetaArgs']] = None,
spec: Optional[pulumi.Input['LeaseSpecArgs']] = None):
"""
Lease defines a lease concept.
:param pulumi.Input[str] api_version: APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources
:param pulumi.Input[str] kind: Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds
:param pulumi.Input['_meta.v1.ObjectMetaArgs'] metadata: More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#metadata
:param pulumi.Input['LeaseSpecArgs'] spec: Specification of the Lease. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#spec-and-status
"""
if api_version is not None:
pulumi.set(__self__, "api_version", 'coordination.k8s.io/v1')
if kind is not None:
pulumi.set(__self__, "kind", 'Lease')
if metadata is not None:
pulumi.set(__self__, "metadata", metadata)
if spec is not None:
pulumi.set(__self__, "spec", spec)
@property
@pulumi.getter(name="apiVersion")
def api_version(self) -> Optional[pulumi.Input[str]]:
"""
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources
"""
return pulumi.get(self, "api_version")
@api_version.setter
def api_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "api_version", value)
@property
@pulumi.getter
def kind(self) -> Optional[pulumi.Input[str]]:
"""
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds
"""
return pulumi.get(self, "kind")
@kind.setter
def kind(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "kind", value)
@property
@pulumi.getter
def metadata(self) -> Optional[pulumi.Input['_meta.v1.ObjectMetaArgs']]:
"""
More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#metadata
"""
return pulumi.get(self, "metadata")
@metadata.setter
def metadata(self, value: Optional[pulumi.Input['_meta.v1.ObjectMetaArgs']]):
pulumi.set(self, "metadata", value)
@property
@pulumi.getter
def spec(self) -> Optional[pulumi.Input['LeaseSpecArgs']]:
"""
Specification of the Lease. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#spec-and-status
"""
return pulumi.get(self, "spec")
@spec.setter
def spec(self, value: Optional[pulumi.Input['LeaseSpecArgs']]):
pulumi.set(self, "spec", value)
| 46.073446
| 335
| 0.682649
| 1,021
| 8,155
| 5.322233
| 0.15573
| 0.076923
| 0.094406
| 0.060729
| 0.7212
| 0.544902
| 0.478285
| 0.44424
| 0.44424
| 0.419948
| 0
| 0.00233
| 0.210423
| 8,155
| 176
| 336
| 46.335227
| 0.84159
| 0.383691
| 0
| 0.150943
| 1
| 0
| 0.116269
| 0.033433
| 0
| 0
| 0
| 0
| 0
| 1
| 0.188679
| false
| 0
| 0.056604
| 0
| 0.349057
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0a854fbf5fe92dd3c9a7f42e69f796c6cc578917
| 333
|
py
|
Python
|
bluebottle/tasks/migrations/0012_merge.py
|
terrameijar/bluebottle
|
b4f5ba9c4f03e678fdd36091b29240307ea69ffd
|
[
"BSD-3-Clause"
] | 10
|
2015-05-28T18:26:40.000Z
|
2021-09-06T10:07:03.000Z
|
bluebottle/tasks/migrations/0012_merge.py
|
terrameijar/bluebottle
|
b4f5ba9c4f03e678fdd36091b29240307ea69ffd
|
[
"BSD-3-Clause"
] | 762
|
2015-01-15T10:00:59.000Z
|
2022-03-31T15:35:14.000Z
|
bluebottle/tasks/migrations/0012_merge.py
|
terrameijar/bluebottle
|
b4f5ba9c4f03e678fdd36091b29240307ea69ffd
|
[
"BSD-3-Clause"
] | 9
|
2015-02-20T13:19:30.000Z
|
2022-03-08T14:09:17.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-09-27 15:35
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('tasks', '0011_auto_20160919_1508'),
('tasks', '0011_auto_20160920_1019'),
]
operations = [
]
| 19.588235
| 47
| 0.657658
| 41
| 333
| 5.073171
| 0.804878
| 0.086538
| 0.125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.183908
| 0.216216
| 333
| 16
| 48
| 20.8125
| 0.613027
| 0.201201
| 0
| 0
| 1
| 0
| 0.212928
| 0.174905
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.222222
| 0
| 0.555556
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0a9a47e3f3a1f529a8e26eeea21042cb90395afd
| 585
|
py
|
Python
|
mlb/game/migrations/0009_game_game_type.py
|
atadams/mlb
|
633b2eb53e5647c64a48c31ca68a50714483fb1d
|
[
"MIT"
] | null | null | null |
mlb/game/migrations/0009_game_game_type.py
|
atadams/mlb
|
633b2eb53e5647c64a48c31ca68a50714483fb1d
|
[
"MIT"
] | null | null | null |
mlb/game/migrations/0009_game_game_type.py
|
atadams/mlb
|
633b2eb53e5647c64a48c31ca68a50714483fb1d
|
[
"MIT"
] | null | null | null |
# Generated by Django 2.2.8 on 2019-12-14 19:07
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('game', '0008_auto_20191214_1019'),
]
operations = [
migrations.AddField(
model_name='game',
name='game_type',
field=models.CharField(choices=[('E', 'Exhibition'), ('S', 'Spring Training'), ('R', 'Regular Season'), ('F', 'Wild Card'), ('D', 'Divisional Series'), ('L', 'League Championship Series'), ('W', 'World Series')], default='R', max_length=30),
),
]
| 30.789474
| 253
| 0.589744
| 67
| 585
| 5.059701
| 0.820896
| 0.047198
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.073009
| 0.22735
| 585
| 18
| 254
| 32.5
| 0.676991
| 0.076923
| 0
| 0
| 1
| 0
| 0.280669
| 0.042751
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.083333
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0a9a9f93de2f3ba2e7d9c2affc936358894ee511
| 36,217
|
py
|
Python
|
backend/main/chapters/c06_lists.py
|
Vman45/futurecoder
|
0f4abc0ab00ec473e6cf6f51d534ef2deb26a086
|
[
"MIT"
] | null | null | null |
backend/main/chapters/c06_lists.py
|
Vman45/futurecoder
|
0f4abc0ab00ec473e6cf6f51d534ef2deb26a086
|
[
"MIT"
] | 1
|
2022-02-28T01:35:27.000Z
|
2022-02-28T01:35:27.000Z
|
backend/main/chapters/c06_lists.py
|
suchoudh/futurecoder
|
0f4abc0ab00ec473e6cf6f51d534ef2deb26a086
|
[
"MIT"
] | null | null | null |
# flake8: NOQA E501
import ast
import random
from textwrap import dedent
from typing import List
from main.exercises import generate_list, generate_string
from main.text import ExerciseStep, MessageStep, Page, Step, VerbatimStep, search_ast
from main.utils import returns_stdout
class IntroducingLists(Page):
class first_list(VerbatimStep):
"""
It's time to learn about a powerful new type of value called lists. Here's an example:
__program_indented__
"""
def program(self):
words = ['This', 'is', 'a', 'list']
for word in words:
print(word)
class can_contain_anything(VerbatimStep):
"""
A list is a *sequence* (an ordered collection/container) of any number of values.
The values are often referred to as *elements*.
They can be anything: numbers, strings, booleans, even lists! They can also be a mixture of types.
To create a list directly, like above:
1. Write some square brackets: `[]`
2. If you don't want an empty list, write some expressions inside to be the elements.
3. Put commas (`,`) between elements to separate them.
Here's another example of making a list:
__program_indented__
"""
def program(self):
x = 1
things = ['Hello', x, x + 3]
print(things)
class numbers_sum(VerbatimStep):
"""
As you saw above, lists are *iterable*, meaning you can iterate over them with a `for loop`.
Here's a program that adds up all the numbers in a list:
__program_indented__
"""
def program(self):
numbers = [3, 1, 4, 1, 5, 9]
total = 0
for number in numbers:
total += number
print(total)
class strings_sum(ExerciseStep):
"""
Now modify the program so that it can add up a list of strings instead of numbers.
For example, given:
words = ['This', 'is', 'a', 'list']
it should print:
Thisisalist
"""
hints = """
This is very similar to the exercises you've done building up strings character by character.
The solution is very similar to the program that adds numbers.
In fact, what happens if you try running that program with a list of strings?
The problem is that 0. You can't add 0 to a string because numbers and strings are incompatible.
Is there a similar concept among strings to 0? A blank initial value?
"""
@returns_stdout
def solution(self, words: List[str]):
total = ''
for word in words:
total += word
print(total)
tests = [
(['This', 'is', 'a', 'list'], 'Thisisalist'),
(['The', 'quick', 'brown', 'fox', 'jumps'], 'Thequickbrownfoxjumps'),
]
class double_numbers(ExerciseStep):
"""
Optional bonus challenge: extend the program to insert a separator string *between* each word.
For example, given
words = ['This', 'is', 'a', 'list']
separator = ' - '
it would output:
This - is - a - list
Lists and strings have a lot in common.
For example, you can add two lists to combine them together into a new list.
You can also create an empty list that has no elements.
Check for yourself:
numbers = [1, 2] + [3, 4]
print(numbers)
new_numbers = []
new_numbers += numbers
new_numbers += [5]
print(new_numbers)
With that knowledge, write a program which takes a list of numbers
and prints a list where each number has been doubled. For example, given:
numbers = [3, 1, 4, 1, 5, 9, 2, 6, 5]
it would print:
[6, 2, 8, 2, 10, 18, 4, 12, 10]
"""
hints = """
Remember that you can multiply numbers using `*`.
This program is structurally very similar to the programs you've written to build up strings character by character.
Make a new list, and then build it up element by element in a for loop.
Start with an empty list.
You can make a list with one element `x` by just writing `[x]`.
You can add an element to a list by adding a list containing one element.
"""
@returns_stdout
def solution(self, numbers: List[int]):
double = []
for number in numbers:
double += [number * 2]
print(double)
tests = [
([3, 1, 4, 1, 5, 9, 2, 6, 5], [6, 2, 8, 2, 10, 18, 4, 12, 10]),
([0, 1, 2, 3], [0, 2, 4, 6]),
]
class filter_numbers(ExerciseStep):
"""
Great!
When you want to add a single element to the end of a list, instead of:
some_list += [element]
it's actually more common to write:
some_list.append(element)
There isn't really a big difference between these, but `.append`
will be more familiar and readable to most people.
Now use `.append` to write a program which prints a list containing only the numbers bigger than 5.
For example, given:
numbers = [3, 1, 4, 1, 5, 9, 2, 6, 5]
it would print:
[9, 6]
"""
hints = """
This is very similar to the previous exercise.
The difference is that sometimes you should skip appending to the new list.
Use an `if` statement.
Use a comparison operator to test if a number is big enough to add.
"""
# TODO enforce not using +=
@returns_stdout
def solution(self, numbers: List[int]):
big_numbers = []
for number in numbers:
if number > 5:
big_numbers.append(number)
print(big_numbers)
tests = [
([3, 1, 4, 1, 5, 9, 2, 6, 5], [9, 6]),
([0, 2, 4, 6, 8, 10], [6, 8, 10]),
]
final_text = """
Fantastic! We're making great progress.
"""
class UsingBreak(Page):
title = "Using `break` to end a loop early"
class list_contains_exercise(ExerciseStep):
"""
Exercise: write a program which takes a list and a value and checks
if the list contains the value. For example, given:
things = ['This', 'is', 'a', 'list']
thing_to_find = 'is'
it should print `True`, but for
thing_to_find = 'other'
it should print `False`.
"""
hints = """
You will need a loop.
You will need an `if` statement.
You will need a comparison operator.
Specifically `==`.
You need a boolean variable that you print at the end.
If you find the element in the list you should set that variable to `True`.
Once you've found the element, you can't unfind it.
That means that once you set the variable to `True`, it should never be set to anything else after that.
Don't use an `else`.
There is no reason to ever set the variable to `False` inside the loop.
"""
@returns_stdout
def solution(self, things, thing_to_find):
found = False
for thing in things:
if thing == thing_to_find:
found = True
print(found)
tests = [
((['This', 'is', 'a', 'list'], 'is'), True),
((['This', 'is', 'a', 'list'], 'other'), False),
(([1, 2, 3, 4], 1), True),
(([1, 2, 3, 4], 0), False),
]
@classmethod
def generate_inputs(cls):
contained = random.choice([True, False])
things = generate_list(int)
if contained:
thing_to_find = random.choice(things)
else:
thing_to_find = random.choice([
min(things) - 1,
max(things) + 1,
])
return dict(
things=things,
thing_to_find=thing_to_find,
)
final_text = """
Nice!
A typical solution looks something like this:
found = False
for thing in things:
if thing == thing_to_find:
found = True
print(found)
Your solution is probably similar. It's fine, but it's a bit inefficient.
That's because it'll loop over the entire list even if it finds the element at the beginning.
You can stop any loop using a `break` statement, like so:
for thing in things:
if thing == thing_to_find:
found = True
break
This is just as correct but skips unnecessary iterations and checks once it finds the element.
You can use snoop to see the difference.
"""
class GettingElementsAtPosition(Page):
title = "Getting Elements at a Position"
class introducing_subscripting(VerbatimStep):
"""
Looping is great, but often you just want to retrieve a single element from the list at a known position.
Here's how:
__program_indented__
"""
def program(self):
words = ['This', 'is', 'a', 'list']
print(words[0])
print(words[1])
print(words[2])
print(words[3])
class index_error(Step):
"""
In general, you can get the element at the position `i` with `words[i]`. The operation is called *subscripting* or *indexing*, and the position is called the *index*.
You've probably noticed that the first index is 0, not 1. In programming, counting starts at 0. It seems weird, but that's how most programming languages do it, and it's generally agreed to be better.
This also means that the last index in this list of 4 elements is 3. What happens if you try getting an index greater than that?
"""
program = "words[4]"
def check(self):
return "IndexError" in self.result
class introducing_len_and_range(VerbatimStep):
"""
There you go. `words[4]` and beyond don't exist, so trying that will give you an error.
By the way, you can get the number of elements in a list (commonly called the *length*) using `len(words)`.
That means that the last valid index of the list is `len(words) - 1`, so the last element is `words[len(words) - 1]`. Try these for yourself.
So in general, the valid indices are:
[0, 1, 2, ..., len(words) - 2, len(words) - 1]
There's a handy built in function to give you these values, called `range`:
__program_indented__
"""
def program(self):
for i in range(10):
print(i)
class range_len(VerbatimStep):
"""
`range(n)` is similar to the list `[0, 1, 2, ..., n - 2, n - 1]`.
This gives us an alternative way to loop over a list:
__program_indented__
"""
def program(self):
words = ['This', 'is', 'a', 'list']
for index in range(len(words)):
print(index)
print(words[index])
class index_exercise(ExerciseStep):
"""
Let's get some exercise! Given a list `things` and a value `to_find`,
print the first index of `to_find` in the list, i.e. the lowest number `i` such that
`things[i]` is `to_find`. For example, for
things = ['on', 'the', 'way', 'to', 'the', 'store']
to_find = 'the'
your program should print `1`.
You can assume that `to_find` appears at least once.
"""
hints = """
You will need to look at all the possible indices of `things` and check which one is the answer.
To look at all possible indices, you will need a loop over `range(len(things))`.
To check if an index is the answer, you will need to use:
- `if`
- the index in a subscript
- `==`
Since you're looking for the first index, you need to stop the loop once you find one.
You learned how to stop a loop in the middle recently.
You need to use `break`.
"""
class all_indices(MessageStep, ExerciseStep):
"""
You're almost there! However, this prints all the indices,
not just the first one.
"""
@returns_stdout
def solution(self, things, to_find):
for i in range(len(things)):
if to_find == things[i]:
print(i)
tests = [
((['on', 'the', 'way', 'to', 'the', 'store'], 'the'), "1\n4"),
(([0, 1, 2, 3, 4, 5, 6, 6], 6), "6\n7"),
]
class last_index(MessageStep, ExerciseStep):
"""
You're almost there! However, this prints the *last* index,
not the first one.
"""
@returns_stdout
def solution(self, things, to_find):
answer = None
for i in range(len(things)):
if to_find == things[i]:
answer = i
print(answer)
tests = [
((['on', 'the', 'way', 'to', 'the', 'store'], 'the'), 4),
(([0, 1, 2, 3, 4, 5, 6, 6], 6), 7),
]
@returns_stdout
def solution(self, things, to_find):
for i in range(len(things)):
if to_find == things[i]:
print(i)
break
tests = [
((['on', 'the', 'way', 'to', 'the', 'store'], 'the'), 1),
(([0, 1, 2, 3, 4, 5, 6, 6], 6), 6),
]
@classmethod
def generate_inputs(cls):
things = generate_list(str)
to_find = generate_string()
things += [to_find] * random.randint(1, 3)
random.shuffle(things)
return dict(
things=things,
to_find=to_find,
)
class zip_exercise(ExerciseStep):
"""
Nice!
By the way, indexing and `len()` also work on strings. Try them out in the shell.
Here's another exercise. Given two strings of equal length, e.g:
string1 = "Hello"
string2 = "World"
print them vertically side by side, with a space between each character:
H W
e o
l r
l l
o d
"""
hints = """
Did you experiment with indexing and `len()` with strings in the shell?
Forget loops for a moment. How would you print just the first line, which has the first character of each of the two strings?
In the second line you want to print the second character of each string, and so on.
You will need a `for` loop.
You will need indexing (subscripting).
You will need `range`.
You will need `len`.
You will need `+`.
You will need to index both strings.
You will need to pass the same index to both strings each time to retrieve matching characters.
"""
@returns_stdout
def solution(self, string1, string2):
for i in range(len(string1)):
char1 = string1[i]
char2 = string2[i]
print(char1 + ' ' + char2)
tests = {
("Hello", "World"): dedent("""\
H W
e o
l r
l l
o d
"""),
("Having", "ablast"): dedent("""\
H a
a b
v l
i a
n s
g t
"""),
}
@classmethod
def generate_inputs(cls):
length = random.randrange(5, 11)
return dict(
string1=generate_string(length),
string2=generate_string(length),
)
class zip_longest_exercise(ExerciseStep):
"""
Incredible!
Your solution probably looks something like this:
for i in range(len(string1)):
char1 = string1[i]
char2 = string2[i]
print(char1 + ' ' + char2)
This doesn't work so well if the strings have different lengths.
In fact, it goes wrong in different ways depending on whether `string1` or `string2` is longer.
Your next challenge is to fix this problem by filling in 'missing' characters with spaces.
For example, for:
string1 = "Goodbye"
string2 = "World"
output:
G W
o o
o r
d l
b d
y
e
and for:
string1 = "Hello"
string2 = "Elizabeth"
output:
H E
e l
l i
l z
o a
b
e
t
h
"""
hints = [
"The solution has the same overall structure and "
"essential elements of the previous solution, "
"but it's significantly longer and will require "
"a few additional ideas and pieces.",
dedent("""
In particular, it should still contain something like:
for i in range(...):
...
print(char1 + ' ' + char2)
"""),
"What should go inside `range()`? Neither `len(string1)` nor `len(string2)` is good enough.",
"You want a loop iteration for every character in the longer string.",
"That means you need `range(<length of the longest string>)`",
"In other words you need to find the biggest of the two values "
"`len(string1)` and `len(string2)`. You've already done an exercise like that.",
"Once you've sorted out `for i in range(...)`, `i` will sometimes be too big "
"to be a valid index for both strings. You will need to check if it's too big before indexing.",
"Remember, the biggest valid index for `string1` is `len(string1) - 1`. "
"`len(string)` is too big.",
"You will need two `if` statements, one for each string.",
"You will need to set e.g. `char1 = ' '` when `string1[i]` is not valid.",
]
# TODO catch user writing string1 < string2
@returns_stdout
def solution(self, string1, string2):
length1 = len(string1)
length2 = len(string2)
if length1 > length2:
length = length1
else:
length = length2
for i in range(length):
if i < len(string1):
char1 = string1[i]
else:
char1 = ' '
if i < len(string2):
char2 = string2[i]
else:
char2 = ' '
print(char1 + ' ' + char2)
tests = {
("Goodbye", "World"): dedent("""\
G W
o o
o r
d l
b d
y
e
"""),
("Hello", "Elizabeth"): dedent("""\
H E
e l
l i
l z
o a
b
e
t
h
"""),
}
@classmethod
def generate_inputs(cls):
length1 = random.randrange(5, 11)
length2 = random.randrange(12, 20)
if random.choice([True, False]):
length1, length2 = length2, length1
return dict(
string1=generate_string(length1),
string2=generate_string(length2),
)
final_text = """
Magnificent! Take a break, you've earned it!
"""
class CallingFunctionsTerminology(Page):
title = "Terminology: Calling functions and methods"
class print_functions(VerbatimStep):
"""
It's time to expand your vocabulary some more.
`print` and `len` are ***functions***. See for yourself:
__program_indented__
"""
def program(self):
print(len)
print(print)
class introducing_callable(VerbatimStep):
"""
An expression like `len(things)` or `print(things)` is a function ***call*** - when you write that, you are ***calling*** the function `len` or `print`. The fact that this is possible means that functions are ***callable***:
__program_indented__
"""
def program(self):
print(callable(len))
class not_callable(VerbatimStep):
"""
Most things are not callable, so trying to call them will give you an error:
__program_indented__
"""
# noinspection PyCallingNonCallable
def program(self):
f = 'a string'
print(callable(f))
f()
class print_returns_none(VerbatimStep):
"""
In the call `len(things)`, `things` is an ***argument***. Sometimes you will also see the word ***parameter***, which means basically the same thing as argument. It's a bit like you're giving the argument to the function - specifically we say that the argument `things` is *passed* to `len`, and `len` *accepts* or *receives* the argument.
`len(things)` will evaluate to a number such as 3, in which case we say that `len` ***returned*** 3.
All calls have to return something...even if it's nothing. For example, `print`'s job is to display something on screen, not to return a useful value. So it returns something useless instead:
__program_indented__
"""
# noinspection PyNoneFunctionAssignment
def program(self):
things = [1, 2, 3]
length = len(things)
printed = print(length)
print(printed)
class len_of_none(VerbatimStep):
"""
`None` is a special 'null' value which can't do anything interesting. It's a common placeholder that represents the lack of a real useful value. Functions that don't want to return anything return `None` by default. If you see an error message about `None` or `NoneType`, it often means you assigned the wrong thing to a variable:
__program_indented__
"""
# noinspection PyNoneFunctionAssignment,PyUnusedLocal,PyTypeChecker
def program(self):
things = print([1, 2, 3])
length = len(things)
class methods_of_str(VerbatimStep):
"""
A ***method*** is a function which belongs to a type, and can be called on all values of that type using `.`. For example, `upper` and `lower` are methods of strings, which are called with e.g. `word.upper()`:
__program_indented__
"""
def program(self):
word = 'Hello'
print(word.upper)
print(word.upper())
class no_append_for_str(VerbatimStep):
"""
Another example is that `append` is a method of lists. But you can't use `.upper` on a list or `.append` on a string:
__program_indented__
"""
# noinspection PyUnresolvedReferences
def program(self):
word = 'Hello'
word.append('!')
final_text = """
The word 'attribute' in the error message refers to the use of `.` - the error actually comes just from `word.append`, without even a call.
"""
class FunctionsAndMethodsForLists(Page):
# TODO this is quite the information dump and I'd like it to be a little more interactive,
# but users don't need to know these functions off by heart.
class sum_list(Step):
"""
Let's review how to work with lists. Suppose we have a list `nums = [1, 2, 3]`. You can use:
- **`append`**: Add an element to the end of the list. `nums.append(4)` changes the list to `[1, 2, 3, 4]`.
- **`len`**: Returns the number of elements. `len(nums)` is `3`.
- **`range`**: `range(n)` is an object similar to the list of numbers from 0 to `n - 1`. In particular, `range(len(nums))` is like `[0, 1, 2]`.
- **`subscripting`**: Get a value at an index. `nums[0]` is 1, `nums[1]` is 2, `nums[2]` is 3.
- **`+`**: Concatenates lists. `nums + [4, 5]` is `[1, 2, 3, 4, 5]`.
Here's some new things. Try them out in the shell.
- **`subscript assignment`**: Set a value at an index. `nums[0] = 9` changes the list to `[9, 2, 3]`.
- **`join`**: Add a list of strings with a separator in between. This is a method of strings (the separator) which takes an iterable of strings as an argument. `'--'.join(['apples', 'oranges', 'bananas'])` returns `'apples--oranges--bananas'`. You can also use an empty string if you don't want a separator, e.g. `''.join(['apples', 'oranges', 'bananas'])` returns `'applesorangesbananas'`.
- **`sum`**: Add a list of numbers. `sum(nums)` is 6.
- **`in`**: A comparison operator that checks if a value is in a list. `2 in nums` is `True`, but `4 in nums` is `False`.
- **`index`**: Returns the first index of a value in a list. `[7, 8, 9, 8].index(8)` is 1. Raises an error if the value isn't there.
You may recognise some of these from your exercises. I assure you that those exercises were not pointless, as you've now learned valuable fundamental skills. For example, you can use `in` to check if a list contains 5, but there's no similarly easy way to check for a number bigger than 5.
It's useful to know these functions, but it's not easy to learn them all, and there's many more. A more important skill is being able to look things up. For example, here are some typical ways you might Google the above functions if you forgot their names:
- `append`
- python add element to list
- python add item at end of list
- `len`
- python size of list
- python number of elements in list
- python how many characters in string
- `join`
- python combine list of strings with separator
- python add together list of strings with string in between
- `sum`
- python add list of numbers
- python total of numbers
- `in`
- python check if list contains value
- python test if list has element
- `index`
- python get position of element
- python get index of value
Let's practice this skill now. Find a function/method that returns the value in a list which is bigger than any other value. For example, given the list `[21, 55, 4, 91, 62, 49]`, it will return `91`. You should write the answer in the shell as a single small expression. For example, if you were looking for the function `sum`, you could write `sum([21, 55, 4, 91, 62, 49])`. Don't solve this manually with a loop.
"""
hints = """
Use the words 'python' and 'list' in your search query.
In one word, what's special about `91` in the list `[21, 55, 4, 91, 62, 49]`?
'biggest' or 'largest'
'python biggest value in list'
"""
program = "max([21, 55, 4, 91, 62, 49])"
def check(self):
return search_ast(
self.stmt,
ast.Call(func=ast.Name(id='max')),
)
class list_insert(Step):
"""
Good find! Let's do one more. If you have a list:
nums = [1, 2, 3, 4, 5]
You could write `nums.append(9)` and `nums` would change to:
[1, 2, 3, 4, 5, 9]
But suppose you don't want the 9 to be at the end, you want it to go between the second and third elements:
[1, 2, 9, 3, 4, 5]
Call the right function/method in the shell to do that.
"""
hints = """
Use the words 'python' and 'list' in your search query.
Instead of putting the value at the beginning or end, we want to put it ____________?
'in the middle' or 'at an index' or 'at a particular position'
'python add value at index'
"""
program = "nums.insert(2, 9)"
def check(self):
return search_ast(
self.stmt,
ast.Call(func=ast.Attribute(attr='insert'),
args=[ast.Constant(value=2),
ast.Constant(value=9)]),
)
class dir_list(VerbatimStep):
"""
Perfect!
It can also be useful to Google things like "python list tutorial", e.g. if:
- Googling a specific method has failed so you want to find it manually.
- You're still confused about lists after this course.
- It's been a while since you learned about lists and you need a reminder.
- You're struggling to solve a problem with lists and you need to go back to basics and strengthen your foundations.
There are also ways to find information without any googling. Try `__program__` in the shell.
"""
program = "dir([])"
final_text = """
`dir()` returns a list of the argument's attributes, which are mostly methods. Many will start with `__` which you can ignore for now - scroll to the end of the list and you'll see some familiar methods.
Here are a few more useful functions/methods. Suppose `nums = [28, 99, 10, 81, 59, 64]`
- **`sorted`**: Takes an iterable and returns a list of the elements in order. `sorted(nums)` returns `[10, 28, 59, 64, 81, 99]`.
- **`pop`**: Removes and returns an element at a given index. `nums.pop(3)` removes `nums[3]` (`81`) from the list and returns it. Without an argument, i.e. just `nums.pop()`, it will remove and return the last element.
- **`remove`**: Removes the first occurrence of the given element. `nums.remove(10)` will leave `nums` as `[28, 99, 81, 59, 64]`. Raises an error if the value doesn't exist. Equivalent to `nums.pop(nums.index(10))`.
- **`count`**: Returns the number of times the argument appears in the list. `[1, 2, 3, 2, 7, 2, 5].count(2)` is 3.
You've already seen that `len` and subscripting work with strings, a bit as if strings are lists of characters. Strings also support some of the new methods we've learned, not just for characters but for any substring. For example:
- `'the' in 'feed the dog and the cat'` is `True`
- `'feed the dog and the cat'.count('the')` is 2
- `'feed the dog and the cat'.index('the')` is 5
Note that in most cases, methods which modify a list in place (`append`, `insert`, `remove`) merely return `None`, while the remaining functions/methods return a new useful value without changing the original argument. The only exception is the `pop` method.
Modifying a value directly is called *mutation* - types of values which can be mutated are *mutable*, while those that can't are *immutable*. Strings are immutable - they don't have any methods like `append` or even subscript assignment. You simply can't change a string - you can only create new strings and use those instead. That means that this is a useless statement on its own:
word.upper()
The string referred to by `word` isn't modified, instead `word.upper()` returned a new string which was immediately discarded. If you want to change the value that `word` refers to, you have to assign a new value to the variable:
word = word.upper()
Or you can use `word.upper()` immediately in a larger expression, e.g.
if word.lower() == 'yes':
"""
class UnderstandingProgramsWithPythonTutor(Page):
final_text = """
It's time to learn about another tool to explore programs. Put some code in the editor and then click the new "Python Tutor" button. Here's some example code if you want:
all_numbers = [2, 4, 8, 1, 9, 7]
small_numbers = []
big_numbers = []
for number in all_numbers:
if number <= 5:
small_numbers.append(number)
else:
big_numbers.append(number)
print(small_numbers)
print(big_numbers)
The button will open a new tab with a visualisation from [pythontutor.com](http://pythontutor.com).
There you can navigate through the program step by step with the "Prev" or "Next" buttons, or drag
the slider left or right. You can also see the values of variables on the right.
"""
class EqualsVsIs(Page):
title = "`==` vs `is`"
class two_separate_lists(VerbatimStep):
"""
It's time to learn some technical details that are often misunderstood and lead to errors.
Run this program:
__program_indented__
"""
def program(self):
list1 = [1, 2, 3]
list2 = [1, 2, 3]
print(list1)
print(list2)
print(list1 == list2)
print(list1 is list2)
list1.append(4)
print(list1)
print(list2)
class same_list(VerbatimStep):
"""
This program is quite straightforward and mostly consists of things you're familiar with.
We create two variables which refer to lists.
The lists have the same elements, so they are equal: `list1 == list2` is `True`.
But then there's a new comparison operator: `is`. Here `list1 is list2` is `False`.
That means that regardless of the two lists being equal,
they are still two separate, distinct, individual lists.
As a result, when you append 4 to `list1`, only `list1` changes.
Now change `list2 = [1, 2, 3]` to `list2 = list1` and see what difference it makes.
"""
program_in_text = False
def program(self):
list1 = [1, 2, 3]
list2 = list1
print(list1)
print(list2)
print(list1 == list2)
print(list1 is list2)
list1.append(4)
print(list1)
print(list2)
final_text = """
Now `list1 is list2` is `True`, because *there is only one list*, and the two variables
`list1` and `list2` both refer to that same list. `list1.append(4)` appends to the one list
and the result can be seen in both `print(list1)` and `print(list2)` because both lines
are now just different ways of printing the same list.
I recommend running both versions with Python Tutor to see how it visualises the difference.
In the second case, the two variables both have arrows pointing to a single list object.
`list2 = list1` doesn't create an eternal link between the variables. If you assign a new value
to *either* of the variables, e.g. `list1 = [7, 8, 9]`, the other variable will be unaffected
and will still point to the original list.
Basically, an assignment like:
list2 = <expression>
means 'make the variable `list2` refer to whatever `<expression>` evaluates to'.
It doesn't make a copy of that value, which is how both variables can end up pointing to the same list.
But as we've learned before, `list2` doesn't remember `<expression>`, only the value.
It doesn't know about other variables.
You can copy a list with the `copy` method:
list2 = list1.copy()
This will make the program behave like the first version again.
If you come across this kind of problem and you're still having trouble understanding this stuff, read the essay [Facts and myths about Python names and values](https://nedbatchelder.com/text/names.html).
"""
class ModifyingWhileIterating(Page):
final_text = """
Consider this program. It loops through a numbers and removes the ones smaller than 10. Or at least, it tries to. I recommend running it with Python Tutor.
numbers = [10, 7, 8, 3, 12, 15]
for i in range(len(numbers)):
number = numbers[i]
if number <= 10:
numbers.pop(i)
print(numbers)
(remember that `numbers.pop(i)` removes the element from `numbers` at index `i`)
As it runs, it clearly skips even looking at 7 or 3 and doesn't remove them, and at the end it fails when it tries to access an index that's too high. Can you see why this happens?
The index variable `i` runs through the usual values 0, 1, 2, ... as it's supposed to, but as the list changes those are no longer the positions we want. For example in the first iteration `i` is 0 and `number` is 10, which gets removed. This shifts the rest of the numbers left one position, so now 7 is in position 0. But then in the next iteration `i` is 1, and `numbers[i]` is 8. 7 got skipped.
We could try writing the program to use `remove` instead of `pop` so we don't have to use indices. It even looks nicer this way.
numbers = [10, 7, 8, 3, 12, 15]
for number in numbers:
if number <= 10:
numbers.remove(number)
print(numbers)
But it turns out this does the same thing, for the same reason. Iterating over a list still goes through the indices under the hood.
The lesson here is to ***never modify something while you iterate over it***. Keep mutation and looping separate.
The good news is that there are many ways to solve this. You can instead just loop over a copy, as in:
for number in numbers.copy():
Now the list being modified and the list being itererated over are separate objects, even if they start out with equal contents.
Similarly, you could loop over the original and modify a copy:
numbers = [10, 7, 8, 3, 12, 15]
big_numbers = numbers.copy()
for number in numbers:
if number <= 10:
big_numbers.remove(number)
print(big_numbers)
Or you could build up a new list from scratch. In this case, we've already done a similar thing in an exercise:
numbers = [10, 7, 8, 3, 12, 15]
big_numbers = []
for number in numbers:
if number > 10:
big_numbers.append(number)
print(big_numbers)
"""
| 34.038534
| 415
| 0.608333
| 5,225
| 36,217
| 4.174354
| 0.147177
| 0.009399
| 0.002613
| 0.011462
| 0.17074
| 0.130301
| 0.10834
| 0.088121
| 0.072716
| 0.057631
| 0
| 0.022418
| 0.294254
| 36,217
| 1,063
| 416
| 34.070555
| 0.830908
| 0.342408
| 0
| 0.356061
| 1
| 0.083333
| 0.554466
| 0.00836
| 0
| 0
| 0
| 0.002822
| 0
| 1
| 0.058712
| false
| 0.001894
| 0.013258
| 0.005682
| 0.176136
| 0.104167
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0aa3e139fa08c65698af3c065bdbf7e9c6759f7b
| 1,946
|
py
|
Python
|
NewsPaperD7(final)/NewsPaper/News/migrations/0001_initial.py
|
GregTMJ/django-files
|
dfd2c8da596522b77fb3dfc8089f0d287a94d53b
|
[
"MIT"
] | 1
|
2021-05-29T21:17:56.000Z
|
2021-05-29T21:17:56.000Z
|
NewsPaperD6/NewsPaper/News/migrations/0001_initial.py
|
GregTMJ/django-files
|
dfd2c8da596522b77fb3dfc8089f0d287a94d53b
|
[
"MIT"
] | null | null | null |
NewsPaperD6/NewsPaper/News/migrations/0001_initial.py
|
GregTMJ/django-files
|
dfd2c8da596522b77fb3dfc8089f0d287a94d53b
|
[
"MIT"
] | 1
|
2021-06-30T12:43:39.000Z
|
2021-06-30T12:43:39.000Z
|
# Generated by Django 3.2 on 2021-04-15 18:05
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Author',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('author', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Category',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('category', models.CharField(default='select category', max_length=255, unique=True)),
('subscriber', models.ManyToManyField(related_name='subscriber', to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Post',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('choosing', models.BooleanField(default=False)),
('time_in', models.DateTimeField(auto_now_add=True)),
('title', models.CharField(max_length=255, unique=True)),
('text', models.TextField(max_length=255)),
('rating', models.FloatField(default=0.0)),
('author', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='News.author', verbose_name='User')),
('category', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='News.category')),
],
),
]
| 42.304348
| 153
| 0.611511
| 202
| 1,946
| 5.757426
| 0.376238
| 0.034394
| 0.048151
| 0.075666
| 0.437661
| 0.399828
| 0.399828
| 0.399828
| 0.285469
| 0.211522
| 0
| 0.017194
| 0.252826
| 1,946
| 45
| 154
| 43.244444
| 0.782669
| 0.022097
| 0
| 0.394737
| 1
| 0
| 0.079432
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.078947
| 0
| 0.184211
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0aa43893204c6ba098361aa19c39257195d9d726
| 425
|
py
|
Python
|
blitz_api/migrations/0020_auto_20190529_1200.py
|
MelanieFJNR/Blitz-API
|
9a6daecd158fe07a6aeb80cbf586781eb688f0f9
|
[
"MIT"
] | 3
|
2019-10-22T00:16:49.000Z
|
2021-07-15T07:44:43.000Z
|
blitz_api/migrations/0020_auto_20190529_1200.py
|
MelanieFJNR/Blitz-API
|
9a6daecd158fe07a6aeb80cbf586781eb688f0f9
|
[
"MIT"
] | 1,183
|
2018-04-19T18:40:30.000Z
|
2022-03-31T21:05:05.000Z
|
blitz_api/migrations/0020_auto_20190529_1200.py
|
MelanieFJNR/Blitz-API
|
9a6daecd158fe07a6aeb80cbf586781eb688f0f9
|
[
"MIT"
] | 12
|
2018-04-17T19:16:42.000Z
|
2022-01-27T00:19:59.000Z
|
# Generated by Django 2.0.8 on 2019-05-29 16:00
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('blitz_api', '0019_merge_20190524_1719'),
]
operations = [
migrations.AlterField(
model_name='exportmedia',
name='file',
field=models.FileField(upload_to='export/%Y/%m/', verbose_name='file'),
),
]
| 22.368421
| 83
| 0.607059
| 48
| 425
| 5.229167
| 0.833333
| 0.063745
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.099042
| 0.263529
| 425
| 18
| 84
| 23.611111
| 0.702875
| 0.105882
| 0
| 0
| 1
| 0
| 0.171958
| 0.063492
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.083333
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0ab496b4beb92ca3fe52c60cfcbb81b2b17b5de3
| 22,976
|
py
|
Python
|
serde/src/gen/thrift/gen-py/megastruct/ttypes.py
|
amCharlie/hive
|
e1870c190188a3b706849059969c8bec2220b6d2
|
[
"Apache-2.0"
] | 2
|
2021-04-24T08:07:45.000Z
|
2021-04-24T08:07:46.000Z
|
serde/src/gen/thrift/gen-py/megastruct/ttypes.py
|
amCharlie/hive
|
e1870c190188a3b706849059969c8bec2220b6d2
|
[
"Apache-2.0"
] | 14
|
2020-12-26T22:01:38.000Z
|
2022-02-09T22:41:46.000Z
|
serde/src/gen/thrift/gen-py/megastruct/ttypes.py
|
amCharlie/hive
|
e1870c190188a3b706849059969c8bec2220b6d2
|
[
"Apache-2.0"
] | 7
|
2021-08-16T07:49:24.000Z
|
2022-03-17T09:04:34.000Z
|
#
# Autogenerated by Thrift Compiler (0.13.0)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
# options string: py
#
from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException
from thrift.protocol.TProtocol import TProtocolException
from thrift.TRecursive import fix_spec
import sys
from thrift.transport import TTransport
all_structs = []
class MyEnum(object):
LLAMA = 1
ALPACA = 2
_VALUES_TO_NAMES = {
1: "LLAMA",
2: "ALPACA",
}
_NAMES_TO_VALUES = {
"LLAMA": 1,
"ALPACA": 2,
}
class MiniStruct(object):
"""
Attributes:
- my_string
- my_enum
"""
def __init__(self, my_string=None, my_enum=None,):
self.my_string = my_string
self.my_enum = my_enum
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.my_string = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.my_enum = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('MiniStruct')
if self.my_string is not None:
oprot.writeFieldBegin('my_string', TType.STRING, 1)
oprot.writeString(self.my_string.encode('utf-8') if sys.version_info[0] == 2 else self.my_string)
oprot.writeFieldEnd()
if self.my_enum is not None:
oprot.writeFieldBegin('my_enum', TType.I32, 2)
oprot.writeI32(self.my_enum)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class MegaStruct(object):
"""
Attributes:
- my_bool
- my_byte
- my_16bit_int
- my_32bit_int
- my_64bit_int
- my_double
- my_string
- my_binary
- my_string_string_map
- my_string_enum_map
- my_enum_string_map
- my_enum_struct_map
- my_enum_stringlist_map
- my_enum_structlist_map
- my_stringlist
- my_structlist
- my_enumlist
- my_stringset
- my_enumset
- my_structset
"""
def __init__(self, my_bool=None, my_byte=None, my_16bit_int=None, my_32bit_int=None, my_64bit_int=None, my_double=None, my_string=None, my_binary=None, my_string_string_map=None, my_string_enum_map=None, my_enum_string_map=None, my_enum_struct_map=None, my_enum_stringlist_map=None, my_enum_structlist_map=None, my_stringlist=None, my_structlist=None, my_enumlist=None, my_stringset=None, my_enumset=None, my_structset=None,):
self.my_bool = my_bool
self.my_byte = my_byte
self.my_16bit_int = my_16bit_int
self.my_32bit_int = my_32bit_int
self.my_64bit_int = my_64bit_int
self.my_double = my_double
self.my_string = my_string
self.my_binary = my_binary
self.my_string_string_map = my_string_string_map
self.my_string_enum_map = my_string_enum_map
self.my_enum_string_map = my_enum_string_map
self.my_enum_struct_map = my_enum_struct_map
self.my_enum_stringlist_map = my_enum_stringlist_map
self.my_enum_structlist_map = my_enum_structlist_map
self.my_stringlist = my_stringlist
self.my_structlist = my_structlist
self.my_enumlist = my_enumlist
self.my_stringset = my_stringset
self.my_enumset = my_enumset
self.my_structset = my_structset
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.BOOL:
self.my_bool = iprot.readBool()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.BYTE:
self.my_byte = iprot.readByte()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I16:
self.my_16bit_int = iprot.readI16()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I32:
self.my_32bit_int = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.I64:
self.my_64bit_int = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.DOUBLE:
self.my_double = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 7:
if ftype == TType.STRING:
self.my_string = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 8:
if ftype == TType.STRING:
self.my_binary = iprot.readBinary()
else:
iprot.skip(ftype)
elif fid == 9:
if ftype == TType.MAP:
self.my_string_string_map = {}
(_ktype1, _vtype2, _size0) = iprot.readMapBegin()
for _i4 in range(_size0):
_key5 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val6 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
self.my_string_string_map[_key5] = _val6
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 10:
if ftype == TType.MAP:
self.my_string_enum_map = {}
(_ktype8, _vtype9, _size7) = iprot.readMapBegin()
for _i11 in range(_size7):
_key12 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val13 = iprot.readI32()
self.my_string_enum_map[_key12] = _val13
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 11:
if ftype == TType.MAP:
self.my_enum_string_map = {}
(_ktype15, _vtype16, _size14) = iprot.readMapBegin()
for _i18 in range(_size14):
_key19 = iprot.readI32()
_val20 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
self.my_enum_string_map[_key19] = _val20
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 12:
if ftype == TType.MAP:
self.my_enum_struct_map = {}
(_ktype22, _vtype23, _size21) = iprot.readMapBegin()
for _i25 in range(_size21):
_key26 = iprot.readI32()
_val27 = MiniStruct()
_val27.read(iprot)
self.my_enum_struct_map[_key26] = _val27
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 13:
if ftype == TType.MAP:
self.my_enum_stringlist_map = {}
(_ktype29, _vtype30, _size28) = iprot.readMapBegin()
for _i32 in range(_size28):
_key33 = iprot.readI32()
_val34 = []
(_etype38, _size35) = iprot.readListBegin()
for _i39 in range(_size35):
_elem40 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val34.append(_elem40)
iprot.readListEnd()
self.my_enum_stringlist_map[_key33] = _val34
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 14:
if ftype == TType.MAP:
self.my_enum_structlist_map = {}
(_ktype42, _vtype43, _size41) = iprot.readMapBegin()
for _i45 in range(_size41):
_key46 = iprot.readI32()
_val47 = []
(_etype51, _size48) = iprot.readListBegin()
for _i52 in range(_size48):
_elem53 = MiniStruct()
_elem53.read(iprot)
_val47.append(_elem53)
iprot.readListEnd()
self.my_enum_structlist_map[_key46] = _val47
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 15:
if ftype == TType.LIST:
self.my_stringlist = []
(_etype57, _size54) = iprot.readListBegin()
for _i58 in range(_size54):
_elem59 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
self.my_stringlist.append(_elem59)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 16:
if ftype == TType.LIST:
self.my_structlist = []
(_etype63, _size60) = iprot.readListBegin()
for _i64 in range(_size60):
_elem65 = MiniStruct()
_elem65.read(iprot)
self.my_structlist.append(_elem65)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 17:
if ftype == TType.LIST:
self.my_enumlist = []
(_etype69, _size66) = iprot.readListBegin()
for _i70 in range(_size66):
_elem71 = iprot.readI32()
self.my_enumlist.append(_elem71)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 18:
if ftype == TType.SET:
self.my_stringset = set()
(_etype75, _size72) = iprot.readSetBegin()
for _i76 in range(_size72):
_elem77 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
self.my_stringset.add(_elem77)
iprot.readSetEnd()
else:
iprot.skip(ftype)
elif fid == 19:
if ftype == TType.SET:
self.my_enumset = set()
(_etype81, _size78) = iprot.readSetBegin()
for _i82 in range(_size78):
_elem83 = iprot.readI32()
self.my_enumset.add(_elem83)
iprot.readSetEnd()
else:
iprot.skip(ftype)
elif fid == 20:
if ftype == TType.SET:
self.my_structset = set()
(_etype87, _size84) = iprot.readSetBegin()
for _i88 in range(_size84):
_elem89 = MiniStruct()
_elem89.read(iprot)
self.my_structset.add(_elem89)
iprot.readSetEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('MegaStruct')
if self.my_bool is not None:
oprot.writeFieldBegin('my_bool', TType.BOOL, 1)
oprot.writeBool(self.my_bool)
oprot.writeFieldEnd()
if self.my_byte is not None:
oprot.writeFieldBegin('my_byte', TType.BYTE, 2)
oprot.writeByte(self.my_byte)
oprot.writeFieldEnd()
if self.my_16bit_int is not None:
oprot.writeFieldBegin('my_16bit_int', TType.I16, 3)
oprot.writeI16(self.my_16bit_int)
oprot.writeFieldEnd()
if self.my_32bit_int is not None:
oprot.writeFieldBegin('my_32bit_int', TType.I32, 4)
oprot.writeI32(self.my_32bit_int)
oprot.writeFieldEnd()
if self.my_64bit_int is not None:
oprot.writeFieldBegin('my_64bit_int', TType.I64, 5)
oprot.writeI64(self.my_64bit_int)
oprot.writeFieldEnd()
if self.my_double is not None:
oprot.writeFieldBegin('my_double', TType.DOUBLE, 6)
oprot.writeDouble(self.my_double)
oprot.writeFieldEnd()
if self.my_string is not None:
oprot.writeFieldBegin('my_string', TType.STRING, 7)
oprot.writeString(self.my_string.encode('utf-8') if sys.version_info[0] == 2 else self.my_string)
oprot.writeFieldEnd()
if self.my_binary is not None:
oprot.writeFieldBegin('my_binary', TType.STRING, 8)
oprot.writeBinary(self.my_binary)
oprot.writeFieldEnd()
if self.my_string_string_map is not None:
oprot.writeFieldBegin('my_string_string_map', TType.MAP, 9)
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.my_string_string_map))
for kiter90, viter91 in self.my_string_string_map.items():
oprot.writeString(kiter90.encode('utf-8') if sys.version_info[0] == 2 else kiter90)
oprot.writeString(viter91.encode('utf-8') if sys.version_info[0] == 2 else viter91)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.my_string_enum_map is not None:
oprot.writeFieldBegin('my_string_enum_map', TType.MAP, 10)
oprot.writeMapBegin(TType.STRING, TType.I32, len(self.my_string_enum_map))
for kiter92, viter93 in self.my_string_enum_map.items():
oprot.writeString(kiter92.encode('utf-8') if sys.version_info[0] == 2 else kiter92)
oprot.writeI32(viter93)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.my_enum_string_map is not None:
oprot.writeFieldBegin('my_enum_string_map', TType.MAP, 11)
oprot.writeMapBegin(TType.I32, TType.STRING, len(self.my_enum_string_map))
for kiter94, viter95 in self.my_enum_string_map.items():
oprot.writeI32(kiter94)
oprot.writeString(viter95.encode('utf-8') if sys.version_info[0] == 2 else viter95)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.my_enum_struct_map is not None:
oprot.writeFieldBegin('my_enum_struct_map', TType.MAP, 12)
oprot.writeMapBegin(TType.I32, TType.STRUCT, len(self.my_enum_struct_map))
for kiter96, viter97 in self.my_enum_struct_map.items():
oprot.writeI32(kiter96)
viter97.write(oprot)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.my_enum_stringlist_map is not None:
oprot.writeFieldBegin('my_enum_stringlist_map', TType.MAP, 13)
oprot.writeMapBegin(TType.I32, TType.LIST, len(self.my_enum_stringlist_map))
for kiter98, viter99 in self.my_enum_stringlist_map.items():
oprot.writeI32(kiter98)
oprot.writeListBegin(TType.STRING, len(viter99))
for iter100 in viter99:
oprot.writeString(iter100.encode('utf-8') if sys.version_info[0] == 2 else iter100)
oprot.writeListEnd()
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.my_enum_structlist_map is not None:
oprot.writeFieldBegin('my_enum_structlist_map', TType.MAP, 14)
oprot.writeMapBegin(TType.I32, TType.LIST, len(self.my_enum_structlist_map))
for kiter101, viter102 in self.my_enum_structlist_map.items():
oprot.writeI32(kiter101)
oprot.writeListBegin(TType.STRUCT, len(viter102))
for iter103 in viter102:
iter103.write(oprot)
oprot.writeListEnd()
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.my_stringlist is not None:
oprot.writeFieldBegin('my_stringlist', TType.LIST, 15)
oprot.writeListBegin(TType.STRING, len(self.my_stringlist))
for iter104 in self.my_stringlist:
oprot.writeString(iter104.encode('utf-8') if sys.version_info[0] == 2 else iter104)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.my_structlist is not None:
oprot.writeFieldBegin('my_structlist', TType.LIST, 16)
oprot.writeListBegin(TType.STRUCT, len(self.my_structlist))
for iter105 in self.my_structlist:
iter105.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.my_enumlist is not None:
oprot.writeFieldBegin('my_enumlist', TType.LIST, 17)
oprot.writeListBegin(TType.I32, len(self.my_enumlist))
for iter106 in self.my_enumlist:
oprot.writeI32(iter106)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.my_stringset is not None:
oprot.writeFieldBegin('my_stringset', TType.SET, 18)
oprot.writeSetBegin(TType.STRING, len(self.my_stringset))
for iter107 in self.my_stringset:
oprot.writeString(iter107.encode('utf-8') if sys.version_info[0] == 2 else iter107)
oprot.writeSetEnd()
oprot.writeFieldEnd()
if self.my_enumset is not None:
oprot.writeFieldBegin('my_enumset', TType.SET, 19)
oprot.writeSetBegin(TType.I32, len(self.my_enumset))
for iter108 in self.my_enumset:
oprot.writeI32(iter108)
oprot.writeSetEnd()
oprot.writeFieldEnd()
if self.my_structset is not None:
oprot.writeFieldBegin('my_structset', TType.SET, 20)
oprot.writeSetBegin(TType.STRUCT, len(self.my_structset))
for iter109 in self.my_structset:
iter109.write(oprot)
oprot.writeSetEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(MiniStruct)
MiniStruct.thrift_spec = (
None, # 0
(1, TType.STRING, 'my_string', 'UTF8', None, ), # 1
(2, TType.I32, 'my_enum', None, None, ), # 2
)
all_structs.append(MegaStruct)
MegaStruct.thrift_spec = (
None, # 0
(1, TType.BOOL, 'my_bool', None, None, ), # 1
(2, TType.BYTE, 'my_byte', None, None, ), # 2
(3, TType.I16, 'my_16bit_int', None, None, ), # 3
(4, TType.I32, 'my_32bit_int', None, None, ), # 4
(5, TType.I64, 'my_64bit_int', None, None, ), # 5
(6, TType.DOUBLE, 'my_double', None, None, ), # 6
(7, TType.STRING, 'my_string', 'UTF8', None, ), # 7
(8, TType.STRING, 'my_binary', 'BINARY', None, ), # 8
(9, TType.MAP, 'my_string_string_map', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 9
(10, TType.MAP, 'my_string_enum_map', (TType.STRING, 'UTF8', TType.I32, None, False), None, ), # 10
(11, TType.MAP, 'my_enum_string_map', (TType.I32, None, TType.STRING, 'UTF8', False), None, ), # 11
(12, TType.MAP, 'my_enum_struct_map', (TType.I32, None, TType.STRUCT, [MiniStruct, None], False), None, ), # 12
(13, TType.MAP, 'my_enum_stringlist_map', (TType.I32, None, TType.LIST, (TType.STRING, 'UTF8', False), False), None, ), # 13
(14, TType.MAP, 'my_enum_structlist_map', (TType.I32, None, TType.LIST, (TType.STRUCT, [MiniStruct, None], False), False), None, ), # 14
(15, TType.LIST, 'my_stringlist', (TType.STRING, 'UTF8', False), None, ), # 15
(16, TType.LIST, 'my_structlist', (TType.STRUCT, [MiniStruct, None], False), None, ), # 16
(17, TType.LIST, 'my_enumlist', (TType.I32, None, False), None, ), # 17
(18, TType.SET, 'my_stringset', (TType.STRING, 'UTF8', False), None, ), # 18
(19, TType.SET, 'my_enumset', (TType.I32, None, False), None, ), # 19
(20, TType.SET, 'my_structset', (TType.STRUCT, [MiniStruct, None], False), None, ), # 20
)
fix_spec(all_structs)
del all_structs
| 43.680608
| 430
| 0.550531
| 2,549
| 22,976
| 4.70969
| 0.105139
| 0.057976
| 0.022491
| 0.035985
| 0.596668
| 0.480966
| 0.371512
| 0.316868
| 0.247813
| 0.237984
| 0
| 0.045433
| 0.346666
| 22,976
| 525
| 431
| 43.76381
| 0.754314
| 0.025244
| 0
| 0.396963
| 1
| 0
| 0.034345
| 0.003956
| 0
| 0
| 0
| 0
| 0
| 1
| 0.030369
| false
| 0
| 0.010846
| 0.013015
| 0.08243
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0ac18453ebf1417fb6591ada4674116fa981b20f
| 402
|
py
|
Python
|
biserici_inlemnite/app/migrations/0096_bisericapage_datare_an.py
|
ck-tm/biserici-inlemnite
|
c9d12127b92f25d3ab2fcc7b4c386419fe308a4e
|
[
"MIT"
] | null | null | null |
biserici_inlemnite/app/migrations/0096_bisericapage_datare_an.py
|
ck-tm/biserici-inlemnite
|
c9d12127b92f25d3ab2fcc7b4c386419fe308a4e
|
[
"MIT"
] | null | null | null |
biserici_inlemnite/app/migrations/0096_bisericapage_datare_an.py
|
ck-tm/biserici-inlemnite
|
c9d12127b92f25d3ab2fcc7b4c386419fe308a4e
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.1.13 on 2021-10-29 11:07
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('app', '0095_bisericapage_utitle'),
]
operations = [
migrations.AddField(
model_name='bisericapage',
name='datare_an',
field=models.IntegerField(blank=True, null=True),
),
]
| 21.157895
| 61
| 0.606965
| 43
| 402
| 5.581395
| 0.813953
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.068966
| 0.278607
| 402
| 18
| 62
| 22.333333
| 0.758621
| 0.114428
| 0
| 0
| 1
| 0
| 0.135593
| 0.067797
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.083333
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0ac42e49c824529d0aa71dbe888c2a691322545e
| 2,527
|
py
|
Python
|
ui_splash_screen.py
|
hirokiyaginuma/scriptspinner-software
|
87185f237f76feeee33a2b74a4d05be088bde011
|
[
"Unlicense"
] | null | null | null |
ui_splash_screen.py
|
hirokiyaginuma/scriptspinner-software
|
87185f237f76feeee33a2b74a4d05be088bde011
|
[
"Unlicense"
] | null | null | null |
ui_splash_screen.py
|
hirokiyaginuma/scriptspinner-software
|
87185f237f76feeee33a2b74a4d05be088bde011
|
[
"Unlicense"
] | null | null | null |
# -*- coding: utf-8 -*-
################################################################################
## Form generated from reading UI file 'splash_screen.ui'
##
## Created by: Qt User Interface Compiler version 5.15.1
##
## WARNING! All changes made in this file will be lost when recompiling UI file!
################################################################################
from PySide2.QtCore import *
from PySide2.QtGui import *
from PySide2.QtWidgets import *
class Ui_Splash_Screen(object):
def setupUi(self, Splash_Screen):
if not Splash_Screen.objectName():
Splash_Screen.setObjectName(u"Splash_Screen")
Splash_Screen.resize(720, 425)
self.centralwidget = QWidget(Splash_Screen)
self.centralwidget.setObjectName(u"centralwidget")
self.verticalLayout = QVBoxLayout(self.centralwidget)
self.verticalLayout.setSpacing(0)
self.verticalLayout.setObjectName(u"verticalLayout")
self.verticalLayout.setContentsMargins(0, 0, 0, 0)
self.frame = QFrame(self.centralwidget)
self.frame.setObjectName(u"frame")
self.frame.setFrameShape(QFrame.StyledPanel)
self.frame.setFrameShadow(QFrame.Raised)
self.frame.setLineWidth(0)
self.label = QLabel(self.frame)
self.label.setObjectName(u"label")
self.label.setGeometry(QRect(0, 0, 720, 425))
self.label.setLineWidth(0)
self.label.setPixmap(QPixmap(u"img/SS_logo.jpg"))
self.label.setIndent(0)
self.progressBar = QProgressBar(self.frame)
self.progressBar.setObjectName(u"progressBar")
self.progressBar.setGeometry(QRect(70, 330, 591, 41))
self.progressBar.setStyleSheet(u"QProgressBar {\n"
" background-color:rgb(149, 165, 166);\n"
" border-style: none;\n"
" border-radius: 10px;\n"
" text-align: center;\n"
"}\n"
"QProgressBar::chunk {\n"
" border-radius: 10px;\n"
" background-color: qlineargradient(spread:pad, x1:0, y1:0, x2:1, y2:0, stop:0 rgba(210, 157, 255, 255), stop:1 rgba(156, 69, 255, 255));\n"
"}")
self.progressBar.setValue(24)
self.verticalLayout.addWidget(self.frame)
Splash_Screen.setCentralWidget(self.centralwidget)
self.retranslateUi(Splash_Screen)
QMetaObject.connectSlotsByName(Splash_Screen)
# setupUi
def retranslateUi(self, Splash_Screen):
Splash_Screen.setWindowTitle(QCoreApplication.translate("Splash_Screen", u"MainWindow", None))
self.label.setText("")
# retranslateUi
| 37.716418
| 140
| 0.646617
| 285
| 2,527
| 5.677193
| 0.435088
| 0.103832
| 0.038937
| 0.029666
| 0.02225
| 0
| 0
| 0
| 0
| 0
| 0
| 0.042085
| 0.172537
| 2,527
| 66
| 141
| 38.287879
| 0.731707
| 0.091017
| 0
| 0.043478
| 1
| 0.021739
| 0.192743
| 0.024505
| 0
| 0
| 0
| 0
| 0
| 1
| 0.043478
| false
| 0
| 0.065217
| 0
| 0.130435
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0afd7a5b152406bcaea034f10b6d1b88302e3d68
| 434
|
py
|
Python
|
web/snowflake.py
|
jphacks/C_2118
|
a63279e92362e09d1856e3d44edb4793d370fd7a
|
[
"MIT"
] | null | null | null |
web/snowflake.py
|
jphacks/C_2118
|
a63279e92362e09d1856e3d44edb4793d370fd7a
|
[
"MIT"
] | 5
|
2021-10-30T00:55:45.000Z
|
2021-10-30T04:23:36.000Z
|
web/snowflake.py
|
jphacks/C_2118
|
a63279e92362e09d1856e3d44edb4793d370fd7a
|
[
"MIT"
] | null | null | null |
import time
class Snowflake:
def __init__(self, init_serial_no=0):
self.machine_id = 0
self.epoch = 0
self.serial_no = init_serial_no
def generate(self):
unique_id = (
((int(time.time() * 1000) - self.epoch) & 0x1FFFFFFFFFF) << 22
| (self.machine_id & 0x3FF) << 12
| (self.serial_no & 0xFFF)
)
self.serial_no += 1
return unique_id
| 24.111111
| 74
| 0.546083
| 53
| 434
| 4.188679
| 0.45283
| 0.18018
| 0.162162
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.059649
| 0.343318
| 434
| 17
| 75
| 25.529412
| 0.719298
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0.052995
| 0
| 0
| 1
| 0.142857
| false
| 0
| 0.071429
| 0
| 0.357143
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
7c154bd7941e6664ea91468d29e01f725ad32c14
| 2,914
|
py
|
Python
|
app/auth/views.py
|
ifaraag/app
|
d952f0dc58fd703074c19ed3235c1520119baf5f
|
[
"MIT"
] | null | null | null |
app/auth/views.py
|
ifaraag/app
|
d952f0dc58fd703074c19ed3235c1520119baf5f
|
[
"MIT"
] | null | null | null |
app/auth/views.py
|
ifaraag/app
|
d952f0dc58fd703074c19ed3235c1520119baf5f
|
[
"MIT"
] | null | null | null |
from flask import Blueprint, render_template, redirect, url_for, request, flash
from flask.ext.login import login_required, login_user, logout_user
from werkzeug import check_password_hash, generate_password_hash
from app import db, login_manager, pubnub, app, _callback
from .models import User
from .forms import LoginForm, SignupForm
mod_auth = Blueprint('auth', __name__)
@mod_auth.route('/login', methods=['GET', 'POST'])
def login():
form = LoginForm(request.form)
error = None
print(request.method)
if request.method == 'POST':
user = db.users.find_one({'username': request.form['username']})
if not user:
error = 'User does not exist'
elif not check_password_hash(user['password'], request.form['password']):
error = 'Invalid credentials. Please try again.'
else:
user_obj = User(user['username'])
login_user(user_obj)
return redirect(url_for('devices.list_devices'))
return render_template('auth/login.html',
title='Log In to Hydrosmart',
form=form,
error=error)
@mod_auth.route('/signup', methods=['GET', 'POST'])
def signup():
form = SignupForm(request.form)
error = None
if request.method == 'POST':
existing_user = db.users.find_one({'username' :
request.form['username']})
if existing_user:
error = 'Username already exists'
else:
new_user = {'username' : request.form['username'],
'email' : request.form['email'],
'zip' : request.form['zip'],
'password' : generate_password_hash(request.form['password'])}
db.users.insert_one(new_user)
user = db.users.find_one({'username': request.form['username']})
pubnub.channel_group_add_channel(channel_group=app.config['PUBNUB_CHANNEL_GRP'], channel=user['username'])
pubnub.grant(channel=user['username'], auth_key=app.config['PUBNUB_AUTH_KEY'], read=True, write=True, manage=True, ttl=0)
return redirect(url_for('dashboard.dashboard'))
return render_template('auth/signup.html', form=form,
title='Sign Up for Hydrosmart', error=error)
# @mod_auth.route('/googlelogin', methods=['GET', 'POST'])
@mod_auth.route("/logout")
@login_required
def logout():
logout_user()
flash("Logged out.")
return redirect('/login')
@login_manager.unauthorized_handler
def unauthorized_callback():
return redirect('/login')
@login_manager.user_loader
def load_user(username):
u = db.users.find_one({'username': username})
if not u:
return None
return User(u['username'])
def callback(message, channel):
db.data.insert_one(message)
def error(message):
db.data.insert_one(message)
| 37.358974
| 133
| 0.630062
| 344
| 2,914
| 5.162791
| 0.296512
| 0.061937
| 0.027027
| 0.031532
| 0.175113
| 0.078266
| 0.078266
| 0.078266
| 0.078266
| 0.052928
| 0
| 0.000451
| 0.239876
| 2,914
| 77
| 134
| 37.844156
| 0.801354
| 0.019218
| 0
| 0.181818
| 1
| 0
| 0.156162
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.106061
| false
| 0.045455
| 0.090909
| 0.015152
| 0.318182
| 0.045455
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
7c48ecfa52411dc6356f3fa1289a95505f086e55
| 2,599
|
py
|
Python
|
issues/migrations/0001_initial.py
|
QizaiMing/ergo-project-manager
|
2b02b2ab6d9e48bfccbbca8c05180b07177dcb77
|
[
"MIT"
] | null | null | null |
issues/migrations/0001_initial.py
|
QizaiMing/ergo-project-manager
|
2b02b2ab6d9e48bfccbbca8c05180b07177dcb77
|
[
"MIT"
] | 3
|
2020-11-01T22:08:38.000Z
|
2022-03-12T00:49:00.000Z
|
issues/migrations/0001_initial.py
|
QizaiMing/ergo-project-manager
|
2b02b2ab6d9e48bfccbbca8c05180b07177dcb77
|
[
"MIT"
] | 2
|
2021-01-03T07:17:16.000Z
|
2021-05-29T17:27:11.000Z
|
# Generated by Django 2.2.12 on 2020-05-01 03:34
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Issue',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=100)),
('description', models.TextField(max_length=2000)),
('created', models.DateTimeField(auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
('status', models.CharField(choices=[('To Do', 'To Do'), ('In Progress', 'In Progress'), ('Done', 'Done')], default='To Do', max_length=20)),
('priority', models.CharField(choices=[('Low', 'Low'), ('Medium', 'Medium'), ('High', 'High')], default='Low', max_length=20)),
('assignee', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='assigned', to=settings.AUTH_USER_MODEL)),
('creator', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='issues', to=settings.AUTH_USER_MODEL)),
('linked_to', models.ManyToManyField(related_name='_issue_linked_to_+', to='issues.Issue')),
],
),
migrations.CreateModel(
name='Comment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True)),
('content', models.TextField(max_length=1000)),
('creator', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='comments', to=settings.AUTH_USER_MODEL)),
('issue', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='comments', to='issues.Issue')),
],
),
migrations.CreateModel(
name='Attachment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('file', models.FileField(upload_to='media/files')),
('issue', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='attachments', to='issues.Issue')),
],
),
]
| 50.960784
| 157
| 0.614852
| 278
| 2,599
| 5.586331
| 0.320144
| 0.036059
| 0.054089
| 0.084997
| 0.529298
| 0.486156
| 0.438506
| 0.386993
| 0.386993
| 0.386993
| 0
| 0.015516
| 0.231243
| 2,599
| 50
| 158
| 51.98
| 0.761762
| 0.017699
| 0
| 0.395349
| 1
| 0
| 0.124657
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.069767
| 0
| 0.162791
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
7c76d6a2f8e354238a96f859815250852db8cda1
| 738
|
py
|
Python
|
kafka-rockset-integration/generate_customers_data.py
|
farkaskid/recipes
|
8eef799cda899ea266f2849d485917f9b0d83190
|
[
"Apache-2.0"
] | 21
|
2019-02-27T22:30:28.000Z
|
2021-07-18T17:26:56.000Z
|
kafka-rockset-integration/generate_customers_data.py
|
farkaskid/recipes
|
8eef799cda899ea266f2849d485917f9b0d83190
|
[
"Apache-2.0"
] | 16
|
2019-07-03T22:04:21.000Z
|
2022-02-26T18:34:05.000Z
|
kafka-rockset-integration/generate_customers_data.py
|
farkaskid/recipes
|
8eef799cda899ea266f2849d485917f9b0d83190
|
[
"Apache-2.0"
] | 11
|
2019-03-13T08:55:31.000Z
|
2022-02-07T08:35:16.000Z
|
"""Generate Customer Data"""
import csv
import random
from config import MIN_CUSTOMER_ID, MAX_CUSTOMER_ID
ACQUISITION_SOURCES = [
'OrganicSearch',
'PaidSearch',
'Email',
'SocialMedia',
'Display',
'Affiliate'
'Referral'
]
def main():
with open('customers.csv', 'w') as fout:
writer = csv.DictWriter(fout, fieldnames=['CustomerID', 'AcquisitionSource'])
writer.writeheader()
for customer_id in range(MIN_CUSTOMER_ID, MAX_CUSTOMER_ID + 1):
record = {
'CustomerID': int(customer_id),
'AcquisitionSource': random.choices(ACQUISITION_SOURCES).pop()
}
writer.writerow(record)
if __name__ == '__main__':
main()
| 22.363636
| 85
| 0.617886
| 73
| 738
| 5.972603
| 0.616438
| 0.137615
| 0.059633
| 0.073395
| 0.119266
| 0.119266
| 0
| 0
| 0
| 0
| 0
| 0.001832
| 0.260163
| 738
| 32
| 86
| 23.0625
| 0.796703
| 0.02981
| 0
| 0
| 1
| 0
| 0.195775
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.041667
| false
| 0
| 0.125
| 0
| 0.166667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
7c79e12b0a22b9ba1c999ecbf405c389b15998f7
| 6,612
|
py
|
Python
|
life_line_chart/_autogenerate_data.py
|
mustaqimM/life_line_chart
|
a9bbbbdeb5568aa0cc3b3b585337a3d655f4b2d6
|
[
"MIT"
] | null | null | null |
life_line_chart/_autogenerate_data.py
|
mustaqimM/life_line_chart
|
a9bbbbdeb5568aa0cc3b3b585337a3d655f4b2d6
|
[
"MIT"
] | null | null | null |
life_line_chart/_autogenerate_data.py
|
mustaqimM/life_line_chart
|
a9bbbbdeb5568aa0cc3b3b585337a3d655f4b2d6
|
[
"MIT"
] | null | null | null |
import names
import os
import datetime
from random import random
def generate_gedcom_file():
"""generate some gedcom file"""
db = {}
db['n_individuals'] = 0
db['max_individuals'] = 8000
db['n_families'] = 0
db['yougest'] = None
gedcom_content = """
0 HEAD
1 SOUR Gramps
2 VERS 3.3.0
2 NAME Gramps
1 DATE {}
2 TIME 15:35:24
1 SUBM @SUBM@
1 COPR Copyright (c) 2020 Christian Schulze,,,.
1 GEDC
2 VERS 5.5
1 CHAR UTF-8
1 LANG German
""".format(datetime.date.today())
def generate_individual(db, birth_year, sex=None, last_name=None):
if not sex:
sex = 'F' if random() < 0.5 else 'M'
first_name = names.get_first_name(
gender='male' if sex == 'M' else 'female')
if random() < 0.3:
first_name += ' ' + \
names.get_first_name(gender='male' if sex == 'M' else 'female')
if not last_name:
last_name = names.get_last_name()
birth_place = 'Paris' if random() < 0.5 else 'Rome'
death_place = 'Zorge' if random() < 0.5 else 'Bruegge'
db['n_individuals'] += 1
individual_id = '@I{}@'.format(db["n_individuals"])
death_year = birth_year + 40 + int(random()*20)
db[individual_id] = {
'birth': birth_year,
'death': death_year,
'sex': sex,
'last_name': last_name
}
birth_date = '1 JUN {}'.format(birth_year)
death_date = '1 JUN {}'.format(birth_year)
if not db['yougest']:
db['yougest'] = individual_id
elif db[db['yougest']]['birth'] < birth_year:
db['yougest'] = individual_id
db[individual_id]['string'] = """0 {individual_id} INDI
1 NAME {first_name} /{last_name}/
1 SEX {sex}
1 BIRT
2 DATE {birth_date}
2 PLAC {birth_place}
1 DEAT
2 DATE {death_date}
2 PLAC {death_place}
""".format(**locals())
return individual_id
def generate_family(db, husband_id, wife_id, children_ids, marriage_year, marriage_place=None):
if not marriage_place:
marriage_place = 'London' if random() < 0.5 else 'Tokio'
db['n_families'] += 1
marriage_date = '1 MAY {}'.format(marriage_year)
family_id = "@F{}@".format(db['n_families'])
db[family_id] = {'string': """0 {family_id} FAM
1 HUSB {husband_id}
1 WIFE {wife_id}
1 MARR
2 DATE {marriage_date}
2 PLAC {marriage_place}
""".format(
**locals()
)}
for child_id in children_ids:
db[family_id]['string'] += "1 CHIL {}\n".format(child_id)
return family_id
def find_by_birth_date(db, from_year, to_year, sex, exclude=[]):
ids = []
for individual_id, data in db.items():
if not individual_id.startswith('@I'):
continue
if 'famc' in data:
if data['birth'] > from_year and data['birth'] < to_year:
if sex == data['sex']:
if individual_id not in exclude:
ids.append(individual_id)
if ids:
return ids[int(random()*len(ids))]
return None
def generate_recursive_family(db, start_year=1000, generations=2, husband_id=None, wife_id=None, siblings=[], max_children=5):
if not husband_id:
if random() < 0.2:
exclude = siblings.copy()
if wife_id:
exclude += [wife_id]
husband_id = find_by_birth_date(
db, start_year, start_year + 10, sex='M', exclude=exclude)
if not husband_id:
husband_id = generate_individual(
db, start_year + int(random()*5), sex='M')
else:
print('reused {}'.format(husband_id))
if not wife_id:
if random() < 10.9:
exclude = siblings.copy() + [husband_id]
wife_id = find_by_birth_date(
db, start_year, start_year + 10, sex='F', exclude=exclude)
if not wife_id:
wife_id = generate_individual(
db, start_year + int(random()*5), sex='F')
else:
print('reused {}'.format(wife_id))
n_children = int((1+random()*(max_children-1)) *
(1 - db['n_individuals'] / db['max_individuals']))
marriage_year = start_year + 20 + int(random()*5)
children_ids = []
for i in range(n_children):
children_ids.append(generate_individual(
db, birth_year=marriage_year + 1 + int(random()*10), last_name=db[husband_id]['last_name']))
family_id = generate_family(
db, husband_id, wife_id, children_ids, marriage_year)
for i in range(n_children):
db[children_ids[i]]['string'] += "1 FAMC "+family_id + '\n'
db[children_ids[i]]['famc'] = family_id
if generations > 0:
generate_recursive_family(
db,
db[children_ids[i]]['birth'],
generations - 1,
children_ids[i] if db[children_ids[i]
]['sex'] == 'M' else None,
children_ids[i] if db[children_ids[i]
]['sex'] == 'F' else None,
children_ids)
db[husband_id]['string'] += "1 FAMS "+family_id + '\n'
db[wife_id]['string'] += "1 FAMS "+family_id + '\n'
generate_recursive_family(db, generations=8, max_children=4)
for k, v in db.items():
if k.startswith('@I'):
gedcom_content += v['string']
for k, v in db.items():
if k.startswith('@F'):
gedcom_content += v['string']
gedcom_content += '0 TRLR\n'
open(os.path.join(os.path.dirname(__file__), '..', 'tests',
'autogenerated.ged'), 'w').write(gedcom_content)
# generate_gedcom_file()
def generate_individual_images():
from PIL import Image, ImageDraw, ImageFont
def generate_one_image(filename, text, font_size=22, pos=(15, 40), size=(100, 100), color=(160, 160, 160)):
img = Image.new('RGB', size, color=color)
d = ImageDraw.Draw(img)
font = ImageFont.truetype(r'arial.ttf', font_size)
d.text(pos, text, fill=(0, 0, 0), font=font)
img.save(filename)
for i in range(20):
generate_one_image(
'tests/images/individual_I6_image_age_{}.png'.format(
1+i*4
), 'Age {}'.format(
1+i*4,
))
generate_individual_images()
| 35.548387
| 130
| 0.545977
| 850
| 6,612
| 4.035294
| 0.196471
| 0.041691
| 0.02449
| 0.020408
| 0.223032
| 0.18484
| 0.159767
| 0.146939
| 0.146939
| 0.11312
| 0
| 0.029804
| 0.320024
| 6,612
| 185
| 131
| 35.740541
| 0.733096
| 0.007411
| 0
| 0.096386
| 1
| 0
| 0.148849
| 0.006558
| 0
| 0
| 0
| 0
| 0
| 1
| 0.042169
| false
| 0
| 0.03012
| 0
| 0.096386
| 0.012048
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
7c9e60fb8b9a1847e8db908d6cfa14b5a53e1aaf
| 623
|
py
|
Python
|
API/migrations/0005_alter_news_date_time_alter_news_headline.py
|
kgarchie/ReSTful-Django-API
|
851c76eb75747042ceac0a6c164266409ca935d4
|
[
"MIT"
] | null | null | null |
API/migrations/0005_alter_news_date_time_alter_news_headline.py
|
kgarchie/ReSTful-Django-API
|
851c76eb75747042ceac0a6c164266409ca935d4
|
[
"MIT"
] | null | null | null |
API/migrations/0005_alter_news_date_time_alter_news_headline.py
|
kgarchie/ReSTful-Django-API
|
851c76eb75747042ceac0a6c164266409ca935d4
|
[
"MIT"
] | null | null | null |
# Generated by Django 4.0.3 on 2022-03-23 14:31
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('API', '0004_alter_news_date_time_alter_news_headline'),
]
operations = [
migrations.AlterField(
model_name='news',
name='date_time',
field=models.DateTimeField(default=datetime.datetime(2022, 3, 23, 17, 31, 17, 27766)),
),
migrations.AlterField(
model_name='news',
name='headline',
field=models.CharField(max_length=100),
),
]
| 24.92
| 98
| 0.603531
| 69
| 623
| 5.289855
| 0.594203
| 0.049315
| 0.136986
| 0.158904
| 0.20274
| 0.20274
| 0
| 0
| 0
| 0
| 0
| 0.089485
| 0.282504
| 623
| 24
| 99
| 25.958333
| 0.727069
| 0.072231
| 0
| 0.333333
| 1
| 0
| 0.126736
| 0.078125
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.111111
| 0
| 0.277778
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
7cf46d5f8307606187101597e795384399b48446
| 804
|
py
|
Python
|
vote/migrations/0005_auto_20210204_1900.py
|
jnegrete2005/JuradoFMS
|
25848037e51de1781c419155615d0fb41edc07ec
|
[
"MIT"
] | 2
|
2021-02-24T21:57:50.000Z
|
2021-03-15T08:44:09.000Z
|
vote/migrations/0005_auto_20210204_1900.py
|
jnegrete2005/JuradoFMS
|
25848037e51de1781c419155615d0fb41edc07ec
|
[
"MIT"
] | null | null | null |
vote/migrations/0005_auto_20210204_1900.py
|
jnegrete2005/JuradoFMS
|
25848037e51de1781c419155615d0fb41edc07ec
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.1.5 on 2021-02-05 00:00
import django.contrib.postgres.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('vote', '0004_auto_20210131_1621'),
]
operations = [
migrations.AlterField(
model_name='competitor',
name='min1',
field=django.contrib.postgres.fields.ArrayField(base_field=models.PositiveSmallIntegerField(), blank=True, null=True, size=9, verbose_name='minuto 1'),
),
migrations.AlterField(
model_name='competitor',
name='min2',
field=django.contrib.postgres.fields.ArrayField(base_field=models.PositiveSmallIntegerField(), blank=True, null=True, size=9, verbose_name='minuto 2'),
),
]
| 32.16
| 163
| 0.655473
| 88
| 804
| 5.886364
| 0.522727
| 0.07529
| 0.121622
| 0.156371
| 0.633205
| 0.633205
| 0.467181
| 0.467181
| 0.467181
| 0.467181
| 0
| 0.059295
| 0.223881
| 804
| 24
| 164
| 33.5
| 0.770833
| 0.05597
| 0
| 0.333333
| 1
| 0
| 0.093791
| 0.030383
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.111111
| 0
| 0.277778
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
7cfa745e3890fcda9ffd072f599dc7be286f99a5
| 11,039
|
py
|
Python
|
fileHandler.py
|
Omer-Sella/ldpc
|
955c0bc32236e171365cbbb88f00574302771610
|
[
"MIT"
] | null | null | null |
fileHandler.py
|
Omer-Sella/ldpc
|
955c0bc32236e171365cbbb88f00574302771610
|
[
"MIT"
] | null | null | null |
fileHandler.py
|
Omer-Sella/ldpc
|
955c0bc32236e171365cbbb88f00574302771610
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Thu Nov 28 12:10:11 2019
@author: Omer
"""
## File handler
## This file was initially intended purely to generate the matrices for the near earth code found in: https://public.ccsds.org/Pubs/131x1o2e2s.pdf
## The values from the above pdf were copied manually to a txt file, and it is the purpose of this file to parse it.
## The emphasis here is on correctness, I currently do not see a reason to generalise this file, since matrices will be saved in either json or some matrix friendly format.
import numpy as np
from scipy.linalg import circulant
#import matplotlib.pyplot as plt
import scipy.io
import common
import hashlib
import os
projectDir = os.environ.get('LDPC')
if projectDir == None:
import pathlib
projectDir = pathlib.Path(__file__).parent.absolute()
## Omer Sella: added on 01/12/2020, need to make sure this doesn't break anything.
import sys
sys.path.insert(1, projectDir)
FILE_HANDLER_INT_DATA_TYPE = np.int32
GENERAL_CODE_MATRIX_DATA_TYPE = np.int32
NIBBLE_CONVERTER = np.array([8, 4, 2, 1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)
def nibbleToHex(inputArray):
n = NIBBLE_CONVERTER.dot(inputArray)
if n == 10:
h = 'A'
elif n== 11:
h = 'B'
elif n== 12:
h = 'C'
elif n== 13:
h = 'D'
elif n== 14:
h = 'E'
elif n== 15:
h = 'F'
else:
h = str(n)
return h
def binaryArraytoHex(inputArray):
d1 = len(inputArray)
assert (d1 % 4 == 0)
outputArray = np.zeros(d1//4, dtype = str)
outputString = ''
for j in range(d1//4):
nibble = inputArray[4 * j : 4 * j + 4]
h = nibbleToHex(nibble)
outputArray[j] = h
outputString = outputString + h
return outputArray, outputString
def hexStringToBinaryArray(hexString):
outputBinary = np.array([], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)
for i in hexString:
if i == '0':
nibble = np.array([0,0,0,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)
elif i == '1':
nibble = np.array([0,0,0,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)
elif i == '2':
nibble = np.array([0,0,1,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)
elif i == '3':
nibble = np.array([0,0,1,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)
elif i == '4':
nibble = np.array([0,1,0,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)
elif i == '5':
nibble = np.array([0,1,0,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)
elif i == '6':
nibble = np.array([0,1,1,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)
elif i == '7':
nibble = np.array([0,1,1,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)
elif i == '8':
nibble = np.array([1,0,0,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)
elif i == '9':
nibble = np.array([1,0,0,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)
elif i == 'A':
nibble = np.array([1,0,1,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)
elif i == 'B':
nibble = np.array([1,0,1,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)
elif i == 'C':
nibble = np.array([1,1,0,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)
elif i == 'D':
nibble = np.array([1,1,0,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)
elif i == 'E':
nibble = np.array([1,1,1,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)
elif i == 'F':
nibble = np.array([1,1,1,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)
else:
#print('Error, 0-9 or A-F')
pass
nibble = np.array([], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)
outputBinary = np.hstack((outputBinary, nibble))
return outputBinary
def hexToCirculant(hexStr, circulantSize):
binaryArray = hexStringToBinaryArray(hexStr)
if len(binaryArray) < circulantSize:
binaryArray = np.hstack(np.zeros(circulantSize-len(binaryArray), dtype = GENERAL_CODE_MATRIX_DATA_TYPE))
else:
binaryArray = binaryArray[1:]
circulantMatrix = circulant(binaryArray)
circulantMatrix = circulantMatrix.T
return circulantMatrix
def hotLocationsToCirculant(locationList, circulantSize):
generatingVector = np.zeros(circulantSize, dtype = GENERAL_CODE_MATRIX_DATA_TYPE)
generatingVector[locationList] = 1
newCirculant = circulant(generatingVector)
newCirculant = newCirculant.T
return newCirculant
def readMatrixFromFile(fileName, dim0, dim1, circulantSize, isRow = True, isHex = True, isGenerator = True ):
# This function assumes that each line in the file contains the non zero locations of the first row of a circulant.
# Each line in the file then defines a circulant, and the order in which they are defined is top to bottom left to right, i.e.:
# line 0 defines circulant 0,0
with open(fileName) as fid:
lines = fid.readlines()
if isGenerator:
for i in range((dim0 // circulantSize) ):
bLeft = hexToCirculant(lines[2 * i], circulantSize)
bRight = hexToCirculant(lines[2 * i + 1], circulantSize)
newBlock = np.hstack((bLeft, bRight))
if i == 0:
accumulatedBlock = newBlock
else:
accumulatedBlock = np.vstack((accumulatedBlock, newBlock))
newMatrix = np.hstack((np.eye(dim0, dtype = GENERAL_CODE_MATRIX_DATA_TYPE), accumulatedBlock))
else:
for i in range((dim1 // circulantSize)):
locationList1 = list(lines[ i].rstrip('\n').split(','))
locationList1 = list(map(int, locationList1))
upBlock = hotLocationsToCirculant(locationList1, circulantSize)
if i == 0:
accumulatedUpBlock1 = upBlock
else:
accumulatedUpBlock1 = np.hstack((accumulatedUpBlock1, upBlock))
for i in range((dim1 // circulantSize)):
locationList = list(lines[(dim1 // circulantSize) + i].rstrip('\n').split(','))
locationList = list(map(int, locationList))
newBlock = hotLocationsToCirculant(locationList, circulantSize)
if i == 0:
accumulatedBlock2 = newBlock
else:
accumulatedBlock2 = np.hstack((accumulatedBlock2, newBlock))
newMatrix = np.vstack((accumulatedUpBlock1, accumulatedBlock2))
return newMatrix
def binaryMatrixToHexString(binaryMatrix, circulantSize):
leftPadding = np.array(4 - (circulantSize % 4))
m,n = binaryMatrix.shape
#print(m)
#print(n)
assert( m % circulantSize == 0)
assert (n % circulantSize == 0)
M = m // circulantSize
N = n // circulantSize
hexName = ''
for r in range(M):
for k in range(N):
nextLine = np.hstack((leftPadding, binaryMatrix[ r * circulantSize , k * circulantSize : (k + 1) * circulantSize]))
hexArray, hexString = binaryArraytoHex(nextLine)
hexName = hexName + hexString
return hexName
def saveCodeInstance(parityMatrix, circulantSize, codewordSize, evaluationData = None, path = None, evaluationTime = 0, numberOfNonZero = 0, fileName = None):
print("*** in saveCodeInstance ...")
m, n = parityMatrix.shape
M = m // circulantSize
N = n // circulantSize
if fileName == None:
fileName = binaryMatrixToHexString(parityMatrix, circulantSize)
fileNameSHA224 = str(circulantSize) + '_' + str(M) + '_' + str(N) + '_' + str(hashlib.sha224(str(fileName).encode('utf-8')).hexdigest())
fileNameWithPath = path + fileNameSHA224
else:
fileNameWithPath = path + fileName
print("*** " + fileName)
workspaceDict = {}
workspaceDict['parityMatrix'] = parityMatrix
workspaceDict['fileName'] = fileName
if evaluationData != None:
scatterSNR, scatterBER, scatterITR, snrAxis, averageSnrAxis, berData, averageNumberOfIterations = evaluationData.getStatsV2()
workspaceDict['snrData'] = scatterSNR
workspaceDict['berData'] = scatterBER
workspaceDict['itrData'] = scatterITR
workspaceDict['averageSnrAxis'] = averageSnrAxis
workspaceDict['averageNumberOfIterations'] = averageNumberOfIterations
workspaceDict['evaluationTime'] = evaluationTime
workspaceDict['nonZero'] = numberOfNonZero
scipy.io.savemat((fileNameWithPath + '.mat'), workspaceDict)
#evaluationData.plotStats(codewordSize, fileNameWithPath)
print("*** Finishing saveCodeInstance !")
return fileName
def testFileHandler():
nearEarthGenerator = readMatrixFromFile(projectDir + '/codeMatrices/nearEarthGenerator.txt', 7154, 8176, 511, True, True, True)
nearEarthParity = readMatrixFromFile(projectDir + '/codeMatrices/nearEarthParity.txt', 1022, 8176, 511, True, False, False)
return 'OK'
def plotResults(path, makeMat = False):
i = 10
evaluationFaildAt = np.zeros(4, dtype = FILE_HANDLER_INT_DATA_TYPE)
evalTimes = []
numberOfIterationsAtHigh = []
for root, dirs, files in os.walk(path):
for file in files:
if str(file).endswith('.mat'):
i = i + 1
mat = scipy.io.loadmat(str(os.path.join(root, file)))
snrAxis = mat['snrAxis']
snrActual = mat['averageSnrAxis']
if len(snrAxis) < 3:
evaluationFaildAt[len(snrAxis)] = evaluationFaildAt[len(snrAxis)] + 1
berAxis = mat['berData']
if ('evaluationTime' in mat.keys()):
evalTimes.append(mat['evaluationTime'])
averageNumberOfIterations = mat['averageNumberOfIterations']
numberOfIterationsAtHigh.append(averageNumberOfIterations[-1])
common.plotSNRvsBER(snrActual, berAxis, fileName = None, inputLabel = '', figureNumber = i, figureName = str(file))
else:
pass
return evalTimes, evaluationFaildAt, numberOfIterationsAtHigh
#plt.imshow(nearEarthParity)
#nearEarthParity = readMatrixFromFile('/home/oss22/swift/swift/codeMatrices/nearEarthParity.txt', 1022, 8176, 511, True, False, False)
#import networkx as nx
#from networkx.algorithms import bipartite
#B = nx.Graph()
#B.add_nodes_from(range(1022), bipartite=0)
#B.add_nodes_from(range(1022, 7156 + 1022), bipartite=1)
# Add edges only between nodes of opposite node sets
#for i in range(8176):
# for j in range(1022):
# if nearEarthParity[j,i] != 0:
# B.add_edges_from([(j, 7156 + i)])
#X, Y = bipartite.sets(B)
#pos = dict()
#pos.update( (n, (1, i)) for i, n in enumerate(X) )
#pos.update( (n, (2, i)) for i, n in enumerate(Y) )
#nx.draw(B, pos=pos)
#plt.show()
| 38.197232
| 172
| 0.621343
| 1,254
| 11,039
| 5.37799
| 0.250399
| 0.029656
| 0.057977
| 0.071619
| 0.213078
| 0.197805
| 0.126928
| 0.116696
| 0.105724
| 0.084371
| 0
| 0.031883
| 0.266963
| 11,039
| 288
| 173
| 38.329861
| 0.801532
| 0.151191
| 0
| 0.10101
| 1
| 0
| 0.039043
| 0.012764
| 0
| 0
| 0
| 0
| 0.015152
| 1
| 0.050505
| false
| 0.010101
| 0.040404
| 0
| 0.141414
| 0.015152
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
7cffa5673d098c5404a18e4042db11fef2170e1f
| 6,540
|
py
|
Python
|
common/OpTestASM.py
|
kyle-ibm/op-test
|
df8dbf8cbff1390668c22632052adb46ebf277c1
|
[
"Apache-2.0"
] | null | null | null |
common/OpTestASM.py
|
kyle-ibm/op-test
|
df8dbf8cbff1390668c22632052adb46ebf277c1
|
[
"Apache-2.0"
] | null | null | null |
common/OpTestASM.py
|
kyle-ibm/op-test
|
df8dbf8cbff1390668c22632052adb46ebf277c1
|
[
"Apache-2.0"
] | 1
|
2021-05-25T11:33:18.000Z
|
2021-05-25T11:33:18.000Z
|
#!/usr/bin/env python3
# encoding=utf8
# IBM_PROLOG_BEGIN_TAG
# This is an automatically generated prolog.
#
# $Source: op-test-framework/common/OpTestASM.py $
#
# OpenPOWER Automated Test Project
#
# Contributors Listed Below - COPYRIGHT 2017
# [+] International Business Machines Corp.
#
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied. See the License for the specific language governing
# permissions and limitations under the License.
#
# IBM_PROLOG_END_TAG
'''
OpTestASM: Advanced System Management (FSP Web UI)
--------------------------------------------------
This class can contains common functions which are useful for
FSP ASM Web page. Some functionality is only accessible through
the FSP Web UI (such as progress codes), so we scrape it.
'''
import time
import subprocess
import os
import pexpect
import sys
import subprocess
from .OpTestConstants import OpTestConstants as BMC_CONST
from .OpTestError import OpTestError
import http.cookiejar
import urllib.request
import urllib.parse
import urllib.error
import re
import ssl
class OpTestASM:
def __init__(self, i_fspIP, i_fspUser, i_fspPasswd):
self.host_name = i_fspIP
self.user_name = i_fspUser
self.password = i_fspPasswd
self.url = "https://%s/cgi-bin/cgi?" % self.host_name
self.cj = http.cookiejar.CookieJar()
context = ssl.create_default_context()
context.check_hostname = False
context.verify_mode = ssl.CERT_NONE
opener = urllib.request.build_opener(urllib.request.HTTPSHandler(context=context))
opener.addheaders = [('User-agent', 'LTCTest')]
opener.add_handler(urllib.request.HTTPCookieProcessor(self.cj))
urllib.request.install_opener(opener)
self.setforms()
def setforms(self):
if "FW860" in self.ver():
self.hrdwr = 'p8'
self.frms = {'pwr': '59',
'dbg': '78',
'immpwroff': '32'}
else:
self.hrdwr = 'p7'
self.frms = {'pwr': '60',
'dbg': '79',
'immpwroff': '33'}
def getcsrf(self, form):
while True:
try:
myurl = urllib.request.urlopen(self.url+form, timeout=10)
except urllib.error.URLError:
time.sleep(2)
continue
break
out = myurl.read().decode("utf-8")
if 'CSRF_TOKEN' in out:
return re.findall('CSRF_TOKEN.*value=\'(.*)\'', out)[0]
else:
return '0'
def getpage(self, form):
myurl = urllib.request.urlopen(self.url+form, timeout=60)
return myurl.read().decode("utf-8")
def submit(self, form, param):
param['CSRF_TOKEN'] = self.getcsrf(form)
data = urllib.parse.urlencode(param).encode("utf-8")
req = urllib.request.Request(self.url+form, data)
return urllib.request.urlopen(req)
def login(self):
if not len(self.cj) == 0:
return True
param = {'user': self.user_name,
'password': self.password,
'login': 'Log in',
'lang': '0',
'CSRF_TOKEN': ''}
form = "form=2"
resp = self.submit(form, param)
count = 0
while count < 2:
if not len(self.cj) == 0:
break
# the login can quietly fail because the FSP has 'too many users' logged in,
# even though it actually doesn't. let's check to see if this is the case
# by trying a request.
if "Too many users" in self.getpage("form=2"):
raise OpTestError("FSP reports 'Too many users', FSP needs power cycle")
time.sleep(10)
self.submit(form, param)
msg = "Login failed with user:{0} and password:{1}".format(
self.user_name, self.password)
print(msg)
count += 1
if count == 2:
print(msg)
return False
return True
def logout(self):
param = {'submit': 'Log out',
'CSRF_TOKEN': ''}
form = "form=1"
self.submit(form, param)
def ver(self):
form = "form=1"
return self.getpage(form)
def execommand(self, cmd):
if not self.login():
raise OpTestError("Failed to login ASM page")
param = {'form': '16',
'exe': 'Execute',
'CSRF_TOKEN': '',
'cmd': cmd}
form = "form=16&frm=0"
self.submit(form, param)
def disablefirewall(self):
if not self.login():
raise OpTestError("Failed to login ASM page")
self.execommand('iptables -F')
self.logout()
def clearlogs(self):
if not self.login():
raise OpTestError("Failed to login ASM page")
param = {'form': '30',
'clear': "Clear all error/event log entries",
'CSRF_TOKEN': ''}
form = "form=30"
self.submit(form, param)
self.logout()
def powerstat(self):
form = "form=%s" % self.frms['pwr']
return self.getpage(form)
def start_debugvtty_session(self, partitionId='0', sessionId='0',
sessionTimeout='600'):
if not self.login():
raise OpTestError("Failed to login ASM page")
param = {'form': '81',
'p': partitionId,
's': sessionId,
't': sessionTimeout,
'Save settings': 'Save settings',
'CSRF_TOKEN': ''}
form = "form=81"
self.submit(form, param)
self.logout()
def enable_err_injct_policy(self):
if not self.login():
raise OpTestError("Failed to login ASM page")
param = {'form': '56',
'p': '1',
'submit': 'Save settings',
'CSRF_TOKEN': ''}
form = "form=56"
self.submit(form, param)
self.logout()
| 31.902439
| 90
| 0.555657
| 762
| 6,540
| 4.711286
| 0.374016
| 0.032591
| 0.027298
| 0.037047
| 0.193593
| 0.157382
| 0.124791
| 0.106964
| 0.083008
| 0.083008
| 0
| 0.016988
| 0.324924
| 6,540
| 204
| 91
| 32.058824
| 0.796149
| 0.193119
| 0
| 0.284722
| 1
| 0
| 0.130509
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.097222
| false
| 0.034722
| 0.097222
| 0
| 0.263889
| 0.013889
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
86226920fab3327506a58d2f239e976d2e4d87d4
| 634
|
py
|
Python
|
games/migrations/0002_auto_20201026_1221.py
|
IceArrow256/game-list
|
5f06e0ff80023acdc0290a9a8f814f7c93b45e0e
|
[
"Unlicense"
] | 3
|
2020-10-19T12:33:37.000Z
|
2020-10-21T05:28:35.000Z
|
games/migrations/0002_auto_20201026_1221.py
|
IceArrow256/gamelist
|
5f06e0ff80023acdc0290a9a8f814f7c93b45e0e
|
[
"Unlicense"
] | null | null | null |
games/migrations/0002_auto_20201026_1221.py
|
IceArrow256/gamelist
|
5f06e0ff80023acdc0290a9a8f814f7c93b45e0e
|
[
"Unlicense"
] | null | null | null |
# Generated by Django 3.1.2 on 2020-10-26 12:21
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('games', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='game',
name='score',
field=models.FloatField(null=True, verbose_name='Score'),
),
migrations.AlterField(
model_name='game',
name='series',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='games.series'),
),
]
| 25.36
| 111
| 0.600946
| 69
| 634
| 5.449275
| 0.57971
| 0.06383
| 0.074468
| 0.117021
| 0.196809
| 0.196809
| 0
| 0
| 0
| 0
| 0
| 0.041215
| 0.272871
| 634
| 24
| 112
| 26.416667
| 0.774403
| 0.070978
| 0
| 0.333333
| 1
| 0
| 0.09029
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.111111
| 0
| 0.277778
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
863e8a2ed0006f7150de09f27d406b39ae986ad3
| 827
|
py
|
Python
|
saleor/order/migrations/0081_auto_20200406_0456.py
|
fairhopeweb/saleor
|
9ac6c22652d46ba65a5b894da5f1ba5bec48c019
|
[
"CC-BY-4.0"
] | 15,337
|
2015-01-12T02:11:52.000Z
|
2021-10-05T19:19:29.000Z
|
saleor/order/migrations/0081_auto_20200406_0456.py
|
fairhopeweb/saleor
|
9ac6c22652d46ba65a5b894da5f1ba5bec48c019
|
[
"CC-BY-4.0"
] | 7,486
|
2015-02-11T10:52:13.000Z
|
2021-10-06T09:37:15.000Z
|
saleor/order/migrations/0081_auto_20200406_0456.py
|
aminziadna/saleor
|
2e78fb5bcf8b83a6278af02551a104cfa555a1fb
|
[
"CC-BY-4.0"
] | 5,864
|
2015-01-16T14:52:54.000Z
|
2021-10-05T23:01:15.000Z
|
# Generated by Django 3.0.4 on 2020-04-06 09:56
from django.db import migrations
from saleor.order import OrderStatus
def match_orders_with_users(apps, *_args, **_kwargs):
Order = apps.get_model("order", "Order")
User = apps.get_model("account", "User")
orders_without_user = Order.objects.filter(
user_email__isnull=False, user=None
).exclude(status=OrderStatus.DRAFT)
for order in orders_without_user:
try:
new_user = User.objects.get(email=order.user_email)
except User.DoesNotExist:
continue
order.user = new_user
order.save(update_fields=["user"])
class Migration(migrations.Migration):
dependencies = [
("order", "0080_invoice"),
]
operations = [
migrations.RunPython(match_orders_with_users),
]
| 25.060606
| 63
| 0.665054
| 102
| 827
| 5.176471
| 0.568627
| 0.051136
| 0.056818
| 0.075758
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.029688
| 0.226119
| 827
| 32
| 64
| 25.84375
| 0.795313
| 0.054414
| 0
| 0
| 1
| 0
| 0.053846
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.045455
| false
| 0
| 0.090909
| 0
| 0.272727
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
865fae0cf0882393868b033ff9b36122ab7504f2
| 76,601
|
py
|
Python
|
src/saml2/saml.py
|
masterapps-au/pysaml2
|
97ad6c066c93cb31a3c3b9d504877c02e93ca9a9
|
[
"Apache-2.0"
] | null | null | null |
src/saml2/saml.py
|
masterapps-au/pysaml2
|
97ad6c066c93cb31a3c3b9d504877c02e93ca9a9
|
[
"Apache-2.0"
] | null | null | null |
src/saml2/saml.py
|
masterapps-au/pysaml2
|
97ad6c066c93cb31a3c3b9d504877c02e93ca9a9
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
#
# Generated Mon May 2 14:23:33 2011 by parse_xsd.py version 0.4.
#
# A summary of available specifications can be found at:
# https://wiki.oasis-open.org/security/FrontPage
#
# saml core specifications to be found at:
# if any question arise please query the following pdf.
# http://docs.oasis-open.org/security/saml/v2.0/saml-core-2.0-os.pdf
# The specification was later updated with errata, and the new version is here:
# https://www.oasis-open.org/committees/download.php/56776/sstc-saml-core-errata-2.0-wd-07.pdf
#
try:
from base64 import encodebytes as b64encode
except ImportError:
from base64 import b64encode
from saml2.validate import valid_ipv4, MustValueError
from saml2.validate import valid_ipv6
from saml2.validate import ShouldValueError
from saml2.validate import valid_domain_name
import saml2
from saml2 import SamlBase
import six
from saml2 import xmldsig as ds
from saml2 import xmlenc as xenc
# authentication information fields
NAMESPACE = 'urn:oasis:names:tc:SAML:2.0:assertion'
# xmlschema definition
XSD = "xs"
# xmlschema templates and extensions
XS_NAMESPACE = 'http://www.w3.org/2001/XMLSchema'
# xmlschema-instance, which contains several builtin attributes
XSI_NAMESPACE = 'http://www.w3.org/2001/XMLSchema-instance'
# xml soap namespace
NS_SOAP_ENC = "http://schemas.xmlsoap.org/soap/encoding/"
# type definitions for xmlschemas
XSI_TYPE = '{%s}type' % XSI_NAMESPACE
# nil type definition for xmlschemas
XSI_NIL = '{%s}nil' % XSI_NAMESPACE
# idp and sp communicate usually about a subject(NameID)
# the format determines the category the subject is in
# custom subject
NAMEID_FORMAT_UNSPECIFIED = (
"urn:oasis:names:tc:SAML:1.1:nameid-format:unspecified")
# subject as email address
NAMEID_FORMAT_EMAILADDRESS = (
"urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress")
# subject as x509 key
NAMEID_FORMAT_X509SUBJECTNAME = (
"urn:oasis:names:tc:SAML:1.1:nameid-format:X509SubjectName")
# subject as windows domain name
NAMEID_FORMAT_WINDOWSDOMAINQUALIFIEDNAME = (
"urn:oasis:names:tc:SAML:1.1:nameid-format:WindowsDomainQualifiedName")
# subject from a kerberos instance
NAMEID_FORMAT_KERBEROS = (
"urn:oasis:names:tc:SAML:2.0:nameid-format:kerberos")
# subject as name
NAMEID_FORMAT_ENTITY = (
"urn:oasis:names:tc:SAML:2.0:nameid-format:entity")
# linked subject
NAMEID_FORMAT_PERSISTENT = (
"urn:oasis:names:tc:SAML:2.0:nameid-format:persistent")
# annonymous subject
NAMEID_FORMAT_TRANSIENT = (
"urn:oasis:names:tc:SAML:2.0:nameid-format:transient")
# subject avaiable in encrypted format
NAMEID_FORMAT_ENCRYPTED = (
"urn:oasis:names:tc:SAML:2.0:nameid-format:encrypted")
# dicc for avaiable formats
NAMEID_FORMATS_SAML2 = (
('NAMEID_FORMAT_EMAILADDRESS', NAMEID_FORMAT_EMAILADDRESS),
('NAMEID_FORMAT_ENCRYPTED', NAMEID_FORMAT_ENCRYPTED),
('NAMEID_FORMAT_ENTITY', NAMEID_FORMAT_ENTITY),
('NAMEID_FORMAT_PERSISTENT', NAMEID_FORMAT_PERSISTENT),
('NAMEID_FORMAT_TRANSIENT', NAMEID_FORMAT_TRANSIENT),
('NAMEID_FORMAT_UNSPECIFIED', NAMEID_FORMAT_UNSPECIFIED),
)
# a profile outlines a set of rules describing how to embed SAML assertions.
# https://docs.oasis-open.org/security/saml/v2.0/saml-profiles-2.0-os.pdf
# The specification was later updated with errata, and the new version is here:
# https://www.oasis-open.org/committees/download.php/56782/sstc-saml-profiles-errata-2.0-wd-07.pdf
# XML based values for SAML attributes
PROFILE_ATTRIBUTE_BASIC = (
"urn:oasis:names:tc:SAML:2.0:profiles:attribute:basic")
# an AuthnRequest is made to initiate authentication
# authenticate the request with login credentials
AUTHN_PASSWORD = "urn:oasis:names:tc:SAML:2.0:ac:classes:Password"
# authenticate the request with login credentials, over tls/https
AUTHN_PASSWORD_PROTECTED = \
"urn:oasis:names:tc:SAML:2.0:ac:classes:PasswordProtectedTransport"
# attribute statements is key:value metadata shared with your app
# custom format
NAME_FORMAT_UNSPECIFIED = (
"urn:oasis:names:tc:SAML:2.0:attrname-format:unspecified")
# uri format
NAME_FORMAT_URI = "urn:oasis:names:tc:SAML:2.0:attrname-format:uri"
# XML-based format
NAME_FORMAT_BASIC = "urn:oasis:names:tc:SAML:2.0:attrname-format:basic"
# dicc for avaiable formats
NAME_FORMATS_SAML2 = (
('NAME_FORMAT_BASIC', NAME_FORMAT_BASIC),
('NAME_FORMAT_URI', NAME_FORMAT_URI),
('NAME_FORMAT_UNSPECIFIED', NAME_FORMAT_UNSPECIFIED),
)
# the SAML authority's decision can be predetermined by arbitrary context
# the specified action is permitted
DECISION_TYPE_PERMIT = "Permit"
# the specified action is denied
DECISION_TYPE_DENY = "Deny"
# the SAML authority cannot determine if the action is permitted or denied
DECISION_TYPE_INDETERMINATE = "Indeterminate"
# consent attributes determine wether consent has been given and under
# what conditions
# no claim to consent is made
CONSENT_UNSPECIFIED = "urn:oasis:names:tc:SAML:2.0:consent:unspecified"
# consent has been obtained
CONSENT_OBTAINED = "urn:oasis:names:tc:SAML:2.0:consent:obtained"
# consent has been obtained before the message has been initiated
CONSENT_PRIOR = "urn:oasis:names:tc:SAML:2.0:consent:prior"
# consent has been obtained implicitly
CONSENT_IMPLICIT = "urn:oasis:names:tc:SAML:2.0:consent:current-implicit"
# consent has been obtained explicitly
CONSENT_EXPLICIT = "urn:oasis:names:tc:SAML:2.0:consent:current-explicit"
# no consent has been obtained
CONSENT_UNAVAILABLE = "urn:oasis:names:tc:SAML:2.0:consent:unavailable"
# no consent is needed.
CONSENT_INAPPLICABLE = "urn:oasis:names:tc:SAML:2.0:consent:inapplicable"
# Subject confirmation methods(scm), can be issued, besides the subject itself
# by third parties.
# http://docs.oasis-open.org/wss/oasis-wss-saml-token-profile-1.0.pdf
# the 3rd party is identified on behalf of the subject given private/public key
SCM_HOLDER_OF_KEY = "urn:oasis:names:tc:SAML:2.0:cm:holder-of-key"
# the 3rd party is identified by subject confirmation and must include a security header
# signing its content.
SCM_SENDER_VOUCHES = "urn:oasis:names:tc:SAML:2.0:cm:sender-vouches"
# a bearer token is issued instead.
SCM_BEARER = "urn:oasis:names:tc:SAML:2.0:cm:bearer"
class AttributeValueBase(SamlBase):
def __init__(self,
text=None,
extension_elements=None,
extension_attributes=None):
self._extatt = {}
SamlBase.__init__(self,
text=None,
extension_elements=extension_elements,
extension_attributes=extension_attributes)
if self._extatt:
self.extension_attributes = self._extatt
if text:
self.set_text(text)
elif not extension_elements:
self.extension_attributes = {XSI_NIL: 'true'}
elif XSI_TYPE in self.extension_attributes:
del self.extension_attributes[XSI_TYPE]
def __setattr__(self, key, value):
if key == "text":
self.set_text(value)
else:
SamlBase.__setattr__(self, key, value)
def verify(self):
if not self.text and not self.extension_elements:
if not self.extension_attributes:
raise Exception(
"Attribute value base should not have extension attributes"
)
if self.extension_attributes[XSI_NIL] != "true":
raise Exception(
"Attribute value base should not have extension attributes"
)
return True
else:
SamlBase.verify(self)
def set_type(self, typ):
try:
del self.extension_attributes[XSI_NIL]
except (AttributeError, KeyError):
pass
try:
self.extension_attributes[XSI_TYPE] = typ
except AttributeError:
self._extatt[XSI_TYPE] = typ
if typ.startswith('xs:'):
try:
self.extension_attributes['xmlns:xs'] = XS_NAMESPACE
except AttributeError:
self._extatt['xmlns:xs'] = XS_NAMESPACE
if typ.startswith('xsd:'):
try:
self.extension_attributes['xmlns:xsd'] = XS_NAMESPACE
except AttributeError:
self._extatt['xmlns:xsd'] = XS_NAMESPACE
def get_type(self):
try:
return self.extension_attributes[XSI_TYPE]
except (KeyError, AttributeError):
try:
return self._extatt[XSI_TYPE]
except KeyError:
return ""
def clear_type(self):
try:
del self.extension_attributes[XSI_TYPE]
except KeyError:
pass
try:
del self._extatt[XSI_TYPE]
except KeyError:
pass
def set_text(self, value, base64encode=False):
def _wrong_type_value(xsd, value):
msg = 'Type and value do not match: {xsd}:{type}:{value}'
msg = msg.format(xsd=xsd, type=type(value), value=value)
raise ValueError(msg)
# only work with six.string_types
_str = unicode if six.PY2 else str
if isinstance(value, six.binary_type):
value = value.decode('utf-8')
type_to_xsd = {
_str: 'string',
int: 'integer',
float: 'float',
bool: 'boolean',
type(None): '',
}
# entries of xsd-types each declaring:
# - a corresponding python type
# - a function to turn a string into that type
# - a function to turn that type into a text-value
xsd_types_props = {
'string': {
'type': _str,
'to_type': _str,
'to_text': _str,
},
'integer': {
'type': int,
'to_type': int,
'to_text': _str,
},
'short': {
'type': int,
'to_type': int,
'to_text': _str,
},
'int': {
'type': int,
'to_type': int,
'to_text': _str,
},
'long': {
'type': int,
'to_type': int,
'to_text': _str,
},
'float': {
'type': float,
'to_type': float,
'to_text': _str,
},
'double': {
'type': float,
'to_type': float,
'to_text': _str,
},
'boolean': {
'type': bool,
'to_type': lambda x: {
'true': True,
'false': False,
}[_str(x).lower()],
'to_text': lambda x: _str(x).lower(),
},
'base64Binary': {
'type': _str,
'to_type': _str,
'to_text': (
lambda x: b64encode(x.encode()) if base64encode else x
),
},
'anyType': {
'type': type(value),
'to_type': lambda x: x,
'to_text': lambda x: x,
},
'': {
'type': type(None),
'to_type': lambda x: None,
'to_text': lambda x: '',
},
}
xsd_string = (
'base64Binary' if base64encode
else self.get_type()
or type_to_xsd.get(type(value)))
xsd_ns, xsd_type = (
['', type(None)] if xsd_string is None
else ['', ''] if xsd_string == ''
else [
XSD if xsd_string in xsd_types_props else '',
xsd_string
] if ':' not in xsd_string
else xsd_string.split(':', 1))
xsd_type_props = xsd_types_props.get(xsd_type, {})
valid_type = xsd_type_props.get('type', type(None))
to_type = xsd_type_props.get('to_type', str)
to_text = xsd_type_props.get('to_text', str)
# cast to correct type before type-checking
if type(value) is _str and valid_type is not _str:
try:
value = to_type(value)
except (TypeError, ValueError, KeyError):
# the cast failed
_wrong_type_value(xsd=xsd_type, value=value)
if type(value) is not valid_type:
_wrong_type_value(xsd=xsd_type, value=value)
text = to_text(value)
self.set_type(
'{ns}:{type}'.format(ns=xsd_ns, type=xsd_type) if xsd_ns
else xsd_type if xsd_type
else '')
SamlBase.__setattr__(self, 'text', text)
return self
def harvest_element_tree(self, tree):
# Fill in the instance members from the contents of the XML tree.
for child in tree:
self._convert_element_tree_to_member(child)
for attribute, value in iter(tree.attrib.items()):
self._convert_element_attribute_to_member(attribute, value)
# if we have added children to this node
# we consider whitespace insignificant
# and remove/trim/strip whitespace
# and expect to not have actual text content
text = (
tree.text.strip()
if tree.text and self.extension_elements
else tree.text
)
if text:
#print("set_text:", tree.text)
# clear type
#self.clear_type()
self.set_text(text)
# if we have added a text node
# or other children to this node
# remove the nil marker
if text or self.extension_elements:
if XSI_NIL in self.extension_attributes:
del self.extension_attributes[XSI_NIL]
class BaseIDAbstractType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:assertion:BaseIDAbstractType element """
c_tag = 'BaseIDAbstractType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_attributes['NameQualifier'] = ('name_qualifier', 'string', False)
c_attributes['SPNameQualifier'] = ('sp_name_qualifier', 'string', False)
def __init__(self,
name_qualifier=None,
sp_name_qualifier=None,
text=None,
extension_elements=None,
extension_attributes=None):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.name_qualifier = name_qualifier
self.sp_name_qualifier = sp_name_qualifier
class NameIDType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:assertion:NameIDType element """
c_tag = 'NameIDType'
c_namespace = NAMESPACE
c_value_type = {'base': 'string'}
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_attributes['NameQualifier'] = ('name_qualifier', 'string', False)
c_attributes['SPNameQualifier'] = ('sp_name_qualifier', 'string', False)
c_attributes['Format'] = ('format', 'anyURI', False)
c_attributes['SPProvidedID'] = ('sp_provided_id', 'string', False)
def __init__(self,
name_qualifier=None,
sp_name_qualifier=None,
format=None,
sp_provided_id=None,
text=None,
extension_elements=None,
extension_attributes=None):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.name_qualifier = name_qualifier
self.sp_name_qualifier = sp_name_qualifier
self.format = format
self.sp_provided_id = sp_provided_id
def name_id_type__from_string(xml_string):
return saml2.create_class_from_xml_string(NameIDType_, xml_string)
class EncryptedElementType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:assertion:EncryptedElementType element
"""
c_tag = 'EncryptedElementType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_children['{http://www.w3.org/2001/04/xmlenc#}EncryptedData'] = (
'encrypted_data',
xenc.EncryptedData)
c_children['{http://www.w3.org/2001/04/xmlenc#}EncryptedKey'] = (
'encrypted_key',
[xenc.EncryptedKey])
c_cardinality['encrypted_key'] = {"min": 0}
c_child_order.extend(['encrypted_data', 'encrypted_key'])
def __init__(self,
encrypted_data=None,
encrypted_key=None,
text=None,
extension_elements=None,
extension_attributes=None):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.encrypted_data = encrypted_data
self.encrypted_key = encrypted_key or []
def encrypted_element_type__from_string(xml_string):
return saml2.create_class_from_xml_string(EncryptedElementType_, xml_string)
class EncryptedID(EncryptedElementType_):
"""The urn:oasis:names:tc:SAML:2.0:assertion:EncryptedID element """
c_tag = 'EncryptedID'
c_namespace = NAMESPACE
c_children = EncryptedElementType_.c_children.copy()
c_attributes = EncryptedElementType_.c_attributes.copy()
c_child_order = EncryptedElementType_.c_child_order[:]
c_cardinality = EncryptedElementType_.c_cardinality.copy()
def encrypted_id_from_string(xml_string):
return saml2.create_class_from_xml_string(EncryptedID, xml_string)
class Issuer(NameIDType_):
"""The urn:oasis:names:tc:SAML:2.0:assertion:Issuer element """
c_tag = 'Issuer'
c_namespace = NAMESPACE
c_children = NameIDType_.c_children.copy()
c_attributes = NameIDType_.c_attributes.copy()
c_child_order = NameIDType_.c_child_order[:]
c_cardinality = NameIDType_.c_cardinality.copy()
def issuer_from_string(xml_string):
return saml2.create_class_from_xml_string(Issuer, xml_string)
class AssertionIDRef(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:assertion:AssertionIDRef element """
c_tag = 'AssertionIDRef'
c_namespace = NAMESPACE
c_value_type = {'base': 'NCName'}
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
def assertion_id_ref_from_string(xml_string):
return saml2.create_class_from_xml_string(AssertionIDRef, xml_string)
class AssertionURIRef(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:assertion:AssertionURIRef element """
c_tag = 'AssertionURIRef'
c_namespace = NAMESPACE
c_value_type = {'base': 'anyURI'}
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
def assertion_uri_ref_from_string(xml_string):
return saml2.create_class_from_xml_string(AssertionURIRef, xml_string)
class SubjectConfirmationDataType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:assertion:SubjectConfirmationDataType
element """
c_tag = 'SubjectConfirmationDataType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_attributes['NotBefore'] = ('not_before', 'dateTime', False)
c_attributes['NotOnOrAfter'] = ('not_on_or_after', 'dateTime', False)
c_attributes['Recipient'] = ('recipient', 'anyURI', False)
c_attributes['InResponseTo'] = ('in_response_to', 'NCName', False)
c_attributes['Address'] = ('address', 'string', False)
c_any = {"namespace": "##any", "processContents": "lax", "minOccurs": "0",
"maxOccurs": "unbounded"}
c_any_attribute = {"namespace": "##other", "processContents": "lax"}
def __init__(self,
not_before=None,
not_on_or_after=None,
recipient=None,
in_response_to=None,
address=None,
text=None,
extension_elements=None,
extension_attributes=None):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.not_before = not_before
self.not_on_or_after = not_on_or_after
self.recipient = recipient
self.in_response_to = in_response_to
self.address = address
def subject_confirmation_data_type__from_string(xml_string):
return saml2.create_class_from_xml_string(SubjectConfirmationDataType_,
xml_string)
class KeyInfoConfirmationDataType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:assertion:KeyInfoConfirmationDataType
element """
c_tag = 'KeyInfoConfirmationDataType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_children['{http://www.w3.org/2000/09/xmldsig#}KeyInfo'] = ('key_info',
[ds.KeyInfo])
c_cardinality['key_info'] = {"min": 1}
c_child_order.extend(['key_info'])
def __init__(self,
key_info=None,
text=None,
extension_elements=None,
extension_attributes=None):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.key_info = key_info or []
def key_info_confirmation_data_type__from_string(xml_string):
return saml2.create_class_from_xml_string(KeyInfoConfirmationDataType_,
xml_string)
class ConditionAbstractType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:assertion:ConditionAbstractType
element """
c_tag = 'ConditionAbstractType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
class Audience(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:assertion:Audience element """
c_tag = 'Audience'
c_namespace = NAMESPACE
c_value_type = {'base': 'anyURI'}
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
def audience_from_string(xml_string):
return saml2.create_class_from_xml_string(Audience, xml_string)
class OneTimeUseType_(ConditionAbstractType_):
"""The urn:oasis:names:tc:SAML:2.0:assertion:OneTimeUseType element """
c_tag = 'OneTimeUseType'
c_namespace = NAMESPACE
c_children = ConditionAbstractType_.c_children.copy()
c_attributes = ConditionAbstractType_.c_attributes.copy()
c_child_order = ConditionAbstractType_.c_child_order[:]
c_cardinality = ConditionAbstractType_.c_cardinality.copy()
def one_time_use_type__from_string(xml_string):
return saml2.create_class_from_xml_string(OneTimeUseType_, xml_string)
class ProxyRestrictionType_(ConditionAbstractType_):
"""The urn:oasis:names:tc:SAML:2.0:assertion:ProxyRestrictionType element
"""
c_tag = 'ProxyRestrictionType'
c_namespace = NAMESPACE
c_children = ConditionAbstractType_.c_children.copy()
c_attributes = ConditionAbstractType_.c_attributes.copy()
c_child_order = ConditionAbstractType_.c_child_order[:]
c_cardinality = ConditionAbstractType_.c_cardinality.copy()
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}Audience'] = ('audience',
[Audience])
c_cardinality['audience'] = {"min": 0}
c_attributes['Count'] = ('count', 'nonNegativeInteger', False)
c_child_order.extend(['audience'])
def __init__(self,
audience=None,
count=None,
text=None,
extension_elements=None,
extension_attributes=None):
ConditionAbstractType_.__init__(
self, text=text, extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.audience = audience or []
self.count = count
def proxy_restriction_type__from_string(xml_string):
return saml2.create_class_from_xml_string(ProxyRestrictionType_, xml_string)
class EncryptedAssertion(EncryptedElementType_):
"""The urn:oasis:names:tc:SAML:2.0:assertion:EncryptedAssertion element """
c_tag = 'EncryptedAssertion'
c_namespace = NAMESPACE
c_children = EncryptedElementType_.c_children.copy()
c_attributes = EncryptedElementType_.c_attributes.copy()
c_child_order = EncryptedElementType_.c_child_order[:]
c_cardinality = EncryptedElementType_.c_cardinality.copy()
def encrypted_assertion_from_string(xml_string):
return saml2.create_class_from_xml_string(EncryptedAssertion, xml_string)
class StatementAbstractType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:assertion:StatementAbstractType element
"""
c_tag = 'StatementAbstractType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
class SubjectLocalityType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:assertion:SubjectLocalityType element """
c_tag = 'SubjectLocalityType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_attributes['Address'] = ('address', 'string', False)
c_attributes['DNSName'] = ('dns_name', 'string', False)
def __init__(self,
address=None,
dns_name=None,
text=None,
extension_elements=None,
extension_attributes=None):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.address = address
self.dns_name = dns_name
def subject_locality_type__from_string(xml_string):
return saml2.create_class_from_xml_string(SubjectLocalityType_, xml_string)
class AuthnContextClassRef(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:assertion:AuthnContextClassRef element
"""
c_tag = 'AuthnContextClassRef'
c_namespace = NAMESPACE
c_value_type = {'base': 'anyURI'}
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
def authn_context_class_ref_from_string(xml_string):
return saml2.create_class_from_xml_string(AuthnContextClassRef, xml_string)
class AuthnContextDeclRef(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:assertion:AuthnContextDeclRef element """
c_tag = 'AuthnContextDeclRef'
c_namespace = NAMESPACE
c_value_type = {'base': 'anyURI'}
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
def authn_context_decl_ref_from_string(xml_string):
return saml2.create_class_from_xml_string(AuthnContextDeclRef, xml_string)
class AuthnContextDecl(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:assertion:AuthnContextDecl element """
c_tag = 'AuthnContextDecl'
c_namespace = NAMESPACE
c_value_type = {'base': 'anyType'}
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
def authn_context_decl_from_string(xml_string):
return saml2.create_class_from_xml_string(AuthnContextDecl, xml_string)
class AuthenticatingAuthority(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:assertion:AuthenticatingAuthority
element """
c_tag = 'AuthenticatingAuthority'
c_namespace = NAMESPACE
c_value_type = {'base': 'anyURI'}
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
def authenticating_authority_from_string(xml_string):
return saml2.create_class_from_xml_string(AuthenticatingAuthority,
xml_string)
class DecisionType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:assertion:DecisionType element """
c_tag = 'DecisionType'
c_namespace = NAMESPACE
c_value_type = {'base': 'string', 'enumeration': ['Permit', 'Deny',
'Indeterminate']}
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
def decision_type__from_string(xml_string):
return saml2.create_class_from_xml_string(DecisionType_, xml_string)
class ActionType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:assertion:ActionType element """
c_tag = 'ActionType'
c_namespace = NAMESPACE
c_value_type = {'base': 'string'}
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_attributes['Namespace'] = ('namespace', 'anyURI', True)
def __init__(self,
namespace=None,
text=None,
extension_elements=None,
extension_attributes=None):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.namespace = namespace
def action_type__from_string(xml_string):
return saml2.create_class_from_xml_string(ActionType_, xml_string)
class AttributeValue(AttributeValueBase):
"""The urn:oasis:names:tc:SAML:2.0:assertion:AttributeValue element """
c_tag = 'AttributeValue'
c_namespace = NAMESPACE
c_value_type = {'base': 'anyType'}
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
def attribute_value_from_string(xml_string):
return saml2.create_class_from_xml_string(AttributeValue, xml_string)
class EncryptedAttribute(EncryptedElementType_):
"""The urn:oasis:names:tc:SAML:2.0:assertion:EncryptedAttribute element """
c_tag = 'EncryptedAttribute'
c_namespace = NAMESPACE
c_children = EncryptedElementType_.c_children.copy()
c_attributes = EncryptedElementType_.c_attributes.copy()
c_child_order = EncryptedElementType_.c_child_order[:]
c_cardinality = EncryptedElementType_.c_cardinality.copy()
def encrypted_attribute_from_string(xml_string):
return saml2.create_class_from_xml_string(EncryptedAttribute, xml_string)
class BaseID(BaseIDAbstractType_):
"""The urn:oasis:names:tc:SAML:2.0:assertion:BaseID element """
c_tag = 'BaseID'
c_namespace = NAMESPACE
c_children = BaseIDAbstractType_.c_children.copy()
c_attributes = BaseIDAbstractType_.c_attributes.copy()
c_child_order = BaseIDAbstractType_.c_child_order[:]
c_cardinality = BaseIDAbstractType_.c_cardinality.copy()
def base_id_from_string(xml_string):
return saml2.create_class_from_xml_string(BaseID, xml_string)
class NameID(NameIDType_):
"""The urn:oasis:names:tc:SAML:2.0:assertion:NameID element
From the Oasis SAML2 Technical Overview:
"The <NameID> element within a <Subject> offers the ability to provide name
identifiers in a number of different formats. SAML's predefined formats
include: Email address, X.509 subject name, Windows domain qualified name,
Kerberos principal name, Entity identifier, Persistent identifier,
Transient identifier."
"""
c_tag = 'NameID'
c_namespace = NAMESPACE
c_children = NameIDType_.c_children.copy()
c_attributes = NameIDType_.c_attributes.copy()
c_child_order = NameIDType_.c_child_order[:]
c_cardinality = NameIDType_.c_cardinality.copy()
def name_id_from_string(xml_string):
return saml2.create_class_from_xml_string(NameID, xml_string)
class SubjectConfirmationData(SubjectConfirmationDataType_):
"""The urn:oasis:names:tc:SAML:2.0:assertion:SubjectConfirmationData
element """
c_tag = 'SubjectConfirmationData'
c_namespace = NAMESPACE
c_children = SubjectConfirmationDataType_.c_children.copy()
c_attributes = SubjectConfirmationDataType_.c_attributes.copy()
c_child_order = SubjectConfirmationDataType_.c_child_order[:]
c_cardinality = SubjectConfirmationDataType_.c_cardinality.copy()
def subject_confirmation_data_from_string(xml_string):
return saml2.create_class_from_xml_string(SubjectConfirmationData,
xml_string)
class Condition(ConditionAbstractType_):
"""The urn:oasis:names:tc:SAML:2.0:assertion:Condition element """
c_tag = 'Condition'
c_namespace = NAMESPACE
c_children = ConditionAbstractType_.c_children.copy()
c_attributes = ConditionAbstractType_.c_attributes.copy()
c_child_order = ConditionAbstractType_.c_child_order[:]
c_cardinality = ConditionAbstractType_.c_cardinality.copy()
def condition_from_string(xml_string):
return saml2.create_class_from_xml_string(Condition, xml_string)
class AudienceRestrictionType_(ConditionAbstractType_):
"""The urn:oasis:names:tc:SAML:2.0:assertion:AudienceRestrictionType
element """
c_tag = 'AudienceRestrictionType'
c_namespace = NAMESPACE
c_children = ConditionAbstractType_.c_children.copy()
c_attributes = ConditionAbstractType_.c_attributes.copy()
c_child_order = ConditionAbstractType_.c_child_order[:]
c_cardinality = ConditionAbstractType_.c_cardinality.copy()
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}Audience'] = ('audience',
[Audience])
c_cardinality['audience'] = {"min": 1}
c_child_order.extend(['audience'])
def __init__(self,
audience=None,
text=None,
extension_elements=None,
extension_attributes=None):
ConditionAbstractType_.__init__(
self, text=text, extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.audience = audience or []
def audience_restriction_type__from_string(xml_string):
return saml2.create_class_from_xml_string(AudienceRestrictionType_,
xml_string)
class OneTimeUse(OneTimeUseType_):
"""The urn:oasis:names:tc:SAML:2.0:assertion:OneTimeUse element """
c_tag = 'OneTimeUse'
c_namespace = NAMESPACE
c_children = OneTimeUseType_.c_children.copy()
c_attributes = OneTimeUseType_.c_attributes.copy()
c_child_order = OneTimeUseType_.c_child_order[:]
c_cardinality = OneTimeUseType_.c_cardinality.copy()
def one_time_use_from_string(xml_string):
return saml2.create_class_from_xml_string(OneTimeUse, xml_string)
class ProxyRestriction(ProxyRestrictionType_):
"""The urn:oasis:names:tc:SAML:2.0:assertion:ProxyRestriction element """
c_tag = 'ProxyRestriction'
c_namespace = NAMESPACE
c_children = ProxyRestrictionType_.c_children.copy()
c_attributes = ProxyRestrictionType_.c_attributes.copy()
c_child_order = ProxyRestrictionType_.c_child_order[:]
c_cardinality = ProxyRestrictionType_.c_cardinality.copy()
def proxy_restriction_from_string(xml_string):
return saml2.create_class_from_xml_string(ProxyRestriction, xml_string)
class Statement(StatementAbstractType_):
"""The urn:oasis:names:tc:SAML:2.0:assertion:Statement element """
c_tag = 'Statement'
c_namespace = NAMESPACE
c_children = StatementAbstractType_.c_children.copy()
c_attributes = StatementAbstractType_.c_attributes.copy()
c_child_order = StatementAbstractType_.c_child_order[:]
c_cardinality = StatementAbstractType_.c_cardinality.copy()
def statement_from_string(xml_string):
return saml2.create_class_from_xml_string(Statement, xml_string)
class SubjectLocality(SubjectLocalityType_):
"""The urn:oasis:names:tc:SAML:2.0:assertion:SubjectLocality element """
c_tag = 'SubjectLocality'
c_namespace = NAMESPACE
c_children = SubjectLocalityType_.c_children.copy()
c_attributes = SubjectLocalityType_.c_attributes.copy()
c_child_order = SubjectLocalityType_.c_child_order[:]
c_cardinality = SubjectLocalityType_.c_cardinality.copy()
def verify(self):
if self.address:
# dotted-decimal IPv4 or RFC3513 IPv6 address
if valid_ipv4(self.address) or valid_ipv6(self.address):
pass
else:
raise ShouldValueError("Not an IPv4 or IPv6 address")
elif self.dns_name:
valid_domain_name(self.dns_name)
return SubjectLocalityType_.verify(self)
def subject_locality_from_string(xml_string):
return saml2.create_class_from_xml_string(SubjectLocality, xml_string)
class AuthnContextType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:assertion:AuthnContextType element """
c_tag = 'AuthnContextType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_children[
'{urn:oasis:names:tc:SAML:2.0:assertion}AuthnContextClassRef'] = (
'authn_context_class_ref', AuthnContextClassRef)
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}AuthnContextDecl'] = (
'authn_context_decl',
AuthnContextDecl)
c_cardinality['authn_context_decl'] = {"min": 0, "max": 1}
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}AuthnContextDeclRef'] = (
'authn_context_decl_ref',
AuthnContextDeclRef)
c_cardinality['authn_context_decl_ref'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:assertion}AuthenticatingAuthority'] = (
'authenticating_authority', [AuthenticatingAuthority])
c_cardinality['authenticating_authority'] = {"min": 0}
c_child_order.extend(['authn_context_class_ref', 'authn_context_decl',
'authn_context_decl_ref', 'authenticating_authority'])
def __init__(self,
authn_context_class_ref=None,
authn_context_decl=None,
authn_context_decl_ref=None,
authenticating_authority=None,
text=None,
extension_elements=None,
extension_attributes=None):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.authn_context_class_ref = authn_context_class_ref
self.authn_context_decl = authn_context_decl
self.authn_context_decl_ref = authn_context_decl_ref
self.authenticating_authority = authenticating_authority or []
def verify(self):
if self.authn_context_decl and self.authn_context_decl_ref:
raise Exception(
"Invalid Response: "
"Cannot have both <AuthnContextDecl> and <AuthnContextDeclRef>"
)
return SamlBase.verify(self)
def authn_context_type__from_string(xml_string):
return saml2.create_class_from_xml_string(AuthnContextType_, xml_string)
class Action(ActionType_):
"""The urn:oasis:names:tc:SAML:2.0:assertion:Action element """
c_tag = 'Action'
c_namespace = NAMESPACE
c_children = ActionType_.c_children.copy()
c_attributes = ActionType_.c_attributes.copy()
c_child_order = ActionType_.c_child_order[:]
c_cardinality = ActionType_.c_cardinality.copy()
def action_from_string(xml_string):
return saml2.create_class_from_xml_string(Action, xml_string)
class AttributeType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:assertion:AttributeType element """
c_tag = 'AttributeType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}AttributeValue'] = (
'attribute_value',
[AttributeValue])
c_cardinality['attribute_value'] = {"min": 0}
c_attributes['Name'] = ('name', 'string', True)
c_attributes['NameFormat'] = ('name_format', 'anyURI', False)
c_attributes['FriendlyName'] = ('friendly_name', 'string', False)
c_child_order.extend(['attribute_value'])
c_any_attribute = {"namespace": "##other", "processContents": "lax"}
def __init__(self,
attribute_value=None,
name=None,
name_format=NAME_FORMAT_URI,
friendly_name=None,
text=None,
extension_elements=None,
extension_attributes=None):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.attribute_value = attribute_value or []
self.name = name
self.name_format = name_format
self.friendly_name = friendly_name
# when consuming such elements, default to NAME_FORMAT_UNSPECIFIED as NameFormat
def harvest_element_tree(self, tree):
tree.attrib.setdefault('NameFormat', NAME_FORMAT_UNSPECIFIED)
SamlBase.harvest_element_tree(self, tree)
def attribute_type__from_string(xml_string):
return saml2.create_class_from_xml_string(AttributeType_, xml_string)
class SubjectConfirmationType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:assertion:SubjectConfirmationType
element """
c_tag = 'SubjectConfirmationType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}BaseID'] = ('base_id',
BaseID)
c_cardinality['base_id'] = {"min": 0, "max": 1}
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}NameID'] = ('name_id',
NameID)
c_cardinality['name_id'] = {"min": 0, "max": 1}
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}EncryptedID'] = (
'encrypted_id',
EncryptedID)
c_cardinality['encrypted_id'] = {"min": 0, "max": 1}
c_children[
'{urn:oasis:names:tc:SAML:2.0:assertion}SubjectConfirmationData'] = (
'subject_confirmation_data', SubjectConfirmationData)
c_cardinality['subject_confirmation_data'] = {"min": 0, "max": 1}
c_attributes['Method'] = ('method', 'anyURI', True)
c_child_order.extend(['base_id', 'name_id', 'encrypted_id',
'subject_confirmation_data'])
def __init__(self,
base_id=None,
name_id=None,
encrypted_id=None,
subject_confirmation_data=None,
method=None,
text=None,
extension_elements=None,
extension_attributes=None):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.base_id = base_id
self.name_id = name_id
self.encrypted_id = encrypted_id
self.subject_confirmation_data = subject_confirmation_data
self.method = method
def subject_confirmation_type__from_string(xml_string):
return saml2.create_class_from_xml_string(SubjectConfirmationType_,
xml_string)
class AudienceRestriction(AudienceRestrictionType_):
"""The urn:oasis:names:tc:SAML:2.0:assertion:AudienceRestriction element """
c_tag = 'AudienceRestriction'
c_namespace = NAMESPACE
c_children = AudienceRestrictionType_.c_children.copy()
c_attributes = AudienceRestrictionType_.c_attributes.copy()
c_child_order = AudienceRestrictionType_.c_child_order[:]
c_cardinality = AudienceRestrictionType_.c_cardinality.copy()
def audience_restriction_from_string(xml_string):
return saml2.create_class_from_xml_string(AudienceRestriction, xml_string)
class AuthnContext(AuthnContextType_):
"""The urn:oasis:names:tc:SAML:2.0:assertion:AuthnContext element """
c_tag = 'AuthnContext'
c_namespace = NAMESPACE
c_children = AuthnContextType_.c_children.copy()
c_attributes = AuthnContextType_.c_attributes.copy()
c_child_order = AuthnContextType_.c_child_order[:]
c_cardinality = AuthnContextType_.c_cardinality.copy()
def authn_context_from_string(xml_string):
return saml2.create_class_from_xml_string(AuthnContext, xml_string)
class Attribute(AttributeType_):
"""The urn:oasis:names:tc:SAML:2.0:assertion:Attribute element """
c_tag = 'Attribute'
c_namespace = NAMESPACE
c_children = AttributeType_.c_children.copy()
c_attributes = AttributeType_.c_attributes.copy()
c_child_order = AttributeType_.c_child_order[:]
c_cardinality = AttributeType_.c_cardinality.copy()
def attribute_from_string(xml_string):
return saml2.create_class_from_xml_string(Attribute, xml_string)
class SubjectConfirmation(SubjectConfirmationType_):
"""The urn:oasis:names:tc:SAML:2.0:assertion:SubjectConfirmation element """
c_tag = 'SubjectConfirmation'
c_namespace = NAMESPACE
c_children = SubjectConfirmationType_.c_children.copy()
c_attributes = SubjectConfirmationType_.c_attributes.copy()
c_child_order = SubjectConfirmationType_.c_child_order[:]
c_cardinality = SubjectConfirmationType_.c_cardinality.copy()
def subject_confirmation_from_string(xml_string):
return saml2.create_class_from_xml_string(SubjectConfirmation, xml_string)
class ConditionsType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:assertion:ConditionsType element """
c_tag = 'ConditionsType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}Condition'] = (
'condition',
[Condition])
c_cardinality['condition'] = {"min": 0}
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}AudienceRestriction'] = (
'audience_restriction',
[AudienceRestriction])
c_cardinality['audience_restriction'] = {"min": 0}
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}OneTimeUse'] = (
'one_time_use',
[OneTimeUse])
c_cardinality['one_time_use'] = {"min": 0}
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}ProxyRestriction'] = (
'proxy_restriction',
[ProxyRestriction])
c_cardinality['proxy_restriction'] = {"min": 0}
c_attributes['NotBefore'] = ('not_before', 'dateTime', False)
c_attributes['NotOnOrAfter'] = ('not_on_or_after', 'dateTime', False)
c_child_order.extend(['condition', 'audience_restriction', 'one_time_use',
'proxy_restriction'])
def __init__(self,
condition=None,
audience_restriction=None,
one_time_use=None,
proxy_restriction=None,
not_before=None,
not_on_or_after=None,
text=None,
extension_elements=None,
extension_attributes=None):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.condition = condition or []
self.audience_restriction = audience_restriction or []
self.one_time_use = one_time_use or []
self.proxy_restriction = proxy_restriction or []
self.not_before = not_before
self.not_on_or_after = not_on_or_after
def verify(self):
if self.one_time_use:
if len(self.one_time_use) != 1:
raise Exception("Cannot be used more than once")
if self.proxy_restriction:
if len(self.proxy_restriction) != 1:
raise Exception("Cannot be used more than once")
return SamlBase.verify(self)
def conditions_type__from_string(xml_string):
return saml2.create_class_from_xml_string(ConditionsType_, xml_string)
class AuthnStatementType_(StatementAbstractType_):
"""The urn:oasis:names:tc:SAML:2.0:assertion:AuthnStatementType element """
c_tag = 'AuthnStatementType'
c_namespace = NAMESPACE
c_children = StatementAbstractType_.c_children.copy()
c_attributes = StatementAbstractType_.c_attributes.copy()
c_child_order = StatementAbstractType_.c_child_order[:]
c_cardinality = StatementAbstractType_.c_cardinality.copy()
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}SubjectLocality'] = (
'subject_locality', SubjectLocality)
c_cardinality['subject_locality'] = {"min": 0, "max": 1}
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}AuthnContext'] = (
'authn_context', AuthnContext)
c_attributes['AuthnInstant'] = ('authn_instant', 'dateTime', True)
c_attributes['SessionIndex'] = ('session_index', 'string', False)
c_attributes['SessionNotOnOrAfter'] = ('session_not_on_or_after',
'dateTime', False)
c_child_order.extend(['subject_locality', 'authn_context'])
def __init__(self,
subject_locality=None,
authn_context=None,
authn_instant=None,
session_index=None,
session_not_on_or_after=None,
text=None,
extension_elements=None,
extension_attributes=None):
StatementAbstractType_.__init__(
self, text=text, extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.subject_locality = subject_locality
self.authn_context = authn_context
self.authn_instant = authn_instant
self.session_index = session_index
self.session_not_on_or_after = session_not_on_or_after
def authn_statement_type__from_string(xml_string):
return saml2.create_class_from_xml_string(AuthnStatementType_, xml_string)
class AttributeStatementType_(StatementAbstractType_):
"""The urn:oasis:names:tc:SAML:2.0:assertion:AttributeStatementType
element """
c_tag = 'AttributeStatementType'
c_namespace = NAMESPACE
c_children = StatementAbstractType_.c_children.copy()
c_attributes = StatementAbstractType_.c_attributes.copy()
c_child_order = StatementAbstractType_.c_child_order[:]
c_cardinality = StatementAbstractType_.c_cardinality.copy()
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}Attribute'] = (
'attribute',
[Attribute])
c_cardinality['attribute'] = {"min": 0}
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}EncryptedAttribute'] = (
'encrypted_attribute',
[EncryptedAttribute])
c_cardinality['encrypted_attribute'] = {"min": 0}
c_child_order.extend(['attribute', 'encrypted_attribute'])
def __init__(self,
attribute=None,
encrypted_attribute=None,
text=None,
extension_elements=None,
extension_attributes=None):
StatementAbstractType_.__init__(
self, text=text, extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.attribute = attribute or []
self.encrypted_attribute = encrypted_attribute or []
def attribute_statement_type__from_string(xml_string):
return saml2.create_class_from_xml_string(AttributeStatementType_,
xml_string)
class SubjectType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:assertion:SubjectType element """
c_tag = 'SubjectType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}BaseID'] = ('base_id',
BaseID)
c_cardinality['base_id'] = {"min": 0, "max": 1}
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}NameID'] = ('name_id',
NameID)
c_cardinality['name_id'] = {"min": 0, "max": 1}
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}EncryptedID'] = (
'encrypted_id', EncryptedID)
c_cardinality['encrypted_id'] = {"min": 0, "max": 1}
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}SubjectConfirmation'] = (
'subject_confirmation', [SubjectConfirmation])
c_cardinality['subject_confirmation'] = {"min": 0}
c_child_order.extend(['base_id', 'name_id', 'encrypted_id',
'subject_confirmation'])
def __init__(self,
base_id=None,
name_id=None,
encrypted_id=None,
subject_confirmation=None,
text=None,
extension_elements=None,
extension_attributes=None):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.base_id = base_id
self.name_id = name_id
self.encrypted_id = encrypted_id
self.subject_confirmation = subject_confirmation or []
def subject_type__from_string(xml_string):
return saml2.create_class_from_xml_string(SubjectType_, xml_string)
class Conditions(ConditionsType_):
"""The urn:oasis:names:tc:SAML:2.0:assertion:Conditions element """
c_tag = 'Conditions'
c_namespace = NAMESPACE
c_children = ConditionsType_.c_children.copy()
c_attributes = ConditionsType_.c_attributes.copy()
c_child_order = ConditionsType_.c_child_order[:]
c_cardinality = ConditionsType_.c_cardinality.copy()
def conditions_from_string(xml_string):
return saml2.create_class_from_xml_string(Conditions, xml_string)
class AuthnStatement(AuthnStatementType_):
"""The urn:oasis:names:tc:SAML:2.0:assertion:AuthnStatement element """
c_tag = 'AuthnStatement'
c_namespace = NAMESPACE
c_children = AuthnStatementType_.c_children.copy()
c_attributes = AuthnStatementType_.c_attributes.copy()
c_child_order = AuthnStatementType_.c_child_order[:]
c_cardinality = AuthnStatementType_.c_cardinality.copy()
def authn_statement_from_string(xml_string):
return saml2.create_class_from_xml_string(AuthnStatement, xml_string)
class AttributeStatement(AttributeStatementType_):
"""The urn:oasis:names:tc:SAML:2.0:assertion:AttributeStatement element """
c_tag = 'AttributeStatement'
c_namespace = NAMESPACE
c_children = AttributeStatementType_.c_children.copy()
c_attributes = AttributeStatementType_.c_attributes.copy()
c_child_order = AttributeStatementType_.c_child_order[:]
c_cardinality = AttributeStatementType_.c_cardinality.copy()
def attribute_statement_from_string(xml_string):
return saml2.create_class_from_xml_string(AttributeStatement, xml_string)
class Subject(SubjectType_):
"""The urn:oasis:names:tc:SAML:2.0:assertion:Subject element """
c_tag = 'Subject'
c_namespace = NAMESPACE
c_children = SubjectType_.c_children.copy()
c_attributes = SubjectType_.c_attributes.copy()
c_child_order = SubjectType_.c_child_order[:]
c_cardinality = SubjectType_.c_cardinality.copy()
def subject_from_string(xml_string):
return saml2.create_class_from_xml_string(Subject, xml_string)
#..................
# ['AuthzDecisionStatement', 'EvidenceType', 'AdviceType', 'Evidence',
# 'Assertion', 'AssertionType', 'AuthzDecisionStatementType', 'Advice']
class EvidenceType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:assertion:EvidenceType element """
c_tag = 'EvidenceType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}AssertionIDRef'] = (
'assertion_id_ref', [AssertionIDRef])
c_cardinality['assertion_id_ref'] = {"min": 0}
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}AssertionURIRef'] = (
'assertion_uri_ref', [AssertionURIRef])
c_cardinality['assertion_uri_ref'] = {"min": 0}
c_cardinality['assertion'] = {"min": 0}
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}EncryptedAssertion'] = (
'encrypted_assertion', [EncryptedAssertion])
c_cardinality['encrypted_assertion'] = {"min": 0}
c_child_order.extend(['assertion_id_ref', 'assertion_uri_ref', 'assertion',
'encrypted_assertion'])
def __init__(self,
assertion_id_ref=None,
assertion_uri_ref=None,
assertion=None,
encrypted_assertion=None,
text=None,
extension_elements=None,
extension_attributes=None):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.assertion_id_ref = assertion_id_ref or []
self.assertion_uri_ref = assertion_uri_ref or []
self.assertion = assertion or []
self.encrypted_assertion = encrypted_assertion or []
def evidence_type__from_string(xml_string):
return saml2.create_class_from_xml_string(EvidenceType_, xml_string)
class Evidence(EvidenceType_):
"""The urn:oasis:names:tc:SAML:2.0:assertion:Evidence element """
c_tag = 'Evidence'
c_namespace = NAMESPACE
c_children = EvidenceType_.c_children.copy()
c_attributes = EvidenceType_.c_attributes.copy()
c_child_order = EvidenceType_.c_child_order[:]
c_cardinality = EvidenceType_.c_cardinality.copy()
def evidence_from_string(xml_string):
return saml2.create_class_from_xml_string(Evidence, xml_string)
class AuthzDecisionStatementType_(StatementAbstractType_):
"""The urn:oasis:names:tc:SAML:2.0:assertion:AuthzDecisionStatementType
element """
c_tag = 'AuthzDecisionStatementType'
c_namespace = NAMESPACE
c_children = StatementAbstractType_.c_children.copy()
c_attributes = StatementAbstractType_.c_attributes.copy()
c_child_order = StatementAbstractType_.c_child_order[:]
c_cardinality = StatementAbstractType_.c_cardinality.copy()
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}Action'] = (
'action', [Action])
c_cardinality['action'] = {"min": 1}
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}Evidence'] = (
'evidence', Evidence)
c_cardinality['evidence'] = {"min": 0, "max": 1}
c_attributes['Resource'] = ('resource', 'anyURI', True)
c_attributes['Decision'] = ('decision', DecisionType_, True)
c_child_order.extend(['action', 'evidence'])
def __init__(self,
action=None,
evidence=None,
resource=None,
decision=None,
text=None,
extension_elements=None,
extension_attributes=None):
StatementAbstractType_.__init__(
self, text=text, extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.action = action or []
self.evidence = evidence
self.resource = resource
self.decision = decision
def authz_decision_statement_type__from_string(xml_string):
return saml2.create_class_from_xml_string(AuthzDecisionStatementType_,
xml_string)
class AuthzDecisionStatement(AuthzDecisionStatementType_):
"""The urn:oasis:names:tc:SAML:2.0:assertion:AuthzDecisionStatement
element """
c_tag = 'AuthzDecisionStatement'
c_namespace = NAMESPACE
c_children = AuthzDecisionStatementType_.c_children.copy()
c_attributes = AuthzDecisionStatementType_.c_attributes.copy()
c_child_order = AuthzDecisionStatementType_.c_child_order[:]
c_cardinality = AuthzDecisionStatementType_.c_cardinality.copy()
def authz_decision_statement_from_string(xml_string):
return saml2.create_class_from_xml_string(AuthzDecisionStatement,
xml_string)
#..................
# ['Assertion', 'AssertionType', 'AdviceType', 'Advice']
class AssertionType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:assertion:AssertionType element """
c_tag = 'AssertionType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}Issuer'] = ('issuer',
Issuer)
c_children['{http://www.w3.org/2000/09/xmldsig#}Signature'] = ('signature',
ds.Signature)
c_cardinality['signature'] = {"min": 0, "max": 1}
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}Subject'] = ('subject',
Subject)
c_cardinality['subject'] = {"min": 0, "max": 1}
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}Conditions'] = (
'conditions', Conditions)
c_cardinality['conditions'] = {"min": 0, "max": 1}
c_cardinality['advice'] = {"min": 0, "max": 1}
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}Statement'] = (
'statement', [Statement])
c_cardinality['statement'] = {"min": 0}
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}AuthnStatement'] = (
'authn_statement', [AuthnStatement])
c_cardinality['authn_statement'] = {"min": 0}
c_children[
'{urn:oasis:names:tc:SAML:2.0:assertion}AuthzDecisionStatement'] = (
'authz_decision_statement', [AuthzDecisionStatement])
c_cardinality['authz_decision_statement'] = {"min": 0}
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}AttributeStatement'] = (
'attribute_statement', [AttributeStatement])
c_cardinality['attribute_statement'] = {"min": 0}
c_attributes['Version'] = ('version', 'string', True)
c_attributes['ID'] = ('id', 'ID', True)
c_attributes['IssueInstant'] = ('issue_instant', 'dateTime', True)
c_child_order.extend(['issuer', 'signature', 'subject', 'conditions',
'advice', 'statement', 'authn_statement',
'authz_decision_statement', 'attribute_statement'])
def __init__(self,
issuer=None,
signature=None,
subject=None,
conditions=None,
advice=None,
statement=None,
authn_statement=None,
authz_decision_statement=None,
attribute_statement=None,
version=None,
id=None,
issue_instant=None,
text=None,
extension_elements=None,
extension_attributes=None):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.issuer = issuer
self.signature = signature
self.subject = subject
self.conditions = conditions
self.advice = advice
self.statement = statement or []
self.authn_statement = authn_statement or []
self.authz_decision_statement = authz_decision_statement or []
self.attribute_statement = attribute_statement or []
self.version = version
self.id = id
self.issue_instant = issue_instant
def verify(self):
# If no statement MUST contain a subject element
if self.attribute_statement or self.statement or \
self.authn_statement or self.authz_decision_statement:
pass
elif not self.subject:
raise MustValueError(
"If no statement MUST contain a subject element")
if self.authn_statement and not self.subject:
raise MustValueError(
"An assertion with an AuthnStatement must contain a Subject")
return SamlBase.verify(self)
def assertion_type__from_string(xml_string):
return saml2.create_class_from_xml_string(AssertionType_, xml_string)
class Assertion(AssertionType_):
"""The urn:oasis:names:tc:SAML:2.0:assertion:Assertion element """
c_tag = 'Assertion'
c_namespace = NAMESPACE
c_children = AssertionType_.c_children.copy()
c_attributes = AssertionType_.c_attributes.copy()
c_child_order = AssertionType_.c_child_order[:]
c_cardinality = AssertionType_.c_cardinality.copy()
def assertion_from_string(xml_string):
return saml2.create_class_from_xml_string(Assertion, xml_string)
class AdviceType_(SamlBase):
"""The urn:oasis:names:tc:SAML:2.0:assertion:AdviceType element """
c_tag = 'AdviceType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}AssertionIDRef'] = (
'assertion_id_ref', [AssertionIDRef])
c_cardinality['assertion_id_ref'] = {"min": 0}
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}AssertionURIRef'] = (
'assertion_uri_ref', [AssertionURIRef])
c_cardinality['assertion_uri_ref'] = {"min": 0}
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}Assertion'] = (
'assertion', [Assertion])
c_cardinality['assertion'] = {"min": 0}
c_children['{urn:oasis:names:tc:SAML:2.0:assertion}EncryptedAssertion'] = (
'encrypted_assertion', [EncryptedAssertion])
c_cardinality['encrypted_assertion'] = {"min": 0}
c_child_order.extend(['assertion_id_ref', 'assertion_uri_ref', 'assertion',
'encrypted_assertion'])
c_any = {"namespace": "##other", "processContents": "lax"}
def __init__(self,
assertion_id_ref=None,
assertion_uri_ref=None,
assertion=None,
encrypted_assertion=None,
text=None,
extension_elements=None,
extension_attributes=None):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.assertion_id_ref = assertion_id_ref or []
self.assertion_uri_ref = assertion_uri_ref or []
self.assertion = assertion or []
self.encrypted_assertion = encrypted_assertion or []
def advice_type__from_string(xml_string):
return saml2.create_class_from_xml_string(AdviceType_, xml_string)
class Advice(AdviceType_):
"""The urn:oasis:names:tc:SAML:2.0:assertion:Advice element """
c_tag = 'Advice'
c_namespace = NAMESPACE
c_children = AdviceType_.c_children.copy()
c_attributes = AdviceType_.c_attributes.copy()
c_child_order = AdviceType_.c_child_order[:]
c_cardinality = AdviceType_.c_cardinality.copy()
def advice_from_string(xml_string):
return saml2.create_class_from_xml_string(Advice, xml_string)
# ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
EvidenceType_.c_children['{urn:oasis:names:tc:SAML:2.0:assertion}Assertion'] = (
'assertion', [Assertion])
Evidence.c_children['{urn:oasis:names:tc:SAML:2.0:assertion}Assertion'] = (
'assertion', [Assertion])
AssertionType_.c_children['{urn:oasis:names:tc:SAML:2.0:assertion}Advice'] = (
'advice', Advice)
Assertion.c_children['{urn:oasis:names:tc:SAML:2.0:assertion}Advice'] = (
'advice', Advice)
# ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
AG_IDNameQualifiers = [
('NameQualifier', 'string', False),
('SPNameQualifier', 'string', False),
]
ELEMENT_FROM_STRING = {
BaseID.c_tag: base_id_from_string,
NameID.c_tag: name_id_from_string,
NameIDType_.c_tag: name_id_type__from_string,
EncryptedElementType_.c_tag: encrypted_element_type__from_string,
EncryptedID.c_tag: encrypted_id_from_string,
Issuer.c_tag: issuer_from_string,
AssertionIDRef.c_tag: assertion_id_ref_from_string,
AssertionURIRef.c_tag: assertion_uri_ref_from_string,
Assertion.c_tag: assertion_from_string,
AssertionType_.c_tag: assertion_type__from_string,
Subject.c_tag: subject_from_string,
SubjectType_.c_tag: subject_type__from_string,
SubjectConfirmation.c_tag: subject_confirmation_from_string,
SubjectConfirmationType_.c_tag: subject_confirmation_type__from_string,
SubjectConfirmationData.c_tag: subject_confirmation_data_from_string,
SubjectConfirmationDataType_.c_tag:
subject_confirmation_data_type__from_string,
KeyInfoConfirmationDataType_.c_tag:
key_info_confirmation_data_type__from_string,
Conditions.c_tag: conditions_from_string,
ConditionsType_.c_tag: conditions_type__from_string,
Condition.c_tag: condition_from_string,
AudienceRestriction.c_tag: audience_restriction_from_string,
AudienceRestrictionType_.c_tag: audience_restriction_type__from_string,
Audience.c_tag: audience_from_string,
OneTimeUse.c_tag: one_time_use_from_string,
OneTimeUseType_.c_tag: one_time_use_type__from_string,
ProxyRestriction.c_tag: proxy_restriction_from_string,
ProxyRestrictionType_.c_tag: proxy_restriction_type__from_string,
Advice.c_tag: advice_from_string,
AdviceType_.c_tag: advice_type__from_string,
EncryptedAssertion.c_tag: encrypted_assertion_from_string,
Statement.c_tag: statement_from_string,
AuthnStatement.c_tag: authn_statement_from_string,
AuthnStatementType_.c_tag: authn_statement_type__from_string,
SubjectLocality.c_tag: subject_locality_from_string,
SubjectLocalityType_.c_tag: subject_locality_type__from_string,
AuthnContext.c_tag: authn_context_from_string,
AuthnContextType_.c_tag: authn_context_type__from_string,
AuthnContextClassRef.c_tag: authn_context_class_ref_from_string,
AuthnContextDeclRef.c_tag: authn_context_decl_ref_from_string,
AuthnContextDecl.c_tag: authn_context_decl_from_string,
AuthenticatingAuthority.c_tag: authenticating_authority_from_string,
AuthzDecisionStatement.c_tag: authz_decision_statement_from_string,
AuthzDecisionStatementType_.c_tag:
authz_decision_statement_type__from_string,
DecisionType_.c_tag: decision_type__from_string,
Action.c_tag: action_from_string,
ActionType_.c_tag: action_type__from_string,
Evidence.c_tag: evidence_from_string,
EvidenceType_.c_tag: evidence_type__from_string,
AttributeStatement.c_tag: attribute_statement_from_string,
AttributeStatementType_.c_tag: attribute_statement_type__from_string,
Attribute.c_tag: attribute_from_string,
AttributeType_.c_tag: attribute_type__from_string,
AttributeValue.c_tag: attribute_value_from_string,
EncryptedAttribute.c_tag: encrypted_attribute_from_string,
}
ELEMENT_BY_TAG = {
'BaseID': BaseID,
'NameID': NameID,
'NameIDType': NameIDType_,
'EncryptedElementType': EncryptedElementType_,
'EncryptedID': EncryptedID,
'Issuer': Issuer,
'AssertionIDRef': AssertionIDRef,
'AssertionURIRef': AssertionURIRef,
'Assertion': Assertion,
'AssertionType': AssertionType_,
'Subject': Subject,
'SubjectType': SubjectType_,
'SubjectConfirmation': SubjectConfirmation,
'SubjectConfirmationType': SubjectConfirmationType_,
'SubjectConfirmationData': SubjectConfirmationData,
'SubjectConfirmationDataType': SubjectConfirmationDataType_,
'KeyInfoConfirmationDataType': KeyInfoConfirmationDataType_,
'Conditions': Conditions,
'ConditionsType': ConditionsType_,
'Condition': Condition,
'AudienceRestriction': AudienceRestriction,
'AudienceRestrictionType': AudienceRestrictionType_,
'Audience': Audience,
'OneTimeUse': OneTimeUse,
'OneTimeUseType': OneTimeUseType_,
'ProxyRestriction': ProxyRestriction,
'ProxyRestrictionType': ProxyRestrictionType_,
'Advice': Advice,
'AdviceType': AdviceType_,
'EncryptedAssertion': EncryptedAssertion,
'Statement': Statement,
'AuthnStatement': AuthnStatement,
'AuthnStatementType': AuthnStatementType_,
'SubjectLocality': SubjectLocality,
'SubjectLocalityType': SubjectLocalityType_,
'AuthnContext': AuthnContext,
'AuthnContextType': AuthnContextType_,
'AuthnContextClassRef': AuthnContextClassRef,
'AuthnContextDeclRef': AuthnContextDeclRef,
'AuthnContextDecl': AuthnContextDecl,
'AuthenticatingAuthority': AuthenticatingAuthority,
'AuthzDecisionStatement': AuthzDecisionStatement,
'AuthzDecisionStatementType': AuthzDecisionStatementType_,
'DecisionType': DecisionType_,
'Action': Action,
'ActionType': ActionType_,
'Evidence': Evidence,
'EvidenceType': EvidenceType_,
'AttributeStatement': AttributeStatement,
'AttributeStatementType': AttributeStatementType_,
'Attribute': Attribute,
'AttributeType': AttributeType_,
'AttributeValue': AttributeValue,
'EncryptedAttribute': EncryptedAttribute,
'BaseIDAbstractType': BaseIDAbstractType_,
'ConditionAbstractType': ConditionAbstractType_,
'StatementAbstractType': StatementAbstractType_,
}
def factory(tag, **kwargs):
return ELEMENT_BY_TAG[tag](**kwargs)
| 38.185942
| 98
| 0.6752
| 8,490
| 76,601
| 5.754888
| 0.061602
| 0.029841
| 0.029043
| 0.038683
| 0.584478
| 0.513764
| 0.485479
| 0.472994
| 0.458196
| 0.411449
| 0
| 0.008473
| 0.223483
| 76,601
| 2,005
| 99
| 38.204988
| 0.812935
| 0.104711
| 0
| 0.402315
| 1
| 0.000681
| 0.141912
| 0.066663
| 0
| 0
| 0
| 0
| 0.078965
| 1
| 0.060585
| false
| 0.005446
| 0.008169
| 0.03744
| 0.394826
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
869f105dabe3ba66d48901d5ab1ef89fe7052f2e
| 624
|
py
|
Python
|
investment_report/migrations/0020_auto_20180911_1005.py
|
uktrade/pir-api
|
79747ceab042c42c287e2b7471f6dade70f68693
|
[
"MIT"
] | 1
|
2021-02-02T19:08:55.000Z
|
2021-02-02T19:08:55.000Z
|
investment_report/migrations/0020_auto_20180911_1005.py
|
uktrade/invest-pir-api
|
be56efddf9dfdf81c8557441a9a54d9a4dd4bab1
|
[
"MIT"
] | 21
|
2018-07-10T10:20:47.000Z
|
2022-03-24T09:36:29.000Z
|
investment_report/migrations/0020_auto_20180911_1005.py
|
uktrade/pir-api
|
79747ceab042c42c287e2b7471f6dade70f68693
|
[
"MIT"
] | 1
|
2021-02-04T11:28:37.000Z
|
2021-02-04T11:28:37.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.13 on 2018-09-11 10:05
from __future__ import unicode_literals
import config.s3
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('investment_report', '0019_auto_20180820_1304'),
]
operations = [
migrations.AddField(
model_name='contact',
name='website_href',
field=models.URLField(default='https://invest.great.gov.uk/contact/', help_text='Custom link for website (used for tracking)', max_length=255),
preserve_default=False,
)
]
| 27.130435
| 155
| 0.653846
| 74
| 624
| 5.324324
| 0.810811
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.079002
| 0.229167
| 624
| 22
| 156
| 28.363636
| 0.740125
| 0.110577
| 0
| 0
| 1
| 0
| 0.25
| 0.041667
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.2
| 0
| 0.4
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
86a7e9fe107833a210f5b3b41b68cc42c51f48ee
| 402
|
py
|
Python
|
physio2go/exercises/migrations/0003_auto_20161128_1753.py
|
hamole/physio2go
|
ebd14c9406e2b6818dc649e4863a734bf812e9b0
|
[
"MIT"
] | null | null | null |
physio2go/exercises/migrations/0003_auto_20161128_1753.py
|
hamole/physio2go
|
ebd14c9406e2b6818dc649e4863a734bf812e9b0
|
[
"MIT"
] | null | null | null |
physio2go/exercises/migrations/0003_auto_20161128_1753.py
|
hamole/physio2go
|
ebd14c9406e2b6818dc649e4863a734bf812e9b0
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.3 on 2016-11-28 06:53
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('exercises', '0002_auto_20161128_1718'),
]
operations = [
migrations.RenameModel(
old_name='Exercises',
new_name='Exercise',
),
]
| 20.1
| 49
| 0.621891
| 44
| 402
| 5.454545
| 0.818182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111486
| 0.263682
| 402
| 19
| 50
| 21.157895
| 0.699324
| 0.169154
| 0
| 0
| 1
| 0
| 0.148036
| 0.069486
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.166667
| 0
| 0.416667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
86dc7d357b174d6a4843f8edef2436d8cf30c367
| 742
|
py
|
Python
|
generator.py
|
Axonny/HexagonalHitori
|
582cb50b751796c30ed273f66c8ac9fa6f3dd089
|
[
"MIT"
] | null | null | null |
generator.py
|
Axonny/HexagonalHitori
|
582cb50b751796c30ed273f66c8ac9fa6f3dd089
|
[
"MIT"
] | null | null | null |
generator.py
|
Axonny/HexagonalHitori
|
582cb50b751796c30ed273f66c8ac9fa6f3dd089
|
[
"MIT"
] | null | null | null |
from hitori_generator import Generator
from argparse import ArgumentParser
def generate(n: int, output_file: str) -> None:
if n < 3 or n > 8:
print("It isn't valid size")
exit(4)
generator = Generator(n)
data = generator.generate()
lines = map(lambda x: ' '.join(map(str, x)), data)
with open(output_file, 'w', encoding='utf-8') as f:
f.write('\n'.join(lines))
def main():
p = ArgumentParser()
p.add_argument('filename', type=str, help='Path to output file')
p.add_argument('-s', "--size", type=int, default=3, help='Generate SxS field. size must be in [3, 8]. Default is 3')
args = p.parse_args()
generate(args.size, args.filename)
if __name__ == '__main__':
main()
| 27.481481
| 120
| 0.628032
| 111
| 742
| 4.072072
| 0.540541
| 0.066372
| 0.053097
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.013769
| 0.216981
| 742
| 26
| 121
| 28.538462
| 0.7642
| 0
| 0
| 0
| 1
| 0
| 0.171159
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.105263
| false
| 0
| 0.105263
| 0
| 0.210526
| 0.052632
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
86e6c529a13c62833d2d9d91e683f2c9cc85c2b8
| 16,246
|
py
|
Python
|
sdk/python/pulumi_azure_native/servicebus/v20210601preview/get_subscription.py
|
polivbr/pulumi-azure-native
|
09571f3bf6bdc4f3621aabefd1ba6c0d4ecfb0e7
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/servicebus/v20210601preview/get_subscription.py
|
polivbr/pulumi-azure-native
|
09571f3bf6bdc4f3621aabefd1ba6c0d4ecfb0e7
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/servicebus/v20210601preview/get_subscription.py
|
polivbr/pulumi-azure-native
|
09571f3bf6bdc4f3621aabefd1ba6c0d4ecfb0e7
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
__all__ = [
'GetSubscriptionResult',
'AwaitableGetSubscriptionResult',
'get_subscription',
]
@pulumi.output_type
class GetSubscriptionResult:
"""
Description of subscription resource.
"""
def __init__(__self__, accessed_at=None, auto_delete_on_idle=None, client_affine_properties=None, count_details=None, created_at=None, dead_lettering_on_filter_evaluation_exceptions=None, dead_lettering_on_message_expiration=None, default_message_time_to_live=None, duplicate_detection_history_time_window=None, enable_batched_operations=None, forward_dead_lettered_messages_to=None, forward_to=None, id=None, is_client_affine=None, lock_duration=None, max_delivery_count=None, message_count=None, name=None, requires_session=None, status=None, system_data=None, type=None, updated_at=None):
if accessed_at and not isinstance(accessed_at, str):
raise TypeError("Expected argument 'accessed_at' to be a str")
pulumi.set(__self__, "accessed_at", accessed_at)
if auto_delete_on_idle and not isinstance(auto_delete_on_idle, str):
raise TypeError("Expected argument 'auto_delete_on_idle' to be a str")
pulumi.set(__self__, "auto_delete_on_idle", auto_delete_on_idle)
if client_affine_properties and not isinstance(client_affine_properties, dict):
raise TypeError("Expected argument 'client_affine_properties' to be a dict")
pulumi.set(__self__, "client_affine_properties", client_affine_properties)
if count_details and not isinstance(count_details, dict):
raise TypeError("Expected argument 'count_details' to be a dict")
pulumi.set(__self__, "count_details", count_details)
if created_at and not isinstance(created_at, str):
raise TypeError("Expected argument 'created_at' to be a str")
pulumi.set(__self__, "created_at", created_at)
if dead_lettering_on_filter_evaluation_exceptions and not isinstance(dead_lettering_on_filter_evaluation_exceptions, bool):
raise TypeError("Expected argument 'dead_lettering_on_filter_evaluation_exceptions' to be a bool")
pulumi.set(__self__, "dead_lettering_on_filter_evaluation_exceptions", dead_lettering_on_filter_evaluation_exceptions)
if dead_lettering_on_message_expiration and not isinstance(dead_lettering_on_message_expiration, bool):
raise TypeError("Expected argument 'dead_lettering_on_message_expiration' to be a bool")
pulumi.set(__self__, "dead_lettering_on_message_expiration", dead_lettering_on_message_expiration)
if default_message_time_to_live and not isinstance(default_message_time_to_live, str):
raise TypeError("Expected argument 'default_message_time_to_live' to be a str")
pulumi.set(__self__, "default_message_time_to_live", default_message_time_to_live)
if duplicate_detection_history_time_window and not isinstance(duplicate_detection_history_time_window, str):
raise TypeError("Expected argument 'duplicate_detection_history_time_window' to be a str")
pulumi.set(__self__, "duplicate_detection_history_time_window", duplicate_detection_history_time_window)
if enable_batched_operations and not isinstance(enable_batched_operations, bool):
raise TypeError("Expected argument 'enable_batched_operations' to be a bool")
pulumi.set(__self__, "enable_batched_operations", enable_batched_operations)
if forward_dead_lettered_messages_to and not isinstance(forward_dead_lettered_messages_to, str):
raise TypeError("Expected argument 'forward_dead_lettered_messages_to' to be a str")
pulumi.set(__self__, "forward_dead_lettered_messages_to", forward_dead_lettered_messages_to)
if forward_to and not isinstance(forward_to, str):
raise TypeError("Expected argument 'forward_to' to be a str")
pulumi.set(__self__, "forward_to", forward_to)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if is_client_affine and not isinstance(is_client_affine, bool):
raise TypeError("Expected argument 'is_client_affine' to be a bool")
pulumi.set(__self__, "is_client_affine", is_client_affine)
if lock_duration and not isinstance(lock_duration, str):
raise TypeError("Expected argument 'lock_duration' to be a str")
pulumi.set(__self__, "lock_duration", lock_duration)
if max_delivery_count and not isinstance(max_delivery_count, int):
raise TypeError("Expected argument 'max_delivery_count' to be a int")
pulumi.set(__self__, "max_delivery_count", max_delivery_count)
if message_count and not isinstance(message_count, float):
raise TypeError("Expected argument 'message_count' to be a float")
pulumi.set(__self__, "message_count", message_count)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if requires_session and not isinstance(requires_session, bool):
raise TypeError("Expected argument 'requires_session' to be a bool")
pulumi.set(__self__, "requires_session", requires_session)
if status and not isinstance(status, str):
raise TypeError("Expected argument 'status' to be a str")
pulumi.set(__self__, "status", status)
if system_data and not isinstance(system_data, dict):
raise TypeError("Expected argument 'system_data' to be a dict")
pulumi.set(__self__, "system_data", system_data)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
if updated_at and not isinstance(updated_at, str):
raise TypeError("Expected argument 'updated_at' to be a str")
pulumi.set(__self__, "updated_at", updated_at)
@property
@pulumi.getter(name="accessedAt")
def accessed_at(self) -> str:
"""
Last time there was a receive request to this subscription.
"""
return pulumi.get(self, "accessed_at")
@property
@pulumi.getter(name="autoDeleteOnIdle")
def auto_delete_on_idle(self) -> Optional[str]:
"""
ISO 8061 timeSpan idle interval after which the topic is automatically deleted. The minimum duration is 5 minutes.
"""
return pulumi.get(self, "auto_delete_on_idle")
@property
@pulumi.getter(name="clientAffineProperties")
def client_affine_properties(self) -> Optional['outputs.SBClientAffinePropertiesResponse']:
"""
Properties specific to client affine subscriptions.
"""
return pulumi.get(self, "client_affine_properties")
@property
@pulumi.getter(name="countDetails")
def count_details(self) -> 'outputs.MessageCountDetailsResponse':
"""
Message count details
"""
return pulumi.get(self, "count_details")
@property
@pulumi.getter(name="createdAt")
def created_at(self) -> str:
"""
Exact time the message was created.
"""
return pulumi.get(self, "created_at")
@property
@pulumi.getter(name="deadLetteringOnFilterEvaluationExceptions")
def dead_lettering_on_filter_evaluation_exceptions(self) -> Optional[bool]:
"""
Value that indicates whether a subscription has dead letter support on filter evaluation exceptions.
"""
return pulumi.get(self, "dead_lettering_on_filter_evaluation_exceptions")
@property
@pulumi.getter(name="deadLetteringOnMessageExpiration")
def dead_lettering_on_message_expiration(self) -> Optional[bool]:
"""
Value that indicates whether a subscription has dead letter support when a message expires.
"""
return pulumi.get(self, "dead_lettering_on_message_expiration")
@property
@pulumi.getter(name="defaultMessageTimeToLive")
def default_message_time_to_live(self) -> Optional[str]:
"""
ISO 8061 Default message timespan to live value. This is the duration after which the message expires, starting from when the message is sent to Service Bus. This is the default value used when TimeToLive is not set on a message itself.
"""
return pulumi.get(self, "default_message_time_to_live")
@property
@pulumi.getter(name="duplicateDetectionHistoryTimeWindow")
def duplicate_detection_history_time_window(self) -> Optional[str]:
"""
ISO 8601 timeSpan structure that defines the duration of the duplicate detection history. The default value is 10 minutes.
"""
return pulumi.get(self, "duplicate_detection_history_time_window")
@property
@pulumi.getter(name="enableBatchedOperations")
def enable_batched_operations(self) -> Optional[bool]:
"""
Value that indicates whether server-side batched operations are enabled.
"""
return pulumi.get(self, "enable_batched_operations")
@property
@pulumi.getter(name="forwardDeadLetteredMessagesTo")
def forward_dead_lettered_messages_to(self) -> Optional[str]:
"""
Queue/Topic name to forward the Dead Letter message
"""
return pulumi.get(self, "forward_dead_lettered_messages_to")
@property
@pulumi.getter(name="forwardTo")
def forward_to(self) -> Optional[str]:
"""
Queue/Topic name to forward the messages
"""
return pulumi.get(self, "forward_to")
@property
@pulumi.getter
def id(self) -> str:
"""
Resource Id
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="isClientAffine")
def is_client_affine(self) -> Optional[bool]:
"""
Value that indicates whether the subscription has an affinity to the client id.
"""
return pulumi.get(self, "is_client_affine")
@property
@pulumi.getter(name="lockDuration")
def lock_duration(self) -> Optional[str]:
"""
ISO 8061 lock duration timespan for the subscription. The default value is 1 minute.
"""
return pulumi.get(self, "lock_duration")
@property
@pulumi.getter(name="maxDeliveryCount")
def max_delivery_count(self) -> Optional[int]:
"""
Number of maximum deliveries.
"""
return pulumi.get(self, "max_delivery_count")
@property
@pulumi.getter(name="messageCount")
def message_count(self) -> float:
"""
Number of messages.
"""
return pulumi.get(self, "message_count")
@property
@pulumi.getter
def name(self) -> str:
"""
Resource name
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="requiresSession")
def requires_session(self) -> Optional[bool]:
"""
Value indicating if a subscription supports the concept of sessions.
"""
return pulumi.get(self, "requires_session")
@property
@pulumi.getter
def status(self) -> Optional[str]:
"""
Enumerates the possible values for the status of a messaging entity.
"""
return pulumi.get(self, "status")
@property
@pulumi.getter(name="systemData")
def system_data(self) -> 'outputs.SystemDataResponse':
"""
The system meta data relating to this resource.
"""
return pulumi.get(self, "system_data")
@property
@pulumi.getter
def type(self) -> str:
"""
Resource type
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="updatedAt")
def updated_at(self) -> str:
"""
The exact time the message was updated.
"""
return pulumi.get(self, "updated_at")
class AwaitableGetSubscriptionResult(GetSubscriptionResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetSubscriptionResult(
accessed_at=self.accessed_at,
auto_delete_on_idle=self.auto_delete_on_idle,
client_affine_properties=self.client_affine_properties,
count_details=self.count_details,
created_at=self.created_at,
dead_lettering_on_filter_evaluation_exceptions=self.dead_lettering_on_filter_evaluation_exceptions,
dead_lettering_on_message_expiration=self.dead_lettering_on_message_expiration,
default_message_time_to_live=self.default_message_time_to_live,
duplicate_detection_history_time_window=self.duplicate_detection_history_time_window,
enable_batched_operations=self.enable_batched_operations,
forward_dead_lettered_messages_to=self.forward_dead_lettered_messages_to,
forward_to=self.forward_to,
id=self.id,
is_client_affine=self.is_client_affine,
lock_duration=self.lock_duration,
max_delivery_count=self.max_delivery_count,
message_count=self.message_count,
name=self.name,
requires_session=self.requires_session,
status=self.status,
system_data=self.system_data,
type=self.type,
updated_at=self.updated_at)
def get_subscription(namespace_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
subscription_name: Optional[str] = None,
topic_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetSubscriptionResult:
"""
Description of subscription resource.
:param str namespace_name: The namespace name
:param str resource_group_name: Name of the Resource group within the Azure subscription.
:param str subscription_name: The subscription name.
:param str topic_name: The topic name.
"""
__args__ = dict()
__args__['namespaceName'] = namespace_name
__args__['resourceGroupName'] = resource_group_name
__args__['subscriptionName'] = subscription_name
__args__['topicName'] = topic_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:servicebus/v20210601preview:getSubscription', __args__, opts=opts, typ=GetSubscriptionResult).value
return AwaitableGetSubscriptionResult(
accessed_at=__ret__.accessed_at,
auto_delete_on_idle=__ret__.auto_delete_on_idle,
client_affine_properties=__ret__.client_affine_properties,
count_details=__ret__.count_details,
created_at=__ret__.created_at,
dead_lettering_on_filter_evaluation_exceptions=__ret__.dead_lettering_on_filter_evaluation_exceptions,
dead_lettering_on_message_expiration=__ret__.dead_lettering_on_message_expiration,
default_message_time_to_live=__ret__.default_message_time_to_live,
duplicate_detection_history_time_window=__ret__.duplicate_detection_history_time_window,
enable_batched_operations=__ret__.enable_batched_operations,
forward_dead_lettered_messages_to=__ret__.forward_dead_lettered_messages_to,
forward_to=__ret__.forward_to,
id=__ret__.id,
is_client_affine=__ret__.is_client_affine,
lock_duration=__ret__.lock_duration,
max_delivery_count=__ret__.max_delivery_count,
message_count=__ret__.message_count,
name=__ret__.name,
requires_session=__ret__.requires_session,
status=__ret__.status,
system_data=__ret__.system_data,
type=__ret__.type,
updated_at=__ret__.updated_at)
| 45.253482
| 595
| 0.70048
| 1,930
| 16,246
| 5.512435
| 0.118653
| 0.028198
| 0.033838
| 0.064856
| 0.41987
| 0.260551
| 0.193815
| 0.119936
| 0.074161
| 0.068521
| 0
| 0.002276
| 0.215622
| 16,246
| 358
| 596
| 45.379888
| 0.832614
| 0.12243
| 0
| 0.11157
| 1
| 0
| 0.189794
| 0.083492
| 0
| 0
| 0
| 0
| 0
| 1
| 0.107438
| false
| 0
| 0.024793
| 0
| 0.243802
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
86f16e97aa13775a35aa0ced03caeac309db0c51
| 4,627
|
py
|
Python
|
{{cookiecutter.repo_name}}/src/mix_with_scaper.py
|
nussl/cookiecutter
|
5df8512592778ea7155b05e3e4b54676227968b0
|
[
"MIT"
] | null | null | null |
{{cookiecutter.repo_name}}/src/mix_with_scaper.py
|
nussl/cookiecutter
|
5df8512592778ea7155b05e3e4b54676227968b0
|
[
"MIT"
] | null | null | null |
{{cookiecutter.repo_name}}/src/mix_with_scaper.py
|
nussl/cookiecutter
|
5df8512592778ea7155b05e3e4b54676227968b0
|
[
"MIT"
] | null | null | null |
import gin
from scaper import Scaper, generate_from_jams
import copy
import logging
import p_tqdm
import nussl
import os
import numpy as np
def _reset_event_spec(sc):
sc.reset_fg_event_spec()
sc.reset_bg_event_spec()
def check_mixture(path_to_mix):
mix_signal = nussl.AudioSignal(path_to_mix)
if mix_signal.rms() < .01:
return False
return True
def make_one_mixture(sc, path_to_file, num_sources,
event_parameters, allow_repeated_label):
"""
Creates a single mixture, incoherent. Instantiates according to
the event parameters for each source.
"""
check = False
while not check:
for j in range(num_sources):
sc.add_event(**event_parameters)
sc.generate(
path_to_file,
path_to_file.replace('.wav', '.jams'),
no_audio=False,
allow_repeated_label=allow_repeated_label,
save_isolated_events=True,
)
_reset_event_spec(sc)
check = check_mixture(path_to_file)
def instantiate_and_get_event_spec(sc, master_label, event_parameters):
_reset_event_spec(sc)
_event_parameters = copy.deepcopy(event_parameters)
_event_parameters['label'] = ('const', master_label)
sc.add_event(**_event_parameters)
event = sc._instantiate_event(sc.fg_spec[-1])
_reset_event_spec(sc)
return sc, event
def make_one_mixture_coherent(sc, path_to_file, labels, event_parameters,
allow_repeated_label):
check = False
while not check:
sc, event = instantiate_and_get_event_spec(
sc, labels[0], event_parameters)
for label in labels:
try:
sc.add_event(
label=('const', label),
source_file=('const', event.source_file.replace(labels[0], label)),
source_time=('const', event.source_time),
event_time=('const', 0),
event_duration=('const', sc.duration),
snr=event_parameters['snr'],
pitch_shift=('const', event.pitch_shift),
time_stretch=('const', event.time_stretch)
)
except:
logging.exception(
f"Got an error for {label} @ {_source_file}. Moving on...")
sc.generate(
path_to_file,
path_to_file.replace('.wav', '.jams'),
no_audio=False,
allow_repeated_label=allow_repeated_label,
save_isolated_events=True,
)
sc.fg_spec = []
check = check_mixture(path_to_file)
@gin.configurable
def make_scaper_datasets(scopes=['train', 'val']):
for scope in scopes:
with gin.config_scope(scope):
mix_with_scaper()
@gin.configurable
def mix_with_scaper(num_mixtures, foreground_path, background_path,
scene_duration, sample_rate, target_folder,
event_parameters, num_sources=None, labels=None,
coherent=False, allow_repeated_label=False,
ref_db=-40, bitdepth=16, seed=0, num_workers=1):
nussl.utils.seed(seed)
os.makedirs(target_folder, exist_ok=True)
scaper_seed = np.random.randint(100)
logging.info('Starting mixing.')
if num_sources is None and labels is None:
raise ValueError("One of labels or num_sources must be set!")
if coherent and labels is None:
raise ValueError("Coherent mixing requires explicit labels!")
generators = []
if background_path is None:
background_path = foreground_path
for i in range(num_mixtures):
sc = Scaper(
scene_duration,
fg_path=foreground_path,
bg_path=background_path,
random_state=scaper_seed,
)
sc.ref_db = ref_db
sc.sr = sample_rate
sc.bitdepth = bitdepth
generators.append(sc)
scaper_seed += 1
mix_func = make_one_mixture_coherent if coherent else make_one_mixture
def arg_tuple(i):
_args = (
generators[i],
os.path.join(target_folder, f'{i:08d}.wav'),
labels if coherent else num_sources,
event_parameters,
allow_repeated_label
)
return _args
args = [arg_tuple(i) for i in range(num_mixtures)]
# do one by itself for testing
mix_func(*args[0])
args = list(zip(*args[1:]))
args = [list(a) for a in args]
# now do the rest in parallel
p_tqdm.p_map(mix_func, *args, num_cpus=num_workers)
| 31.691781
| 87
| 0.612708
| 573
| 4,627
| 4.643979
| 0.277487
| 0.073281
| 0.030064
| 0.024051
| 0.2469
| 0.198422
| 0.118001
| 0.085682
| 0.085682
| 0.085682
| 0
| 0.006163
| 0.298682
| 4,627
| 145
| 88
| 31.910345
| 0.813867
| 0.034364
| 0
| 0.194915
| 1
| 0
| 0.053519
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.067797
| false
| 0
| 0.067797
| 0
| 0.169492
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
81145bece0e3560e4fd661b7085c6a1e4f6811f2
| 910
|
py
|
Python
|
djangocms_redirect/migrations/0003_auto_20190810_1009.py
|
vsalat/djangocms-redirect
|
a2577f08430b6b65ae4a51293f861b697bf4ab9d
|
[
"BSD-3-Clause"
] | null | null | null |
djangocms_redirect/migrations/0003_auto_20190810_1009.py
|
vsalat/djangocms-redirect
|
a2577f08430b6b65ae4a51293f861b697bf4ab9d
|
[
"BSD-3-Clause"
] | null | null | null |
djangocms_redirect/migrations/0003_auto_20190810_1009.py
|
vsalat/djangocms-redirect
|
a2577f08430b6b65ae4a51293f861b697bf4ab9d
|
[
"BSD-3-Clause"
] | null | null | null |
# Generated by Django 2.2.4 on 2019-08-10 08:09
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('djangocms_redirect', '0002_auto_20170321_1807'),
]
operations = [
migrations.AddField(
model_name='redirect',
name='catchall_redirect',
field=models.BooleanField(default=False, help_text='If selected all the pages starting with the given string will be redirected to the given redirect path', verbose_name='Catchall redirect'),
),
migrations.AddField(
model_name='redirect',
name='subpath_match',
field=models.BooleanField(default=False, help_text='If selected all the pages starting with the given string will be redirected by replacing the matching subpath with the provided redirect path.', verbose_name='Subpath match'),
),
]
| 37.916667
| 239
| 0.679121
| 109
| 910
| 5.559633
| 0.504587
| 0.034653
| 0.075908
| 0.089109
| 0.478548
| 0.478548
| 0.349835
| 0.349835
| 0.349835
| 0.349835
| 0
| 0.044476
| 0.234066
| 910
| 23
| 240
| 39.565217
| 0.824964
| 0.049451
| 0
| 0.352941
| 1
| 0.058824
| 0.418308
| 0.026651
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.058824
| 0
| 0.235294
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
8123dd148da3e7a93c319e5be784b12da6c27afd
| 22,630
|
py
|
Python
|
pymatgen/analysis/wulff.py
|
hpatel1567/pymatgen
|
8304b25464206c74305214e45935df90bab95500
|
[
"MIT"
] | 1
|
2020-02-08T08:20:45.000Z
|
2020-02-08T08:20:45.000Z
|
pymatgen/analysis/wulff.py
|
hpatel1567/pymatgen
|
8304b25464206c74305214e45935df90bab95500
|
[
"MIT"
] | null | null | null |
pymatgen/analysis/wulff.py
|
hpatel1567/pymatgen
|
8304b25464206c74305214e45935df90bab95500
|
[
"MIT"
] | null | null | null |
# coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
"""
This module define a WulffShape class to generate the Wulff shape from
a lattice, a list of indices and their corresponding surface energies,
and the total area and volume of the wulff shape,the weighted surface energy,
the anisotropy and shape_factor can also be calculated.
In support of plotting from a given view in terms of miller index.
The lattice is from the conventional unit cell, and (hkil) for hexagonal
lattices.
If you use this code extensively, consider citing the following:
Tran, R.; Xu, Z.; Radhakrishnan, B.; Winston, D.; Persson, K. A.; Ong, S. P.
(2016). Surface energies of elemental crystals. Scientific Data.
"""
from pymatgen.core.structure import Structure
from pymatgen.util.coord import get_angle
import numpy as np
import scipy as sp
from scipy.spatial import ConvexHull
import logging
import warnings
__author__ = 'Zihan Xu, Richard Tran, Shyue Ping Ong'
__copyright__ = 'Copyright 2013, The Materials Virtual Lab'
__version__ = '0.1'
__maintainer__ = 'Zihan Xu'
__email__ = 'zix009@eng.ucsd.edu'
__date__ = 'May 5 2016'
logger = logging.getLogger(__name__)
def hkl_tuple_to_str(hkl):
"""
Prepare for display on plots
"(hkl)" for surfaces
Agrs:
hkl: in the form of [h, k, l] or (h, k, l)
"""
str_format = '($'
for x in hkl:
if x < 0:
str_format += '\\overline{' + str(-x) + '}'
else:
str_format += str(x)
str_format += '$)'
return str_format
def get_tri_area(pts):
"""
Given a list of coords for 3 points,
Compute the area of this triangle.
Args:
pts: [a, b, c] three points
"""
a, b, c = pts[0], pts[1], pts[2]
v1 = np.array(b) - np.array(a)
v2 = np.array(c) - np.array(a)
area_tri = abs(sp.linalg.norm(sp.cross(v1, v2)) / 2)
return area_tri
class WulffFacet:
"""
Helper container for each Wulff plane.
"""
def __init__(self, normal, e_surf, normal_pt, dual_pt, index, m_ind_orig,
miller):
"""
:param normal:
:param e_surf:
:param normal_pt:
:param dual_pt:
:param index:
:param m_ind_orig:
:param miller:
"""
self.normal = normal
self.e_surf = e_surf
self.normal_pt = normal_pt
self.dual_pt = dual_pt
self.index = index
self.m_ind_orig = m_ind_orig
self.miller = miller
self.points = []
self.outer_lines = []
class WulffShape:
"""
Generate Wulff Shape from list of miller index and surface energies,
with given conventional unit cell.
surface energy (Jm^2) is the length of normal.
Wulff shape is the convex hull.
Based on:
http://scipy.github.io/devdocs/generated/scipy.spatial.ConvexHull.html
Process:
1. get wulff simplices
2. label with color
3. get wulff_area and other properties
.. attribute:: debug (bool)
.. attribute:: alpha
transparency
.. attribute:: color_set
.. attribute:: grid_off (bool)
.. attribute:: axis_off (bool)
.. attribute:: show_area
.. attribute:: off_color
color of facets off wulff
.. attribute:: structure
Structure object, input conventional unit cell (with H ) from lattice
.. attribute:: miller_list
list of input miller index, for hcp in the form of hkil
.. attribute:: hkl_list
modify hkill to hkl, in the same order with input_miller
.. attribute:: e_surf_list
list of input surface energies, in the same order with input_miller
.. attribute:: lattice
Lattice object, the input lattice for the conventional unit cell
.. attribute:: facets
[WulffFacet] for all facets considering symm
.. attribute:: dual_cv_simp
simplices from the dual convex hull (dual_pt)
.. attribute:: wulff_pt_list
.. attribute:: wulff_cv_simp
simplices from the convex hull of wulff_pt_list
.. attribute:: on_wulff
list for all input_miller, True is on wulff.
.. attribute:: color_area
list for all input_miller, total area on wulff, off_wulff = 0.
.. attribute:: miller_area
($hkl$): area for all input_miller
"""
def __init__(self, lattice, miller_list, e_surf_list, symprec=1e-5):
"""
Args:
lattice: Lattice object of the conventional unit cell
miller_list ([(hkl), ...]: list of hkl or hkil for hcp
e_surf_list ([float]): list of corresponding surface energies
symprec (float): for recp_operation, default is 1e-5.
"""
if any([se < 0 for se in e_surf_list]):
warnings.warn("Unphysical (negative) surface energy detected.")
self.color_ind = list(range(len(miller_list)))
self.input_miller_fig = [hkl_tuple_to_str(x) for x in miller_list]
# store input data
self.structure = Structure(lattice, ["H"], [[0, 0, 0]])
self.miller_list = tuple([tuple(x) for x in miller_list])
self.hkl_list = tuple([(x[0], x[1], x[-1]) for x in miller_list])
self.e_surf_list = tuple(e_surf_list)
self.lattice = lattice
self.symprec = symprec
# 2. get all the data for wulff construction
# get all the surface normal from get_all_miller_e()
self.facets = self._get_all_miller_e()
logger.debug(len(self.facets))
# 3. consider the dual condition
dual_pts = [x.dual_pt for x in self.facets]
dual_convex = ConvexHull(dual_pts)
dual_cv_simp = dual_convex.simplices
# simplices (ndarray of ints, shape (nfacet, ndim))
# list of [i, j, k] , ndim = 3
# i, j, k: ind for normal_e_m
# recalculate the dual of dual, get the wulff shape.
# conner <-> surface
# get cross point from the simplices of the dual convex hull
wulff_pt_list = [self._get_cross_pt_dual_simp(dual_simp)
for dual_simp in dual_cv_simp]
wulff_convex = ConvexHull(wulff_pt_list)
wulff_cv_simp = wulff_convex.simplices
logger.debug(", ".join([str(len(x)) for x in wulff_cv_simp]))
# store simplices and convex
self.dual_cv_simp = dual_cv_simp
self.wulff_pt_list = wulff_pt_list
self.wulff_cv_simp = wulff_cv_simp
self.wulff_convex = wulff_convex
self.on_wulff, self.color_area = self._get_simpx_plane()
miller_area = []
for m, in_mill_fig in enumerate(self.input_miller_fig):
miller_area.append(
in_mill_fig + ' : ' + str(round(self.color_area[m], 4)))
self.miller_area = miller_area
def _get_all_miller_e(self):
"""
from self:
get miller_list(unique_miller), e_surf_list and symmetry
operations(symmops) according to lattice
apply symmops to get all the miller index, then get normal,
get all the facets functions for wulff shape calculation:
|normal| = 1, e_surf is plane's distance to (0, 0, 0),
normal[0]x + normal[1]y + normal[2]z = e_surf
return:
[WulffFacet]
"""
all_hkl = []
color_ind = self.color_ind
planes = []
recp = self.structure.lattice.reciprocal_lattice_crystallographic
recp_symmops = self.lattice.get_recp_symmetry_operation(self.symprec)
for i, (hkl, energy) in enumerate(zip(self.hkl_list,
self.e_surf_list)):
for op in recp_symmops:
miller = tuple([int(x) for x in op.operate(hkl)])
if miller not in all_hkl:
all_hkl.append(miller)
normal = recp.get_cartesian_coords(miller)
normal /= sp.linalg.norm(normal)
normal_pt = [x * energy for x in normal]
dual_pt = [x / energy for x in normal]
color_plane = color_ind[divmod(i, len(color_ind))[1]]
planes.append(WulffFacet(normal, energy, normal_pt,
dual_pt, color_plane, i, hkl))
# sort by e_surf
planes.sort(key=lambda x: x.e_surf)
return planes
def _get_cross_pt_dual_simp(self, dual_simp):
"""
|normal| = 1, e_surf is plane's distance to (0, 0, 0),
plane function:
normal[0]x + normal[1]y + normal[2]z = e_surf
from self:
normal_e_m to get the plane functions
dual_simp: (i, j, k) simplices from the dual convex hull
i, j, k: plane index(same order in normal_e_m)
"""
matrix_surfs = [self.facets[dual_simp[i]].normal for i in range(3)]
matrix_e = [self.facets[dual_simp[i]].e_surf for i in range(3)]
cross_pt = sp.dot(sp.linalg.inv(matrix_surfs), matrix_e)
return cross_pt
def _get_simpx_plane(self):
"""
Locate the plane for simpx of on wulff_cv, by comparing the center of
the simpx triangle with the plane functions.
"""
on_wulff = [False] * len(self.miller_list)
surface_area = [0.0] * len(self.miller_list)
for simpx in self.wulff_cv_simp:
pts = [self.wulff_pt_list[simpx[i]] for i in range(3)]
center = np.sum(pts, 0) / 3.0
# check whether the center of the simplices is on one plane
for plane in self.facets:
abs_diff = abs(np.dot(plane.normal, center) - plane.e_surf)
if abs_diff < 1e-5:
on_wulff[plane.index] = True
surface_area[plane.index] += get_tri_area(pts)
plane.points.append(pts)
plane.outer_lines.append([simpx[0], simpx[1]])
plane.outer_lines.append([simpx[1], simpx[2]])
plane.outer_lines.append([simpx[0], simpx[2]])
# already find the plane, move to the next simplices
break
for plane in self.facets:
plane.outer_lines.sort()
plane.outer_lines = [line for line in plane.outer_lines
if plane.outer_lines.count(line) != 2]
return on_wulff, surface_area
def _get_colors(self, color_set, alpha, off_color, custom_colors={}):
"""
assign colors according to the surface energies of on_wulff facets.
return:
(color_list, color_proxy, color_proxy_on_wulff, miller_on_wulff,
e_surf_on_wulff_list)
"""
import matplotlib as mpl
import matplotlib.pyplot as plt
color_list = [off_color] * len(self.hkl_list)
color_proxy_on_wulff = []
miller_on_wulff = []
e_surf_on_wulff = [(i, e_surf)
for i, e_surf in enumerate(self.e_surf_list)
if self.on_wulff[i]]
c_map = plt.get_cmap(color_set)
e_surf_on_wulff.sort(key=lambda x: x[1], reverse=False)
e_surf_on_wulff_list = [x[1] for x in e_surf_on_wulff]
if len(e_surf_on_wulff) > 1:
cnorm = mpl.colors.Normalize(vmin=min(e_surf_on_wulff_list),
vmax=max(e_surf_on_wulff_list))
else:
# if there is only one hkl on wulff, choose the color of the median
cnorm = mpl.colors.Normalize(vmin=min(e_surf_on_wulff_list) - 0.1,
vmax=max(e_surf_on_wulff_list) + 0.1)
scalar_map = mpl.cm.ScalarMappable(norm=cnorm, cmap=c_map)
for i, e_surf in e_surf_on_wulff:
color_list[i] = scalar_map.to_rgba(e_surf, alpha=alpha)
if tuple(self.miller_list[i]) in custom_colors.keys():
color_list[i] = custom_colors[tuple(self.miller_list[i])]
color_proxy_on_wulff.append(
plt.Rectangle((2, 2), 1, 1, fc=color_list[i], alpha=alpha))
miller_on_wulff.append(self.input_miller_fig[i])
scalar_map.set_array([x[1] for x in e_surf_on_wulff])
color_proxy = [plt.Rectangle((2, 2), 1, 1, fc=x, alpha=alpha)
for x in color_list]
return color_list, color_proxy, color_proxy_on_wulff, miller_on_wulff, e_surf_on_wulff_list
def show(self, *args, **kwargs):
r"""
Show the Wulff plot.
Args:
*args: Passed to get_plot.
**kwargs: Passed to get_plot.
"""
self.get_plot(*args, **kwargs).show()
def get_line_in_facet(self, facet):
"""
Returns the sorted pts in a facet used to draw a line
"""
lines = list(facet.outer_lines)
pt = []
prev = None
while len(lines) > 0:
if prev is None:
l = lines.pop(0)
else:
for i, l in enumerate(lines):
if prev in l:
l = lines.pop(i)
if l[1] == prev:
l.reverse()
break
# make sure the lines are connected one by one.
# find the way covering all pts and facets
pt.append(self.wulff_pt_list[l[0]].tolist())
pt.append(self.wulff_pt_list[l[1]].tolist())
prev = l[1]
return pt
def get_plot(self, color_set='PuBu', grid_off=True, axis_off=True,
show_area=False, alpha=1, off_color='red', direction=None,
bar_pos=(0.75, 0.15, 0.05, 0.65), bar_on=False, units_in_JPERM2=True,
legend_on=True, aspect_ratio=(8, 8), custom_colors={}):
"""
Get the Wulff shape plot.
Args:
color_set: default is 'PuBu'
grid_off (bool): default is True
axis_off (bool): default is Ture
show_area (bool): default is False
alpha (float): chosen from 0 to 1 (float), default is 1
off_color: Default color for facets not present on the Wulff shape.
direction: default is (1, 1, 1)
bar_pos: default is [0.75, 0.15, 0.05, 0.65]
bar_on (bool): default is False
legend_on (bool): default is True
aspect_ratio: default is (8, 8)
custom_colors ({(h,k,l}: [r,g,b,alpha}): Customize color of each
facet with a dictionary. The key is the corresponding Miller
index and value is the color. Undefined facets will use default
color site. Note: If you decide to set your own colors, it
probably won't make any sense to have the color bar on.
Return:
(matplotlib.pyplot)
"""
import matplotlib as mpl
import matplotlib.pyplot as plt
import mpl_toolkits.mplot3d as mpl3
color_list, color_proxy, color_proxy_on_wulff, miller_on_wulff, e_surf_on_wulff = self._get_colors(
color_set, alpha, off_color, custom_colors=custom_colors)
if not direction:
# If direction is not specified, use the miller indices of
# maximum area.
direction = max(self.area_fraction_dict.items(),
key=lambda x: x[1])[0]
fig = plt.figure()
fig.set_size_inches(aspect_ratio[0], aspect_ratio[1])
azim, elev = self._get_azimuth_elev([direction[0], direction[1],
direction[-1]])
wulff_pt_list = self.wulff_pt_list
ax = mpl3.Axes3D(fig, azim=azim, elev=elev)
for plane in self.facets:
# check whether [pts] is empty
if len(plane.points) < 1:
# empty, plane is not on_wulff.
continue
# assign the color for on_wulff facets according to its
# index and the color_list for on_wulff
plane_color = color_list[plane.index]
pt = self.get_line_in_facet(plane)
# plot from the sorted pts from [simpx]
tri = mpl3.art3d.Poly3DCollection([pt])
tri.set_color(plane_color)
tri.set_edgecolor("#808080")
ax.add_collection3d(tri)
# set ranges of x, y, z
# find the largest distance between on_wulff pts and the origin,
# to ensure complete and consistent display for all directions
r_range = max([np.linalg.norm(x) for x in wulff_pt_list])
ax.set_xlim([-r_range * 1.1, r_range * 1.1])
ax.set_ylim([-r_range * 1.1, r_range * 1.1])
ax.set_zlim([-r_range * 1.1, r_range * 1.1])
# add legend
if legend_on:
color_proxy = color_proxy
if show_area:
ax.legend(color_proxy, self.miller_area, loc='upper left',
bbox_to_anchor=(0, 1), fancybox=True, shadow=False)
else:
ax.legend(color_proxy_on_wulff, miller_on_wulff,
loc='upper center',
bbox_to_anchor=(0.5, 1), ncol=3, fancybox=True,
shadow=False)
ax.set_xlabel('x')
ax.set_ylabel('y')
ax.set_zlabel('z')
# Add colorbar
if bar_on:
cmap = plt.get_cmap(color_set)
cmap.set_over('0.25')
cmap.set_under('0.75')
bounds = [round(e, 2) for e in e_surf_on_wulff]
bounds.append(1.2 * bounds[-1])
norm = mpl.colors.BoundaryNorm(bounds, cmap.N)
# display surface energies
ax1 = fig.add_axes(bar_pos)
cbar = mpl.colorbar.ColorbarBase(
ax1, cmap=cmap, norm=norm, boundaries=[0] + bounds + [10],
extend='both', ticks=bounds[:-1], spacing='proportional',
orientation='vertical')
units = "$J/m^2$" if units_in_JPERM2 else r"$eV/\AA^2$"
cbar.set_label('Surface Energies (%s)' % (units), fontsize=100)
if grid_off:
ax.grid('off')
if axis_off:
ax.axis('off')
return plt
def _get_azimuth_elev(self, miller_index):
"""
Args:
miller_index: viewing direction
Returns:
azim, elev for plotting
"""
if miller_index == (0, 0, 1) or miller_index == (0, 0, 0, 1):
return 0, 90
else:
cart = self.lattice.get_cartesian_coords(miller_index)
azim = get_angle([cart[0], cart[1], 0], (1, 0, 0))
v = [cart[0], cart[1], 0]
elev = get_angle(cart, v)
return azim, elev
@property
def volume(self):
"""
Volume of the Wulff shape
"""
return self.wulff_convex.volume
@property
def miller_area_dict(self):
"""
Returns {hkl: area_hkl on wulff}
"""
return dict(zip(self.miller_list, self.color_area))
@property
def miller_energy_dict(self):
"""
Returns {hkl: surface energy_hkl}
"""
return dict(zip(self.miller_list, self.e_surf_list))
@property
def surface_area(self):
"""
Total surface area of Wulff shape.
"""
return sum(self.miller_area_dict.values())
@property
def weighted_surface_energy(self):
"""
Returns:
sum(surface_energy_hkl * area_hkl)/ sum(area_hkl)
"""
return self.total_surface_energy / self.surface_area
@property
def area_fraction_dict(self):
"""
Returns:
(dict): {hkl: area_hkl/total area on wulff}
"""
return {hkl: self.miller_area_dict[hkl] / self.surface_area
for hkl in self.miller_area_dict.keys()}
@property
def anisotropy(self):
"""
Returns:
(float) Coefficient of Variation from weighted surface energy
The ideal sphere is 0.
"""
square_diff_energy = 0
weighted_energy = self.weighted_surface_energy
area_frac_dict = self.area_fraction_dict
miller_energy_dict = self.miller_energy_dict
for hkl in miller_energy_dict.keys():
square_diff_energy += (miller_energy_dict[hkl] - weighted_energy) \
** 2 * area_frac_dict[hkl]
return np.sqrt(square_diff_energy) / weighted_energy
@property
def shape_factor(self):
"""
This is useful for determining the critical nucleus size.
A large shape factor indicates great anisotropy.
See Ballufi, R. W., Allen, S. M. & Carter, W. C. Kinetics
of Materials. (John Wiley & Sons, 2005), p.461
Returns:
(float) Shape factor.
"""
return self.surface_area / (self.volume ** (2 / 3))
@property
def effective_radius(self):
"""
Radius of the Wulffshape when the
Wulffshape is approximated as a sphere.
Returns:
(float) radius.
"""
return ((3 / 4) * (self.volume / np.pi)) ** (1 / 3)
@property
def total_surface_energy(self):
"""
Total surface energy of the Wulff shape.
Returns:
(float) sum(surface_energy_hkl * area_hkl)
"""
tot_surface_energy = 0
for hkl in self.miller_energy_dict.keys():
tot_surface_energy += self.miller_energy_dict[hkl] * \
self.miller_area_dict[hkl]
return tot_surface_energy
@property
def tot_corner_sites(self):
"""
Returns the number of vertices in the convex hull.
Useful for identifying catalytically active sites.
"""
return len(self.wulff_convex.vertices)
@property
def tot_edges(self):
"""
Returns the number of edges in the convex hull.
Useful for identifying catalytically active sites.
"""
all_edges = []
for facet in self.facets:
edges = []
pt = self.get_line_in_facet(facet)
lines = []
for i, p in enumerate(pt):
if i == len(pt) / 2:
break
lines.append(tuple(sorted(tuple([tuple(pt[i * 2]), tuple(pt[i * 2 + 1])]))))
for i, p in enumerate(lines):
if p not in all_edges:
edges.append(p)
all_edges.extend(edges)
return len(all_edges)
| 35.194401
| 107
| 0.576712
| 3,024
| 22,630
| 4.115741
| 0.164683
| 0.024747
| 0.008436
| 0.014462
| 0.186405
| 0.127029
| 0.102925
| 0.070946
| 0.063555
| 0.052627
| 0
| 0.015858
| 0.328458
| 22,630
| 642
| 108
| 35.249221
| 0.803119
| 0.315068
| 0
| 0.088816
| 1
| 0
| 0.021338
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.078947
| false
| 0
| 0.039474
| 0
| 0.197368
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
81399676f0bd08a3b07c20a3a444ab0c8669d9d3
| 1,064
|
py
|
Python
|
plugins/barracuda_waf/komand_barracuda_waf/actions/create_security_policy/schema.py
|
lukaszlaszuk/insightconnect-plugins
|
8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892
|
[
"MIT"
] | 46
|
2019-06-05T20:47:58.000Z
|
2022-03-29T10:18:01.000Z
|
plugins/barracuda_waf/komand_barracuda_waf/actions/create_security_policy/schema.py
|
lukaszlaszuk/insightconnect-plugins
|
8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892
|
[
"MIT"
] | 386
|
2019-06-07T20:20:39.000Z
|
2022-03-30T17:35:01.000Z
|
plugins/barracuda_waf/komand_barracuda_waf/actions/create_security_policy/schema.py
|
lukaszlaszuk/insightconnect-plugins
|
8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892
|
[
"MIT"
] | 43
|
2019-07-09T14:13:58.000Z
|
2022-03-28T12:04:46.000Z
|
# GENERATED BY KOMAND SDK - DO NOT EDIT
import komand
import json
class Component:
DESCRIPTION = "Creates a security policy with the default values"
class Input:
NAME = "name"
class Output:
ID = "id"
class CreateSecurityPolicyInput(komand.Input):
schema = json.loads("""
{
"type": "object",
"title": "Variables",
"properties": {
"name": {
"type": "string",
"title": "Name",
"description": "The name of the security policy that needs to be created",
"order": 1
}
},
"required": [
"name"
]
}
""")
def __init__(self):
super(self.__class__, self).__init__(self.schema)
class CreateSecurityPolicyOutput(komand.Output):
schema = json.loads("""
{
"type": "object",
"title": "Variables",
"properties": {
"id": {
"type": "string",
"title": "ID",
"description": "ID of the new policy",
"order": 1
}
},
"required": [
"id"
]
}
""")
def __init__(self):
super(self.__class__, self).__init__(self.schema)
| 17.16129
| 80
| 0.566729
| 110
| 1,064
| 5.263636
| 0.436364
| 0.055268
| 0.051813
| 0.06563
| 0.317789
| 0.317789
| 0.317789
| 0.317789
| 0.148532
| 0.148532
| 0
| 0.002587
| 0.273496
| 1,064
| 61
| 81
| 17.442623
| 0.746442
| 0.034774
| 0
| 0.416667
| 1
| 0
| 0.549268
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.041667
| false
| 0
| 0.041667
| 0
| 0.291667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
d496c9cfdd316aad01a20acdae3c9c7e998fb11f
| 887
|
py
|
Python
|
Matrix/Python/rotatematrix.py
|
pratika1505/DSA-Path-And-Important-Questions
|
a86a0774f0abf5151c852afd2bbf67a5368125c8
|
[
"MIT"
] | 26
|
2021-08-04T17:03:26.000Z
|
2022-03-08T08:43:44.000Z
|
Matrix/Python/rotatematrix.py
|
pratika1505/DSA-Path-And-Important-Questions
|
a86a0774f0abf5151c852afd2bbf67a5368125c8
|
[
"MIT"
] | 25
|
2021-08-04T16:58:33.000Z
|
2021-11-01T05:26:19.000Z
|
Matrix/Python/rotatematrix.py
|
pratika1505/DSA-Path-And-Important-Questions
|
a86a0774f0abf5151c852afd2bbf67a5368125c8
|
[
"MIT"
] | 16
|
2021-08-14T20:15:24.000Z
|
2022-02-23T11:04:06.000Z
|
# -*- coding: utf-8 -*-
"""RotateMatrix.ipynb
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/1LX-dZFuQCyBXDNVosTp0MHaZZxoc5T4I
"""
#Function to rotate matrix by 90 degree
def rotate(mat):
# `N × N` matrix
N = len(mat)
# Transpose the matrix
for i in range(N):
for j in range(i):
temp = mat[i][j]
mat[i][j] = mat[j][i]
mat[j][i] = temp
# swap columns
for i in range(N):
for j in range(N // 2):
temp = mat[i][j]
mat[i][j] = mat[i][N - j - 1]
mat[i][N - j - 1] = temp
if __name__ == '__main__':
#Declaring matrix
mat = [
[1, 2, 3, 4],
[5, 6, 7, 8],
[9, 10, 11, 12],
[13, 14, 15, 16]
]
rotate(mat)
#printing matrix
for i in mat:
print(i)
| 19.282609
| 77
| 0.500564
| 129
| 887
| 3.387597
| 0.496124
| 0.05492
| 0.045767
| 0.073227
| 0.208238
| 0.183066
| 0.183066
| 0.183066
| 0.105263
| 0
| 0
| 0.057192
| 0.349493
| 887
| 45
| 78
| 19.711111
| 0.69844
| 0.347238
| 0
| 0.181818
| 1
| 0
| 0.014184
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.045455
| false
| 0
| 0
| 0
| 0.045455
| 0.045455
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
d4ade5ab9af89265fbd2d849b58156e138f3d82c
| 452
|
py
|
Python
|
grocery/migrations/0003_alter_item_comments.py
|
akshay-kapase/shopping
|
7bf3bac4a78d07bca9a9f9d44d85e11bb826a366
|
[
"MIT"
] | null | null | null |
grocery/migrations/0003_alter_item_comments.py
|
akshay-kapase/shopping
|
7bf3bac4a78d07bca9a9f9d44d85e11bb826a366
|
[
"MIT"
] | null | null | null |
grocery/migrations/0003_alter_item_comments.py
|
akshay-kapase/shopping
|
7bf3bac4a78d07bca9a9f9d44d85e11bb826a366
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.2.6 on 2021-09-03 15:48
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('grocery', '0002_alter_item_comments'),
]
operations = [
migrations.AlterField(
model_name='item',
name='comments',
field=models.CharField(blank=True, default='null', max_length=200),
preserve_default=False,
),
]
| 22.6
| 79
| 0.606195
| 49
| 452
| 5.469388
| 0.816327
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.067485
| 0.278761
| 452
| 19
| 80
| 23.789474
| 0.754601
| 0.099558
| 0
| 0
| 1
| 0
| 0.116049
| 0.059259
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.076923
| 0
| 0.307692
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
d4b832afc1a419832477a3ad699f701ea5d77522
| 3,357
|
py
|
Python
|
ciphers/SKINNY-TK2/SKINNY-TK2/skinnytk2.py
|
j-danner/autoguess
|
712a8dcfb259a277b2b2a499bd7c5fc4aab97b67
|
[
"MIT"
] | 7
|
2021-11-29T07:25:43.000Z
|
2022-03-02T10:15:30.000Z
|
ciphers/SKINNY-TK2/SKINNY-TK2/skinnytk2.py
|
j-danner/autoguess
|
712a8dcfb259a277b2b2a499bd7c5fc4aab97b67
|
[
"MIT"
] | 1
|
2022-03-30T16:29:50.000Z
|
2022-03-30T16:29:50.000Z
|
ciphers/SKINNY-TK2/SKINNY-TK2/skinnytk2.py
|
j-danner/autoguess
|
712a8dcfb259a277b2b2a499bd7c5fc4aab97b67
|
[
"MIT"
] | 1
|
2022-03-30T13:40:12.000Z
|
2022-03-30T13:40:12.000Z
|
# Created on Sep 7, 2020
# author: Hosein Hadipour
# contact: hsn.hadipour@gmail.com
import os
output_dir = os.path.curdir
def skinnytk2(R=1):
"""
This function generates the relations of Skinny-n-n for R rounds.
tk ================================================> TWEAKEY_P(tk) ===> ---
SB AC | P MC SB AC |
x_0 ===> x_0 ===> x_0 ===> + ===> y_0 ===> P(y_0) ===> x_1 ===> x_1 ===> x_1 ===> + ===> y_1 ===> ---
"""
cipher_name = 'skinnytk2'
P = [0, 1, 2, 3, 7, 4, 5, 6, 10, 11, 8, 9, 13, 14, 15, 12]
TKP = [9, 15, 8, 13, 10, 14, 12, 11, 0, 1, 2, 3, 4, 5, 6, 7]
tk1 = ['tk1_%d' % i for i in range(16)]
tk2 = ['tk2_%d' % i for i in range(16)]
# 1 round
# recommended_mg = 8
# recommended_ms = 4
# 2 rounds
# recommended_mg = 16
# recommended_ms = 8
# 3 rounds
# recommended_mg = 19
# recommended_ms = 24
# 4 rounds
# recommended_mg = 21
# recommended_ms = 27
# 5 rounds
# recommended_mg = 22
# recommended_ms = 35
# 6 rounds
# recommended_mg = 25
# recommended_ms = 40
# 7 rounds
# recommended_mg = 26
# recommended_ms = 70
# 8 rounds
# recommended_mg = 28
# recommended_ms = 80
# 9 rounds
# recommended_mg = 28
# recommended_ms = 100
# 10 rounds
recommended_mg = 30
recommended_ms = 100
# 11 rounds
# recommended_mg = 31
# recommended_ms = 100
eqs = '#%s %d Rounds\n' % (cipher_name, R)
eqs += 'connection relations\n'
for r in range(R):
xin = ['x_%d_%d' % (r, i) for i in range(16)]
xout = ['x_%d_%d' % (r + 1, i) for i in range(16)]
y = ['y_%d_%d' % (r, i) for i in range(16)]
tk = ['tk_%d_%d' % (r, i) for i in range(8)]
# Generaete AddTweakey relations
for i in range(4):
for j in range(4):
if i < 2:
eqs += '%s, %s, %s\n' % (tk1[j + 4*i], tk2[j + 4*i], tk[j + 4*i])
eqs += '%s, %s, %s\n' % (xin[j + 4*i], tk[j + 4*i], y[j + 4*i])
else:
eqs += '%s, %s\n' % (xin[j + 4*i], y[j + 4*i])
# Apply ShiftRows
py = [y[P[i]] for i in range(16)]
# Generate MixColumn relations
for j in range(4):
eqs += '%s, %s, %s, %s\n' % (py[j + 0*4], py[j + 2*4], py[j + 3*4], xout[j + 0*4])
eqs += '%s, %s\n' % (py[j], xout[j + 1*4])
eqs += '%s, %s, %s\n' % (py[j + 1*4], py[j + 2*4], xout[j + 2*4])
eqs += '%s, %s, %s\n' % (py[j + 0*4], py[j + 2*4], xout[j + 3*4])
# Update Tweakey
temp1 = tk1.copy()
temp2 = tk2.copy()
tk1 = [temp1[TKP[i]] for i in range(16)]
tk2 = [temp2[TKP[i]] for i in range(16)]
plaintext = ['x_0_%d' % i for i in range(16)]
ciphertext = ['x_%d_%d' % (R, i) for i in range(16)]
eqs += 'known\n' + '\n'.join(plaintext + ciphertext)
eqs += '\nend'
relation_file_path = os.path.join(output_dir, 'relationfile_%s_%dr_mg%d_ms%d.txt' % (cipher_name, R, recommended_mg, recommended_ms))
with open(relation_file_path, 'w') as relation_file:
relation_file.write(eqs)
def main():
skinnytk2(R=10)
if __name__ == '__main__':
main()
| 33.909091
| 137
| 0.472148
| 519
| 3,357
| 2.917148
| 0.22736
| 0.069353
| 0.047556
| 0.087186
| 0.258917
| 0.229194
| 0.165786
| 0.073316
| 0.053501
| 0.042272
| 0
| 0.086682
| 0.340185
| 3,357
| 98
| 138
| 34.255102
| 0.59684
| 0.308311
| 0
| 0.044444
| 1
| 0
| 0.104564
| 0.014621
| 0.022222
| 0
| 0
| 0
| 0
| 1
| 0.044444
| false
| 0
| 0.022222
| 0
| 0.066667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
d4c391278bd0cf509c7b23a6660f7d6beb4dfdb7
| 3,960
|
py
|
Python
|
python/SHA3_hashlib_based_concept.py
|
feketebv/SCA_proof_SHA3-512
|
5a7689ea307463d5b797e49142c349b02cdcda03
|
[
"MIT"
] | 1
|
2021-05-19T00:08:15.000Z
|
2021-05-19T00:08:15.000Z
|
python/SHA3_hashlib_based_concept.py
|
feketebv/SCA_proof_SHA3-512
|
5a7689ea307463d5b797e49142c349b02cdcda03
|
[
"MIT"
] | null | null | null |
python/SHA3_hashlib_based_concept.py
|
feketebv/SCA_proof_SHA3-512
|
5a7689ea307463d5b797e49142c349b02cdcda03
|
[
"MIT"
] | null | null | null |
'''
Written by: Balazs Valer Fekete fbv81bp@outlook.hu fbv81bp@gmail.com
Last updated: 29.01.2021
'''
# the concept is to generate a side channel resistant initialisation of the hashing function based on
# one secret key and several openly known initialisation vectors (IV) in a manner that the same input
# is not hashed too more than two times, which is hopefully not sufficient for side channel
# measurements based computations: the number of consecutive measurements for a successful attack on
# the CHI function in a practically noiseless computer simulation (see "chi_cpa.py") takes around a
# 100 measurements
# this concept is achieved by taking a counter of a certain bitlength, and twice as many IVs as bits in
# the counter: "IV0s" and "IV1s" and compute a series of hashes starting with the secret key then with a
# correspong IV of the sets 0 and 1 based on whether the counter's corresponding bit - starting at MSB -
# is 0 or 1; this way every hash output is exactly used 2 times if the intermediate values are STORTED
# and the entire series of initial hashes are NOT fully recomputed only such whose corresponding
# counter bits has changed and all the next levels too down to the LSB of the counter
# the working solution is going to based on the algorithms presented here, although
# in this file the algorithm here does the full padding so the results won't equal to
# a scheme where the rate is fully filled with IVs and the data comes only afterwards...
import hashlib
# KEY DATA STRUCTURES' INTERPRETATION
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
IV0s = [658678, 6785697, 254376, 67856, 1432543, 786, 124345, 5443654]
IV1s = [2565, 256658, 985, 218996, 255, 685652, 28552, 3256565]
# LSB ... MSB
hash_copies = [None for i in range(len(IV0s))]
# LSB ... MSB
# counter
# MSB ... LSB
# COMPUTING HASHES FOR EVERY COUNTER VALUE INDIVIDUALLY
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
for counter in range(11):
hash = hashlib.sha3_512()
# looping from MSB to LSB in counter too
for i in range(len(IV0s)-1, -1, -1):
if (counter>>i) & 1 == 1:
IV = bytes(IV1s[i])
else:
IV = bytes(IV0s[i])
hash.update(IV)
print(hash.hexdigest())
print()
# COMPUTING HASHES BASED ON THE NATURE OF BINARY INCREMENTATION:
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# only fewer values need to be recomputed, those whose corresponding
# bits have changed, down until LSB
# initialize
hash = hashlib.sha3_512()
# looping from MSB to LSB
for i in range(len(IV0s)-1, -1, -1):
# addressing "MSB" of IVs at first, "LSB" at last!
IV = bytes(IV0s[i])
hash.update(IV)
# index 0 of hash_copies changes the most frequently ie. according to counter's LSB
hash_copies[i] = hash.copy()
# compute
last_counter = 0
for counter in range(11):
IV_mask = last_counter ^ counter
last_counter = counter
# determine the highest non-zero bit of IV_mask, LSB is 1, 0 means there was no change
nz = 0
while IV_mask > 0:
IV_mask >>= 1
nz += 1
# initialize hash to the last value whose corresponding counter bit didn't switch
# have to copy object otherwise the originally pointed version gets updated!
hash = hash_copies[nz].copy() # LSB is index 0
# compute only the remaining hashes
while nz != 0: # nz=0 is the initial condition, nothing needs to be done
nz -= 1
if (counter>>nz) & 1 == 1:
IV = bytes(IV1s[nz])
else:
IV = bytes(IV0s[nz])
hash.update(IV)
# needs to be copied again because of object orientation
hash_copies[nz] = hash.copy()
# showing the hash copies' entire table after each computation
#for hashes in hash_copies:
# print(hashes.hexdigest())
print(hash_copies[0].hexdigest())
| 40
| 105
| 0.65303
| 579
| 3,960
| 4.43696
| 0.419689
| 0.031141
| 0.007007
| 0.012845
| 0.095757
| 0.070845
| 0.063838
| 0.045154
| 0.045154
| 0
| 0
| 0.051095
| 0.238889
| 3,960
| 98
| 106
| 40.408163
| 0.801261
| 0.661364
| 0
| 0.351351
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.027027
| 0
| 0.027027
| 0.081081
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
d4d8f82be29e6cb13695308004bac74a741d2095
| 8,111
|
py
|
Python
|
bogglesolver.py
|
gammazero/pybogglesolver
|
71d2c6d6ae8c9b5f580f6b27479aea3450a2895a
|
[
"MIT"
] | null | null | null |
bogglesolver.py
|
gammazero/pybogglesolver
|
71d2c6d6ae8c9b5f580f6b27479aea3450a2895a
|
[
"MIT"
] | null | null | null |
bogglesolver.py
|
gammazero/pybogglesolver
|
71d2c6d6ae8c9b5f580f6b27479aea3450a2895a
|
[
"MIT"
] | null | null | null |
"""
Module to generate solutions for Boggle grids.
Andrew Gillis 22 Dec. 2009
"""
from __future__ import print_function
import os
import sys
import collections
import trie
if sys.version < '3':
range = xrange
class BoggleSolver(object):
"""
This class uses an external words file as a dictionary of acceptable boggle
words. When an instance of this class is created, it sets up an internal
dictionary to look up valid boggle answers. The class' solve method can be
used repeatedly to generate solutions for different boggle grids.
"""
def __init__(self, words_file, xlen=4, ylen=4, pre_compute_adj=False):
"""Create and initialize BoggleSolver instance.
This creates the internal trie for fast word lookup letter-by-letter.
Words that begin with capital letters and words that are not within the
specified length limits are filtered out.
Arguments:
xlen -- X dimension (width) of board.
ylen -- Y dimension (height) of board.
pre_compute_adj -- Pre-compute adjacency matrix.
"""
assert(xlen > 1)
assert(ylen > 1)
self.xlen = xlen
self.ylen = ylen
self.board_size = xlen * ylen
if pre_compute_adj:
self.adjacency = BoggleSolver._create_adjacency_matrix(xlen, ylen)
else:
self.adjacency = None
self.trie = BoggleSolver._load_dictionary(
words_file, self.board_size, 3)
def solve(self, grid):
"""Generate all solutions for the given boggle grid.
Arguments:
grid -- A string of 16 characters representing the letters in a boggle
grid, from top left to bottom right.
Returns:
A list of words found in the boggle grid.
None if given invalid grid.
"""
if self.trie is None:
raise RuntimeError('words file not loaded')
if len(grid) != self.board_size:
raise RuntimeError('invalid board')
board = list(grid)
trie = self.trie
words = set()
q = collections.deque()
adjs = self.adjacency
for init_sq in range(self.board_size):
c = board[init_sq]
q.append((init_sq, c, trie.get_child(c), [init_sq]))
while q:
parent_sq, prefix, pnode, seen = q.popleft()
pnode_get_child = pnode.get_child
if adjs:
adj = adjs[parent_sq]
else:
adj = self._calc_adjacency(self.xlen, self.ylen, parent_sq)
for cur_sq in adj:
if cur_sq in seen:
continue
c = board[cur_sq]
cur_node = pnode_get_child(c)
if cur_node is None:
continue
s = prefix + c
q.append((cur_sq, s, cur_node, seen + [cur_sq]))
if cur_node._is_word:
if s[0] == 'q':
# Rehydrate q-words with 'u'.
words.add('qu' + s[1:])
else:
words.add(s)
return words
def show_grid(self, grid):
"""Utility method to print a 4x4 boggle grid.
Arguments:
grid -- A string of X*Y characters representing the letters in a boggle
grid, from top left to bottom right.
"""
for y in range(self.ylen):
print('+' + '---+' * self.xlen)
yi = y * self.xlen
line = ['| ']
for x in range(self.xlen):
cell = grid[yi+x].upper()
if cell == 'Q':
line.append('Qu')
line.append('| ')
else:
line.append(cell)
line.append(' | ')
print(''.join(line))
print('+' + '---+' * self.xlen)
def find_substrings(self, string):
"""Find all valid substrings in the given string.
This method is not necessary for the boggle solver, but is a utility
for testing that all substrings of a word are correctly found.
Arguments:
string -- The string in which to search for valid substrings.
Returns:
List of substrings that are valid words.
"""
found = set()
for start in range(len(string)):
cur = self.trie
letters = [None] * self.board_size
count = 0
for l in string[start:]:
letters[count] = l
count += 1
cur = cur.get_child(l)
if cur is None:
break
if cur._is_word:
found.add(''.join(letters[:count]))
if not cur.has_children():
break
return found
@staticmethod
def _load_dictionary(words_file, max_len, min_len):
"""Private method to create the trie for finding words.
Arguments:
words_file -- Path of file containing words for reference.
Return:
Count of words inserted into trie.
"""
if not os.path.isfile(words_file):
raise RuntimeError('words file not found: ' + words_file)
print('creating dictionary...')
root = trie.Trie()
word_count = 0
if words_file.endswith('gz'):
import gzip
f = gzip.open(words_file)
elif words_file.endswith('bz2'):
import bz2
f = bz2.BZ2File(words_file)
else:
f = open(words_file)
try:
for word in f:
if sys.version < '3':
word = word.strip()
else:
word = word.strip().decode("utf-8")
# Skip words that are too long or too short.
word_len = len(word)
if word_len > max_len or word_len < min_len:
continue
# Skip words that start with capital letter.
if word[0].isupper():
continue
if word[0] == 'q':
# Skip words starting with q not followed by u.
if word[1] != 'u':
continue
# Remove "u" from q-words so that only the q is matched.
word = 'q' + word[2:]
root.insert(word)
word_count += 1
finally:
f.close()
print('Loaded', word_count, 'words from file.')
return root
@staticmethod
def _create_adjacency_matrix(xlim, ylim):
adj_list = [[]] * (ylim * xlim)
for i in range(ylim * xlim):
# Current cell index = y * xlim + x
adj = BoggleSolver._calc_adjacency(xlim, ylim, i)
adj_list[i] = adj
return adj_list
@staticmethod
def _calc_adjacency(xlim, ylim, sq):
adj = []
y = int(sq / xlim)
x = sq - (y * xlim)
# Look at row above current cell.
if y-1 >= 0:
above = sq - xlim
# Look to upper left.
if x-1 >= 0:
adj.append(above - 1)
# Look above.
adj.append(above)
# Look upper right.
if x+1 < xlim:
adj.append(above + 1)
# Look at same row that current cell is on.
# Look to left of current cell.
if x-1 >= 0:
adj.append(sq - 1)
# Look to right of current cell.
if x+1 < xlim:
adj.append(sq + 1)
# Look at row below current cell.
if y+1 < ylim:
below = sq + xlim
# Look to lower left.
if x-1 >= 0:
adj.append(below - 1)
# Look below.
adj.append(below)
# Look to lower rigth.
if x+1 < xlim:
adj.append(below + 1)
return adj
| 31.076628
| 79
| 0.501911
| 959
| 8,111
| 4.151199
| 0.240876
| 0.03165
| 0.006029
| 0.003768
| 0.113288
| 0.082643
| 0.059784
| 0.034665
| 0.034665
| 0.034665
| 0
| 0.010509
| 0.413389
| 8,111
| 260
| 80
| 31.196154
| 0.826187
| 0.267784
| 0
| 0.181818
| 1
| 0
| 0.02463
| 0
| 0
| 0
| 0
| 0
| 0.012987
| 1
| 0.045455
| false
| 0
| 0.045455
| 0
| 0.12987
| 0.038961
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
be01c82117aa2911b241e39136b462d24502c315
| 793
|
py
|
Python
|
dash/graphs.py
|
fuzzylabs/wearable-my-foot
|
5e7d818fc51a3d3babbe1c0ec49450b1a1f030c6
|
[
"Apache-2.0"
] | 5
|
2020-09-04T13:49:41.000Z
|
2021-07-30T02:33:49.000Z
|
dash/graphs.py
|
archena/wearable-my-foot
|
5e7d818fc51a3d3babbe1c0ec49450b1a1f030c6
|
[
"Apache-2.0"
] | 2
|
2020-09-24T07:55:43.000Z
|
2020-09-24T09:30:19.000Z
|
dash/graphs.py
|
archena/wearable-my-foot
|
5e7d818fc51a3d3babbe1c0ec49450b1a1f030c6
|
[
"Apache-2.0"
] | 1
|
2021-03-04T03:18:37.000Z
|
2021-03-04T03:18:37.000Z
|
import plotly.graph_objs as go
class GraphsHelper:
template = "plotly_dark"
'''
Generate a plot for a timeseries
'''
def generate_timeseries_plot(self, dataframe):
pressure_plots = []
for sensor in ["p1", "p2", "p3"]:
series = dataframe[sensor]
scatter = go.Scatter(x = dataframe.index,
y = series,
name = f"Sensor {sensor}",
opacity = 0.4)
pressure_plots.append(scatter)
pressure_figure = go.Figure(
data = pressure_plots,
layout = go.Layout(
title = "Pressure timeseries",
template = self.template
)
)
return pressure_figure
| 29.37037
| 59
| 0.493064
| 72
| 793
| 5.305556
| 0.569444
| 0.102094
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010893
| 0.421185
| 793
| 26
| 60
| 30.5
| 0.821351
| 0
| 0
| 0
| 1
| 0
| 0.068456
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.05
| false
| 0
| 0.05
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
be0243ad78899348119ce102fbea0418e12871e2
| 5,379
|
py
|
Python
|
telethon/tl/functions/stickers.py
|
polisitni1/DogeClickBot
|
ac57eaeefca2c6ab9e48458f9f928a6a421a162e
|
[
"MIT"
] | null | null | null |
telethon/tl/functions/stickers.py
|
polisitni1/DogeClickBot
|
ac57eaeefca2c6ab9e48458f9f928a6a421a162e
|
[
"MIT"
] | null | null | null |
telethon/tl/functions/stickers.py
|
polisitni1/DogeClickBot
|
ac57eaeefca2c6ab9e48458f9f928a6a421a162e
|
[
"MIT"
] | null | null | null |
"""File generated by TLObjects' generator. All changes will be ERASED"""
from ...tl.tlobject import TLRequest
from typing import Optional, List, Union, TYPE_CHECKING
import os
import struct
if TYPE_CHECKING:
from ...tl.types import TypeInputStickerSet, TypeInputUser, TypeInputStickerSetItem, TypeInputDocument
class AddStickerToSetRequest(TLRequest):
CONSTRUCTOR_ID = 0x8653febe
SUBCLASS_OF_ID = 0x9b704a5a
def __init__(self, stickerset, sticker):
"""
:param TypeInputStickerSet stickerset:
:param TypeInputStickerSetItem sticker:
:returns messages.StickerSet: Instance of StickerSet.
"""
self.stickerset = stickerset # type: TypeInputStickerSet
self.sticker = sticker # type: TypeInputStickerSetItem
def to_dict(self):
return {
'_': 'AddStickerToSetRequest',
'stickerset': None if self.stickerset is None else self.stickerset.to_dict(),
'sticker': None if self.sticker is None else self.sticker.to_dict()
}
def __bytes__(self):
return b''.join((
b'\xbe\xfeS\x86',
bytes(self.stickerset),
bytes(self.sticker),
))
@classmethod
def from_reader(cls, reader):
_stickerset = reader.tgread_object()
_sticker = reader.tgread_object()
return cls(stickerset=_stickerset, sticker=_sticker)
class ChangeStickerPositionRequest(TLRequest):
CONSTRUCTOR_ID = 0xffb6d4ca
SUBCLASS_OF_ID = 0x9b704a5a
def __init__(self, sticker, position):
"""
:param TypeInputDocument sticker:
:param int position:
:returns messages.StickerSet: Instance of StickerSet.
"""
self.sticker = sticker # type: TypeInputDocument
self.position = position # type: int
def to_dict(self):
return {
'_': 'ChangeStickerPositionRequest',
'sticker': None if self.sticker is None else self.sticker.to_dict(),
'position': self.position
}
def __bytes__(self):
return b''.join((
b'\xca\xd4\xb6\xff',
bytes(self.sticker),
struct.pack('<i', self.position),
))
@classmethod
def from_reader(cls, reader):
_sticker = reader.tgread_object()
_position = reader.read_int()
return cls(sticker=_sticker, position=_position)
class CreateStickerSetRequest(TLRequest):
CONSTRUCTOR_ID = 0x9bd86e6a
SUBCLASS_OF_ID = 0x9b704a5a
def __init__(self, user_id, title, short_name, stickers, masks=None):
"""
:param TypeInputUser user_id:
:param str title:
:param str short_name:
:param List[TypeInputStickerSetItem] stickers:
:param Optional[bool] masks:
:returns messages.StickerSet: Instance of StickerSet.
"""
self.user_id = user_id # type: TypeInputUser
self.title = title # type: str
self.short_name = short_name # type: str
self.stickers = stickers # type: List[TypeInputStickerSetItem]
self.masks = masks # type: Optional[bool]
async def resolve(self, client, utils):
self.user_id = utils.get_input_user(await client.get_input_entity(self.user_id))
def to_dict(self):
return {
'_': 'CreateStickerSetRequest',
'user_id': None if self.user_id is None else self.user_id.to_dict(),
'title': self.title,
'short_name': self.short_name,
'stickers': [] if self.stickers is None else [None if x is None else x.to_dict() for x in self.stickers],
'masks': self.masks
}
def __bytes__(self):
return b''.join((
b'jn\xd8\x9b',
struct.pack('<I', (0 if self.masks is None or self.masks is False else 1)),
bytes(self.user_id),
self.serialize_bytes(self.title),
self.serialize_bytes(self.short_name),
b'\x15\xc4\xb5\x1c',struct.pack('<i', len(self.stickers)),b''.join(bytes(x) for x in self.stickers),
))
@classmethod
def from_reader(cls, reader):
flags = reader.read_int()
_masks = bool(flags & 1)
_user_id = reader.tgread_object()
_title = reader.tgread_string()
_short_name = reader.tgread_string()
reader.read_int()
_stickers = []
for _ in range(reader.read_int()):
_x = reader.tgread_object()
_stickers.append(_x)
return cls(user_id=_user_id, title=_title, short_name=_short_name, stickers=_stickers, masks=_masks)
class RemoveStickerFromSetRequest(TLRequest):
CONSTRUCTOR_ID = 0xf7760f51
SUBCLASS_OF_ID = 0x9b704a5a
def __init__(self, sticker):
"""
:param TypeInputDocument sticker:
:returns messages.StickerSet: Instance of StickerSet.
"""
self.sticker = sticker # type: TypeInputDocument
def to_dict(self):
return {
'_': 'RemoveStickerFromSetRequest',
'sticker': None if self.sticker is None else self.sticker.to_dict()
}
def __bytes__(self):
return b''.join((
b'Q\x0fv\xf7',
bytes(self.sticker),
))
@classmethod
def from_reader(cls, reader):
_sticker = reader.tgread_object()
return cls(sticker=_sticker)
| 31.641176
| 117
| 0.622421
| 584
| 5,379
| 5.511986
| 0.200342
| 0.047841
| 0.021746
| 0.021746
| 0.324324
| 0.289531
| 0.265921
| 0.215284
| 0.170861
| 0.150668
| 0
| 0.015393
| 0.27533
| 5,379
| 169
| 118
| 31.828402
| 0.810416
| 0.151143
| 0
| 0.380531
| 1
| 0
| 0.056875
| 0.022841
| 0
| 0
| 0.018273
| 0
| 0
| 1
| 0.141593
| false
| 0
| 0.044248
| 0.070796
| 0.39823
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
076e350bd997dc6e64e333caef566c1b62991f65
| 970
|
py
|
Python
|
evaluate.py
|
adelmassimo/EM-Algorithm-for-MMPP
|
23ae031076a464bfba5286cf6b5a1fa5e1cc66b1
|
[
"MIT"
] | null | null | null |
evaluate.py
|
adelmassimo/EM-Algorithm-for-MMPP
|
23ae031076a464bfba5286cf6b5a1fa5e1cc66b1
|
[
"MIT"
] | null | null | null |
evaluate.py
|
adelmassimo/EM-Algorithm-for-MMPP
|
23ae031076a464bfba5286cf6b5a1fa5e1cc66b1
|
[
"MIT"
] | null | null | null |
import model
import numpy as np
import datasetReader as df
import main
# Number of traces loaded T
T = 1
# Generate traces
traces_factory = df.DatasetFactory()
traces_factory.createDataset(T)
traces = traces_factory.traces
P0 = np.matrix("[ .02 0;"
"0 0 0.5;"
"0 0 0]")
P1 = np.matrix("[0.1 0 0;"
"0 0.5 0;"
"0 0 0.9]")
M = np.matrix("[0.25 0 0;"
"0 0.23 0;"
"0 0 0.85]")
def backward_likelihood(i, trace):
N = model.N
M = len( trace )
likelihoods = np.ones((N, 1))
if i < M:
P = main.randomization(P0, model.uniformization_rate, trace[i][0])
# P = stored_p_values[i, :, :]
likelihoods = np.multiply(
P.dot( model.P1 ).dot( backward_likelihood(i+1, trace) ),
model.M[:, trace[i][1]] )
if likelihoods.sum() != 0:
likelihoods = likelihoods / likelihoods.sum()
return likelihoods
| 23.095238
| 74
| 0.541237
| 137
| 970
| 3.773723
| 0.357664
| 0.065764
| 0.06383
| 0.038685
| 0.030948
| 0.030948
| 0.030948
| 0.030948
| 0
| 0
| 0
| 0.071212
| 0.319588
| 970
| 42
| 75
| 23.095238
| 0.712121
| 0.072165
| 0
| 0
| 1
| 0
| 0.083612
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.034483
| false
| 0
| 0.137931
| 0
| 0.206897
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
078eac42052a5c2213460643ce82f3d54d3402ee
| 963
|
py
|
Python
|
apps/delivery/migrations/0001_initial.py
|
jimforit/lagou
|
165593a15597012092b5e0ba34158fbc1d1c213d
|
[
"MIT"
] | 2
|
2019-03-11T03:58:19.000Z
|
2020-03-06T06:45:28.000Z
|
apps/delivery/migrations/0001_initial.py
|
jimforit/lagou
|
165593a15597012092b5e0ba34158fbc1d1c213d
|
[
"MIT"
] | 5
|
2020-06-05T20:04:20.000Z
|
2021-09-08T00:53:52.000Z
|
apps/delivery/migrations/0001_initial.py
|
jimforit/lagou
|
165593a15597012092b5e0ba34158fbc1d1c213d
|
[
"MIT"
] | null | null | null |
# Generated by Django 2.0.2 on 2019-03-08 13:03
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Delivery',
fields=[
('create_time', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('update_time', models.DateTimeField(auto_now=True, verbose_name='更新时间')),
('is_delete', models.BooleanField(default=False, verbose_name='删除标记')),
('id', models.AutoField(primary_key=True, serialize=False, verbose_name='投递ID')),
('delivery_status', models.CharField(choices=[('DD', '待定'), ('YQ', '邀请面试'), ('WJ', '婉拒')], default='DD', max_length=2, verbose_name='投递状态')),
],
options={
'verbose_name': '面试',
'verbose_name_plural': '面试',
},
),
]
| 33.206897
| 157
| 0.559709
| 99
| 963
| 5.272727
| 0.626263
| 0.14751
| 0.088123
| 0.103448
| 0.114943
| 0
| 0
| 0
| 0
| 0
| 0
| 0.023222
| 0.284528
| 963
| 28
| 158
| 34.392857
| 0.734398
| 0.046729
| 0
| 0
| 1
| 0
| 0.138646
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.047619
| 0
| 0.238095
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
079486c9e4c55ef02a54afabc6964be8635f9540
| 860
|
py
|
Python
|
shop/migrations/0009_auto_20200310_1430.py
|
manson800819/test
|
6df7d92eababe76a54585cb8102a00a6d79ca467
|
[
"MIT"
] | null | null | null |
shop/migrations/0009_auto_20200310_1430.py
|
manson800819/test
|
6df7d92eababe76a54585cb8102a00a6d79ca467
|
[
"MIT"
] | null | null | null |
shop/migrations/0009_auto_20200310_1430.py
|
manson800819/test
|
6df7d92eababe76a54585cb8102a00a6d79ca467
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.29 on 2020-03-10 14:30
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('shop', '0008_auto_20200310_1134'),
]
operations = [
migrations.RemoveField(
model_name='category',
name='id',
),
migrations.AlterField(
model_name='category',
name='name',
field=models.CharField(db_index=True, max_length=200, primary_key=True, serialize=False),
),
migrations.AlterField(
model_name='product',
name='type1',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='products', to='shop.Type1'),
),
]
| 27.741935
| 123
| 0.612791
| 93
| 860
| 5.494624
| 0.623656
| 0.046967
| 0.054795
| 0.086106
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.061709
| 0.265116
| 860
| 30
| 124
| 28.666667
| 0.746835
| 0.080233
| 0
| 0.304348
| 1
| 0
| 0.100254
| 0.029188
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.130435
| 0
| 0.26087
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
07baaefdacf7ace2738a920e7e9c1d5671078a05
| 13,520
|
py
|
Python
|
microbitAnim.py
|
SaitoYutaka/microbitAnim
|
6630d5cdb3ae867d3467a035a1c14358944c0367
|
[
"MIT"
] | null | null | null |
microbitAnim.py
|
SaitoYutaka/microbitAnim
|
6630d5cdb3ae867d3467a035a1c14358944c0367
|
[
"MIT"
] | null | null | null |
microbitAnim.py
|
SaitoYutaka/microbitAnim
|
6630d5cdb3ae867d3467a035a1c14358944c0367
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
###########################################################################
## Python code generated with wxFormBuilder (version Aug 8 2018)
## http://www.wxformbuilder.org/
##
## PLEASE DO *NOT* EDIT THIS FILE!
###########################################################################
import wx
import wx.xrc
###########################################################################
## Class MyFrame1
###########################################################################
class MyFrame1 ( wx.Frame ):
def __init__( self, parent ):
wx.Frame.__init__ ( self, parent, id = wx.ID_ANY, title = wx.EmptyString, pos = wx.Point( 0,0 ), size = wx.Size( 767,507 ), style = wx.DEFAULT_FRAME_STYLE|wx.TAB_TRAVERSAL )
self.SetSizeHints( wx.DefaultSize, wx.DefaultSize )
gbSizer1 = wx.GridBagSizer( 0, 0 )
gbSizer1.SetFlexibleDirection( wx.BOTH )
gbSizer1.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED )
self.m_button00 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button00.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button00, wx.GBPosition( 0, 0 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button01 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button01.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button01, wx.GBPosition( 0, 1 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button02 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button02.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button02, wx.GBPosition( 0, 2 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button03 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button03.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button03, wx.GBPosition( 0, 3 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button04 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button04.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button04, wx.GBPosition( 0, 4 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button10 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button10.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button10, wx.GBPosition( 1, 0 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button11 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button11.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button11, wx.GBPosition( 1, 1 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button12 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button12.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button12, wx.GBPosition( 1, 2 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button13 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button13.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button13, wx.GBPosition( 1, 3 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button14 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button14.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button14, wx.GBPosition( 1, 4 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button20 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button20.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button20, wx.GBPosition( 2, 0 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button21 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button21.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button21, wx.GBPosition( 2, 1 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button22 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button22.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button22, wx.GBPosition( 2, 2 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button23 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button23.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button23, wx.GBPosition( 2, 3 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button24 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button24.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button24, wx.GBPosition( 2, 4 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button30 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button30.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button30, wx.GBPosition( 3, 0 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button31 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button31.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button31, wx.GBPosition( 3, 1 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button32 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button32.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button32, wx.GBPosition( 3, 2 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button33 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button33.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button33, wx.GBPosition( 3, 3 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button34 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button34.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button34, wx.GBPosition( 3, 4 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button40 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button40.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button40, wx.GBPosition( 4, 0 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button41 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button41.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button41, wx.GBPosition( 4, 1 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button42 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button42.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button42, wx.GBPosition( 4, 2 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button43 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button43.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button43, wx.GBPosition( 4, 3 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button44 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button44.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button44, wx.GBPosition( 4, 4 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.SetSizer( gbSizer1 )
self.Layout()
self.m_menubar1 = wx.MenuBar( 0 )
self.m_menu1 = wx.Menu()
self.m_menuItem3 = wx.MenuItem( self.m_menu1, wx.ID_ANY, u"Open", wx.EmptyString, wx.ITEM_NORMAL )
self.m_menu1.Append( self.m_menuItem3 )
self.m_menuItem1 = wx.MenuItem( self.m_menu1, wx.ID_ANY, u"Save", wx.EmptyString, wx.ITEM_NORMAL )
self.m_menu1.Append( self.m_menuItem1 )
self.m_menuItem2 = wx.MenuItem( self.m_menu1, wx.ID_ANY, u"quit", wx.EmptyString, wx.ITEM_NORMAL )
self.m_menu1.Append( self.m_menuItem2 )
self.m_menubar1.Append( self.m_menu1, u"File" )
self.m_menu2 = wx.Menu()
self.m_menuItem4 = wx.MenuItem( self.m_menu2, wx.ID_ANY, u"python", wx.EmptyString, wx.ITEM_NORMAL )
self.m_menu2.Append( self.m_menuItem4 )
self.m_menubar1.Append( self.m_menu2, u"export" )
self.SetMenuBar( self.m_menubar1 )
self.Centre( wx.BOTH )
# Connect Events
self.m_button00.Bind( wx.EVT_BUTTON, self.onButton00Click )
self.m_button01.Bind( wx.EVT_BUTTON, self.onButton01Click )
self.m_button02.Bind( wx.EVT_BUTTON, self.onButton02Click )
self.m_button03.Bind( wx.EVT_BUTTON, self.onButton03Click )
self.m_button04.Bind( wx.EVT_BUTTON, self.onButton04Click )
self.m_button10.Bind( wx.EVT_BUTTON, self.onButton10Click )
self.m_button11.Bind( wx.EVT_BUTTON, self.onButton11Click )
self.m_button12.Bind( wx.EVT_BUTTON, self.onButton12Click )
self.m_button13.Bind( wx.EVT_BUTTON, self.onButton13Click )
self.m_button14.Bind( wx.EVT_BUTTON, self.onButton14Click )
self.m_button20.Bind( wx.EVT_BUTTON, self.onButton20Click )
self.m_button21.Bind( wx.EVT_BUTTON, self.onButton21Click )
self.m_button22.Bind( wx.EVT_BUTTON, self.onButton22Click )
self.m_button23.Bind( wx.EVT_BUTTON, self.onButton23Click )
self.m_button24.Bind( wx.EVT_BUTTON, self.onButton24Click )
self.m_button30.Bind( wx.EVT_BUTTON, self.onButton30Click )
self.m_button31.Bind( wx.EVT_BUTTON, self.onButton31Click )
self.m_button32.Bind( wx.EVT_BUTTON, self.onButton32Click )
self.m_button33.Bind( wx.EVT_BUTTON, self.onButton33Click )
self.m_button34.Bind( wx.EVT_BUTTON, self.onButton34Click )
self.m_button40.Bind( wx.EVT_BUTTON, self.onButton40Click )
self.m_button41.Bind( wx.EVT_BUTTON, self.onButton41Click )
self.m_button42.Bind( wx.EVT_BUTTON, self.onButton42Click )
self.m_button43.Bind( wx.EVT_BUTTON, self.onButton43Click )
self.m_button44.Bind( wx.EVT_BUTTON, self.onButton44Click )
self.Bind( wx.EVT_MENU, self.OnMenuOpenSelect, id = self.m_menuItem3.GetId() )
self.Bind( wx.EVT_MENU, self.OnMenuSaveSelect, id = self.m_menuItem1.GetId() )
self.Bind( wx.EVT_MENU, self.OnMenuQuitSelect, id = self.m_menuItem2.GetId() )
self.Bind( wx.EVT_MENU, self.OnExportPythonSelect, id = self.m_menuItem4.GetId() )
def __del__( self ):
pass
# Virtual event handlers, overide them in your derived class
def onButton00Click( self, event ):
event.Skip()
def onButton01Click( self, event ):
event.Skip()
def onButton02Click( self, event ):
event.Skip()
def onButton03Click( self, event ):
event.Skip()
def onButton04Click( self, event ):
event.Skip()
def onButton10Click( self, event ):
event.Skip()
def onButton11Click( self, event ):
event.Skip()
def onButton12Click( self, event ):
event.Skip()
def onButton13Click( self, event ):
event.Skip()
def onButton14Click( self, event ):
event.Skip()
def onButton20Click( self, event ):
event.Skip()
def onButton21Click( self, event ):
event.Skip()
def onButton22Click( self, event ):
event.Skip()
def onButton23Click( self, event ):
event.Skip()
def onButton24Click( self, event ):
event.Skip()
def onButton30Click( self, event ):
event.Skip()
def onButton31Click( self, event ):
event.Skip()
def onButton32Click( self, event ):
event.Skip()
def onButton33Click( self, event ):
event.Skip()
def onButton34Click( self, event ):
event.Skip()
def onButton40Click( self, event ):
event.Skip()
def onButton41Click( self, event ):
event.Skip()
def onButton42Click( self, event ):
event.Skip()
def onButton43Click( self, event ):
event.Skip()
def onButton44Click( self, event ):
event.Skip()
def OnMenuOpenSelect( self, event ):
event.Skip()
def OnMenuSaveSelect( self, event ):
event.Skip()
def OnMenuQuitSelect( self, event ):
event.Skip()
def OnExportPythonSelect( self, event ):
event.Skip()
| 44.473684
| 181
| 0.598669
| 1,773
| 13,520
| 4.447829
| 0.091371
| 0.081156
| 0.026629
| 0.066193
| 0.613239
| 0.478443
| 0.469693
| 0.455998
| 0.455998
| 0.442683
| 0
| 0.074268
| 0.252071
| 13,520
| 303
| 182
| 44.620462
| 0.705597
| 0.017456
| 0
| 0.152632
| 1
| 0
| 0.002159
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.163158
| false
| 0.005263
| 0.010526
| 0
| 0.178947
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
07cc54388c8061e52f8dc1aa33c14d904afe5143
| 3,964
|
py
|
Python
|
lectures/extensions/hyperbolic_discounting/replication_code/src/analysis/get_bivariate_distr_data.py
|
loikein/ekw-lectures
|
a2f5436f10515ab26eab323fca8c37c91bdc5dcd
|
[
"MIT"
] | 4
|
2019-11-15T15:21:27.000Z
|
2020-07-08T15:04:30.000Z
|
lectures/extensions/hyperbolic_discounting/replication_code/src/analysis/get_bivariate_distr_data.py
|
loikein/ekw-lectures
|
a2f5436f10515ab26eab323fca8c37c91bdc5dcd
|
[
"MIT"
] | 9
|
2019-11-18T15:54:36.000Z
|
2020-07-14T13:56:53.000Z
|
lectures/extensions/hyperbolic_discounting/replication_code/src/analysis/get_bivariate_distr_data.py
|
loikein/ekw-lectures
|
a2f5436f10515ab26eab323fca8c37c91bdc5dcd
|
[
"MIT"
] | 3
|
2021-01-25T15:41:30.000Z
|
2021-09-21T08:51:36.000Z
|
"""Generate values of Method of Simulated Moments criterion function.
Given observed moments and weighting matrix in `OUT_ANALYSIS`, "msm_estimation",
generate values of Method of Simulated Moments criterion function for combinations
of discount factor and present bias values.
The goal is to study the bivariate distribution of the time preference parameters
around the combination of true parameter values.
"""
import itertools
import numpy as np
import pandas as pd
import respy as rp
import yaml
from bld.project_paths import project_paths_join as ppj
from src.library.compute_moments import _replace_nans
from src.library.compute_moments import calc_restricted_choice_probabilities
from src.library.compute_moments import calc_restricted_wage_distribution
from src.library.compute_moments import calc_unrestricted_choice_probabilities
from src.library.compute_moments import calc_unrestricted_wage_distribution
from src.library.compute_moments import calc_very_restricted_choice_probabilities
from src.library.compute_moments import calc_very_restricted_wage_distribution
from src.library.housekeeping import _load_pickle
from src.library.housekeeping import _temporary_working_directory
from tqdm import tqdm
def get_bivariate_distribution(params, crit_func, grid_delta, grid_beta):
"""Compute value of criterion function.
Args:
params (pd.DataFrame): DataFrame containing model parameters.
crit_func (dict): Dictionary containing model options.
grid_delta (np.array): Values of discount factor.
grid_beta (np.array): Values of present-bias parameter.
Returns:
pd.DataFrame
"""
results = []
for beta, delta in tqdm(itertools.product(grid_beta, grid_delta)):
params_ = params.copy()
params_.loc[("beta", "beta"), "value"] = beta
params_.loc[("delta", "delta"), "value"] = delta
val = crit_func(params_)
result = {"beta": beta, "delta": delta, "val": val}
results.append(result)
return pd.DataFrame.from_dict(results)
if __name__ == "__main__":
# load params
params = pd.read_csv(
ppj("IN_MODEL_SPECS", "params_hyp.csv"),
sep=";",
index_col=["category", "name"],
)
params["value"] = params["value"].astype(float)
# load options
with open(ppj("IN_MODEL_SPECS", "options_hyp.yaml")) as options:
options = yaml.safe_load(options)
# get empirical moments
empirical_moments = _load_pickle(ppj("OUT_ANALYSIS", "msm_estimation", "moments_hyp.pickle"))
# get weighting matrix
weighting_matrix = _load_pickle(
ppj("OUT_ANALYSIS", "msm_estimation", "weighting_matrix_hyp.pickle")
)
calc_moments = {
"Choice Probabilities Very Restricted": calc_very_restricted_choice_probabilities,
"Choice Probabilities Restricted": calc_restricted_choice_probabilities,
"Choice Probabilities Unrestricted": calc_unrestricted_choice_probabilities,
"Wage Distribution Very Restricted": calc_very_restricted_wage_distribution,
"Wage Distribution Restricted": calc_restricted_wage_distribution,
"Wage Distribution Unrestricted": calc_unrestricted_wage_distribution,
}
with _temporary_working_directory(snippet="heatmap"):
# get criterion function
weighted_sum_squared_errors = rp.get_moment_errors_func(
params=params,
options=options,
calc_moments=calc_moments,
replace_nans=_replace_nans,
empirical_moments=empirical_moments,
weighting_matrix=weighting_matrix,
)
# get bivariate distribution results
results = get_bivariate_distribution(
crit_func=weighted_sum_squared_errors,
params=params,
grid_delta=np.arange(0.945, 0.9625, 0.0025),
grid_beta=np.arange(0.75, 1.05, 0.01),
)
results.to_csv(ppj("OUT_ANALYSIS", "heatmap.csv"))
| 36.703704
| 97
| 0.726791
| 477
| 3,964
| 5.75891
| 0.27673
| 0.022934
| 0.045868
| 0.053513
| 0.333091
| 0.233345
| 0.210047
| 0.183109
| 0.156898
| 0.04878
| 0
| 0.007192
| 0.193239
| 3,964
| 107
| 98
| 37.046729
| 0.851782
| 0.212916
| 0
| 0.031746
| 1
| 0
| 0.145698
| 0.008801
| 0
| 0
| 0
| 0
| 0
| 1
| 0.015873
| false
| 0
| 0.253968
| 0
| 0.285714
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
ed0142db547eada6fd1f50b0e7939a47e99944a3
| 1,746
|
py
|
Python
|
tests/test_hedges.py
|
aplested/DC_Pyps
|
da33fc7d0e7365044e368488d1c7cbbae7473cc7
|
[
"MIT"
] | 1
|
2021-03-25T18:09:25.000Z
|
2021-03-25T18:09:25.000Z
|
tests/test_hedges.py
|
aplested/DC_Pyps
|
da33fc7d0e7365044e368488d1c7cbbae7473cc7
|
[
"MIT"
] | null | null | null |
tests/test_hedges.py
|
aplested/DC_Pyps
|
da33fc7d0e7365044e368488d1c7cbbae7473cc7
|
[
"MIT"
] | null | null | null |
from dcstats.hedges import Hedges_d
from dcstats.statistics_EJ import simple_stats as mean_SD
import random
import math
def generate_sample (length, mean, sigma):
#generate a list of normal distributed samples
sample = []
for n in range(length):
sample.append(random.gauss(mean, sigma))
return sample
def close_enough (a, b, count_error):
if math.fabs (a - b) < math.fabs((a + b) / (count_error * 2)) :
return True
else:
return False
def gaussian_case (sig):
sample_size = 200
count_error = math.sqrt(sample_size)
m1 = 1
m2 = 2
s1 = generate_sample (sample_size, m1, sig)
s2 = generate_sample (sample_size, m2, sig)
h_testing = Hedges_d(s1, s2)
h_testing.hedges_d_unbiased() #answer is in self.d
approx_95CI_lower, approx_95CI_upper = h_testing.approx_CI()
bs_95CI_lower, bs_95CI_upper = h_testing.bootstrap_CI(5000)
print (mean_SD(s1), mean_SD(s2))
print ("h_testing.d, analytic, correction = ", h_testing.d, (m2 - m1) / sig, h_testing.correction)
print ("lower: approx, bootstrap", approx_95CI_lower, bs_95CI_lower)
print ("upper: approx, bootstrap", approx_95CI_upper, bs_95CI_upper)
#bootstrap is similar at high d but gives wider intervals at low d
assert close_enough(approx_95CI_lower, bs_95CI_lower, count_error)
assert close_enough(approx_95CI_upper, bs_95CI_upper, count_error)
assert close_enough(h_testing.d, (m2 - m1) / sig, count_error)
###tests
def test_gaussian_case_low():
gaussian_case(0.2) #expect d = 5
def test_gaussian_case_med():
gaussian_case(0.5) #expect d = 2
def test_gaussian_case_high():
gaussian_case(1.0) #expect d = 1, fail
| 29.59322
| 102
| 0.689003
| 265
| 1,746
| 4.264151
| 0.328302
| 0.056637
| 0.039823
| 0.039823
| 0.183186
| 0.120354
| 0
| 0
| 0
| 0
| 0
| 0.041575
| 0.214777
| 1,746
| 58
| 103
| 30.103448
| 0.78264
| 0.100802
| 0
| 0
| 1
| 0
| 0.053915
| 0
| 0
| 0
| 0
| 0
| 0.078947
| 1
| 0.157895
| false
| 0
| 0.105263
| 0
| 0.342105
| 0.105263
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
ed4edc151ca26cac5de8e4d708a84551964ac057
| 14,366
|
py
|
Python
|
sdk/python/pulumi_oci/database/get_external_non_container_database.py
|
EladGabay/pulumi-oci
|
6841e27d4a1a7e15c672306b769912efbfd3ba99
|
[
"ECL-2.0",
"Apache-2.0"
] | 5
|
2021-08-17T11:14:46.000Z
|
2021-12-31T02:07:03.000Z
|
sdk/python/pulumi_oci/database/get_external_non_container_database.py
|
pulumi-oci/pulumi-oci
|
6841e27d4a1a7e15c672306b769912efbfd3ba99
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2021-09-06T11:21:29.000Z
|
2021-09-06T11:21:29.000Z
|
sdk/python/pulumi_oci/database/get_external_non_container_database.py
|
pulumi-oci/pulumi-oci
|
6841e27d4a1a7e15c672306b769912efbfd3ba99
|
[
"ECL-2.0",
"Apache-2.0"
] | 2
|
2021-08-24T23:31:30.000Z
|
2022-01-02T19:26:54.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
__all__ = [
'GetExternalNonContainerDatabaseResult',
'AwaitableGetExternalNonContainerDatabaseResult',
'get_external_non_container_database',
]
@pulumi.output_type
class GetExternalNonContainerDatabaseResult:
"""
A collection of values returned by getExternalNonContainerDatabase.
"""
def __init__(__self__, character_set=None, compartment_id=None, database_configuration=None, database_edition=None, database_management_config=None, database_version=None, db_id=None, db_packs=None, db_unique_name=None, defined_tags=None, display_name=None, external_non_container_database_id=None, freeform_tags=None, id=None, lifecycle_details=None, ncharacter_set=None, operations_insights_config=None, state=None, time_created=None, time_zone=None):
if character_set and not isinstance(character_set, str):
raise TypeError("Expected argument 'character_set' to be a str")
pulumi.set(__self__, "character_set", character_set)
if compartment_id and not isinstance(compartment_id, str):
raise TypeError("Expected argument 'compartment_id' to be a str")
pulumi.set(__self__, "compartment_id", compartment_id)
if database_configuration and not isinstance(database_configuration, str):
raise TypeError("Expected argument 'database_configuration' to be a str")
pulumi.set(__self__, "database_configuration", database_configuration)
if database_edition and not isinstance(database_edition, str):
raise TypeError("Expected argument 'database_edition' to be a str")
pulumi.set(__self__, "database_edition", database_edition)
if database_management_config and not isinstance(database_management_config, dict):
raise TypeError("Expected argument 'database_management_config' to be a dict")
pulumi.set(__self__, "database_management_config", database_management_config)
if database_version and not isinstance(database_version, str):
raise TypeError("Expected argument 'database_version' to be a str")
pulumi.set(__self__, "database_version", database_version)
if db_id and not isinstance(db_id, str):
raise TypeError("Expected argument 'db_id' to be a str")
pulumi.set(__self__, "db_id", db_id)
if db_packs and not isinstance(db_packs, str):
raise TypeError("Expected argument 'db_packs' to be a str")
pulumi.set(__self__, "db_packs", db_packs)
if db_unique_name and not isinstance(db_unique_name, str):
raise TypeError("Expected argument 'db_unique_name' to be a str")
pulumi.set(__self__, "db_unique_name", db_unique_name)
if defined_tags and not isinstance(defined_tags, dict):
raise TypeError("Expected argument 'defined_tags' to be a dict")
pulumi.set(__self__, "defined_tags", defined_tags)
if display_name and not isinstance(display_name, str):
raise TypeError("Expected argument 'display_name' to be a str")
pulumi.set(__self__, "display_name", display_name)
if external_non_container_database_id and not isinstance(external_non_container_database_id, str):
raise TypeError("Expected argument 'external_non_container_database_id' to be a str")
pulumi.set(__self__, "external_non_container_database_id", external_non_container_database_id)
if freeform_tags and not isinstance(freeform_tags, dict):
raise TypeError("Expected argument 'freeform_tags' to be a dict")
pulumi.set(__self__, "freeform_tags", freeform_tags)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if lifecycle_details and not isinstance(lifecycle_details, str):
raise TypeError("Expected argument 'lifecycle_details' to be a str")
pulumi.set(__self__, "lifecycle_details", lifecycle_details)
if ncharacter_set and not isinstance(ncharacter_set, str):
raise TypeError("Expected argument 'ncharacter_set' to be a str")
pulumi.set(__self__, "ncharacter_set", ncharacter_set)
if operations_insights_config and not isinstance(operations_insights_config, dict):
raise TypeError("Expected argument 'operations_insights_config' to be a dict")
pulumi.set(__self__, "operations_insights_config", operations_insights_config)
if state and not isinstance(state, str):
raise TypeError("Expected argument 'state' to be a str")
pulumi.set(__self__, "state", state)
if time_created and not isinstance(time_created, str):
raise TypeError("Expected argument 'time_created' to be a str")
pulumi.set(__self__, "time_created", time_created)
if time_zone and not isinstance(time_zone, str):
raise TypeError("Expected argument 'time_zone' to be a str")
pulumi.set(__self__, "time_zone", time_zone)
@property
@pulumi.getter(name="characterSet")
def character_set(self) -> str:
"""
The character set of the external database.
"""
return pulumi.get(self, "character_set")
@property
@pulumi.getter(name="compartmentId")
def compartment_id(self) -> str:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment.
"""
return pulumi.get(self, "compartment_id")
@property
@pulumi.getter(name="databaseConfiguration")
def database_configuration(self) -> str:
"""
The Oracle Database configuration
"""
return pulumi.get(self, "database_configuration")
@property
@pulumi.getter(name="databaseEdition")
def database_edition(self) -> str:
"""
The Oracle Database edition.
"""
return pulumi.get(self, "database_edition")
@property
@pulumi.getter(name="databaseManagementConfig")
def database_management_config(self) -> 'outputs.GetExternalNonContainerDatabaseDatabaseManagementConfigResult':
"""
The configuration of the Database Management service.
"""
return pulumi.get(self, "database_management_config")
@property
@pulumi.getter(name="databaseVersion")
def database_version(self) -> str:
"""
The Oracle Database version.
"""
return pulumi.get(self, "database_version")
@property
@pulumi.getter(name="dbId")
def db_id(self) -> str:
"""
The Oracle Database ID, which identifies an Oracle Database located outside of Oracle Cloud.
"""
return pulumi.get(self, "db_id")
@property
@pulumi.getter(name="dbPacks")
def db_packs(self) -> str:
"""
The database packs licensed for the external Oracle Database.
"""
return pulumi.get(self, "db_packs")
@property
@pulumi.getter(name="dbUniqueName")
def db_unique_name(self) -> str:
"""
The `DB_UNIQUE_NAME` of the external database.
"""
return pulumi.get(self, "db_unique_name")
@property
@pulumi.getter(name="definedTags")
def defined_tags(self) -> Mapping[str, Any]:
"""
Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm).
"""
return pulumi.get(self, "defined_tags")
@property
@pulumi.getter(name="displayName")
def display_name(self) -> str:
"""
The user-friendly name for the external database. The name does not have to be unique.
"""
return pulumi.get(self, "display_name")
@property
@pulumi.getter(name="externalNonContainerDatabaseId")
def external_non_container_database_id(self) -> str:
return pulumi.get(self, "external_non_container_database_id")
@property
@pulumi.getter(name="freeformTags")
def freeform_tags(self) -> Mapping[str, Any]:
"""
Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}`
"""
return pulumi.get(self, "freeform_tags")
@property
@pulumi.getter
def id(self) -> str:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the Oracle Cloud Infrastructure external database resource.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="lifecycleDetails")
def lifecycle_details(self) -> str:
"""
Additional information about the current lifecycle state.
"""
return pulumi.get(self, "lifecycle_details")
@property
@pulumi.getter(name="ncharacterSet")
def ncharacter_set(self) -> str:
"""
The national character of the external database.
"""
return pulumi.get(self, "ncharacter_set")
@property
@pulumi.getter(name="operationsInsightsConfig")
def operations_insights_config(self) -> 'outputs.GetExternalNonContainerDatabaseOperationsInsightsConfigResult':
"""
The configuration of Operations Insights for the external database
"""
return pulumi.get(self, "operations_insights_config")
@property
@pulumi.getter
def state(self) -> str:
"""
The current state of the Oracle Cloud Infrastructure external database resource.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter(name="timeCreated")
def time_created(self) -> str:
"""
The date and time the database was created.
"""
return pulumi.get(self, "time_created")
@property
@pulumi.getter(name="timeZone")
def time_zone(self) -> str:
"""
The time zone of the external database. It is a time zone offset (a character type in the format '[+|-]TZH:TZM') or a time zone region name, depending on how the time zone value was specified when the database was created / last altered.
"""
return pulumi.get(self, "time_zone")
class AwaitableGetExternalNonContainerDatabaseResult(GetExternalNonContainerDatabaseResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetExternalNonContainerDatabaseResult(
character_set=self.character_set,
compartment_id=self.compartment_id,
database_configuration=self.database_configuration,
database_edition=self.database_edition,
database_management_config=self.database_management_config,
database_version=self.database_version,
db_id=self.db_id,
db_packs=self.db_packs,
db_unique_name=self.db_unique_name,
defined_tags=self.defined_tags,
display_name=self.display_name,
external_non_container_database_id=self.external_non_container_database_id,
freeform_tags=self.freeform_tags,
id=self.id,
lifecycle_details=self.lifecycle_details,
ncharacter_set=self.ncharacter_set,
operations_insights_config=self.operations_insights_config,
state=self.state,
time_created=self.time_created,
time_zone=self.time_zone)
def get_external_non_container_database(external_non_container_database_id: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetExternalNonContainerDatabaseResult:
"""
This data source provides details about a specific External Non Container Database resource in Oracle Cloud Infrastructure Database service.
Gets information about a specific external non-container database.
## Example Usage
```python
import pulumi
import pulumi_oci as oci
test_external_non_container_database = oci.database.get_external_non_container_database(external_non_container_database_id=oci_database_external_non_container_database["test_external_non_container_database"]["id"])
```
:param str external_non_container_database_id: The external non-container database [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm).
"""
__args__ = dict()
__args__['externalNonContainerDatabaseId'] = external_non_container_database_id
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('oci:database/getExternalNonContainerDatabase:getExternalNonContainerDatabase', __args__, opts=opts, typ=GetExternalNonContainerDatabaseResult).value
return AwaitableGetExternalNonContainerDatabaseResult(
character_set=__ret__.character_set,
compartment_id=__ret__.compartment_id,
database_configuration=__ret__.database_configuration,
database_edition=__ret__.database_edition,
database_management_config=__ret__.database_management_config,
database_version=__ret__.database_version,
db_id=__ret__.db_id,
db_packs=__ret__.db_packs,
db_unique_name=__ret__.db_unique_name,
defined_tags=__ret__.defined_tags,
display_name=__ret__.display_name,
external_non_container_database_id=__ret__.external_non_container_database_id,
freeform_tags=__ret__.freeform_tags,
id=__ret__.id,
lifecycle_details=__ret__.lifecycle_details,
ncharacter_set=__ret__.ncharacter_set,
operations_insights_config=__ret__.operations_insights_config,
state=__ret__.state,
time_created=__ret__.time_created,
time_zone=__ret__.time_zone)
| 45.034483
| 457
| 0.69748
| 1,662
| 14,366
| 5.711191
| 0.127557
| 0.028972
| 0.052676
| 0.073746
| 0.397282
| 0.263064
| 0.170881
| 0.136115
| 0.0748
| 0.0748
| 0
| 0.000088
| 0.212864
| 14,366
| 318
| 458
| 45.176101
| 0.839317
| 0.188013
| 0
| 0.105263
| 1
| 0
| 0.191274
| 0.071045
| 0
| 0
| 0
| 0
| 0
| 1
| 0.110048
| false
| 0
| 0.028708
| 0.004785
| 0.253589
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
ed5b25db8eee2bdd6eb22e7c4a9c331775d6cf05
| 1,651
|
py
|
Python
|
services/server/server/apps/checkout/migrations/0001_initial.py
|
AyanSamanta23/moni-moni
|
8e8aa4edf4cd2e2b005f6dbe8c885ecc791e6a2b
|
[
"MIT"
] | null | null | null |
services/server/server/apps/checkout/migrations/0001_initial.py
|
AyanSamanta23/moni-moni
|
8e8aa4edf4cd2e2b005f6dbe8c885ecc791e6a2b
|
[
"MIT"
] | null | null | null |
services/server/server/apps/checkout/migrations/0001_initial.py
|
AyanSamanta23/moni-moni
|
8e8aa4edf4cd2e2b005f6dbe8c885ecc791e6a2b
|
[
"MIT"
] | null | null | null |
# Generated by Django 4.0.2 on 2022-02-26 15:52
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='FundingOptions',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('funding_name', models.CharField(help_text='Required', max_length=255, verbose_name='funding_name')),
('funding_price', models.DecimalField(decimal_places=2, help_text='Required', max_digits=1000, verbose_name='funding price')),
('funding_timeframe', models.CharField(help_text='Required', max_length=255, verbose_name='funding timeframe')),
('funding_window', models.CharField(help_text='Required', max_length=255, verbose_name='funding window')),
],
options={
'verbose_name': 'Funding Option',
'verbose_name_plural': 'Funding Options',
},
),
migrations.CreateModel(
name='PaymentSelections',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(help_text='Required', max_length=255, verbose_name='name')),
('is_active', models.BooleanField(default=True)),
],
options={
'verbose_name': 'Payment Selection',
'verbose_name_plural': 'Payment Selections',
},
),
]
| 40.268293
| 142
| 0.59358
| 161
| 1,651
| 5.875776
| 0.391304
| 0.127907
| 0.084567
| 0.100423
| 0.432347
| 0.432347
| 0.432347
| 0.432347
| 0.432347
| 0.432347
| 0
| 0.026823
| 0.277408
| 1,651
| 40
| 143
| 41.275
| 0.766136
| 0.027256
| 0
| 0.363636
| 1
| 0
| 0.208229
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.030303
| 0
| 0.151515
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
71f5039371a3b37776d4da5587717221d15a60a1
| 5,276
|
py
|
Python
|
VAE/reduced_model/nesm_generator.py
|
youngmg1995/NES-Music-Maker
|
aeda10a541cfd439cfa46c45e63411e0d98e41c1
|
[
"MIT"
] | 3
|
2020-06-26T22:02:35.000Z
|
2021-11-20T19:24:33.000Z
|
VAE/reduced_model/nesm_generator.py
|
youngmg1995/NES-Music-Maker
|
aeda10a541cfd439cfa46c45e63411e0d98e41c1
|
[
"MIT"
] | null | null | null |
VAE/reduced_model/nesm_generator.py
|
youngmg1995/NES-Music-Maker
|
aeda10a541cfd439cfa46c45e63411e0d98e41c1
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Wed Apr 1 17:14:19 2020
@author: Mitchell
nesm_generator.py
~~~~~~~~~~~~~~~~~
This file serves as a script for using our pre-trained VAE model to generate
brand new NES music soundtracks. NOTE - using the reduced model we only
generate the first melodic voice for each track rather than each of the four
voices present in an NESM track. To do so we first reconstruct our model using
the file VAE class defined in `VAE.py` and the same parameters used in
`model_training`. Then we use functions from the file `generation_utils` to
have our trained model create entirely new and original NES music.
"""
# Imports
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# NOTE - nesmdb folder manually added to environment libraries
from dataset_utils import load_training
from VAE import VAE
from generation_utils import generate_seprsco, latent_SVD, get_latent_vecs,\
plot_track, filter_tracks
import nesmdb
from nesmdb.vgm.vgm_to_wav import save_vgmwav
import tensorflow as tf
import numpy as np
import os, json
### Load Mappings
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Parameters for shape of dataset (note these are also used for model def.)
measures = 8
measure_len = 96
# load data
training_foldername = '../../nesmdb24_seprsco/train/'
train_save_filename = 'transformed_dataset.json'
dataset , labels2int_map , int2labels_map = \
load_training(training_foldername, train_save_filename,
measures = measures, measure_len = measure_len)
### Reinitiate Model
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
### Model Parameters
latent_dim = 124
input_dim = len(int2labels_map) - 1
dropout = .1
maxnorm = None
vae_b1 , vae_b2 = .02 , .1
print('Reinitiating VAE Model')
# Build Model
model = VAE(latent_dim, input_dim, measures, measure_len, dropout,
maxnorm, vae_b1 , vae_b2)
# Reload Saved Weights
checkpoint_dir = './training_checkpoints'
checkpoint_prefix = os.path.join(checkpoint_dir, "model_ckpt")
model.load_weights(checkpoint_prefix)
model.build(tf.TensorShape([None, measures, measure_len, ]))
# Print Summary of Model
model.summary()
### Sample Latent Variable Distributions
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Here we use SVD to more effectively sample from the orthogonal components
# of our latent space
# Parameters for sampling
num_songs = 10
print('Generating Latent Samples to Generate {} New Tracks'.format(num_songs))
# Grab distributions of dataset over latent space
# Have to run in batches due to size of the dataset
batch_size = 300
latent_vecs = get_latent_vecs(model, dataset, batch_size)
# Sample from normal distribution
rand_vecs = np.random.normal(0.0, 1.0, (num_songs, latent_dim))
# perform SVD
plot_eigenvalues = True
sample_vecs = latent_SVD(latent_vecs, rand_vecs, plot_eigenvalues)
### Generate New Tracks
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Create new seprsco tracks using our model and the random samples
# Seprsco files can later be converted to valid NES music format
# Parameters for track generation (specifically filtering)
p_min = .5
print('Generating New Tracks from Latent Samples')
# Decode samples using VAE
decoded_tracks = model.decoder(sample_vecs)
# Plot first decoded track
print("Example Model Generated Track")
plot_track(decoded_tracks[0])
# Filter Track
decoded_tracks = filter_tracks(decoded_tracks, p_min)
# Plot first filtered track
print("Example Filtered Track")
plot_track(decoded_tracks[0])
# Convert tracks to seprsco format
print('Converting Model Output to Seprsco')
seprsco_tracks = generate_seprsco(decoded_tracks, int2labels_map)
### Convert to WAV
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Convert seprsco tracks to WAV files so we can listen!!!
print('Converting Seprsco to WAV Audio')
wav_tracks = []
for track in seprsco_tracks:
wav = nesmdb.convert.seprsco_to_wav(track)
wav_tracks.append(wav)
### Save WAV Files
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Save our wav tracks to appropriate files (be sure not to overwrite existing)
# Also save latent variables so we can reproduce songs we like
# Save WAV tracks
save_wav = False
if save_wav:
print('Saving Generated WAV Audio Tracks')
wav_folder = 'model_gen_files/'
for i in range(len(wav_tracks)):
wav_file = wav_folder+'VAE_NESM_{}.wav'.format(i)
save_vgmwav(wav_file, wav_tracks[i])
# Save Latent Variables
save_latent_var = False
if save_latent_var:
print('Saving Latent Variables for Generated Tracks')
latent_filename = os.path.join(wav_folder, "latent_variables.json")
with open(latent_filename, 'w') as f:
json.dump({
'VAE_NESM_{}.wav'.format(i): sample_vecs[i].tolist()
for i in range(sample_vecs.shape[0])
}, f)
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#----------------------------------END FILE------------------------------------
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
| 32.567901
| 79
| 0.638931
| 666
| 5,276
| 4.899399
| 0.336336
| 0.023904
| 0.016549
| 0.006129
| 0.027582
| 0.017162
| 0
| 0
| 0
| 0
| 0
| 0.010179
| 0.14348
| 5,276
| 162
| 80
| 32.567901
| 0.711883
| 0.485027
| 0
| 0.029851
| 1
| 0
| 0.173454
| 0.036199
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.119403
| 0
| 0.119403
| 0.134328
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
71f98ce850b9a0d28247d4a6575715ee5f7a82c8
| 2,955
|
py
|
Python
|
src/rpocore/migrations/0007_auto_20160927_1517.py
|
2martens/rpo-website
|
14990920722c537810aecd2b97f5af6bbdd1b5ec
|
[
"MIT"
] | null | null | null |
src/rpocore/migrations/0007_auto_20160927_1517.py
|
2martens/rpo-website
|
14990920722c537810aecd2b97f5af6bbdd1b5ec
|
[
"MIT"
] | null | null | null |
src/rpocore/migrations/0007_auto_20160927_1517.py
|
2martens/rpo-website
|
14990920722c537810aecd2b97f5af6bbdd1b5ec
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2016-09-27 13:17
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import mezzanine.core.fields
class Migration(migrations.Migration):
dependencies = [
('rpocore', '0006_auto_20160921_1924'),
]
operations = [
migrations.CreateModel(
name='SupportingOrganization',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('_order', mezzanine.core.fields.OrderField(null=True, verbose_name='Order')),
('name', models.CharField(max_length=100, verbose_name='Name')),
('logo', models.ImageField(upload_to='', verbose_name='Logo of organization')),
('url', models.CharField(max_length=200, verbose_name='URL')),
],
options={
'verbose_name_plural': 'Supporting organizations',
'ordering': ('_order',),
'verbose_name': 'Supporting organization',
},
),
migrations.AlterField(
model_name='carouselitem',
name='homepage',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='carousel_items', to='rpocore.HomepagePage', verbose_name='Homepage'),
),
migrations.AlterField(
model_name='homepagepage',
name='process',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='rpocore.Process', verbose_name='Process'),
),
migrations.AlterField(
model_name='notablesupporter',
name='supporter_page',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='notable_supporters', to='rpocore.SupporterPage', verbose_name='Supporter page'),
),
migrations.AlterField(
model_name='phase',
name='process',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='rpocore.Process', verbose_name='Process'),
),
migrations.AlterField(
model_name='statementpage',
name='formal_statements',
field=models.ManyToManyField(blank=True, to='rpocore.FormalStatement', verbose_name='Formal statements'),
),
migrations.AlterField(
model_name='statementpage',
name='informal_statements',
field=models.ManyToManyField(blank=True, to='rpocore.InformalStatement', verbose_name='Informal statements'),
),
migrations.AlterField(
model_name='supporter',
name='support_group',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, to='rpocore.SupportGroup', verbose_name='Support group'),
),
]
| 43.455882
| 186
| 0.626396
| 288
| 2,955
| 6.25
| 0.347222
| 0.085556
| 0.097222
| 0.112778
| 0.383333
| 0.343889
| 0.308889
| 0.308889
| 0.248889
| 0.232778
| 0
| 0.017056
| 0.246024
| 2,955
| 67
| 187
| 44.104478
| 0.790844
| 0.022335
| 0
| 0.316667
| 1
| 0
| 0.214137
| 0.039501
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.066667
| 0
| 0.116667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
9c245a520078fb55db53d97b8e520bef999698c6
| 9,538
|
py
|
Python
|
api/base/settings/defaults.py
|
mattclark/osf.io
|
7a362ceb6af3393d3d0423aafef336ee13277303
|
[
"Apache-2.0"
] | null | null | null |
api/base/settings/defaults.py
|
mattclark/osf.io
|
7a362ceb6af3393d3d0423aafef336ee13277303
|
[
"Apache-2.0"
] | null | null | null |
api/base/settings/defaults.py
|
mattclark/osf.io
|
7a362ceb6af3393d3d0423aafef336ee13277303
|
[
"Apache-2.0"
] | null | null | null |
"""
Django settings for api project.
Generated by 'django-admin startproject' using Django 1.8.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
import os
from urlparse import urlparse
from website import settings as osf_settings
BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
DATABASES = {
'default': {
'CONN_MAX_AGE': 0,
'ENGINE': 'osf.db.backends.postgresql', # django.db.backends.postgresql
'NAME': os.environ.get('OSF_DB_NAME', 'osf'),
'USER': os.environ.get('OSF_DB_USER', 'postgres'),
'PASSWORD': os.environ.get('OSF_DB_PASSWORD', ''),
'HOST': os.environ.get('OSF_DB_HOST', '127.0.0.1'),
'PORT': os.environ.get('OSF_DB_PORT', '5432'),
'ATOMIC_REQUESTS': True,
'TEST': {
'SERIALIZE': False,
},
},
}
DATABASE_ROUTERS = ['osf.db.router.PostgreSQLFailoverRouter', ]
PASSWORD_HASHERS = [
'django.contrib.auth.hashers.BCryptSHA256PasswordHasher',
'django.contrib.auth.hashers.BCryptPasswordHasher',
]
AUTH_USER_MODEL = 'osf.OSFUser'
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = osf_settings.SECRET_KEY
AUTHENTICATION_BACKENDS = (
'api.base.authentication.backends.ODMBackend',
'guardian.backends.ObjectPermissionBackend',
)
# SECURITY WARNING: don't run with debug turned on in production!
DEV_MODE = osf_settings.DEV_MODE
DEBUG = osf_settings.DEBUG_MODE
DEBUG_PROPAGATE_EXCEPTIONS = True
# session:
SESSION_COOKIE_NAME = 'api'
SESSION_COOKIE_SECURE = osf_settings.SECURE_MODE
SESSION_COOKIE_HTTPONLY = osf_settings.SESSION_COOKIE_HTTPONLY
# csrf:
CSRF_COOKIE_NAME = 'api-csrf'
CSRF_COOKIE_SECURE = osf_settings.SECURE_MODE
CSRF_COOKIE_HTTPONLY = osf_settings.SECURE_MODE
ALLOWED_HOSTS = [
'.osf.io',
]
# Application definition
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.messages',
'django.contrib.sessions',
'django.contrib.staticfiles',
'django.contrib.admin',
# 3rd party
'django_celery_beat',
'django_celery_results',
'rest_framework',
'corsheaders',
'raven.contrib.django.raven_compat',
'django_extensions',
'guardian',
'storages',
'waffle',
'elasticsearch_metrics',
# OSF
'osf',
# Addons
'addons.osfstorage',
'addons.bitbucket',
'addons.box',
'addons.dataverse',
'addons.dropbox',
'addons.figshare',
'addons.forward',
'addons.github',
'addons.gitlab',
'addons.googledrive',
'addons.mendeley',
'addons.onedrive',
'addons.owncloud',
'addons.s3',
'addons.twofactor',
'addons.wiki',
'addons.zotero',
)
# local development using https
if osf_settings.SECURE_MODE and DEBUG:
INSTALLED_APPS += ('sslserver',)
# TODO: Are there more granular ways to configure reporting specifically related to the API?
RAVEN_CONFIG = {
'tags': {'App': 'api'},
'dsn': osf_settings.SENTRY_DSN,
'release': osf_settings.VERSION,
}
BULK_SETTINGS = {
'DEFAULT_BULK_LIMIT': 100,
}
MAX_PAGE_SIZE = 100
REST_FRAMEWORK = {
'PAGE_SIZE': 10,
'DEFAULT_RENDERER_CLASSES': (
'api.base.renderers.JSONAPIRenderer',
'api.base.renderers.JSONRendererWithESISupport',
'api.base.renderers.BrowsableAPIRendererNoForms',
),
'DEFAULT_PARSER_CLASSES': (
'api.base.parsers.JSONAPIParser',
'api.base.parsers.JSONAPIParserForRegularJSON',
'rest_framework.parsers.FormParser',
'rest_framework.parsers.MultiPartParser',
),
'EXCEPTION_HANDLER': 'api.base.exceptions.json_api_exception_handler',
'DEFAULT_CONTENT_NEGOTIATION_CLASS': 'api.base.content_negotiation.JSONAPIContentNegotiation',
'DEFAULT_VERSIONING_CLASS': 'api.base.versioning.BaseVersioning',
'DEFAULT_VERSION': '2.0',
'ALLOWED_VERSIONS': (
'2.0',
'2.1',
'2.2',
'2.3',
'2.4',
'2.5',
'2.6',
'2.7',
'2.8',
'2.9',
'2.10',
'2.11',
'2.12',
'2.13',
'2.14',
'2.15',
'2.16',
'2.17',
),
'DEFAULT_FILTER_BACKENDS': ('api.base.filters.OSFOrderingFilter',),
'DEFAULT_PAGINATION_CLASS': 'api.base.pagination.JSONAPIPagination',
'ORDERING_PARAM': 'sort',
'DEFAULT_AUTHENTICATION_CLASSES': (
# Custom auth classes
'api.base.authentication.drf.OSFBasicAuthentication',
'api.base.authentication.drf.OSFSessionAuthentication',
'api.base.authentication.drf.OSFCASAuthentication',
),
'DEFAULT_THROTTLE_CLASSES': (
'rest_framework.throttling.UserRateThrottle',
'api.base.throttling.NonCookieAuthThrottle',
),
'DEFAULT_THROTTLE_RATES': {
'user': '10000/day',
'non-cookie-auth': '100/hour',
'add-contributor': '10/second',
'create-guid': '1000/hour',
'root-anon-throttle': '1000/hour',
'test-user': '2/hour',
'test-anon': '1/hour',
'send-email': '2/minute',
},
}
# Settings related to CORS Headers addon: allow API to receive authenticated requests from OSF
# CORS plugin only matches based on "netloc" part of URL, so as workaround we add that to the list
CORS_ORIGIN_ALLOW_ALL = False
CORS_ORIGIN_WHITELIST = (
urlparse(osf_settings.DOMAIN).netloc,
osf_settings.DOMAIN,
)
# This needs to remain True to allow cross origin requests that are in CORS_ORIGIN_WHITELIST to
# use cookies.
CORS_ALLOW_CREDENTIALS = True
# Set dynamically on app init
ORIGINS_WHITELIST = ()
MIDDLEWARE = (
'api.base.middleware.DjangoGlobalMiddleware',
'api.base.middleware.CeleryTaskMiddleware',
'api.base.middleware.PostcommitTaskMiddleware',
# A profiling middleware. ONLY FOR DEV USE
# Uncomment and add "prof" to url params to recieve a profile for that url
# 'api.base.middleware.ProfileMiddleware',
# 'django.contrib.sessions.middleware.SessionMiddleware',
'api.base.middleware.CorsMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
# 'django.contrib.auth.middleware.AuthenticationMiddleware',
# 'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
# 'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
'waffle.middleware.WaffleMiddleware',
)
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
},
]
ROOT_URLCONF = 'api.base.urls'
WSGI_APPLICATION = 'api.base.wsgi.application'
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# https://django-storages.readthedocs.io/en/latest/backends/gcloud.html
if os.environ.get('GOOGLE_APPLICATION_CREDENTIALS', False):
# Required to interact with Google Cloud Storage
DEFAULT_FILE_STORAGE = 'api.base.storage.RequestlessURLGoogleCloudStorage'
GS_BUCKET_NAME = os.environ.get('GS_BUCKET_NAME', 'cos-osf-stage-cdn-us')
GS_FILE_OVERWRITE = os.environ.get('GS_FILE_OVERWRITE', False)
elif osf_settings.DEV_MODE or osf_settings.DEBUG_MODE:
DEFAULT_FILE_STORAGE = 'api.base.storage.DevFileSystemStorage'
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_ROOT = os.path.join(BASE_DIR, 'static/vendor')
API_BASE = 'v2/'
API_PRIVATE_BASE = '_/'
STATIC_URL = '/static/'
NODE_CATEGORY_MAP = osf_settings.NODE_CATEGORY_MAP
DEBUG_TRANSACTIONS = DEBUG
JWT_SECRET = 'osf_api_cas_login_jwt_secret_32b'
JWE_SECRET = 'osf_api_cas_login_jwe_secret_32b'
ENABLE_VARNISH = osf_settings.ENABLE_VARNISH
ENABLE_ESI = osf_settings.ENABLE_ESI
VARNISH_SERVERS = osf_settings.VARNISH_SERVERS
ESI_MEDIA_TYPES = osf_settings.ESI_MEDIA_TYPES
ADDONS_FOLDER_CONFIGURABLE = ['box', 'dropbox', 's3', 'googledrive', 'figshare', 'owncloud', 'onedrive']
ADDONS_OAUTH = ADDONS_FOLDER_CONFIGURABLE + ['dataverse', 'github', 'bitbucket', 'gitlab', 'mendeley', 'zotero', 'forward']
BYPASS_THROTTLE_TOKEN = 'test-token'
OSF_SHELL_USER_IMPORTS = None
# Settings for use in the admin
OSF_URL = 'https://osf.io'
SELECT_FOR_UPDATE_ENABLED = True
# Disable anonymous user permissions in django-guardian
ANONYMOUS_USER_NAME = None
# If set to True, automated tests with extra queries will fail.
NPLUSONE_RAISE = False
# salt used for generating hashids
HASHIDS_SALT = 'pinkhimalayan'
# django-elasticsearch-metrics
ELASTICSEARCH_DSL = {
'default': {
'hosts': os.environ.get('ELASTIC6_URI', '127.0.0.1:9201'),
'retry_on_timeout': True,
},
}
# Store yearly indices for time-series metrics
ELASTICSEARCH_METRICS_DATE_FORMAT = '%Y'
WAFFLE_CACHE_NAME = 'waffle_cache'
STORAGE_USAGE_CACHE_NAME = 'storage_usage'
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
},
STORAGE_USAGE_CACHE_NAME: {
'BACKEND': 'django.core.cache.backends.db.DatabaseCache',
'LOCATION': 'osf_cache_table',
},
WAFFLE_CACHE_NAME: {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
},
}
| 28.990881
| 123
| 0.698784
| 1,121
| 9,538
| 5.742194
| 0.369313
| 0.027187
| 0.016778
| 0.011651
| 0.09492
| 0.070219
| 0.050023
| 0.027963
| 0
| 0
| 0
| 0.015463
| 0.172783
| 9,538
| 328
| 124
| 29.079268
| 0.80038
| 0.202139
| 0
| 0.043668
| 1
| 0
| 0.450053
| 0.279863
| 0
| 0
| 0
| 0.003049
| 0
| 1
| 0
| false
| 0.021834
| 0.017467
| 0
| 0.017467
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
9c2fcf2ba9545bbf4f412026ea905a2899fef624
| 2,511
|
py
|
Python
|
regenesis/modelgen.py
|
crijke/regenesis
|
e53a0c6302aa458ff9ae95f573d5594351e5434c
|
[
"MIT"
] | 16
|
2015-04-09T14:40:53.000Z
|
2021-07-13T15:03:35.000Z
|
regenesis/modelgen.py
|
crijke/regenesis
|
e53a0c6302aa458ff9ae95f573d5594351e5434c
|
[
"MIT"
] | 1
|
2018-06-25T07:51:18.000Z
|
2018-06-25T07:51:18.000Z
|
regenesis/modelgen.py
|
crijke/regenesis
|
e53a0c6302aa458ff9ae95f573d5594351e5434c
|
[
"MIT"
] | 3
|
2015-12-20T18:24:21.000Z
|
2018-06-24T16:57:25.000Z
|
import json
from regenesis.queries import get_cubes, get_all_dimensions, get_dimensions
from pprint import pprint
def generate_dimensions():
dimensions = []
for dimension in get_all_dimensions():
pprint (dimension)
if dimension.get('measure_type').startswith('W-'):
continue
attrs = ['name', 'label']
if 'ZI' in dimension.get('measure_type'):
attrs = ['text', 'from', 'until']
dim = {
'name': dimension.get('name'),
'label': dimension.get('title_de'),
'description': dimension.get('definition_de'),
'attributes': attrs
}
dimensions.append(dim)
return dimensions
def generate_cubes():
cubes = []
for cube in get_cubes():
dimensions = []
measures = []
joins = []
mappings = {}
cube_name = cube.get('cube_name')
for dim in get_dimensions(cube_name):
dn = dim.get('dim_name')
if dim.get('dim_measure_type').startswith('W-'):
measures.append(dn)
continue
dimensions.append(dn)
if dim.get('dim_measure_type').startswith('ZI-'):
mappings[dn + '.text'] = 'fact_%s.%s' % (cube_name, dn)
mappings[dn + '.from'] = 'fact_%s.%s_from' % (cube_name, dn)
mappings[dn + '.until'] = 'fact_%s.%s_until' % (cube_name, dn)
else:
tn = 'tbl_' + dn
joins.append({
'master': dn,
'detail': 'value.value_id',
'alias': tn
})
mappings[dn + '.name'] = tn + '.name'
mappings[dn + '.label'] = tn + '.title_de'
cubes.append({
'dimensions': dimensions,
'measures': measures,
'mappings': mappings,
'joins': joins,
'fact': 'fact_%s' % cube_name,
'name': cube.get('cube_name'),
'label': cube.get('statistic_title_de'),
'description': cube.get('statistic_description_de'),
})
return cubes
def generate_model():
model = {
'dimensions': generate_dimensions(),
'cubes': generate_cubes(),
'locale': 'de'
}
pprint(model)
return model
if __name__ == '__main__':
with open('model.json', 'wb') as fh:
model = generate_model()
json.dump(model, fh, indent=2)
| 30.253012
| 78
| 0.502589
| 250
| 2,511
| 4.84
| 0.248
| 0.052893
| 0.033058
| 0.038017
| 0.117355
| 0.052893
| 0.052893
| 0
| 0
| 0
| 0
| 0.000619
| 0.35683
| 2,511
| 82
| 79
| 30.621951
| 0.748607
| 0
| 0
| 0.085714
| 1
| 0
| 0.172112
| 0.009562
| 0
| 0
| 0
| 0
| 0
| 1
| 0.042857
| false
| 0
| 0.042857
| 0
| 0.128571
| 0.042857
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
92bbcd24bf10bc66f379878a7b6917a00a8a96a4
| 2,698
|
py
|
Python
|
layerserver/migrations/0001_initial.py
|
aroiginfraplan/giscube-admin
|
b7f3131b0186f847f3902df97f982cb288b16a49
|
[
"BSD-3-Clause"
] | 5
|
2018-06-07T12:54:35.000Z
|
2022-01-14T10:38:38.000Z
|
layerserver/migrations/0001_initial.py
|
aroiginfraplan/giscube-admin
|
b7f3131b0186f847f3902df97f982cb288b16a49
|
[
"BSD-3-Clause"
] | 140
|
2018-06-18T10:27:28.000Z
|
2022-03-23T09:53:15.000Z
|
layerserver/migrations/0001_initial.py
|
aroiginfraplan/giscube-admin
|
b7f3131b0186f847f3902df97f982cb288b16a49
|
[
"BSD-3-Clause"
] | 1
|
2021-04-13T11:20:54.000Z
|
2021-04-13T11:20:54.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.10 on 2018-04-26 09:14
import colorfield.fields
from django.db import migrations, models
import django.db.models.deletion
import giscube.utils
class Migration(migrations.Migration):
initial = True
dependencies = [
('giscube', '0002_update'),
]
operations = [
migrations.CreateModel(
name='GeoJsonLayer',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=50, unique=True)),
('title', models.CharField(blank=True, max_length=100, null=True)),
('description', models.TextField(blank=True, null=True)),
('keywords', models.CharField(blank=True, max_length=200, null=True)),
('active', models.BooleanField(default=True)),
('visibility', models.CharField(choices=[('private', 'Private'), ('public', 'Public')], default='private', max_length=10)),
('visible_on_geoportal', models.BooleanField(default=False)),
('shapetype', models.CharField(blank=True, choices=[('marker', 'Marker'), ('line', 'Line'), ('polygon', 'Polygon'), ('Circle', 'Circle')], max_length=20, null=True)),
('shape_radius', models.IntegerField(blank=True, null=True)),
('stroke_color', colorfield.fields.ColorField(blank=True, default=b'#FF3333', max_length=18, null=True)),
('stroke_width', models.IntegerField(blank=True, default=1, null=True)),
('stroke_dash_array', models.CharField(blank=True, default='', max_length=25, null=True)),
('fill_color', colorfield.fields.ColorField(blank=True, default=b'#FFC300', max_length=18, null=True)),
('fill_opacity', models.DecimalField(blank=True, decimal_places=1, default=1, max_digits=2, null=True)),
('url', models.CharField(blank=True, max_length=100, null=True)),
('data_file', models.FileField(blank=True, null=True, upload_to=giscube.utils.unique_service_directory)),
('service_path', models.CharField(max_length=255)),
('cache_time', models.IntegerField(blank=True, null=True)),
('last_fetch_on', models.DateField(blank=True, null=True)),
('category', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='giscube.Category')),
],
options={
'verbose_name': 'GeoJSONLayer',
'verbose_name_plural': 'GeoJSONLayers',
},
),
]
| 52.901961
| 182
| 0.610823
| 293
| 2,698
| 5.494881
| 0.392491
| 0.083851
| 0.048447
| 0.063354
| 0.201863
| 0.178261
| 0.114286
| 0.114286
| 0.054658
| 0
| 0
| 0.02743
| 0.2298
| 2,698
| 50
| 183
| 53.96
| 0.747353
| 0.025575
| 0
| 0
| 1
| 0
| 0.153085
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.097561
| 0
| 0.195122
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
92cab9ec692aa8897ecccca29c25b34c478b66a7
| 8,798
|
py
|
Python
|
qiskit_metal/_gui/elements_ui.py
|
sarafs1926/qiskit-metal
|
cf2ce8125ebe8f21b6d1b85362466fd57db2cada
|
[
"Apache-2.0"
] | 1
|
2022-01-27T07:11:49.000Z
|
2022-01-27T07:11:49.000Z
|
qiskit_metal/_gui/elements_ui.py
|
sarafs1926/qiskit-metal
|
cf2ce8125ebe8f21b6d1b85362466fd57db2cada
|
[
"Apache-2.0"
] | null | null | null |
qiskit_metal/_gui/elements_ui.py
|
sarafs1926/qiskit-metal
|
cf2ce8125ebe8f21b6d1b85362466fd57db2cada
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file './elements_ui.ui',
# licensing of './elements_ui.ui' applies.
#
# Created: Wed Jun 16 14:29:03 2021
# by: pyside2-uic running on PySide2 5.13.2
#
# WARNING! All changes made in this file will be lost!
from PySide2 import QtCore, QtGui, QtWidgets
class Ui_ElementsWindow(object):
def setupUi(self, ElementsWindow):
ElementsWindow.setObjectName("ElementsWindow")
ElementsWindow.resize(841, 623)
self.centralwidget = QtWidgets.QWidget(ElementsWindow)
self.centralwidget.setObjectName("centralwidget")
self.verticalLayout_2 = QtWidgets.QVBoxLayout(self.centralwidget)
self.verticalLayout_2.setSpacing(0)
self.verticalLayout_2.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.verticalLayout = QtWidgets.QVBoxLayout()
self.verticalLayout.setSizeConstraint(
QtWidgets.QLayout.SetDefaultConstraint)
self.verticalLayout.setObjectName("verticalLayout")
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
self.btn_refresh = QtWidgets.QPushButton(self.centralwidget)
self.btn_refresh.setCursor(QtCore.Qt.ClosedHandCursor)
self.btn_refresh.setText("")
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(":/refresh"), QtGui.QIcon.Normal,
QtGui.QIcon.Off)
self.btn_refresh.setIcon(icon)
self.btn_refresh.setIconSize(QtCore.QSize(20, 20))
self.btn_refresh.setAutoDefault(False)
self.btn_refresh.setDefault(False)
self.btn_refresh.setFlat(True)
self.btn_refresh.setObjectName("btn_refresh")
self.horizontalLayout.addWidget(self.btn_refresh)
self.label = QtWidgets.QLabel(self.centralwidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum,
QtWidgets.QSizePolicy.Minimum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(
self.label.sizePolicy().hasHeightForWidth())
self.label.setSizePolicy(sizePolicy)
font = QtGui.QFont()
font.setWeight(75)
font.setBold(True)
self.label.setFont(font)
self.label.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.AlignTrailing |
QtCore.Qt.AlignVCenter)
self.label.setObjectName("label")
self.horizontalLayout.addWidget(self.label)
self.combo_element_type = QtWidgets.QComboBox(self.centralwidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred,
QtWidgets.QSizePolicy.Minimum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(
self.combo_element_type.sizePolicy().hasHeightForWidth())
self.combo_element_type.setSizePolicy(sizePolicy)
self.combo_element_type.setCurrentText("")
self.combo_element_type.setSizeAdjustPolicy(
QtWidgets.QComboBox.AdjustToContents)
self.combo_element_type.setObjectName("combo_element_type")
self.horizontalLayout.addWidget(self.combo_element_type)
self.line = QtWidgets.QFrame(self.centralwidget)
self.line.setFrameShape(QtWidgets.QFrame.VLine)
self.line.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line.setObjectName("line")
self.horizontalLayout.addWidget(self.line)
self.label_3 = QtWidgets.QLabel(self.centralwidget)
font = QtGui.QFont()
font.setWeight(75)
font.setBold(True)
self.label_3.setFont(font)
self.label_3.setObjectName("label_3")
self.horizontalLayout.addWidget(self.label_3)
self.label_2 = QtWidgets.QLabel(self.centralwidget)
self.label_2.setObjectName("label_2")
self.horizontalLayout.addWidget(self.label_2)
self.lineEdit = QtWidgets.QLineEdit(self.centralwidget)
self.lineEdit.setObjectName("lineEdit")
self.horizontalLayout.addWidget(self.lineEdit)
self.label_4 = QtWidgets.QLabel(self.centralwidget)
self.label_4.setObjectName("label_4")
self.horizontalLayout.addWidget(self.label_4)
self.lineEdit_2 = QtWidgets.QLineEdit(self.centralwidget)
self.lineEdit_2.setObjectName("lineEdit_2")
self.horizontalLayout.addWidget(self.lineEdit_2)
self.line_2 = QtWidgets.QFrame(self.centralwidget)
self.line_2.setFrameShape(QtWidgets.QFrame.VLine)
self.line_2.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_2.setObjectName("line_2")
self.horizontalLayout.addWidget(self.line_2)
self.verticalLayout.addLayout(self.horizontalLayout)
self.tableElements = QtWidgets.QTableView(self.centralwidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding,
QtWidgets.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(
self.tableElements.sizePolicy().hasHeightForWidth())
self.tableElements.setSizePolicy(sizePolicy)
self.tableElements.setProperty("showDropIndicator", False)
self.tableElements.setDragDropOverwriteMode(False)
self.tableElements.setAlternatingRowColors(True)
self.tableElements.setSortingEnabled(False)
self.tableElements.setObjectName("tableElements")
self.verticalLayout.addWidget(self.tableElements)
self.verticalLayout_2.addLayout(self.verticalLayout)
ElementsWindow.setCentralWidget(self.centralwidget)
self.menubar = QtWidgets.QMenuBar()
self.menubar.setGeometry(QtCore.QRect(0, 0, 841, 22))
self.menubar.setObjectName("menubar")
ElementsWindow.setMenuBar(self.menubar)
self.statusbar = QtWidgets.QStatusBar(ElementsWindow)
self.statusbar.setEnabled(True)
self.statusbar.setObjectName("statusbar")
ElementsWindow.setStatusBar(self.statusbar)
self.retranslateUi(ElementsWindow)
QtCore.QObject.connect(self.combo_element_type,
QtCore.SIGNAL("currentIndexChanged(QString)"),
ElementsWindow.combo_element_type)
QtCore.QObject.connect(self.btn_refresh, QtCore.SIGNAL("clicked()"),
ElementsWindow.force_refresh)
QtCore.QMetaObject.connectSlotsByName(ElementsWindow)
def retranslateUi(self, ElementsWindow):
ElementsWindow.setWindowTitle(
QtWidgets.QApplication.translate("ElementsWindow", "MainWindow",
None, -1))
self.btn_refresh.setToolTip(
QtWidgets.QApplication.translate("ElementsWindow",
"Force refresh the table ", None,
-1))
self.btn_refresh.setStatusTip(
QtWidgets.QApplication.translate("ElementsWindow",
"Force refresh the table ", None,
-1))
self.btn_refresh.setWhatsThis(
QtWidgets.QApplication.translate("ElementsWindow",
"Force refresh the table ", None,
-1))
self.btn_refresh.setAccessibleDescription(
QtWidgets.QApplication.translate("ElementsWindow",
"Force refresh the table ", None,
-1))
self.label.setText(
QtWidgets.QApplication.translate("ElementsWindow", "Element type: ",
None, -1))
self.combo_element_type.setToolTip(
QtWidgets.QApplication.translate(
"ElementsWindow",
"<html><head/><body><p>Select the element table you wish to view</p></body></html>",
None, -1))
self.label_3.setText(
QtWidgets.QApplication.translate("ElementsWindow", " Filter: ",
None, -1))
self.label_2.setText(
QtWidgets.QApplication.translate("ElementsWindow", "Component: ",
None, -1))
self.label_4.setText(
QtWidgets.QApplication.translate("ElementsWindow", " Layer: ",
None, -1))
from . import main_window_rc_rc
| 49.988636
| 100
| 0.639236
| 780
| 8,798
| 7.111538
| 0.234615
| 0.034072
| 0.037858
| 0.059492
| 0.356589
| 0.248783
| 0.172345
| 0.13611
| 0.13611
| 0.119704
| 0
| 0.01458
| 0.26722
| 8,798
| 175
| 101
| 50.274286
| 0.84582
| 0.030461
| 0
| 0.221519
| 1
| 0.006329
| 0.072879
| 0.009154
| 0
| 0
| 0
| 0
| 0
| 1
| 0.012658
| false
| 0
| 0.012658
| 0
| 0.031646
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
92ea3eda1c775e0583e47210352c08da3ae6793c
| 3,995
|
py
|
Python
|
amy/workshops/migrations/0191_auto_20190809_0936.py
|
code-review-doctor/amy
|
268c1a199510457891459f3ddd73fcce7fe2b974
|
[
"MIT"
] | 53
|
2015-01-10T17:39:19.000Z
|
2019-06-12T17:36:34.000Z
|
amy/workshops/migrations/0191_auto_20190809_0936.py
|
code-review-doctor/amy
|
268c1a199510457891459f3ddd73fcce7fe2b974
|
[
"MIT"
] | 1,176
|
2015-01-02T06:32:47.000Z
|
2019-06-18T11:57:47.000Z
|
amy/workshops/migrations/0191_auto_20190809_0936.py
|
code-review-doctor/amy
|
268c1a199510457891459f3ddd73fcce7fe2b974
|
[
"MIT"
] | 44
|
2015-01-03T15:08:56.000Z
|
2019-06-09T05:33:08.000Z
|
# Generated by Django 2.1.7 on 2019-08-09 09:36
from django.db import migrations, models
def migrate_public_event(apps, schema_editor):
"""Migrate options previously with no contents (displayed as "Other:")
to a new contents ("other").
The field containing these options is in CommonRequest abstract model,
implemented in WorkshopRequest, WorkshopInquiryRequest, and
SelfOrganizedSubmission models."""
WorkshopRequest = apps.get_model('workshops', 'WorkshopRequest')
WorkshopInquiryRequest = apps.get_model('extrequests',
'WorkshopInquiryRequest')
SelfOrganizedSubmission = apps.get_model('extrequests',
'SelfOrganizedSubmission')
WorkshopRequest.objects.filter(public_event="") \
.update(public_event="other")
WorkshopInquiryRequest.objects.filter(public_event="") \
.update(public_event="other")
SelfOrganizedSubmission.objects.filter(public_event="") \
.update(public_event="other")
class Migration(migrations.Migration):
dependencies = [
('workshops', '0190_auto_20190728_1118'),
('extrequests', '0008_auto_20190809_1004'),
]
operations = [
migrations.AlterField(
model_name='workshoprequest',
name='host_responsibilities',
field=models.BooleanField(default=False, verbose_name='I understand <a href="https://docs.carpentries.org/topic_folders/hosts_instructors/hosts_instructors_checklist.html#host-checklist">the responsibilities of the workshop host</a>, including recruiting local helpers to support the workshop (1 helper for every 8-10 learners).'),
),
migrations.AlterField(
model_name='workshoprequest',
name='requested_workshop_types',
field=models.ManyToManyField(help_text='If your learners are new to programming and primarily interested in working with data, Data Carpentry is likely the best choice. If your learners are interested in learning more about programming, including version control and automation, Software Carpentry is likely the best match. If your learners are people working in library and information related roles interested in learning data and software skills, Library Carpentry is the best choice. Please visit the <a href="https://software-carpentry.org/lessons/">Software Carpentry lessons page</a>, <a href="http://www.datacarpentry.org/lessons/">Data Carpentry lessons page</a>, or the <a href="https://librarycarpentry.org/lessons/">Library Carpentry lessons page</a> for more information about any of our lessons.', limit_choices_to={'active': True}, to='workshops.Curriculum', verbose_name='Which Carpentries workshop are you requesting?'),
),
migrations.AlterField(
model_name='workshoprequest',
name='scholarship_circumstances',
field=models.TextField(blank=True, help_text='Required only if you request a scholarship.', verbose_name='Please explain the circumstances for your scholarship request and let us know what budget you have towards The Carpentries workshop fees.'),
),
migrations.AlterField(
model_name='workshoprequest',
name='public_event',
field=models.CharField(blank=True, choices=[('invite', 'This event is open to learners by invitation only.'), ('closed', 'This event is open to learners inside of my institution.'), ('public', 'This event is open to learners outside of my institution.'), ('other', 'Other:')], default='', help_text='Many of our workshops restrict registration to learners from the hosting institution. If your workshop will be open to registrants outside of your institution please let us know below.', max_length=20, verbose_name='Is this workshop open to the public?'),
),
migrations.RunPython(migrate_public_event),
]
| 71.339286
| 949
| 0.702378
| 460
| 3,995
| 6.006522
| 0.406522
| 0.035831
| 0.036193
| 0.041983
| 0.163952
| 0.14658
| 0.049946
| 0.049946
| 0
| 0
| 0
| 0.016688
| 0.205006
| 3,995
| 55
| 950
| 72.636364
| 0.853275
| 0.076596
| 0
| 0.365854
| 1
| 0.097561
| 0.550859
| 0.043905
| 0
| 0
| 0
| 0
| 0
| 1
| 0.02439
| false
| 0
| 0.02439
| 0
| 0.121951
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
1317d06f323723e4a4f4ac9a34fb6dfc7aa40fb0
| 1,146
|
py
|
Python
|
website/migrations/0084_auto_20210215_1401.py
|
czhu1217/cmimc-online
|
5ef49ceec0bb86d8ae120a6ecfd723532e277821
|
[
"MIT"
] | null | null | null |
website/migrations/0084_auto_20210215_1401.py
|
czhu1217/cmimc-online
|
5ef49ceec0bb86d8ae120a6ecfd723532e277821
|
[
"MIT"
] | 1
|
2022-01-23T21:08:12.000Z
|
2022-01-23T21:08:12.000Z
|
website/migrations/0084_auto_20210215_1401.py
|
czhu1217/cmimc-online
|
5ef49ceec0bb86d8ae120a6ecfd723532e277821
|
[
"MIT"
] | 1
|
2021-10-17T17:11:42.000Z
|
2021-10-17T17:11:42.000Z
|
# Generated by Django 3.1.6 on 2021-02-15 19:01
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('website', '0083_remove_aisubmission_code'),
]
operations = [
migrations.AddField(
model_name='exam',
name='division',
field=models.IntegerField(default=1),
preserve_default=False,
),
migrations.CreateModel(
name='ExamPair',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100, unique=True)),
('contest', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='exampairs', to='website.contest')),
],
),
migrations.AddField(
model_name='exam',
name='exampair',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='exams', to='website.exampair'),
),
]
| 33.705882
| 150
| 0.604712
| 120
| 1,146
| 5.65
| 0.541667
| 0.047198
| 0.061947
| 0.097345
| 0.19174
| 0.19174
| 0
| 0
| 0
| 0
| 0
| 0.027316
| 0.265271
| 1,146
| 33
| 151
| 34.727273
| 0.77791
| 0.039267
| 0
| 0.259259
| 1
| 0
| 0.11647
| 0.026388
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.074074
| 0
| 0.185185
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
13275a03cfbb8b9a14ddc59598aea372c23ea6fb
| 6,461
|
py
|
Python
|
relocation/depth/setup_relocation_dir.py
|
ziyixi/SeisScripts
|
a484bc1747eae52b2441f0bfd47ac7e093150f1d
|
[
"MIT"
] | null | null | null |
relocation/depth/setup_relocation_dir.py
|
ziyixi/SeisScripts
|
a484bc1747eae52b2441f0bfd47ac7e093150f1d
|
[
"MIT"
] | null | null | null |
relocation/depth/setup_relocation_dir.py
|
ziyixi/SeisScripts
|
a484bc1747eae52b2441f0bfd47ac7e093150f1d
|
[
"MIT"
] | null | null | null |
"""
setup earthquake depth relocation directory
"""
import obspy
import sh
import numpy as np
import click
from os.path import join
from glob import glob
import copy
def generate_new_cmtsolution_files(cmts_dir, generated_cmts_dir, depth_perturbation_list):
cmt_names = glob(join(cmts_dir, "*"))
for cmt_file in cmt_names:
event = obspy.read_events(cmt_file)[0]
# gcmt_id = event.resource_id.id.split("/")[-2]
# there are some problems in changing names
gcmt_id = cmt_file.split("/")[-1]
# assume dirs like f"{generated_cmts_dir}/d-3" have already been created
for depth_per in depth_perturbation_list:
generated_name = join(generated_cmts_dir, f"d{depth_per}", gcmt_id)
# there are always problem in copy event, so here I'd like to read in the event again
event_this_depth = obspy.read_events(cmt_file)[0]
# event_this_depth = event.copy()
event_this_depth.origins[0].depth += 1000.0*depth_per
# print(generated_name, generated_cmts_dir, f"d{depth_per}", gcmt_id)
event_this_depth.write(generated_name, format="CMTSOLUTION")
def setup_basic_structure(main_dir, ref_dir, cmts_dir, depth_perturbation_list):
# main
sh.mkdir("-p", main_dir)
# ref
sh.cp("-r", ref_dir, join(main_dir, "ref"))
# refine the structure in ref
sh.rm("-rf", join(main_dir, "ref", "DATABASES_MPI"))
sh.rm("-rf", join(main_dir, "ref", "EXAMPLES"))
sh.rm("-rf", join(main_dir, "ref", "OUTPUT_FILES"))
sh.rm("-rf", join(main_dir, "ref", "doc"))
sh.rm("-rf", join(main_dir, "ref", "tests"))
# mv DATA and utils to upper level
sh.mv(join(main_dir, "ref", "DATA"), main_dir)
sh.mv(join(main_dir, "ref", "utils"), main_dir)
# cmts
sh.mkdir("-p", join(main_dir, "cmts"))
sh.cp("-r", cmts_dir, join(main_dir, "cmts", "cmts_raw"))
sh.mkdir("-p", join(main_dir, "cmts", "cmts_generated"))
for depth_per in depth_perturbation_list:
sh.mkdir("-p", join(main_dir, "cmts",
"cmts_generated", f"d{depth_per}"))
# working directory
sh.mkdir("-p", join(main_dir, "work"))
def setup_structure_after_generat_cmts(main_dir, output_dir, depth_perturbation_list):
# get cmts names
cmt_dirs = glob(join(main_dir, "cmts", "cmts_raw", "*"))
cmt_names = [item.split("/")[-1] for item in cmt_dirs]
# mkdirs
for cmt_name in cmt_names:
sh.mkdir(join(main_dir, "work", cmt_name))
for depth_per in depth_perturbation_list:
# sh.mkdir(join(main_dir, "work", cmt_name, f"d{depth_per}"))
# cp ref to working dirs
sh.cp("-r", join(main_dir, "ref"),
join(main_dir, "work", cmt_name, f"d{depth_per}"))
# mv DATA and utils back to ref
sh.mv(join(main_dir, "DATA"), join(main_dir, "ref", "DATA"))
sh.mv(join(main_dir, "utils"), join(main_dir, "ref", "utils"))
# mkdir DATA in work directory
for cmt_name in cmt_names:
for depth_per in depth_perturbation_list:
sh.mkdir(join(main_dir, "work", cmt_name, f"d{depth_per}", "DATA"))
# cp and ln files in DATA
toln = ["cemRequest", "crust1.0", "crust2.0",
"crustmap", "epcrust", "eucrust-07", "GLL", "heterogen", "Lebedev_sea99", "Montagner_model", "old", "PPM", "QRFSI12", "s20rts", "s362ani", "s40rts", "Simons_model", "topo_bathy", "Zhao_JP_model"]
for cmt_name in cmt_names:
for depth_per in depth_perturbation_list:
sh.cp(join(main_dir, "cmts", "cmts_generated",
f"d{depth_per}", cmt_name), join(main_dir, "work", cmt_name, f"d{depth_per}", "DATA", "CMTSOLUTION"))
sh.cp(join(main_dir, "ref", "DATA", "Par_file"), join(
main_dir, "work", cmt_name, f"d{depth_per}", "DATA", "Par_file"))
sh.cp(join(main_dir, "ref", "DATA", "STATIONS"), join(
main_dir, "work", cmt_name, f"d{depth_per}", "DATA", "STATIONS"))
for lnfile in toln:
sh.ln("-s", join(main_dir, "ref", "DATA", lnfile), join(
main_dir, "work", cmt_name, f"d{depth_per}", "DATA", lnfile))
# ln in work files
toln_work = ["utils"]
for lnfile in toln_work:
sh.ln("-s", join(main_dir, "ref", lnfile), join(
main_dir, "work", cmt_name, f"d{depth_per}", lnfile))
# mkdir and ln DATABASE_MPI and OUTPUT_FILES
sh.mkdir("-p", output_dir)
sh.mkdir("-p", join(output_dir, "DATABASES_MPI"))
sh.mkdir("-p", join(output_dir, "OUTPUT_FILES"))
for cmt_name in cmt_names:
for depth_per in depth_perturbation_list:
sh.mkdir("-p", join(output_dir, "DATABASES_MPI",
cmt_name, f"d{depth_per}"))
sh.mkdir("-p", join(output_dir, "OUTPUT_FILES",
cmt_name, f"d{depth_per}"))
sh.ln("-s", join(output_dir, "DATABASES_MPI",
cmt_name, f"d{depth_per}"), join(main_dir, "work", cmt_name, f"d{depth_per}", "DATABASES_MPI"))
sh.ln("-s", join(output_dir, "OUTPUT_FILES",
cmt_name, f"d{depth_per}"), join(main_dir, "work", cmt_name, f"d{depth_per}", "OUTPUT_FILES"))
@click.command()
@click.option('--main_dir', required=True, help="the main working directory", type=str)
@click.option('--output_dir', required=True, help="the output directory in scratch", type=str)
@click.option('--ref_dir', required=True, help="the reference specfem directory", type=str)
@click.option('--cmts_dir', required=True, help="the cmt solution directory", type=str)
@click.option('--depth_perturbation', required=True, help="the depth perturbation, use somthing like -3,-1,5 (in km)", type=str)
def main(main_dir, output_dir, ref_dir, cmts_dir, depth_perturbation):
depth_perturbation_list = [float(item)
for item in depth_perturbation.split(",")]
setup_basic_structure(main_dir, ref_dir, cmts_dir, depth_perturbation_list)
generated_cmts_dir = join(main_dir, "cmts", "cmts_generated")
working_cmts_dir = join(main_dir, "cmts", "cmts_raw")
generate_new_cmtsolution_files(
working_cmts_dir, generated_cmts_dir, depth_perturbation_list)
setup_structure_after_generat_cmts(
main_dir, output_dir, depth_perturbation_list)
if __name__ == "__main__":
main()
| 45.181818
| 207
| 0.625445
| 933
| 6,461
| 4.068596
| 0.165059
| 0.084826
| 0.107218
| 0.047418
| 0.571391
| 0.505269
| 0.463119
| 0.384089
| 0.318757
| 0.292413
| 0
| 0.006389
| 0.224733
| 6,461
| 142
| 208
| 45.5
| 0.751447
| 0.112676
| 0
| 0.126316
| 1
| 0
| 0.193514
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.042105
| false
| 0
| 0.073684
| 0
| 0.115789
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
1359a8c4afe9581f59876f936fb68313f28865c1
| 1,028
|
py
|
Python
|
corehq/apps/accounting/migrations/0026_auto_20180508_1956.py
|
kkrampa/commcare-hq
|
d64d7cad98b240325ad669ccc7effb07721b4d44
|
[
"BSD-3-Clause"
] | 1
|
2020-05-05T13:10:01.000Z
|
2020-05-05T13:10:01.000Z
|
corehq/apps/accounting/migrations/0026_auto_20180508_1956.py
|
kkrampa/commcare-hq
|
d64d7cad98b240325ad669ccc7effb07721b4d44
|
[
"BSD-3-Clause"
] | 1
|
2019-12-09T14:00:14.000Z
|
2019-12-09T14:00:14.000Z
|
corehq/apps/accounting/migrations/0026_auto_20180508_1956.py
|
MaciejChoromanski/commcare-hq
|
fd7f65362d56d73b75a2c20d2afeabbc70876867
|
[
"BSD-3-Clause"
] | 5
|
2015-11-30T13:12:45.000Z
|
2019-07-01T19:27:07.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.13 on 2018-05-08 19:56
from __future__ import unicode_literals
from __future__ import absolute_import
from django.db import migrations
def noop(*args, **kwargs):
pass
def _convert_emailed_to_array_field(apps, schema_editor):
BillingRecord = apps.get_model('accounting', 'BillingRecord')
for record in BillingRecord.objects.all():
if record.emailed_to != '':
record.emailed_to_list = record.emailed_to.split(',')
record.save()
WireBillingRecord = apps.get_model('accounting', 'WireBillingRecord')
for wirerecord in WireBillingRecord.objects.all():
if wirerecord.emailed_to != '':
wirerecord.emailed_to_list = wirerecord.emailed_to.split(',')
wirerecord.save()
class Migration(migrations.Migration):
dependencies = [
('accounting', '0025_auto_20180508_1952'),
]
operations = [
migrations.RunPython(_convert_emailed_to_array_field, reverse_code=noop)
]
| 27.783784
| 80
| 0.689689
| 117
| 1,028
| 5.760684
| 0.529915
| 0.106825
| 0.066766
| 0.062315
| 0.077151
| 0
| 0
| 0
| 0
| 0
| 0
| 0.041312
| 0.199416
| 1,028
| 36
| 81
| 28.555556
| 0.777643
| 0.067121
| 0
| 0
| 1
| 0
| 0.088912
| 0.024059
| 0
| 0
| 0
| 0
| 0
| 1
| 0.086957
| false
| 0.043478
| 0.130435
| 0
| 0.347826
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
1369857b721c52701d49ebb99393f03d4c246712
| 569
|
py
|
Python
|
appliance_catalog/migrations/0015_appliance_icon_py3.py
|
ChameleonCloud/portal
|
92a06fb926dc36e997b94fb8dcd22b7e0d24d3ee
|
[
"Apache-2.0"
] | 3
|
2015-08-04T20:53:41.000Z
|
2020-02-14T22:58:20.000Z
|
appliance_catalog/migrations/0015_appliance_icon_py3.py
|
ChameleonCloud/portal
|
92a06fb926dc36e997b94fb8dcd22b7e0d24d3ee
|
[
"Apache-2.0"
] | 103
|
2015-01-15T14:21:00.000Z
|
2022-03-31T19:14:20.000Z
|
appliance_catalog/migrations/0015_appliance_icon_py3.py
|
ChameleonCloud/portal
|
92a06fb926dc36e997b94fb8dcd22b7e0d24d3ee
|
[
"Apache-2.0"
] | 4
|
2016-02-22T16:48:20.000Z
|
2021-01-08T17:13:21.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.29 on 2021-02-25 20:32
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
"""Updates ImageField syntax for later version.
"""
dependencies = [
('appliance_catalog', '0014_auto_20180625_1104'),
]
operations = [
migrations.AlterField(
model_name='appliance',
name='appliance_icon',
field=models.ImageField(blank=True, upload_to='appliance_catalog/icons/'),
),
]
| 24.73913
| 86
| 0.644991
| 62
| 569
| 5.709677
| 0.790323
| 0.090395
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.078522
| 0.239016
| 569
| 22
| 87
| 25.863636
| 0.73903
| 0.210896
| 0
| 0
| 1
| 0
| 0.196833
| 0.106335
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.153846
| 0
| 0.384615
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
13b9386ce9cd9ff6be8dca6211a1ab2dc6917f81
| 7,340
|
py
|
Python
|
BaseTools/Source/Python/GenFds/CapsuleData.py
|
James992927108/uEFI_Edk2_Practice
|
2cac7618dfee10bfa5104a2e167c85425fde0100
|
[
"BSD-2-Clause"
] | 6
|
2020-01-10T05:16:15.000Z
|
2022-01-06T17:41:58.000Z
|
BaseTools/Source/Python/GenFds/CapsuleData.py
|
James992927108/uEFI_Edk2_Practice
|
2cac7618dfee10bfa5104a2e167c85425fde0100
|
[
"BSD-2-Clause"
] | null | null | null |
BaseTools/Source/Python/GenFds/CapsuleData.py
|
James992927108/uEFI_Edk2_Practice
|
2cac7618dfee10bfa5104a2e167c85425fde0100
|
[
"BSD-2-Clause"
] | 3
|
2018-04-21T07:59:33.000Z
|
2018-04-23T02:06:01.000Z
|
## @file
# generate capsule
#
# Copyright (c) 2007-2017, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
#
##
# Import Modules
#
import Ffs
from GenFdsGlobalVariable import GenFdsGlobalVariable
import StringIO
from struct import pack
import os
from Common.Misc import SaveFileOnChange
import uuid
## base class for capsule data
#
#
class CapsuleData:
## The constructor
#
# @param self The object pointer
def __init__(self):
pass
## generate capsule data
#
# @param self The object pointer
def GenCapsuleSubItem(self):
pass
## FFS class for capsule data
#
#
class CapsuleFfs (CapsuleData):
## The constructor
#
# @param self The object pointer
#
def __init__(self) :
self.Ffs = None
self.FvName = None
## generate FFS capsule data
#
# @param self The object pointer
# @retval string Generated file name
#
def GenCapsuleSubItem(self):
FfsFile = self.Ffs.GenFfs()
return FfsFile
## FV class for capsule data
#
#
class CapsuleFv (CapsuleData):
## The constructor
#
# @param self The object pointer
#
def __init__(self) :
self.Ffs = None
self.FvName = None
self.CapsuleName = None
## generate FV capsule data
#
# @param self The object pointer
# @retval string Generated file name
#
def GenCapsuleSubItem(self):
if self.FvName.find('.fv') == -1:
if self.FvName.upper() in GenFdsGlobalVariable.FdfParser.Profile.FvDict.keys():
FvObj = GenFdsGlobalVariable.FdfParser.Profile.FvDict.get(self.FvName.upper())
FdBuffer = StringIO.StringIO('')
FvObj.CapsuleName = self.CapsuleName
FvFile = FvObj.AddToBuffer(FdBuffer)
FvObj.CapsuleName = None
FdBuffer.close()
return FvFile
else:
FvFile = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FvName)
return FvFile
## FD class for capsule data
#
#
class CapsuleFd (CapsuleData):
## The constructor
#
# @param self The object pointer
#
def __init__(self) :
self.Ffs = None
self.FdName = None
self.CapsuleName = None
## generate FD capsule data
#
# @param self The object pointer
# @retval string Generated file name
#
def GenCapsuleSubItem(self):
if self.FdName.find('.fd') == -1:
if self.FdName.upper() in GenFdsGlobalVariable.FdfParser.Profile.FdDict.keys():
FdObj = GenFdsGlobalVariable.FdfParser.Profile.FdDict.get(self.FdName.upper())
FdFile = FdObj.GenFd()
return FdFile
else:
FdFile = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FdName)
return FdFile
## AnyFile class for capsule data
#
#
class CapsuleAnyFile (CapsuleData):
## The constructor
#
# @param self The object pointer
#
def __init__(self) :
self.Ffs = None
self.FileName = None
## generate AnyFile capsule data
#
# @param self The object pointer
# @retval string Generated file name
#
def GenCapsuleSubItem(self):
return self.FileName
## Afile class for capsule data
#
#
class CapsuleAfile (CapsuleData):
## The constructor
#
# @param self The object pointer
#
def __init__(self) :
self.Ffs = None
self.FileName = None
## generate Afile capsule data
#
# @param self The object pointer
# @retval string Generated file name
#
def GenCapsuleSubItem(self):
return self.FileName
class CapsulePayload(CapsuleData):
'''Generate payload file, the header is defined below:
#pragma pack(1)
typedef struct {
UINT32 Version;
EFI_GUID UpdateImageTypeId;
UINT8 UpdateImageIndex;
UINT8 reserved_bytes[3];
UINT32 UpdateImageSize;
UINT32 UpdateVendorCodeSize;
UINT64 UpdateHardwareInstance; //Introduced in v2
} EFI_FIRMWARE_MANAGEMENT_CAPSULE_IMAGE_HEADER;
'''
def __init__(self):
self.UiName = None
self.Version = None
self.ImageTypeId = None
self.ImageIndex = None
self.HardwareInstance = None
self.ImageFile = []
self.VendorCodeFile = []
self.Certificate_Guid = None
self.MonotonicCount = None
self.Existed = False
self.Buffer = None
def GenCapsuleSubItem(self, AuthData=[]):
if not self.Version:
self.Version = '0x00000002'
if not self.ImageIndex:
self.ImageIndex = '0x1'
if not self.HardwareInstance:
self.HardwareInstance = '0x0'
ImageFileSize = os.path.getsize(self.ImageFile)
if AuthData:
# the ImageFileSize need include the full authenticated info size. From first bytes of MonotonicCount to last bytes of certificate.
# the 32 bit is the MonotonicCount, dwLength, wRevision, wCertificateType and CertType
ImageFileSize += 32
VendorFileSize = 0
if self.VendorCodeFile:
VendorFileSize = os.path.getsize(self.VendorCodeFile)
#
# Fill structure
#
Guid = self.ImageTypeId.split('-')
Buffer = pack('=ILHHBBBBBBBBBBBBIIQ',
int(self.Version,16),
int(Guid[0], 16),
int(Guid[1], 16),
int(Guid[2], 16),
int(Guid[3][-4:-2], 16),
int(Guid[3][-2:], 16),
int(Guid[4][-12:-10], 16),
int(Guid[4][-10:-8], 16),
int(Guid[4][-8:-6], 16),
int(Guid[4][-6:-4], 16),
int(Guid[4][-4:-2], 16),
int(Guid[4][-2:], 16),
int(self.ImageIndex, 16),
0,
0,
0,
ImageFileSize,
VendorFileSize,
int(self.HardwareInstance, 16)
)
if AuthData:
Buffer += pack('QIHH', AuthData[0], AuthData[1], AuthData[2], AuthData[3])
Buffer += uuid.UUID(AuthData[4]).get_bytes_le()
#
# Append file content to the structure
#
ImageFile = open(self.ImageFile, 'rb')
Buffer += ImageFile.read()
ImageFile.close()
if self.VendorCodeFile:
VendorFile = open(self.VendorCodeFile, 'rb')
Buffer += VendorFile.read()
VendorFile.close()
self.Existed = True
return Buffer
| 29.837398
| 143
| 0.568665
| 735
| 7,340
| 5.627211
| 0.282993
| 0.027079
| 0.034816
| 0.052224
| 0.330513
| 0.249758
| 0.249033
| 0.240329
| 0.240329
| 0.240329
| 0
| 0.021798
| 0.343733
| 7,340
| 245
| 144
| 29.959184
| 0.836828
| 0.308311
| 0
| 0.333333
| 1
| 0
| 0.010379
| 0
| 0
| 0
| 0.003256
| 0
| 0
| 1
| 0.113821
| false
| 0.01626
| 0.056911
| 0.01626
| 0.292683
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
13c4fe2bf0cd10d5be8344221103967c7cea77fd
| 12,883
|
py
|
Python
|
windows/winobject/network.py
|
marpie/PythonForWindows
|
b253bc5873e7d97087ed22f2753b51fc6880ec18
|
[
"BSD-3-Clause"
] | 1
|
2018-11-15T11:15:56.000Z
|
2018-11-15T11:15:56.000Z
|
windows/winobject/network.py
|
killvxk/PythonForWindows
|
b253bc5873e7d97087ed22f2753b51fc6880ec18
|
[
"BSD-3-Clause"
] | null | null | null |
windows/winobject/network.py
|
killvxk/PythonForWindows
|
b253bc5873e7d97087ed22f2753b51fc6880ec18
|
[
"BSD-3-Clause"
] | 1
|
2020-12-25T12:59:10.000Z
|
2020-12-25T12:59:10.000Z
|
import windows
import ctypes
import socket
import struct
from windows import winproxy
import windows.generated_def as gdef
from windows.com import interfaces as cominterfaces
from windows.generated_def.winstructs import *
from windows.generated_def.windef import *
class TCP4Connection(MIB_TCPROW_OWNER_PID):
"""A TCP4 socket (connected or listening)"""
@property
def established(self):
"""``True`` if connection is established else it's a listening socket"""
return self.dwState == MIB_TCP_STATE_ESTAB
@property
def remote_port(self):
""":type: :class:`int`"""
if not self.established:
return None
return socket.ntohs(self.dwRemotePort)
@property
def local_port(self):
""":type: :class:`int`"""
return socket.ntohs(self.dwLocalPort)
@property
def local_addr(self):
"""Local address IP (x.x.x.x)
:type: :class:`str`"""
return socket.inet_ntoa(struct.pack("<I", self.dwLocalAddr))
@property
def remote_addr(self):
"""remote address IP (x.x.x.x)
:type: :class:`str`"""
if not self.established:
return None
return socket.inet_ntoa(struct.pack("<I", self.dwRemoteAddr))
@property
def remote_proto(self):
"""Identification of the protocol associated with the remote port.
Equals ``remote_port`` if no protocol is associated with it.
:type: :class:`str` or :class:`int`
"""
try:
return socket.getservbyport(self.remote_port, 'tcp')
except socket.error:
return self.remote_port
@property
def remote_host(self):
"""Identification of the remote hostname.
Equals ``remote_addr`` if the resolution fails
:type: :class:`str` or :class:`int`
"""
try:
return socket.gethostbyaddr(self.remote_addr)
except socket.error:
return self.remote_addr
def close(self):
"""Close the connection <require elevated process>"""
closing = MIB_TCPROW()
closing.dwState = MIB_TCP_STATE_DELETE_TCB
closing.dwLocalAddr = self.dwLocalAddr
closing.dwLocalPort = self.dwLocalPort
closing.dwRemoteAddr = self.dwRemoteAddr
closing.dwRemotePort = self.dwRemotePort
return winproxy.SetTcpEntry(ctypes.byref(closing))
def __repr__(self):
if not self.established:
return "<TCP IPV4 Listening socket on {0}:{1}>".format(self.local_addr, self.local_port)
return "<TCP IPV4 Connection {s.local_addr}:{s.local_port} -> {s.remote_addr}:{s.remote_port}>".format(s=self)
class TCP6Connection(MIB_TCP6ROW_OWNER_PID):
"""A TCP6 socket (connected or listening)"""
@staticmethod
def _str_ipv6_addr(addr):
return ":".join(c.encode('hex') for c in addr)
@property
def established(self):
"""``True`` if connection is established else it's a listening socket"""
return self.dwState == MIB_TCP_STATE_ESTAB
@property
def remote_port(self):
""":type: :class:`int`"""
if not self.established:
return None
return socket.ntohs(self.dwRemotePort)
@property
def local_port(self):
""":type: :class:`int`"""
return socket.ntohs(self.dwLocalPort)
@property
def local_addr(self):
"""Local address IP
:type: :class:`str`"""
return self._str_ipv6_addr(self.ucLocalAddr)
@property
def remote_addr(self):
"""remote address IP
:type: :class:`str`"""
if not self.established:
return None
return self._str_ipv6_addr(self.ucRemoteAddr)
@property
def remote_proto(self):
"""Equals to ``self.remote_port`` for Ipv6"""
return self.remote_port
@property
def remote_host(self):
"""Equals to ``self.remote_addr`` for Ipv6"""
return self.remote_addr
def close(self):
raise NotImplementedError("Closing IPV6 connection non implemented")
def __repr__(self):
if not self.established:
return "<TCP IPV6 Listening socket on {0}:{1}>".format(self.local_addr, self.local_port)
return "<TCP IPV6 Connection {0}:{1} -> {2}:{3}>".format(self.local_addr, self.local_port, self.remote_addr, self.remote_port)
def get_MIB_TCPTABLE_OWNER_PID_from_buffer(buffer):
x = windows.generated_def.winstructs.MIB_TCPTABLE_OWNER_PID.from_buffer(buffer)
nb_entry = x.dwNumEntries
class _GENERATED_MIB_TCPTABLE_OWNER_PID(ctypes.Structure):
_fields_ = [
("dwNumEntries", DWORD),
("table", TCP4Connection * nb_entry),
]
return _GENERATED_MIB_TCPTABLE_OWNER_PID.from_buffer(buffer)
def get_MIB_TCP6TABLE_OWNER_PID_from_buffer(buffer):
x = windows.generated_def.winstructs.MIB_TCP6TABLE_OWNER_PID.from_buffer(buffer)
nb_entry = x.dwNumEntries
# Struct _MIB_TCP6TABLE_OWNER_PID definitions
class _GENERATED_MIB_TCP6TABLE_OWNER_PID(Structure):
_fields_ = [
("dwNumEntries", DWORD),
("table", TCP6Connection * nb_entry),
]
return _GENERATED_MIB_TCP6TABLE_OWNER_PID.from_buffer(buffer)
class Firewall(cominterfaces.INetFwPolicy2):
"""The windows firewall"""
@property
def rules(self):
"""The rules of the firewall
:type: [:class:`FirewallRule`] -- A list of rule
"""
ifw_rules = cominterfaces.INetFwRules()
self.get_Rules(ifw_rules)
nb_rules = gdef.LONG()
ifw_rules.get_Count(nb_rules)
unknw = cominterfaces.IUnknown()
ifw_rules.get__NewEnum(unknw)
pVariant = cominterfaces.IEnumVARIANT()
unknw.QueryInterface(pVariant.IID, pVariant)
count = gdef.ULONG()
var = windows.com.ImprovedVariant()
rules = []
for i in range(nb_rules.value):
pVariant.Next(1, var, count)
if not count.value:
break
rule = FirewallRule()
idisp = var.asdispatch
idisp.QueryInterface(rule.IID, rule)
rules.append(rule)
return rules
@property
def current_profile_types(self):
"""Mask of the profiles currently enabled
:type: :class:`long`
"""
cpt = gdef.LONG()
self.get_CurrentProfileTypes(cpt)
return cpt.value
@property
def enabled(self):
"""A maping of the active firewall profiles
{
``NET_FW_PROFILE_TYPE2_.NET_FW_PROFILE2_DOMAIN(0x1L)``: ``True`` or ``False``,
``NET_FW_PROFILE_TYPE2_.NET_FW_PROFILE2_PRIVATE(0x2L)``: ``True`` or ``False``,
``NET_FW_PROFILE_TYPE2_.NET_FW_PROFILE2_PUBLIC(0x4L)``: ``True`` or ``False``,
}
:type: :class:`dict`
"""
profiles = [gdef.NET_FW_PROFILE2_DOMAIN, gdef.NET_FW_PROFILE2_PRIVATE, gdef.NET_FW_PROFILE2_PUBLIC]
return {prof: self.enabled_for_profile_type(prof) for prof in profiles}
def enabled_for_profile_type(self, profile_type):
enabled = gdef.VARIANT_BOOL()
self.get_FirewallEnabled(profile_type, enabled)
return enabled.value
class FirewallRule(cominterfaces.INetFwRule):
"""A rule of the firewall"""
@property
def name(self):
"""Name of the rule
:type: :class:`unicode`
"""
name = gdef.BSTR()
self.get_Name(name)
return name.value
@property
def description(self):
"""Description of the rule
:type: :class:`unicode`
"""
description = gdef.BSTR()
self.get_Description(description)
return description.value
@property
def application_name(self):
"""Name of the application to which apply the rule
:type: :class:`unicode`
"""
applicationname = gdef.BSTR()
self.get_ApplicationName(applicationname)
return applicationname.value
@property
def service_name(self):
"""Name of the service to which apply the rule
:type: :class:`unicode`
"""
servicename = gdef.BSTR()
self.get_ServiceName(servicename)
return servicename.value
@property
def protocol(self):
"""Protocol to which apply the rule
:type: :class:`long`
"""
protocol = gdef.LONG()
self.get_Protocol(protocol)
return protocol.value
@property
def local_address(self):
"""Local address of the rule
:type: :class:`unicode`
"""
local_address = gdef.BSTR()
self.get_LocalAddresses(local_address)
return local_address.value
@property
def remote_address(self):
"""Remote address of the rule
:type: :class:`unicode`
"""
remote_address = gdef.BSTR()
self.get_RemoteAddresses(remote_address)
return remote_address.value
@property
def direction(self):
"""Direction of the rule, values might be:
* ``NET_FW_RULE_DIRECTION_.NET_FW_RULE_DIR_IN(0x1L)``
* ``NET_FW_RULE_DIRECTION_.NET_FW_RULE_DIR_OUT(0x2L)``
subclass of :class:`long`
"""
direction = gdef.NET_FW_RULE_DIRECTION()
self.get_Direction(direction)
return direction.value
@property
def interface_types(self):
"""Types of interface of the rule
:type: :class:`unicode`
"""
interface_type = gdef.BSTR()
self.get_InterfaceTypes(interface_type)
return interface_type.value
@property
def local_port(self):
"""Local port of the rule
:type: :class:`unicode`
"""
local_port = gdef.BSTR()
self.get_LocalPorts(local_port)
return local_port.value
@property
def remote_port(self):
"""Remote port of the rule
:type: :class:`unicode`
"""
remote_port = gdef.BSTR()
self.get_RemotePorts(remote_port)
return remote_port.value
@property
def action(self):
"""Action of the rule, values might be:
* ``NET_FW_ACTION_.NET_FW_ACTION_BLOCK(0x0L)``
* ``NET_FW_ACTION_.NET_FW_ACTION_ALLOW(0x1L)``
subclass of :class:`long`
"""
action = gdef.NET_FW_ACTION()
self.get_Action(action)
return action.value
@property
def enabled(self):
"""``True`` if rule is enabled"""
enabled = gdef.VARIANT_BOOL()
self.get_Enabled(enabled)
return enabled.value
@property
def grouping(self):
"""Grouping of the rule
:type: :class:`unicode`
"""
grouping = gdef.BSTR()
self.get_RemotePorts(grouping)
return grouping.value
@property
def icmp_type_and_code(self):
icmp_type_and_code = gdef.BSTR()
self.get_RemotePorts(icmp_type_and_code)
return icmp_type_and_code.value
def __repr__(self):
return u'<{0} "{1}">'.format(type(self).__name__, self.name).encode("ascii", errors='backslashreplace')
class Network(object):
NetFwPolicy2 = windows.com.IID.from_string("E2B3C97F-6AE1-41AC-817A-F6F92166D7DD")
@property
def firewall(self):
"""The firewall of the system
:type: :class:`Firewall`
"""
windows.com.init()
firewall = Firewall()
windows.com.create_instance(self.NetFwPolicy2, firewall)
return firewall
@staticmethod
def _get_tcp_ipv4_sockets():
size = ctypes.c_uint(0)
try:
winproxy.GetExtendedTcpTable(None, ctypes.byref(size), ulAf=AF_INET)
except winproxy.IphlpapiError:
pass # Allow us to set size to the needed value
buffer = (ctypes.c_char * size.value)()
winproxy.GetExtendedTcpTable(buffer, ctypes.byref(size), ulAf=AF_INET)
t = get_MIB_TCPTABLE_OWNER_PID_from_buffer(buffer)
return list(t.table)
@staticmethod
def _get_tcp_ipv6_sockets():
size = ctypes.c_uint(0)
try:
winproxy.GetExtendedTcpTable(None, ctypes.byref(size), ulAf=AF_INET6)
except winproxy.IphlpapiError:
pass # Allow us to set size to the needed value
buffer = (ctypes.c_char * size.value)()
winproxy.GetExtendedTcpTable(buffer, ctypes.byref(size), ulAf=AF_INET6)
t = get_MIB_TCP6TABLE_OWNER_PID_from_buffer(buffer)
return list(t.table)
ipv4 = property(lambda self: self._get_tcp_ipv4_sockets())
"""List of TCP IPv4 socket (connection and listening)
:type: [:class:`TCP4Connection`]"""
ipv6 = property(lambda self: self._get_tcp_ipv6_sockets())
"""List of TCP IPv6 socket (connection and listening)
:type: [:class:`TCP6Connection`]
"""
| 28.756696
| 134
| 0.623613
| 1,492
| 12,883
| 5.175603
| 0.164879
| 0.047009
| 0.03108
| 0.022792
| 0.478244
| 0.402745
| 0.347967
| 0.29798
| 0.249806
| 0.192696
| 0
| 0.009764
| 0.268649
| 12,883
| 447
| 135
| 28.821029
| 0.809807
| 0.195607
| 0
| 0.416667
| 1
| 0.003968
| 0.037181
| 0.010188
| 0
| 0
| 0
| 0
| 0
| 1
| 0.174603
| false
| 0.007937
| 0.035714
| 0.007937
| 0.460317
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
b927180a3b55091e89983dcae5d96dd47f1373ae
| 4,172
|
py
|
Python
|
extras/amld/cloud/quickdraw_rnn/task.py
|
luyang1210/tensorflow
|
948324f4cafdc97ae51c0e44fc1c28677a6e2e8a
|
[
"Apache-2.0"
] | 1
|
2019-04-28T15:46:45.000Z
|
2019-04-28T15:46:45.000Z
|
extras/amld/cloud/quickdraw_rnn/task.py
|
luyang1210/tensorflow
|
948324f4cafdc97ae51c0e44fc1c28677a6e2e8a
|
[
"Apache-2.0"
] | null | null | null |
extras/amld/cloud/quickdraw_rnn/task.py
|
luyang1210/tensorflow
|
948324f4cafdc97ae51c0e44fc1c28677a6e2e8a
|
[
"Apache-2.0"
] | 1
|
2020-11-18T04:43:33.000Z
|
2020-11-18T04:43:33.000Z
|
"""Experiment wrapper for training on Cloud ML."""
import argparse, glob, os
import tensorflow as tf
# From this package.
import model
def generate_experiment_fn(data_dir, train_batch_size, eval_batch_size,
train_steps, eval_steps, cell_size, hidden,
**experiment_args):
"""Returns experiment_fn for a RNN classifier.
Args:
data_dir: Where {train,eval}-* tf.train.Example datasets can be found.
train_batch_size: Batch size during training.
train_batch_size: Batch size during evaluation.
train_steps: Number of training steps.
eval_steps: Number of evaluation steps.
cell_size: LSTM cell size.
hidden: Number of units in hidden layers (note that None means "use default"
wich is equivalent to [] -- see code in model).
experiment_args: Additional arguments when `tf.contrib.learn.Experiment`
is instantiated.
"""
classes = tf.gfile.Open('%s/labels.txt' % data_dir).read().splitlines()
n_classes = len(classes)
params = tf.contrib.training.HParams(
cell_size=cell_size,
hidden=hidden or None, # Default is empty list.
)
config = tf.contrib.learn.RunConfig()
def _experiment_fn(output_dir):
return tf.contrib.learn.Experiment(
model.build_estimator(output_dir, n_classes, params, config),
train_input_fn=model.make_input_fn_stroke(
files_pattern=os.path.join(data_dir, 'train-*'),
batch_size=train_batch_size),
eval_input_fn=model.make_input_fn_stroke(
files_pattern=os.path.join(data_dir, 'eval-*'),
batch_size=eval_batch_size),
export_strategies=[
tf.contrib.learn.utils.saved_model_export_utils.make_export_strategy(
model.serving_input_fn,
exports_to_keep=1)
],
train_steps=train_steps,
eval_steps=eval_steps,
**experiment_args
)
return _experiment_fn
if __name__ == '__main__':
tf.logging.set_verbosity(tf.logging.INFO)
parser = argparse.ArgumentParser()
parser.add_argument(
'--data_dir',
help='GCS or local path to training data',
required=True
)
parser.add_argument(
'--train_batch_size',
help='Batch size for training steps',
type=int,
default=100
)
parser.add_argument(
'--eval_batch_size',
help='Batch size for evaluation steps',
type=int,
default=100
)
parser.add_argument(
'--train_steps',
help='Steps to run the training job for.',
type=int,
default=10000
)
parser.add_argument(
'--eval_steps',
help='Number of steps to run evalution for at each checkpoint',
default=100,
type=int
)
parser.add_argument(
'--output_dir',
help='GCS location to write checkpoints and export models',
required=True
)
parser.add_argument(
'--job-dir',
help='this model ignores this field, but it is required by gcloud',
default='junk'
)
parser.add_argument(
'--eval_delay_secs',
help='How long to wait before running first evaluation',
default=10,
type=int
)
parser.add_argument(
'--min_eval_frequency',
help='Minimum number of training steps between evaluations',
default=1,
type=int
)
# Hyper parameters.
parser.add_argument(
'--cell_size',
help='LSTM cell size.',
default=256,
type=int
)
parser.add_argument(
'--hidden',
help='Units in hidden layers.',
default=(),
nargs='+',
type=int
)
args = parser.parse_args()
arguments = args.__dict__
# unused args provided by service
arguments.pop('job_dir', None)
arguments.pop('job-dir', None)
output_dir = arguments.pop('output_dir')
# Run the training job
tf.contrib.learn.learn_runner.run(
generate_experiment_fn(**arguments), output_dir)
| 28.972222
| 81
| 0.613375
| 497
| 4,172
| 4.921529
| 0.338028
| 0.051513
| 0.076451
| 0.025756
| 0.223222
| 0.123467
| 0.079313
| 0.079313
| 0.047424
| 0.047424
| 0
| 0.007092
| 0.290268
| 4,172
| 143
| 82
| 29.174825
| 0.81898
| 0.173298
| 0
| 0.216981
| 1
| 0
| 0.188087
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.018868
| false
| 0
| 0.028302
| 0.009434
| 0.066038
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
b92c7cbb70fbc4dd2dec20c24e021d0f6405bd12
| 19,900
|
py
|
Python
|
marshmallow_dataclass/__init__.py
|
dan-starkware/marshmallow_dataclass
|
25c3e041d8c6a87d740984e57a5bd29b768afbf8
|
[
"MIT"
] | null | null | null |
marshmallow_dataclass/__init__.py
|
dan-starkware/marshmallow_dataclass
|
25c3e041d8c6a87d740984e57a5bd29b768afbf8
|
[
"MIT"
] | null | null | null |
marshmallow_dataclass/__init__.py
|
dan-starkware/marshmallow_dataclass
|
25c3e041d8c6a87d740984e57a5bd29b768afbf8
|
[
"MIT"
] | null | null | null |
"""
This library allows the conversion of python 3.7's :mod:`dataclasses`
to :mod:`marshmallow` schemas.
It takes a python class, and generates a marshmallow schema for it.
Simple example::
from marshmallow import Schema
from marshmallow_dataclass import dataclass
@dataclass
class Point:
x:float
y:float
point = Point(x=0, y=0)
point_json = Point.Schema().dumps(point)
Full example::
from marshmallow import Schema
from dataclasses import field
from marshmallow_dataclass import dataclass
import datetime
@dataclass
class User:
birth: datetime.date = field(metadata= {
"required": True # A parameter to pass to marshmallow's field
})
website:str = field(metadata = {
"marshmallow_field": marshmallow.fields.Url() # Custom marshmallow field
})
Schema: ClassVar[Type[Schema]] = Schema # For the type checker
"""
import inspect
from enum import EnumMeta
from functools import lru_cache
from typing import (
Any,
Callable,
Dict,
List,
Mapping,
Optional,
Set,
Tuple,
Type,
TypeVar,
Union,
cast,
overload,
)
import dataclasses
import marshmallow
import typing_inspect
__all__ = ["dataclass", "add_schema", "class_schema", "field_for_schema", "NewType"]
NoneType = type(None)
_U = TypeVar("_U")
# Whitelist of dataclass members that will be copied to generated schema.
MEMBERS_WHITELIST: Set[str] = {"Meta"}
# Max number of generated schemas that class_schema keeps of generated schemas. Removes duplicates.
MAX_CLASS_SCHEMA_CACHE_SIZE = 1024
# _cls should never be specified by keyword, so start it with an
# underscore. The presence of _cls is used to detect if this
# decorator is being called with parameters or not.
def dataclass(
_cls: Type[_U] = None,
*,
repr: bool = True,
eq: bool = True,
order: bool = False,
unsafe_hash: bool = False,
frozen: bool = False,
base_schema: Optional[Type[marshmallow.Schema]] = None,
):
"""
This decorator does the same as dataclasses.dataclass, but also applies :func:`add_schema`.
It adds a `.Schema` attribute to the class object
:param base_schema: marshmallow schema used as a base class when deriving dataclass schema
>>> @dataclass
... class Artist:
... name: str
>>> Artist.Schema
<class 'marshmallow.schema.Artist'>
>>> from typing import ClassVar
>>> from marshmallow import Schema
>>> @dataclass(order=True) # preserve field order
... class Point:
... x:float
... y:float
... Schema: ClassVar[Type[Schema]] = Schema # For the type checker
...
>>> Point.Schema().load({'x':0, 'y':0}) # This line can be statically type checked
Point(x=0.0, y=0.0)
"""
# dataclass's typing doesn't expect it to be called as a function, so ignore type check
dc = dataclasses.dataclass( # type: ignore
_cls, repr=repr, eq=eq, order=order, unsafe_hash=unsafe_hash, frozen=frozen
)
if _cls is None:
return lambda cls: add_schema(dc(cls), base_schema)
return add_schema(dc, base_schema)
@overload
def add_schema(_cls: Type[_U]) -> Type[_U]:
...
@overload
def add_schema(
base_schema: Type[marshmallow.Schema] = None,
) -> Callable[[Type[_U]], Type[_U]]:
...
@overload
def add_schema(
_cls: Type[_U], base_schema: Type[marshmallow.Schema] = None
) -> Type[_U]:
...
def add_schema(_cls=None, base_schema=None):
"""
This decorator adds a marshmallow schema as the 'Schema' attribute in a dataclass.
It uses :func:`class_schema` internally.
:param type cls: The dataclass to which a Schema should be added
:param base_schema: marshmallow schema used as a base class when deriving dataclass schema
>>> class BaseSchema(marshmallow.Schema):
... def on_bind_field(self, field_name, field_obj):
... field_obj.data_key = (field_obj.data_key or field_name).upper()
>>> @add_schema(base_schema=BaseSchema)
... @dataclasses.dataclass
... class Artist:
... names: Tuple[str, str]
>>> artist = Artist.Schema().loads('{"NAMES": ["Martin", "Ramirez"]}')
>>> artist
Artist(names=('Martin', 'Ramirez'))
"""
def decorator(clazz: Type[_U]) -> Type[_U]:
clazz.Schema = class_schema(clazz, base_schema) # type: ignore
return clazz
return decorator(_cls) if _cls else decorator
def class_schema(
clazz: type, base_schema: Optional[Type[marshmallow.Schema]] = None
) -> Type[marshmallow.Schema]:
"""
Convert a class to a marshmallow schema
:param clazz: A python class (may be a dataclass)
:param base_schema: marshmallow schema used as a base class when deriving dataclass schema
:return: A marshmallow Schema corresponding to the dataclass
.. note::
All the arguments supported by marshmallow field classes can
be passed in the `metadata` dictionary of a field.
If you want to use a custom marshmallow field
(one that has no equivalent python type), you can pass it as the
``marshmallow_field`` key in the metadata dictionary.
>>> import typing
>>> Meters = typing.NewType('Meters', float)
>>> @dataclasses.dataclass()
... class Building:
... height: Optional[Meters]
... name: str = dataclasses.field(default="anonymous")
... class Meta:
... ordered = True
...
>>> class_schema(Building) # Returns a marshmallow schema class (not an instance)
<class 'marshmallow.schema.Building'>
>>> @dataclasses.dataclass()
... class City:
... name: str = dataclasses.field(metadata={'required':True})
... best_building: Building # Reference to another dataclasses. A schema will be created for it too.
... other_buildings: List[Building] = dataclasses.field(default_factory=lambda: [])
...
>>> citySchema = class_schema(City)()
>>> city = citySchema.load({"name":"Paris", "best_building": {"name": "Eiffel Tower"}})
>>> city
City(name='Paris', best_building=Building(height=None, name='Eiffel Tower'), other_buildings=[])
>>> citySchema.load({"name":"Paris"})
Traceback (most recent call last):
...
marshmallow.exceptions.ValidationError: {'best_building': ['Missing data for required field.']}
>>> city_json = citySchema.dump(city)
>>> city_json['best_building'] # We get an OrderedDict because we specified order = True in the Meta class
OrderedDict([('height', None), ('name', 'Eiffel Tower')])
>>> @dataclasses.dataclass()
... class Person:
... name: str = dataclasses.field(default="Anonymous")
... friends: List['Person'] = dataclasses.field(default_factory=lambda:[]) # Recursive field
...
>>> person = class_schema(Person)().load({
... "friends": [{"name": "Roger Boucher"}]
... })
>>> person
Person(name='Anonymous', friends=[Person(name='Roger Boucher', friends=[])])
>>> @dataclasses.dataclass()
... class C:
... important: int = dataclasses.field(init=True, default=0)
... # Only fields that are in the __init__ method will be added:
... unimportant: int = dataclasses.field(init=False, default=0)
...
>>> c = class_schema(C)().load({
... "important": 9, # This field will be imported
... "unimportant": 9 # This field will NOT be imported
... }, unknown=marshmallow.EXCLUDE)
>>> c
C(important=9, unimportant=0)
>>> @dataclasses.dataclass
... class Website:
... url:str = dataclasses.field(metadata = {
... "marshmallow_field": marshmallow.fields.Url() # Custom marshmallow field
... })
...
>>> class_schema(Website)().load({"url": "I am not a good URL !"})
Traceback (most recent call last):
...
marshmallow.exceptions.ValidationError: {'url': ['Not a valid URL.']}
>>> @dataclasses.dataclass
... class NeverValid:
... @marshmallow.validates_schema
... def validate(self, data, **_):
... raise marshmallow.ValidationError('never valid')
...
>>> class_schema(NeverValid)().load({})
Traceback (most recent call last):
...
marshmallow.exceptions.ValidationError: {'_schema': ['never valid']}
>>> # noinspection PyTypeChecker
>>> class_schema(None) # unsupported type
Traceback (most recent call last):
...
TypeError: None is not a dataclass and cannot be turned into one.
>>> @dataclasses.dataclass
... class Anything:
... name: str
... @marshmallow.validates('name')
... def validates(self, value):
... if len(value) > 5: raise marshmallow.ValidationError("Name too long")
>>> class_schema(Anything)().load({"name": "aaaaaargh"})
Traceback (most recent call last):
...
marshmallow.exceptions.ValidationError: {'name': ['Name too long']}
"""
return _proxied_class_schema(clazz, base_schema)
@lru_cache(maxsize=MAX_CLASS_SCHEMA_CACHE_SIZE)
def _proxied_class_schema(
clazz: type, base_schema: Optional[Type[marshmallow.Schema]] = None
) -> Type[marshmallow.Schema]:
try:
# noinspection PyDataclass
fields: Tuple[dataclasses.Field, ...] = dataclasses.fields(clazz)
except TypeError: # Not a dataclass
try:
return class_schema(dataclasses.dataclass(clazz), base_schema)
except Exception:
raise TypeError(
f"{getattr(clazz, '__name__', repr(clazz))} is not a dataclass and cannot be turned into one."
)
# Copy all marshmallow hooks and whitelisted members of the dataclass to the schema.
attributes = {
k: v
for k, v in inspect.getmembers(clazz)
if hasattr(v, "__marshmallow_hook__") or k in MEMBERS_WHITELIST
}
# Update the schema members to contain marshmallow fields instead of dataclass fields
attributes.update(
(
field.name,
field_for_schema(
field.type, _get_field_default(field), field.metadata, base_schema
),
)
for field in fields
if field.init
)
schema_class = type(clazz.__name__, (_base_schema(clazz, base_schema),), attributes)
return cast(Type[marshmallow.Schema], schema_class)
def _field_by_type(
typ: Union[type, Any], base_schema: Optional[Type[marshmallow.Schema]]
) -> Optional[Type[marshmallow.fields.Field]]:
return (
base_schema and base_schema.TYPE_MAPPING.get(typ)
) or marshmallow.Schema.TYPE_MAPPING.get(typ)
def _field_by_supertype(
typ: Type,
default: marshmallow.missing,
newtype_supertype: Type,
metadata: dict,
base_schema: Optional[Type[marshmallow.Schema]],
) -> marshmallow.fields.Field:
"""
Return a new field for fields based on a super field. (Usually spawned from NewType)
"""
# Add the information coming our custom NewType implementation
typ_args = getattr(typ, "_marshmallow_args", {})
# Handle multiple validators from both `typ` and `metadata`.
# See https://github.com/lovasoa/marshmallow_dataclass/issues/91
new_validators: List[Callable] = []
for meta_dict in (typ_args, metadata):
if "validate" in meta_dict:
if marshmallow.utils.is_iterable_but_not_string(meta_dict["validate"]):
new_validators.extend(meta_dict["validate"])
elif callable(meta_dict["validate"]):
new_validators.append(meta_dict["validate"])
metadata["validate"] = new_validators if new_validators else None
metadata = {"description": typ.__name__, **typ_args, **metadata}
field = getattr(typ, "_marshmallow_field", None)
if field:
return field(**metadata)
else:
return field_for_schema(
newtype_supertype,
metadata=metadata,
default=default,
base_schema=base_schema,
)
def field_for_schema(
typ: type,
default=marshmallow.missing,
metadata: Mapping[str, Any] = None,
base_schema: Optional[Type[marshmallow.Schema]] = None,
) -> marshmallow.fields.Field:
"""
Get a marshmallow Field corresponding to the given python type.
The metadata of the dataclass field is used as arguments to the marshmallow Field.
:param typ: The type for which a field should be generated
:param default: value to use for (de)serialization when the field is missing
:param metadata: Additional parameters to pass to the marshmallow field constructor
:param base_schema: marshmallow schema used as a base class when deriving dataclass schema
>>> int_field = field_for_schema(int, default=9, metadata=dict(required=True))
>>> int_field.__class__
<class 'marshmallow.fields.Integer'>
>>> int_field.default
9
>>> field_for_schema(str, metadata={"marshmallow_field": marshmallow.fields.Url()}).__class__
<class 'marshmallow.fields.Url'>
"""
metadata = {} if metadata is None else dict(metadata)
if default is not marshmallow.missing:
metadata.setdefault("default", default)
# 'missing' must not be set for required fields.
if not metadata.get("required"):
metadata.setdefault("missing", default)
else:
metadata.setdefault("required", True)
# If the field was already defined by the user
predefined_field = metadata.get("marshmallow_field")
if predefined_field:
return predefined_field
# Generic types specified without type arguments
if typ is list:
typ = List[Any]
elif typ is dict:
typ = Dict[Any, Any]
# Base types
field = _field_by_type(typ, base_schema)
if field:
return field(**metadata)
if typ is Any:
metadata.setdefault("allow_none", True)
return marshmallow.fields.Raw(**metadata)
# Generic types
origin = typing_inspect.get_origin(typ)
if origin:
arguments = typing_inspect.get_args(typ, True)
# Override base_schema.TYPE_MAPPING to change the class used for generic types below
type_mapping = base_schema.TYPE_MAPPING if base_schema else {}
if origin in (list, List):
child_type = field_for_schema(arguments[0], base_schema=base_schema)
list_type = type_mapping.get(List, marshmallow.fields.List)
return list_type(child_type, **metadata)
if origin in (tuple, Tuple):
children = tuple(
field_for_schema(arg, base_schema=base_schema) for arg in arguments
)
tuple_type = type_mapping.get(Tuple, marshmallow.fields.Tuple)
return tuple_type(children, **metadata)
elif origin in (dict, Dict):
dict_type = type_mapping.get(Dict, marshmallow.fields.Dict)
return dict_type(
keys=field_for_schema(arguments[0], base_schema=base_schema),
values=field_for_schema(arguments[1], base_schema=base_schema),
**metadata,
)
elif typing_inspect.is_optional_type(typ):
subtyp = next(t for t in arguments if t is not NoneType) # type: ignore
# Treat optional types as types with a None default
metadata["default"] = metadata.get("default", None)
metadata["missing"] = metadata.get("missing", None)
metadata["required"] = False
return field_for_schema(subtyp, metadata=metadata, base_schema=base_schema)
elif typing_inspect.is_union_type(typ):
from . import union_field
return union_field.Union(
[
(
subtyp,
field_for_schema(
subtyp, metadata=metadata, base_schema=base_schema
),
)
for subtyp in arguments
],
**metadata,
)
# typing.NewType returns a function with a __supertype__ attribute
newtype_supertype = getattr(typ, "__supertype__", None)
if newtype_supertype and inspect.isfunction(typ):
return _field_by_supertype(
typ=typ,
default=default,
newtype_supertype=newtype_supertype,
metadata=metadata,
base_schema=base_schema,
)
# enumerations
if isinstance(typ, EnumMeta):
import marshmallow_enum
return marshmallow_enum.EnumField(typ, **metadata)
# Nested marshmallow dataclass
nested_schema = getattr(typ, "Schema", None)
# Nested dataclasses
forward_reference = getattr(typ, "__forward_arg__", None)
nested = (
nested_schema or forward_reference or class_schema(typ, base_schema=base_schema)
)
return marshmallow.fields.Nested(nested, **metadata)
def _base_schema(
clazz: type, base_schema: Optional[Type[marshmallow.Schema]] = None
) -> Type[marshmallow.Schema]:
"""
Base schema factory that creates a schema for `clazz` derived either from `base_schema`
or `BaseSchema`
"""
# Remove `type: ignore` when mypy handles dynamic base classes
# https://github.com/python/mypy/issues/2813
class BaseSchema(base_schema or marshmallow.Schema): # type: ignore
def load(self, data: Mapping, *, many: bool = None, **kwargs):
all_loaded = super().load(data, many=many, **kwargs)
many = self.many if many is None else bool(many)
if many:
return [clazz(**loaded) for loaded in all_loaded]
else:
return clazz(**all_loaded)
return BaseSchema
def _get_field_default(field: dataclasses.Field):
"""
Return a marshmallow default value given a dataclass default value
>>> _get_field_default(dataclasses.field())
<marshmallow.missing>
"""
# Remove `type: ignore` when https://github.com/python/mypy/issues/6910 is fixed
default_factory = field.default_factory # type: ignore
if default_factory is not dataclasses.MISSING:
return default_factory
elif field.default is dataclasses.MISSING:
return marshmallow.missing
return field.default
def NewType(
name: str,
typ: Type[_U],
field: Optional[Type[marshmallow.fields.Field]] = None,
**kwargs,
) -> Callable[[_U], _U]:
"""NewType creates simple unique types
to which you can attach custom marshmallow attributes.
All the keyword arguments passed to this function will be transmitted
to the marshmallow field constructor.
>>> import marshmallow.validate
>>> IPv4 = NewType('IPv4', str, validate=marshmallow.validate.Regexp(r'^([0-9]{1,3}\\.){3}[0-9]{1,3}$'))
>>> @dataclass
... class MyIps:
... ips: List[IPv4]
>>> MyIps.Schema().load({"ips": ["0.0.0.0", "grumble grumble"]})
Traceback (most recent call last):
...
marshmallow.exceptions.ValidationError: {'ips': {1: ['String does not match expected pattern.']}}
>>> MyIps.Schema().load({"ips": ["127.0.0.1"]})
MyIps(ips=['127.0.0.1'])
>>> Email = NewType('Email', str, field=marshmallow.fields.Email)
>>> @dataclass
... class ContactInfo:
... mail: Email = dataclasses.field(default="anonymous@example.org")
>>> ContactInfo.Schema().load({})
ContactInfo(mail='anonymous@example.org')
>>> ContactInfo.Schema().load({"mail": "grumble grumble"})
Traceback (most recent call last):
...
marshmallow.exceptions.ValidationError: {'mail': ['Not a valid email address.']}
"""
def new_type(x: _U):
return x
new_type.__name__ = name
new_type.__supertype__ = typ # type: ignore
new_type._marshmallow_field = field # type: ignore
new_type._marshmallow_args = kwargs # type: ignore
return new_type
if __name__ == "__main__":
import doctest
doctest.testmod(verbose=True)
| 34.133791
| 110
| 0.647538
| 2,355
| 19,900
| 5.323142
| 0.163057
| 0.040683
| 0.021777
| 0.014359
| 0.238274
| 0.176292
| 0.131142
| 0.121809
| 0.096921
| 0.082403
| 0
| 0.004284
| 0.237638
| 19,900
| 582
| 111
| 34.19244
| 0.822029
| 0.499196
| 0
| 0.157895
| 1
| 0.004049
| 0.043687
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.064777
| false
| 0
| 0.040486
| 0.008097
| 0.226721
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
b9652ceb78b45d3bef98c61d48e3cd4630133615
| 19,317
|
py
|
Python
|
sdk/python/pulumi_google_native/testing/v1/test_matrix.py
|
AaronFriel/pulumi-google-native
|
75d1cda425e33d4610348972cd70bddf35f1770d
|
[
"Apache-2.0"
] | 44
|
2021-04-18T23:00:48.000Z
|
2022-02-14T17:43:15.000Z
|
sdk/python/pulumi_google_native/testing/v1/test_matrix.py
|
AaronFriel/pulumi-google-native
|
75d1cda425e33d4610348972cd70bddf35f1770d
|
[
"Apache-2.0"
] | 354
|
2021-04-16T16:48:39.000Z
|
2022-03-31T17:16:39.000Z
|
sdk/python/pulumi_google_native/testing/v1/test_matrix.py
|
AaronFriel/pulumi-google-native
|
75d1cda425e33d4610348972cd70bddf35f1770d
|
[
"Apache-2.0"
] | 8
|
2021-04-24T17:46:51.000Z
|
2022-01-05T10:40:21.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
from ._enums import *
from ._inputs import *
__all__ = ['TestMatrixArgs', 'TestMatrix']
@pulumi.input_type
class TestMatrixArgs:
def __init__(__self__, *,
environment_matrix: pulumi.Input['EnvironmentMatrixArgs'],
result_storage: pulumi.Input['ResultStorageArgs'],
test_specification: pulumi.Input['TestSpecificationArgs'],
client_info: Optional[pulumi.Input['ClientInfoArgs']] = None,
fail_fast: Optional[pulumi.Input[bool]] = None,
flaky_test_attempts: Optional[pulumi.Input[int]] = None,
project: Optional[pulumi.Input[str]] = None,
request_id: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a TestMatrix resource.
:param pulumi.Input['EnvironmentMatrixArgs'] environment_matrix: The devices the tests are being executed on.
:param pulumi.Input['ResultStorageArgs'] result_storage: Where the results for the matrix are written.
:param pulumi.Input['TestSpecificationArgs'] test_specification: How to run the test.
:param pulumi.Input['ClientInfoArgs'] client_info: Information about the client which invoked the test.
:param pulumi.Input[bool] fail_fast: If true, only a single attempt at most will be made to run each execution/shard in the matrix. Flaky test attempts are not affected. Normally, 2 or more attempts are made if a potential infrastructure issue is detected. This feature is for latency sensitive workloads. The incidence of execution failures may be significantly greater for fail-fast matrices and support is more limited because of that expectation.
:param pulumi.Input[int] flaky_test_attempts: The number of times a TestExecution should be re-attempted if one or more of its test cases fail for any reason. The maximum number of reruns allowed is 10. Default is 0, which implies no reruns.
:param pulumi.Input[str] project: The cloud project that owns the test matrix.
"""
pulumi.set(__self__, "environment_matrix", environment_matrix)
pulumi.set(__self__, "result_storage", result_storage)
pulumi.set(__self__, "test_specification", test_specification)
if client_info is not None:
pulumi.set(__self__, "client_info", client_info)
if fail_fast is not None:
pulumi.set(__self__, "fail_fast", fail_fast)
if flaky_test_attempts is not None:
pulumi.set(__self__, "flaky_test_attempts", flaky_test_attempts)
if project is not None:
pulumi.set(__self__, "project", project)
if request_id is not None:
pulumi.set(__self__, "request_id", request_id)
@property
@pulumi.getter(name="environmentMatrix")
def environment_matrix(self) -> pulumi.Input['EnvironmentMatrixArgs']:
"""
The devices the tests are being executed on.
"""
return pulumi.get(self, "environment_matrix")
@environment_matrix.setter
def environment_matrix(self, value: pulumi.Input['EnvironmentMatrixArgs']):
pulumi.set(self, "environment_matrix", value)
@property
@pulumi.getter(name="resultStorage")
def result_storage(self) -> pulumi.Input['ResultStorageArgs']:
"""
Where the results for the matrix are written.
"""
return pulumi.get(self, "result_storage")
@result_storage.setter
def result_storage(self, value: pulumi.Input['ResultStorageArgs']):
pulumi.set(self, "result_storage", value)
@property
@pulumi.getter(name="testSpecification")
def test_specification(self) -> pulumi.Input['TestSpecificationArgs']:
"""
How to run the test.
"""
return pulumi.get(self, "test_specification")
@test_specification.setter
def test_specification(self, value: pulumi.Input['TestSpecificationArgs']):
pulumi.set(self, "test_specification", value)
@property
@pulumi.getter(name="clientInfo")
def client_info(self) -> Optional[pulumi.Input['ClientInfoArgs']]:
"""
Information about the client which invoked the test.
"""
return pulumi.get(self, "client_info")
@client_info.setter
def client_info(self, value: Optional[pulumi.Input['ClientInfoArgs']]):
pulumi.set(self, "client_info", value)
@property
@pulumi.getter(name="failFast")
def fail_fast(self) -> Optional[pulumi.Input[bool]]:
"""
If true, only a single attempt at most will be made to run each execution/shard in the matrix. Flaky test attempts are not affected. Normally, 2 or more attempts are made if a potential infrastructure issue is detected. This feature is for latency sensitive workloads. The incidence of execution failures may be significantly greater for fail-fast matrices and support is more limited because of that expectation.
"""
return pulumi.get(self, "fail_fast")
@fail_fast.setter
def fail_fast(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "fail_fast", value)
@property
@pulumi.getter(name="flakyTestAttempts")
def flaky_test_attempts(self) -> Optional[pulumi.Input[int]]:
"""
The number of times a TestExecution should be re-attempted if one or more of its test cases fail for any reason. The maximum number of reruns allowed is 10. Default is 0, which implies no reruns.
"""
return pulumi.get(self, "flaky_test_attempts")
@flaky_test_attempts.setter
def flaky_test_attempts(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "flaky_test_attempts", value)
@property
@pulumi.getter
def project(self) -> Optional[pulumi.Input[str]]:
"""
The cloud project that owns the test matrix.
"""
return pulumi.get(self, "project")
@project.setter
def project(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project", value)
@property
@pulumi.getter(name="requestId")
def request_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "request_id")
@request_id.setter
def request_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "request_id", value)
class TestMatrix(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
client_info: Optional[pulumi.Input[pulumi.InputType['ClientInfoArgs']]] = None,
environment_matrix: Optional[pulumi.Input[pulumi.InputType['EnvironmentMatrixArgs']]] = None,
fail_fast: Optional[pulumi.Input[bool]] = None,
flaky_test_attempts: Optional[pulumi.Input[int]] = None,
project: Optional[pulumi.Input[str]] = None,
request_id: Optional[pulumi.Input[str]] = None,
result_storage: Optional[pulumi.Input[pulumi.InputType['ResultStorageArgs']]] = None,
test_specification: Optional[pulumi.Input[pulumi.InputType['TestSpecificationArgs']]] = None,
__props__=None):
"""
Creates and runs a matrix of tests according to the given specifications. Unsupported environments will be returned in the state UNSUPPORTED. A test matrix is limited to use at most 2000 devices in parallel. May return any of the following canonical error codes: - PERMISSION_DENIED - if the user is not authorized to write to project - INVALID_ARGUMENT - if the request is malformed or if the matrix tries to use too many simultaneous devices.
Auto-naming is currently not supported for this resource.
Note - this resource's API doesn't support deletion. When deleted, the resource will persist
on Google Cloud even though it will be deleted from Pulumi state.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[pulumi.InputType['ClientInfoArgs']] client_info: Information about the client which invoked the test.
:param pulumi.Input[pulumi.InputType['EnvironmentMatrixArgs']] environment_matrix: The devices the tests are being executed on.
:param pulumi.Input[bool] fail_fast: If true, only a single attempt at most will be made to run each execution/shard in the matrix. Flaky test attempts are not affected. Normally, 2 or more attempts are made if a potential infrastructure issue is detected. This feature is for latency sensitive workloads. The incidence of execution failures may be significantly greater for fail-fast matrices and support is more limited because of that expectation.
:param pulumi.Input[int] flaky_test_attempts: The number of times a TestExecution should be re-attempted if one or more of its test cases fail for any reason. The maximum number of reruns allowed is 10. Default is 0, which implies no reruns.
:param pulumi.Input[str] project: The cloud project that owns the test matrix.
:param pulumi.Input[pulumi.InputType['ResultStorageArgs']] result_storage: Where the results for the matrix are written.
:param pulumi.Input[pulumi.InputType['TestSpecificationArgs']] test_specification: How to run the test.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: TestMatrixArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Creates and runs a matrix of tests according to the given specifications. Unsupported environments will be returned in the state UNSUPPORTED. A test matrix is limited to use at most 2000 devices in parallel. May return any of the following canonical error codes: - PERMISSION_DENIED - if the user is not authorized to write to project - INVALID_ARGUMENT - if the request is malformed or if the matrix tries to use too many simultaneous devices.
Auto-naming is currently not supported for this resource.
Note - this resource's API doesn't support deletion. When deleted, the resource will persist
on Google Cloud even though it will be deleted from Pulumi state.
:param str resource_name: The name of the resource.
:param TestMatrixArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(TestMatrixArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
client_info: Optional[pulumi.Input[pulumi.InputType['ClientInfoArgs']]] = None,
environment_matrix: Optional[pulumi.Input[pulumi.InputType['EnvironmentMatrixArgs']]] = None,
fail_fast: Optional[pulumi.Input[bool]] = None,
flaky_test_attempts: Optional[pulumi.Input[int]] = None,
project: Optional[pulumi.Input[str]] = None,
request_id: Optional[pulumi.Input[str]] = None,
result_storage: Optional[pulumi.Input[pulumi.InputType['ResultStorageArgs']]] = None,
test_specification: Optional[pulumi.Input[pulumi.InputType['TestSpecificationArgs']]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = TestMatrixArgs.__new__(TestMatrixArgs)
__props__.__dict__["client_info"] = client_info
if environment_matrix is None and not opts.urn:
raise TypeError("Missing required property 'environment_matrix'")
__props__.__dict__["environment_matrix"] = environment_matrix
__props__.__dict__["fail_fast"] = fail_fast
__props__.__dict__["flaky_test_attempts"] = flaky_test_attempts
__props__.__dict__["project"] = project
__props__.__dict__["request_id"] = request_id
if result_storage is None and not opts.urn:
raise TypeError("Missing required property 'result_storage'")
__props__.__dict__["result_storage"] = result_storage
if test_specification is None and not opts.urn:
raise TypeError("Missing required property 'test_specification'")
__props__.__dict__["test_specification"] = test_specification
__props__.__dict__["invalid_matrix_details"] = None
__props__.__dict__["outcome_summary"] = None
__props__.__dict__["state"] = None
__props__.__dict__["test_executions"] = None
__props__.__dict__["test_matrix_id"] = None
__props__.__dict__["timestamp"] = None
super(TestMatrix, __self__).__init__(
'google-native:testing/v1:TestMatrix',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'TestMatrix':
"""
Get an existing TestMatrix resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = TestMatrixArgs.__new__(TestMatrixArgs)
__props__.__dict__["client_info"] = None
__props__.__dict__["environment_matrix"] = None
__props__.__dict__["fail_fast"] = None
__props__.__dict__["flaky_test_attempts"] = None
__props__.__dict__["invalid_matrix_details"] = None
__props__.__dict__["outcome_summary"] = None
__props__.__dict__["project"] = None
__props__.__dict__["result_storage"] = None
__props__.__dict__["state"] = None
__props__.__dict__["test_executions"] = None
__props__.__dict__["test_matrix_id"] = None
__props__.__dict__["test_specification"] = None
__props__.__dict__["timestamp"] = None
return TestMatrix(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="clientInfo")
def client_info(self) -> pulumi.Output['outputs.ClientInfoResponse']:
"""
Information about the client which invoked the test.
"""
return pulumi.get(self, "client_info")
@property
@pulumi.getter(name="environmentMatrix")
def environment_matrix(self) -> pulumi.Output['outputs.EnvironmentMatrixResponse']:
"""
The devices the tests are being executed on.
"""
return pulumi.get(self, "environment_matrix")
@property
@pulumi.getter(name="failFast")
def fail_fast(self) -> pulumi.Output[bool]:
"""
If true, only a single attempt at most will be made to run each execution/shard in the matrix. Flaky test attempts are not affected. Normally, 2 or more attempts are made if a potential infrastructure issue is detected. This feature is for latency sensitive workloads. The incidence of execution failures may be significantly greater for fail-fast matrices and support is more limited because of that expectation.
"""
return pulumi.get(self, "fail_fast")
@property
@pulumi.getter(name="flakyTestAttempts")
def flaky_test_attempts(self) -> pulumi.Output[int]:
"""
The number of times a TestExecution should be re-attempted if one or more of its test cases fail for any reason. The maximum number of reruns allowed is 10. Default is 0, which implies no reruns.
"""
return pulumi.get(self, "flaky_test_attempts")
@property
@pulumi.getter(name="invalidMatrixDetails")
def invalid_matrix_details(self) -> pulumi.Output[str]:
"""
Describes why the matrix is considered invalid. Only useful for matrices in the INVALID state.
"""
return pulumi.get(self, "invalid_matrix_details")
@property
@pulumi.getter(name="outcomeSummary")
def outcome_summary(self) -> pulumi.Output[str]:
"""
Output Only. The overall outcome of the test. Only set when the test matrix state is FINISHED.
"""
return pulumi.get(self, "outcome_summary")
@property
@pulumi.getter
def project(self) -> pulumi.Output[str]:
"""
The cloud project that owns the test matrix.
"""
return pulumi.get(self, "project")
@property
@pulumi.getter(name="resultStorage")
def result_storage(self) -> pulumi.Output['outputs.ResultStorageResponse']:
"""
Where the results for the matrix are written.
"""
return pulumi.get(self, "result_storage")
@property
@pulumi.getter
def state(self) -> pulumi.Output[str]:
"""
Indicates the current progress of the test matrix.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter(name="testExecutions")
def test_executions(self) -> pulumi.Output[Sequence['outputs.TestExecutionResponse']]:
"""
The list of test executions that the service creates for this matrix.
"""
return pulumi.get(self, "test_executions")
@property
@pulumi.getter(name="testMatrixId")
def test_matrix_id(self) -> pulumi.Output[str]:
"""
Unique id set by the service.
"""
return pulumi.get(self, "test_matrix_id")
@property
@pulumi.getter(name="testSpecification")
def test_specification(self) -> pulumi.Output['outputs.TestSpecificationResponse']:
"""
How to run the test.
"""
return pulumi.get(self, "test_specification")
@property
@pulumi.getter
def timestamp(self) -> pulumi.Output[str]:
"""
The time this test matrix was initially created.
"""
return pulumi.get(self, "timestamp")
| 50.436031
| 458
| 0.67671
| 2,305
| 19,317
| 5.431236
| 0.12321
| 0.050084
| 0.047048
| 0.031872
| 0.714754
| 0.6316
| 0.609394
| 0.596134
| 0.567058
| 0.544373
| 0
| 0.001756
| 0.233421
| 19,317
| 382
| 459
| 50.568063
| 0.843666
| 0.344567
| 0
| 0.412766
| 1
| 0
| 0.165417
| 0.038828
| 0
| 0
| 0
| 0
| 0
| 1
| 0.148936
| false
| 0.004255
| 0.034043
| 0.004255
| 0.285106
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
b9954284c404c9a5aed225965d5006c8735af349
| 1,717
|
py
|
Python
|
musa/migrations/0001_initial.py
|
ccsreenidhin/Music-Web-Django
|
9b8286914f9099b9ed56c712c7ca384846f189d1
|
[
"MIT"
] | null | null | null |
musa/migrations/0001_initial.py
|
ccsreenidhin/Music-Web-Django
|
9b8286914f9099b9ed56c712c7ca384846f189d1
|
[
"MIT"
] | null | null | null |
musa/migrations/0001_initial.py
|
ccsreenidhin/Music-Web-Django
|
9b8286914f9099b9ed56c712c7ca384846f189d1
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-03-29 06:43
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import musa.models
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='MusicCollection',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(blank=True, max_length=70, null=True)),
('document', models.FileField(upload_to=musa.models.get_upload_path)),
('uploaded_at', models.DateTimeField(auto_now_add=True, null=True)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='UserProfile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('fullname', models.CharField(blank=True, max_length=70)),
('favourite_music', models.CharField(blank=True, max_length=70)),
('about', models.TextField(blank=True, max_length=300)),
('picture', models.ImageField(default='/profile_images/avatar.jpeg', upload_to='profile_images')),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
| 40.880952
| 121
| 0.633663
| 188
| 1,717
| 5.606383
| 0.457447
| 0.030361
| 0.045541
| 0.068311
| 0.363378
| 0.363378
| 0.363378
| 0.263757
| 0.263757
| 0.263757
| 0
| 0.019055
| 0.235877
| 1,717
| 41
| 122
| 41.878049
| 0.784299
| 0.038439
| 0
| 0.30303
| 1
| 0
| 0.086165
| 0.016384
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.151515
| 0
| 0.272727
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
b999024320e50c940c8f273e6f0536039450c829
| 1,949
|
py
|
Python
|
config.py
|
jhattat/photoBooth
|
f6fe3ab418bb917792e10349597401ed34078766
|
[
"MIT"
] | null | null | null |
config.py
|
jhattat/photoBooth
|
f6fe3ab418bb917792e10349597401ed34078766
|
[
"MIT"
] | null | null | null |
config.py
|
jhattat/photoBooth
|
f6fe3ab418bb917792e10349597401ed34078766
|
[
"MIT"
] | null | null | null |
# Tumblr Setup
# Replace the values with your information
# OAuth keys can be generated from https://api.tumblr.com/console/calls/user/info
consumer_key='ShbOqI5zErQXOL7Qnd5XduXpY9XQUlBgJDpCLeq1OYqnY2KzSt' #replace with your key
consumer_secret='ulZradkbJGksjpl2MMlshAfJgEW6TNeSdZucykqeTp8jvwgnhu' #replace with your secret code
oath_token='uUcBuvJx8yhk4HJIZ39sfcYo0W4VoqcvUetR2EwcI5Sn8SLgNt' #replace with your oath token
oath_secret='iNJlqQJI6dwhAGmdNbMtD9u7VazmX2Rk5uW0fuIozIEjk97lz4' #replace with your oath secret code
tumblr_blog = 'soniaetjeremie' # replace with your tumblr account name without .tumblr.com
tagsForTumblr = "photobooth" # change to tags you want, separated with commas
#Config settings to change behavior of photo booth
monitor_w = 800 # width of the display monitor
monitor_h = 480 # height of the display monitor
file_path = '/home/pi/photobooth/pics/' # path to save images
clear_on_startup = False # True will clear previously stored photos as the program launches. False will leave all previous photos.
debounce = 0.3 # how long to debounce the button. Add more time if the button triggers too many times.
post_online = True # True to upload images. False to store locally only.
capture_count_pics = True # if true, show a photo count between taking photos. If false, do not. False is faster.
make_gifs = True # True to make an animated gif. False to post 4 jpgs into one post.
hi_res_pics = False # True to save high res pics from camera.
# If also uploading, the program will also convert each image to a smaller image before making the gif.
# False to first capture low res pics. False is faster.
# Careful, each photo costs against your daily Tumblr upload max.
camera_iso = 400 # adjust for lighting issues. Normal is 100 or 200. Sort of dark is 400. Dark is 800 max.
# available options: 100, 200, 320, 400, 500, 640, 800
| 77.96
| 130
| 0.758338
| 278
| 1,949
| 5.255396
| 0.561151
| 0.032854
| 0.051335
| 0.02601
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.045541
| 0.188815
| 1,949
| 25
| 131
| 77.96
| 0.878558
| 0.653155
| 0
| 0
| 1
| 0
| 0.381902
| 0.345092
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
b99ee5dfe9849188796ff8d2b024b524adedb8d2
| 1,950
|
py
|
Python
|
django_mfa/migrations/0001_initial.py
|
timgates42/django-mfa
|
89eeb83f7da3ea24f205b40b13c7f9d33ea15b99
|
[
"MIT"
] | null | null | null |
django_mfa/migrations/0001_initial.py
|
timgates42/django-mfa
|
89eeb83f7da3ea24f205b40b13c7f9d33ea15b99
|
[
"MIT"
] | null | null | null |
django_mfa/migrations/0001_initial.py
|
timgates42/django-mfa
|
89eeb83f7da3ea24f205b40b13c7f9d33ea15b99
|
[
"MIT"
] | null | null | null |
# Generated by Django 2.1.5 on 2019-03-26 11:35
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='U2FKey',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True)),
('last_used_at', models.DateTimeField(null=True)),
('public_key', models.TextField(unique=True)),
('key_handle', models.TextField()),
('app_id', models.TextField()),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='u2f_keys', to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='UserOTP',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('otp_type', models.CharField(choices=[('HOTP', 'hotp'), ('TOTP', 'totp')], max_length=20)),
('secret_key', models.CharField(blank=True, max_length=100)),
('user', models.OneToOneField(on_delete=django.db.models.deletion.PROTECT, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='UserRecoveryCodes',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('secret_code', models.CharField(max_length=10)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='django_mfa.UserOTP')),
],
),
]
| 41.489362
| 143
| 0.598974
| 205
| 1,950
| 5.521951
| 0.4
| 0.035336
| 0.04947
| 0.077739
| 0.421378
| 0.421378
| 0.394876
| 0.394876
| 0.310071
| 0.310071
| 0
| 0.016644
| 0.260513
| 1,950
| 46
| 144
| 42.391304
| 0.768377
| 0.023077
| 0
| 0.384615
| 1
| 0
| 0.090909
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.076923
| 0
| 0.179487
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
b9e96b262a690da4aaab0bf9584b51a15851826f
| 6,784
|
py
|
Python
|
demos/python/sdk_wireless_camera_control/open_gopro/demos/log_battery.py
|
Natureshadow/OpenGoPro
|
05110123cfbf6584288b813f2d4896d3a091480e
|
[
"MIT"
] | 210
|
2021-06-05T20:06:17.000Z
|
2022-03-31T18:13:17.000Z
|
demos/python/sdk_wireless_camera_control/open_gopro/demos/log_battery.py
|
Natureshadow/OpenGoPro
|
05110123cfbf6584288b813f2d4896d3a091480e
|
[
"MIT"
] | 73
|
2021-06-01T21:22:44.000Z
|
2022-03-31T18:33:24.000Z
|
demos/python/sdk_wireless_camera_control/open_gopro/demos/log_battery.py
|
Natureshadow/OpenGoPro
|
05110123cfbf6584288b813f2d4896d3a091480e
|
[
"MIT"
] | 70
|
2021-06-07T03:59:04.000Z
|
2022-03-26T10:51:15.000Z
|
# log_battery.py/Open GoPro, Version 2.0 (C) Copyright 2021 GoPro, Inc. (http://gopro.com/OpenGoPro).
# This copyright was auto-generated on Wed, Sep 1, 2021 5:05:45 PM
"""Example to continuously read the battery (with no Wifi connection)"""
import csv
import time
import logging
import argparse
import threading
from pathlib import Path
from datetime import datetime
from dataclasses import dataclass
from typing import Optional, Tuple, Literal, List
from rich.console import Console
from open_gopro import GoPro
from open_gopro.constants import StatusId
from open_gopro.util import setup_logging, set_logging_level
logger = logging.getLogger(__name__)
console = Console() # rich consoler printer
BarsType = Literal[0, 1, 2, 3]
@dataclass
class Sample:
"""Simple class to store battery samples"""
index: int
percentage: int
bars: BarsType
def __post_init__(self) -> None:
self.time = datetime.now()
def __str__(self) -> str: # pylint: disable=missing-return-doc
return f"Index {self.index} @ time {self.time.strftime('%H:%M:%S')} --> bars: {self.bars}, percentage: {self.percentage}"
SAMPLE_INDEX = 0
SAMPLES: List[Sample] = []
def dump_results_as_csv(location: Path) -> None:
"""Write all of the samples to a csv file
Args:
location (Path): File to write to
"""
console.print(f"Dumping results as CSV to {location}")
with open(location, mode="w") as f:
w = csv.writer(f, delimiter=",", quotechar='"', quoting=csv.QUOTE_MINIMAL)
w.writerow(["index", "time", "percentage", "bars"])
initial_time = SAMPLES[0].time
for s in SAMPLES:
w.writerow([s.index, (s.time - initial_time).seconds, s.percentage, s.bars])
def process_battery_notifications(gopro: GoPro, initial_bars: BarsType, initial_percentage: int) -> None:
"""Separate thread to continuously check for and store battery notifications.
If the CLI parameter was set to poll, this isn't used.
Args:
gopro (GoPro): instance to get updates from
initial_bars (BarsType): Initial bars level when notifications were enabled
initial_percentage (int): Initial percentage when notifications were enabled
"""
last_percentage = initial_percentage
last_bars = initial_bars
while True:
# Block until we receive an update
notification = gopro.get_update()
# Update data points if they have changed
last_percentage = (
notification.data[StatusId.INT_BATT_PER]
if StatusId.INT_BATT_PER in notification.data
else last_percentage
)
last_bars = (
notification.data[StatusId.BATT_LEVEL] if StatusId.BATT_LEVEL in notification.data else last_bars
)
# Append and print sample
global SAMPLE_INDEX
SAMPLES.append(Sample(index=SAMPLE_INDEX, percentage=last_percentage, bars=last_bars))
console.print(str(SAMPLES[-1]))
SAMPLE_INDEX += 1
def main() -> int:
"""Main program functionality
Returns:
int: program return code
"""
identifier, log_location, poll = parse_arguments()
global logger
logger = setup_logging(logger, log_location)
global SAMPLE_INDEX
gopro: Optional[GoPro] = None
return_code = 0
try:
with GoPro(identifier, enable_wifi=False) as gopro:
set_logging_level(logger, logging.ERROR)
# # Setup notifications if we are not polling
if poll is None:
console.print("Configuring battery notifications...")
# Enable notifications of the relevant battery statuses. Also store initial values.
bars = gopro.ble_status.batt_level.register_value_update().flatten
percentage = gopro.ble_status.int_batt_per.register_value_update().flatten
# Start a thread to handle asynchronous battery level notifications
threading.Thread(
target=process_battery_notifications, args=(gopro, bars, percentage), daemon=True
).start()
with console.status("[bold green]Receiving battery notifications until it dies..."):
# Sleep forever, allowing notification handler thread to deal with battery level notifications
while True:
time.sleep(1)
# Otherwise, poll
else:
with console.status("[bold green]Polling the battery until it dies..."):
while True:
SAMPLES.append(
Sample(
index=SAMPLE_INDEX,
percentage=gopro.ble_status.int_batt_per.get_value().flatten,
bars=gopro.ble_status.batt_level.get_value().flatten,
)
)
console.print(str(SAMPLES[-1]))
SAMPLE_INDEX += 1
time.sleep(poll)
except Exception as e: # pylint: disable=broad-except
logger.error(repr(e))
return_code = 1
except KeyboardInterrupt:
logger.warning("Received keyboard interrupt. Shutting down...")
finally:
if len(SAMPLES) > 0:
csv_location = Path(log_location.parent) / "battery_results.csv"
dump_results_as_csv(csv_location)
if gopro is not None:
gopro.close()
console.print("Exiting...")
return return_code # pylint: disable=lost-exception
def parse_arguments() -> Tuple[str, Path, Optional[int]]:
"""Parse command line arguments
Returns:
Tuple[str, Path, Path]: (identifier, path to save log, path to VLC)
"""
parser = argparse.ArgumentParser(
description="Connect to the GoPro via BLE only and continuously read the battery (either by polling or notifications)."
)
parser.add_argument(
"-i",
"--identifier",
type=str,
help="Last 4 digits of GoPro serial number, which is the last 4 digits of the default camera SSID. \
If not used, first discovered GoPro will be connected to",
default=None,
)
parser.add_argument(
"-l",
"--log",
type=Path,
help="Location to store detailed log",
default="log_battery.log",
)
parser.add_argument(
"-p",
"--poll",
type=int,
help="Set to poll the battery at a given interval. If not set, battery level will be notified instead. Defaults to notifications.",
default=None,
)
args = parser.parse_args()
return args.identifier, args.log, args.poll
if __name__ == "__main__":
main()
| 34.969072
| 139
| 0.627358
| 806
| 6,784
| 5.150124
| 0.320099
| 0.02385
| 0.009636
| 0.012527
| 0.104071
| 0.065526
| 0.052517
| 0.016863
| 0
| 0
| 0
| 0.006579
| 0.283019
| 6,784
| 193
| 140
| 35.150259
| 0.846834
| 0.205483
| 0
| 0.109375
| 1
| 0.023438
| 0.13274
| 0.006059
| 0
| 0
| 0
| 0
| 0
| 1
| 0.046875
| false
| 0
| 0.101563
| 0.007813
| 0.203125
| 0.039063
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
6a0593a2d9f168fbcc460c2d82964c99ec312e4a
| 911
|
py
|
Python
|
mayan/apps/metadata/migrations/0011_auto_20180917_0645.py
|
prezi/mayan-edms
|
e9bc10a056c3379b57115c6e83022f48c6298e1d
|
[
"Apache-2.0"
] | 4
|
2019-02-17T08:35:42.000Z
|
2019-03-28T06:02:11.000Z
|
mayan/apps/metadata/migrations/0011_auto_20180917_0645.py
|
zhoubear/mayan-edms
|
e9bc10a056c3379b57115c6e83022f48c6298e1d
|
[
"Apache-2.0"
] | 1
|
2018-10-11T13:01:34.000Z
|
2018-10-11T13:01:34.000Z
|
mayan/apps/metadata/migrations/0011_auto_20180917_0645.py
|
prezi/mayan-edms
|
e9bc10a056c3379b57115c6e83022f48c6298e1d
|
[
"Apache-2.0"
] | 3
|
2019-01-29T13:21:57.000Z
|
2019-10-27T03:20:15.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.11 on 2018-09-17 06:45
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('metadata', '0010_auto_20180823_2353'),
]
operations = [
migrations.AlterField(
model_name='documentmetadata',
name='value',
field=models.CharField(blank=True, db_index=True, help_text='The actual value stored in the metadata type field for the document.', max_length=255, null=True, verbose_name='Value'),
),
migrations.AlterField(
model_name='metadatatype',
name='name',
field=models.CharField(help_text='Name used by other apps to reference this metadata type. Do not use python reserved words, or spaces.', max_length=48, unique=True, verbose_name='Name'),
),
]
| 35.038462
| 199
| 0.657519
| 112
| 911
| 5.196429
| 0.642857
| 0.068729
| 0.085911
| 0.099656
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.055874
| 0.233809
| 911
| 25
| 200
| 36.44
| 0.777937
| 0.075741
| 0
| 0.222222
| 1
| 0.055556
| 0.293206
| 0.027414
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.111111
| 0
| 0.277778
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
dbed5bb534715e304b67cd5a82e6d1e8cca605fa
| 1,693
|
py
|
Python
|
categories/migrations/0001_initial.py
|
snoop2head/exercise_curation_django
|
ba35bd32d8bc203d318cb8b6e0a1722f3aa26eda
|
[
"MIT"
] | 3
|
2020-09-30T04:44:39.000Z
|
2021-07-30T08:20:18.000Z
|
categories/migrations/0001_initial.py
|
snoop2head/exercise_curation_django
|
ba35bd32d8bc203d318cb8b6e0a1722f3aa26eda
|
[
"MIT"
] | 7
|
2021-03-30T13:09:55.000Z
|
2022-01-13T02:33:34.000Z
|
categories/migrations/0001_initial.py
|
snoop2head/exercise_curation_django
|
ba35bd32d8bc203d318cb8b6e0a1722f3aa26eda
|
[
"MIT"
] | 1
|
2022-03-31T12:01:38.000Z
|
2022-03-31T12:01:38.000Z
|
# Generated by Django 3.0.3 on 2020-03-24 09:59
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('exercises', '0018_photo_file'),
]
operations = [
migrations.CreateModel(
name='Category',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('name', models.CharField(max_length=80)),
('description', models.TextField(blank=True)),
('exercises', models.ForeignKey(blank=True, on_delete=django.db.models.deletion.CASCADE, related_name='categories', to='exercises.Exercise')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Photo',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('image_url', models.URLField()),
('image_caption', models.CharField(blank=True, max_length=80)),
('category', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='photos', to='categories.Category')),
],
options={
'abstract': False,
},
),
]
| 37.622222
| 158
| 0.569403
| 163
| 1,693
| 5.779141
| 0.398773
| 0.03397
| 0.097665
| 0.110403
| 0.433121
| 0.433121
| 0.433121
| 0.433121
| 0.433121
| 0.33121
| 0
| 0.019183
| 0.29179
| 1,693
| 44
| 159
| 38.477273
| 0.766472
| 0.02658
| 0
| 0.486486
| 1
| 0
| 0.119077
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.054054
| 0
| 0.162162
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
dbf810a25b7c035adf73121054a304443a683fb0
| 748
|
py
|
Python
|
core/migrations/0002_auto_20180702_1913.py
|
mertyildiran/echo
|
805db64e3fa9d31fd3c24390fac2e9bf7c91ad57
|
[
"Apache-2.0"
] | 5
|
2018-07-26T22:48:00.000Z
|
2021-05-02T01:59:51.000Z
|
core/migrations/0002_auto_20180702_1913.py
|
mertyildiran/echo
|
805db64e3fa9d31fd3c24390fac2e9bf7c91ad57
|
[
"Apache-2.0"
] | null | null | null |
core/migrations/0002_auto_20180702_1913.py
|
mertyildiran/echo
|
805db64e3fa9d31fd3c24390fac2e9bf7c91ad57
|
[
"Apache-2.0"
] | 1
|
2018-08-04T14:07:53.000Z
|
2018-08-04T14:07:53.000Z
|
# Generated by Django 2.0.6 on 2018-07-02 19:13
import core.models
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0001_initial'),
]
operations = [
migrations.RenameField(
model_name='echo',
old_name='owner',
new_name='user',
),
migrations.AlterField(
model_name='echo',
name='audio',
field=models.FileField(upload_to=core.models.echo_directory),
),
migrations.AlterField(
model_name='profile',
name='picture',
field=models.FileField(blank=True, null=True, upload_to=core.models.profile_directory),
),
]
| 24.933333
| 99
| 0.57754
| 77
| 748
| 5.480519
| 0.571429
| 0.07109
| 0.061611
| 0.137441
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.036609
| 0.30615
| 748
| 29
| 100
| 25.793103
| 0.776493
| 0.06016
| 0
| 0.304348
| 1
| 0
| 0.07418
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.086957
| 0
| 0.217391
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
dbfdb987e6de76d1f36bf0f8ce7f9d972b1cbaed
| 7,103
|
py
|
Python
|
venv/Lib/site-packages/CoolProp/constants.py
|
kubakoziczak/gasSteamPowerPlant
|
e6c036cc66ee2ff0b3f2fc923d0991bf57295d61
|
[
"MIT"
] | null | null | null |
venv/Lib/site-packages/CoolProp/constants.py
|
kubakoziczak/gasSteamPowerPlant
|
e6c036cc66ee2ff0b3f2fc923d0991bf57295d61
|
[
"MIT"
] | null | null | null |
venv/Lib/site-packages/CoolProp/constants.py
|
kubakoziczak/gasSteamPowerPlant
|
e6c036cc66ee2ff0b3f2fc923d0991bf57295d61
|
[
"MIT"
] | null | null | null |
# This file is automatically generated by the generate_constants_module.py script in wrappers/Python.
# DO NOT MODIFY THE CONTENTS OF THIS FILE!
from __future__ import absolute_import
from . import _constants
INVALID_PARAMETER = _constants.INVALID_PARAMETER
igas_constant = _constants.igas_constant
imolar_mass = _constants.imolar_mass
iacentric_factor = _constants.iacentric_factor
irhomolar_reducing = _constants.irhomolar_reducing
irhomolar_critical = _constants.irhomolar_critical
iT_reducing = _constants.iT_reducing
iT_critical = _constants.iT_critical
irhomass_reducing = _constants.irhomass_reducing
irhomass_critical = _constants.irhomass_critical
iP_critical = _constants.iP_critical
iP_reducing = _constants.iP_reducing
iT_triple = _constants.iT_triple
iP_triple = _constants.iP_triple
iT_min = _constants.iT_min
iT_max = _constants.iT_max
iP_max = _constants.iP_max
iP_min = _constants.iP_min
idipole_moment = _constants.idipole_moment
iT = _constants.iT
iP = _constants.iP
iQ = _constants.iQ
iTau = _constants.iTau
iDelta = _constants.iDelta
iDmolar = _constants.iDmolar
iHmolar = _constants.iHmolar
iSmolar = _constants.iSmolar
iCpmolar = _constants.iCpmolar
iCp0molar = _constants.iCp0molar
iCvmolar = _constants.iCvmolar
iUmolar = _constants.iUmolar
iGmolar = _constants.iGmolar
iHelmholtzmolar = _constants.iHelmholtzmolar
iSmolar_residual = _constants.iSmolar_residual
iDmass = _constants.iDmass
iHmass = _constants.iHmass
iSmass = _constants.iSmass
iCpmass = _constants.iCpmass
iCp0mass = _constants.iCp0mass
iCvmass = _constants.iCvmass
iUmass = _constants.iUmass
iGmass = _constants.iGmass
iHelmholtzmass = _constants.iHelmholtzmass
iviscosity = _constants.iviscosity
iconductivity = _constants.iconductivity
isurface_tension = _constants.isurface_tension
iPrandtl = _constants.iPrandtl
ispeed_sound = _constants.ispeed_sound
iisothermal_compressibility = _constants.iisothermal_compressibility
iisobaric_expansion_coefficient = _constants.iisobaric_expansion_coefficient
ifundamental_derivative_of_gas_dynamics = _constants.ifundamental_derivative_of_gas_dynamics
ialphar = _constants.ialphar
idalphar_dtau_constdelta = _constants.idalphar_dtau_constdelta
idalphar_ddelta_consttau = _constants.idalphar_ddelta_consttau
ialpha0 = _constants.ialpha0
idalpha0_dtau_constdelta = _constants.idalpha0_dtau_constdelta
idalpha0_ddelta_consttau = _constants.idalpha0_ddelta_consttau
iBvirial = _constants.iBvirial
iCvirial = _constants.iCvirial
idBvirial_dT = _constants.idBvirial_dT
idCvirial_dT = _constants.idCvirial_dT
iZ = _constants.iZ
iPIP = _constants.iPIP
ifraction_min = _constants.ifraction_min
ifraction_max = _constants.ifraction_max
iT_freeze = _constants.iT_freeze
iGWP20 = _constants.iGWP20
iGWP100 = _constants.iGWP100
iGWP500 = _constants.iGWP500
iFH = _constants.iFH
iHH = _constants.iHH
iPH = _constants.iPH
iODP = _constants.iODP
iPhase = _constants.iPhase
iundefined_parameter = _constants.iundefined_parameter
INPUT_PAIR_INVALID = _constants.INPUT_PAIR_INVALID
QT_INPUTS = _constants.QT_INPUTS
PQ_INPUTS = _constants.PQ_INPUTS
QSmolar_INPUTS = _constants.QSmolar_INPUTS
QSmass_INPUTS = _constants.QSmass_INPUTS
HmolarQ_INPUTS = _constants.HmolarQ_INPUTS
HmassQ_INPUTS = _constants.HmassQ_INPUTS
DmolarQ_INPUTS = _constants.DmolarQ_INPUTS
DmassQ_INPUTS = _constants.DmassQ_INPUTS
PT_INPUTS = _constants.PT_INPUTS
DmassT_INPUTS = _constants.DmassT_INPUTS
DmolarT_INPUTS = _constants.DmolarT_INPUTS
HmolarT_INPUTS = _constants.HmolarT_INPUTS
HmassT_INPUTS = _constants.HmassT_INPUTS
SmolarT_INPUTS = _constants.SmolarT_INPUTS
SmassT_INPUTS = _constants.SmassT_INPUTS
TUmolar_INPUTS = _constants.TUmolar_INPUTS
TUmass_INPUTS = _constants.TUmass_INPUTS
DmassP_INPUTS = _constants.DmassP_INPUTS
DmolarP_INPUTS = _constants.DmolarP_INPUTS
HmassP_INPUTS = _constants.HmassP_INPUTS
HmolarP_INPUTS = _constants.HmolarP_INPUTS
PSmass_INPUTS = _constants.PSmass_INPUTS
PSmolar_INPUTS = _constants.PSmolar_INPUTS
PUmass_INPUTS = _constants.PUmass_INPUTS
PUmolar_INPUTS = _constants.PUmolar_INPUTS
HmassSmass_INPUTS = _constants.HmassSmass_INPUTS
HmolarSmolar_INPUTS = _constants.HmolarSmolar_INPUTS
SmassUmass_INPUTS = _constants.SmassUmass_INPUTS
SmolarUmolar_INPUTS = _constants.SmolarUmolar_INPUTS
DmassHmass_INPUTS = _constants.DmassHmass_INPUTS
DmolarHmolar_INPUTS = _constants.DmolarHmolar_INPUTS
DmassSmass_INPUTS = _constants.DmassSmass_INPUTS
DmolarSmolar_INPUTS = _constants.DmolarSmolar_INPUTS
DmassUmass_INPUTS = _constants.DmassUmass_INPUTS
DmolarUmolar_INPUTS = _constants.DmolarUmolar_INPUTS
FLUID_TYPE_PURE = _constants.FLUID_TYPE_PURE
FLUID_TYPE_PSEUDOPURE = _constants.FLUID_TYPE_PSEUDOPURE
FLUID_TYPE_REFPROP = _constants.FLUID_TYPE_REFPROP
FLUID_TYPE_INCOMPRESSIBLE_LIQUID = _constants.FLUID_TYPE_INCOMPRESSIBLE_LIQUID
FLUID_TYPE_INCOMPRESSIBLE_SOLUTION = _constants.FLUID_TYPE_INCOMPRESSIBLE_SOLUTION
FLUID_TYPE_UNDEFINED = _constants.FLUID_TYPE_UNDEFINED
iphase_liquid = _constants.iphase_liquid
iphase_supercritical = _constants.iphase_supercritical
iphase_supercritical_gas = _constants.iphase_supercritical_gas
iphase_supercritical_liquid = _constants.iphase_supercritical_liquid
iphase_critical_point = _constants.iphase_critical_point
iphase_gas = _constants.iphase_gas
iphase_twophase = _constants.iphase_twophase
iphase_unknown = _constants.iphase_unknown
iphase_not_imposed = _constants.iphase_not_imposed
NORMALIZE_GAS_CONSTANTS = _constants.NORMALIZE_GAS_CONSTANTS
CRITICAL_WITHIN_1UK = _constants.CRITICAL_WITHIN_1UK
CRITICAL_SPLINES_ENABLED = _constants.CRITICAL_SPLINES_ENABLED
SAVE_RAW_TABLES = _constants.SAVE_RAW_TABLES
ALTERNATIVE_TABLES_DIRECTORY = _constants.ALTERNATIVE_TABLES_DIRECTORY
ALTERNATIVE_REFPROP_PATH = _constants.ALTERNATIVE_REFPROP_PATH
ALTERNATIVE_REFPROP_HMX_BNC_PATH = _constants.ALTERNATIVE_REFPROP_HMX_BNC_PATH
ALTERNATIVE_REFPROP_LIBRARY_PATH = _constants.ALTERNATIVE_REFPROP_LIBRARY_PATH
REFPROP_DONT_ESTIMATE_INTERACTION_PARAMETERS = _constants.REFPROP_DONT_ESTIMATE_INTERACTION_PARAMETERS
REFPROP_IGNORE_ERROR_ESTIMATED_INTERACTION_PARAMETERS = _constants.REFPROP_IGNORE_ERROR_ESTIMATED_INTERACTION_PARAMETERS
REFPROP_USE_GERG = _constants.REFPROP_USE_GERG
REFPROP_USE_PENGROBINSON = _constants.REFPROP_USE_PENGROBINSON
MAXIMUM_TABLE_DIRECTORY_SIZE_IN_GB = _constants.MAXIMUM_TABLE_DIRECTORY_SIZE_IN_GB
DONT_CHECK_PROPERTY_LIMITS = _constants.DONT_CHECK_PROPERTY_LIMITS
HENRYS_LAW_TO_GENERATE_VLE_GUESSES = _constants.HENRYS_LAW_TO_GENERATE_VLE_GUESSES
PHASE_ENVELOPE_STARTING_PRESSURE_PA = _constants.PHASE_ENVELOPE_STARTING_PRESSURE_PA
R_U_CODATA = _constants.R_U_CODATA
VTPR_UNIFAC_PATH = _constants.VTPR_UNIFAC_PATH
SPINODAL_MINIMUM_DELTA = _constants.SPINODAL_MINIMUM_DELTA
OVERWRITE_FLUIDS = _constants.OVERWRITE_FLUIDS
OVERWRITE_DEPARTURE_FUNCTION = _constants.OVERWRITE_DEPARTURE_FUNCTION
OVERWRITE_BINARY_INTERACTION = _constants.OVERWRITE_BINARY_INTERACTION
USE_GUESSES_IN_PROPSSI = _constants.USE_GUESSES_IN_PROPSSI
ASSUME_CRITICAL_POINT_STABLE = _constants.ASSUME_CRITICAL_POINT_STABLE
VTPR_ALWAYS_RELOAD_LIBRARY = _constants.VTPR_ALWAYS_RELOAD_LIBRARY
FLOAT_PUNCTUATION = _constants.FLOAT_PUNCTUATION
| 44.672956
| 120
| 0.887653
| 841
| 7,103
| 6.88585
| 0.261593
| 0.090658
| 0.01865
| 0.016059
| 0.082887
| 0.036609
| 0
| 0
| 0
| 0
| 0
| 0.004232
| 0.068563
| 7,103
| 158
| 121
| 44.955696
| 0.87107
| 0.01971
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.012987
| 0
| 0.012987
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
e02a89e62a53d61fc9086acef78dc03df26f1de7
| 2,140
|
py
|
Python
|
backend/listings/migrations/0001_initial.py
|
relaxxpls/Music-Control
|
76f5d10904f820607b3eb756850d5c5d7d89d875
|
[
"MIT"
] | null | null | null |
backend/listings/migrations/0001_initial.py
|
relaxxpls/Music-Control
|
76f5d10904f820607b3eb756850d5c5d7d89d875
|
[
"MIT"
] | null | null | null |
backend/listings/migrations/0001_initial.py
|
relaxxpls/Music-Control
|
76f5d10904f820607b3eb756850d5c5d7d89d875
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.2.3 on 2021-05-30 04:28
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
('realtors', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Listing',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('slug', models.CharField(max_length=200, unique=True)),
('title', models.CharField(max_length=150)),
('address', models.CharField(default='', max_length=150)),
('city', models.CharField(max_length=100)),
('state', models.CharField(max_length=100)),
('zipcode', models.CharField(max_length=15)),
('description', models.TextField(blank=True)),
('sale_type', models.CharField(choices=[('For Sale', 'For Sale'), ('For Rent', 'For Rent')], default='For Sale', max_length=50)),
('price', models.IntegerField()),
('bedrooms', models.IntegerField()),
('bathrooms', models.DecimalField(decimal_places=1, max_digits=2)),
('home_type', models.CharField(choices=[('House', 'House'), ('Condo', 'Condo'), ('Townhouse', 'Townhouse')], default='House', max_length=50)),
('sqft', models.IntegerField()),
('open_house', models.BooleanField(default=False)),
('photo_main', models.ImageField(upload_to='photos/%Y/%m/%d')),
('photo_1', models.ImageField(blank=True, upload_to='photos/%Y/%m/%d')),
('photo_2', models.ImageField(blank=True, upload_to='photos/%Y/%m/%d')),
('is_published', models.BooleanField(default=True)),
('list_date', models.DateTimeField(blank=True, default=django.utils.timezone.now)),
('realtor', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='realtors.realtor')),
],
),
]
| 48.636364
| 158
| 0.583645
| 225
| 2,140
| 5.431111
| 0.444444
| 0.0982
| 0.07365
| 0.0982
| 0.135025
| 0.090835
| 0.090835
| 0.06874
| 0.06874
| 0.06874
| 0
| 0.027295
| 0.246729
| 2,140
| 43
| 159
| 49.767442
| 0.730769
| 0.021028
| 0
| 0
| 1
| 0
| 0.154802
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.083333
| 0
| 0.194444
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
e03bee15bfc41f500be41ba4168c4029ea4dba20
| 3,770
|
py
|
Python
|
scripts/beautify.py
|
lukaschoebel/POTUSgen
|
7b88ba63f0ddab199937df909c5af3271a833cf3
|
[
"MIT"
] | null | null | null |
scripts/beautify.py
|
lukaschoebel/POTUSgen
|
7b88ba63f0ddab199937df909c5af3271a833cf3
|
[
"MIT"
] | 5
|
2020-03-25T08:02:45.000Z
|
2020-04-08T20:07:42.000Z
|
scripts/beautify.py
|
lukaschoebel/POTUSgen
|
7b88ba63f0ddab199937df909c5af3271a833cf3
|
[
"MIT"
] | null | null | null |
import json
import re
import sys
def beautify(name):
''' Loading, filtering and saving the JSON tweet file to a newly generated .txt file
:type: name: String
:rtype: output: .txt
'''
filename = name + '.json'
output_name = name + "_filtered.txt"
with open(filename, "r", encoding="utf-8") as input:
with open(output_name, "w", encoding="utf-8") as output:
document = json.load(input)
# Filter only the messages that are not retweeted
# >> Version i): for tweets from archive "master_XXXX.json"
# document = [x['full_text'] for x in document if x['user']['screen_name'] == 'realDonaldTrump' and 'full_text' in x]
# >> Version ii): for self-scraped tweets via https://github.com/bpb27/twitter_scraping
# document = [x['text'] for x in document if x['user']['screen_name'] == 'realDonaldTrump' and 'text' in x]
# >> Version iii): Data set from https://github.com/MatthewWolff/MarkovTweets/
document = [x['text'] for x in document]
# Clean and only include not retweeted messages
document = [deep_clean(x) for x in document if deep_clean(x) is not None]
# Preventing unicode characters by ensuring false ascii encoding
for _, value in enumerate(document):
output.write(json.dumps(value, ensure_ascii=False) + "\n")
# json.dump(document, output, ensure_ascii=False, indent=4)
print(f">> Sucessfully cleaned {filename} and saved it to {output_name}")
def deep_clean(s):
''' Deep cleaning of filtered tweets. Replaces common symbols and kills quotation marks/apostrophes.
:type: s: String
:rtype: s: String
'''
# Return None if given tweet is a retweet
if s[:2] == 'RT':
return None
# Delete all URLs because they don't make for interesting tweets.
s = re.sub(r'http[\S]*', '', s)
# Replace some common unicode symbols with raw character variants
s = re.sub(r'\\u2026', '...', s)
s = re.sub(r'…', '', s)
s = re.sub(r'\\u2019', "'", s)
s = re.sub(r'\\u2018', "'", s)
s = re.sub(r"&", r"&", s)
s = re.sub(r'\\n', r"", s)
# Delete emoji modifying characters
s = re.sub(chr(127996), '', s)
s = re.sub(chr(65039), '', s)
# Kill apostrophes & punctuation because they confuse things.
s = re.sub(r"'", r"", s)
s = re.sub(r"“", r"", s)
s = re.sub(r"”", r"", s)
s = re.sub('[()]', r'', s)
s = re.sub(r'"', r"", s)
# Collapse multiples of certain chars
s = re.sub('([.-])+', r'\1', s)
# Pad sentence punctuation chars with whitespace
s = re.sub('([^0-9])([.,!?])([^0-9])', r'\1 \2 \3', s)
# Remove extra whitespace (incl. newlines)
s = ' '.join(s.split()).lower()
# Define emoji_pattern
emoji_pattern = re.compile("["
u"\U0001F600-\U0001F64F" # emoticons
u"\U0001F300-\U0001F5FF" # symbols & pictographs
u"\U0001F680-\U0001F6FF" # transport & map symbols
u"\U0001F1E0-\U0001F1FF" # flags (iOS)
u"\U0001F1F2-\U0001F1F4" # Macau flag
u"\U0001F1E6-\U0001F1FF" # flags
u"\U0001F600-\U0001F64F"
u"\U00002702-\U000027B0"
u"\U000024C2-\U0001F251"
u"\U0001f926-\U0001f937"
u"\U0001F1F2"
u"\U0001F1F4"
u"\U0001F620"
u"\u200d"
u"\u2640-\u2642"
"]+", flags=re.UNICODE)
s = emoji_pattern.sub(r'', s)
# Care for a special case where the first char is a "."
# return s[1:] if s[0] == "." else s
if len(s):
return s[1:] if s[0] == "." else s
return None
if __name__ == "__main__":
if len(sys.argv) - 1: beautify(sys.argv[1])
| 33.963964
| 129
| 0.571088
| 512
| 3,770
| 4.158203
| 0.404297
| 0.022546
| 0.045092
| 0.042743
| 0.130108
| 0.107562
| 0.103335
| 0.086426
| 0.065759
| 0.065759
| 0
| 0.072727
| 0.270557
| 3,770
| 111
| 130
| 33.963964
| 0.700364
| 0.396286
| 0
| 0.070175
| 1
| 0
| 0.211026
| 0.105739
| 0
| 0
| 0
| 0
| 0
| 1
| 0.035088
| false
| 0
| 0.052632
| 0
| 0.140351
| 0.017544
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
e04601e1749bc51e7e5f74ca383f947dc25e7da9
| 562
|
py
|
Python
|
islam_fitz/survey/migrations/0005_auto_20210712_2132.py
|
OmarEhab177/Islam_fitz
|
6ad0eb21549895a6fe537e8413022b82bc530c57
|
[
"MIT"
] | null | null | null |
islam_fitz/survey/migrations/0005_auto_20210712_2132.py
|
OmarEhab177/Islam_fitz
|
6ad0eb21549895a6fe537e8413022b82bc530c57
|
[
"MIT"
] | 2
|
2022-03-01T12:17:05.000Z
|
2022-03-30T12:19:55.000Z
|
islam_fitz/survey/migrations/0005_auto_20210712_2132.py
|
OmarEhab177/Islam_fitz
|
6ad0eb21549895a6fe537e8413022b82bc530c57
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.1.12 on 2021-07-12 19:32
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('survey', '0004_lastpage_whatsapp_button'),
]
operations = [
migrations.RemoveField(
model_name='lastpage',
name='whatsapp_button',
),
migrations.AddField(
model_name='lastpage',
name='whatsapp_number',
field=models.CharField(default=1, max_length=50),
preserve_default=False,
),
]
| 23.416667
| 61
| 0.592527
| 56
| 562
| 5.785714
| 0.678571
| 0.08642
| 0.104938
| 0.12963
| 0.179012
| 0
| 0
| 0
| 0
| 0
| 0
| 0.058524
| 0.300712
| 562
| 23
| 62
| 24.434783
| 0.765903
| 0.081851
| 0
| 0.235294
| 1
| 0
| 0.157588
| 0.05642
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.058824
| 0
| 0.235294
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
e05432743bd72af1411301793f19ae278f8a6b5a
| 485
|
py
|
Python
|
apps/vendors/migrations/0090_auto_20160610_2125.py
|
ExpoAshique/ProveBanking__s
|
f0b45fffea74d00d14014be27aa50fe5f42f6903
|
[
"MIT"
] | null | null | null |
apps/vendors/migrations/0090_auto_20160610_2125.py
|
ExpoAshique/ProveBanking__s
|
f0b45fffea74d00d14014be27aa50fe5f42f6903
|
[
"MIT"
] | null | null | null |
apps/vendors/migrations/0090_auto_20160610_2125.py
|
ExpoAshique/ProveBanking__s
|
f0b45fffea74d00d14014be27aa50fe5f42f6903
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-06-10 21:25
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('vendors', '0089_auto_20160602_2123'),
]
operations = [
migrations.AlterField(
model_name='vendor',
name='email',
field=models.EmailField(blank=True, max_length=254, verbose_name='Email'),
),
]
| 23.095238
| 86
| 0.626804
| 55
| 485
| 5.327273
| 0.818182
| 0.061433
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.096419
| 0.251546
| 485
| 20
| 87
| 24.25
| 0.710744
| 0.138144
| 0
| 0
| 1
| 0
| 0.110843
| 0.055422
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.153846
| 0
| 0.384615
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
e05606e62a7f260ca58d2f3413562fa3ee898b64
| 1,000
|
py
|
Python
|
HackBitApp/migrations/0003_roadmap.py
|
SukhadaM/HackBit-Interview-Preparation-Portal
|
f4c6b0d7168a4ea4ffcf1569183b1614752d9946
|
[
"MIT"
] | null | null | null |
HackBitApp/migrations/0003_roadmap.py
|
SukhadaM/HackBit-Interview-Preparation-Portal
|
f4c6b0d7168a4ea4ffcf1569183b1614752d9946
|
[
"MIT"
] | null | null | null |
HackBitApp/migrations/0003_roadmap.py
|
SukhadaM/HackBit-Interview-Preparation-Portal
|
f4c6b0d7168a4ea4ffcf1569183b1614752d9946
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.1.7 on 2021-03-27 18:22
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('HackBitApp', '0002_company_photo'),
]
operations = [
migrations.CreateModel(
name='Roadmap',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('company_name', models.CharField(db_index=True, max_length=200, unique=True)),
('photo1', models.ImageField(upload_to='photos/company/roadmap')),
('photo2', models.ImageField(blank=True, upload_to='photos/company/roadmap')),
('photo3', models.ImageField(blank=True, upload_to='photos/company/roadmap')),
],
options={
'verbose_name': 'roadmap',
'verbose_name_plural': 'roadmaps',
'ordering': ('company_name',),
},
),
]
| 34.482759
| 114
| 0.571
| 98
| 1,000
| 5.673469
| 0.581633
| 0.059353
| 0.07554
| 0.113309
| 0.241007
| 0.190647
| 0.190647
| 0.190647
| 0.190647
| 0
| 0
| 0.035112
| 0.288
| 1,000
| 28
| 115
| 35.714286
| 0.745787
| 0.045
| 0
| 0
| 1
| 0
| 0.210913
| 0.069255
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.045455
| 0
| 0.181818
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
e05ea195ece947573587efca60ad05b204af43f6
| 1,095
|
py
|
Python
|
payment/migrations/0002_auto_20171125_0022.py
|
Littledelma/mofadog
|
5a7c6672da248e400a8a5746506a6e7b273c9510
|
[
"MIT"
] | null | null | null |
payment/migrations/0002_auto_20171125_0022.py
|
Littledelma/mofadog
|
5a7c6672da248e400a8a5746506a6e7b273c9510
|
[
"MIT"
] | 1
|
2021-06-08T03:28:08.000Z
|
2021-06-08T03:28:08.000Z
|
payment/migrations/0002_auto_20171125_0022.py
|
Littledelma/mofadog
|
5a7c6672da248e400a8a5746506a6e7b273c9510
|
[
"MIT"
] | 1
|
2021-06-08T03:23:34.000Z
|
2021-06-08T03:23:34.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.7 on 2017-11-24 16:22
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('payment', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='history_order',
name='dead_date',
field=models.DateTimeField(default=datetime.datetime(2017, 11, 24, 16, 22, 1, 719840, tzinfo=utc), verbose_name='daed_date'),
),
migrations.AlterField(
model_name='history_order',
name='order_date',
field=models.DateTimeField(default=datetime.datetime(2017, 11, 24, 16, 22, 1, 719662, tzinfo=utc), verbose_name='order date'),
),
migrations.AlterField(
model_name='history_order',
name='valid_date',
field=models.DateTimeField(default=datetime.datetime(2017, 11, 24, 16, 22, 1, 719758, tzinfo=utc), verbose_name='valid_date'),
),
]
| 33.181818
| 138
| 0.63379
| 127
| 1,095
| 5.307087
| 0.393701
| 0.035608
| 0.047478
| 0.059347
| 0.514837
| 0.497033
| 0.497033
| 0.430267
| 0.284866
| 0.284866
| 0
| 0.093976
| 0.242009
| 1,095
| 32
| 139
| 34.21875
| 0.718072
| 0.0621
| 0
| 0.36
| 1
| 0
| 0.113281
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.16
| 0
| 0.28
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
e078ffec67d1b2046e248c3ee5d65b353731cbf4
| 1,479
|
py
|
Python
|
examples/basic/wire_feedthrough.py
|
souviksaha97/spydrnet-physical
|
b07bcc152737158ea7cbebf0ef844abe49d29c5e
|
[
"BSD-3-Clause"
] | null | null | null |
examples/basic/wire_feedthrough.py
|
souviksaha97/spydrnet-physical
|
b07bcc152737158ea7cbebf0ef844abe49d29c5e
|
[
"BSD-3-Clause"
] | null | null | null |
examples/basic/wire_feedthrough.py
|
souviksaha97/spydrnet-physical
|
b07bcc152737158ea7cbebf0ef844abe49d29c5e
|
[
"BSD-3-Clause"
] | null | null | null |
"""
==========================================
Genrating feedthrough from single instance
==========================================
This example demostrates how to generate a feedthrough wire connection for
a given scalar or vector wires.
**Initial Design**
.. hdl-diagram:: ../../../examples/basic/_initial_design.v
:type: netlistsvg
:align: center
:module: top
**Output1** ``wire0`` feedthough from ``inst_2_1``
.. hdl-diagram:: ../../../examples/basic/_output_wire.v
:type: netlistsvg
:align: center
:module: top
**Output2** ``bus_in`` feedthrough from ``inst_1_0``
.. hdl-diagram:: ../../../examples/basic/_output_bus.v
:type: netlistsvg
:align: center
:module: top
"""
from os import path
import spydrnet as sdn
import spydrnet_physical as sdnphy
netlist = sdnphy.load_netlist_by_name('basic_hierarchy')
top = netlist.top_instance.reference
cable0 = next(top.get_cables("wire0"))
inst2 = next(top.get_instances("inst_2_0"))
sdn.compose(netlist, '_initial_design.v', skip_constraints=True)
top.create_feedthrough(inst2, cable0)
top.create_unconn_wires()
sdn.compose(netlist, '_output_wire.v', skip_constraints=True)
netlist = sdnphy.load_netlist_by_name('basic_hierarchy')
top = netlist.top_instance.reference
bus_in = next(top.get_cables("bus_in"))
inst1 = next(top.get_instances("inst_1_0"))
cables = top.create_feedthrough(inst1, bus_in)
top.create_unconn_wires()
sdn.compose(netlist, '_output_bus.v', skip_constraints=True)
| 24.65
| 74
| 0.699797
| 194
| 1,479
| 5.087629
| 0.365979
| 0.020263
| 0.040527
| 0.069909
| 0.448835
| 0.343465
| 0.343465
| 0.237082
| 0.149949
| 0.149949
| 0
| 0.013688
| 0.110886
| 1,479
| 59
| 75
| 25.067797
| 0.736882
| 0.473969
| 0
| 0.333333
| 1
| 0
| 0.131339
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.166667
| 0
| 0.166667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0edddd954e6572bd2613d0926da19b7e62f01353
| 346
|
py
|
Python
|
torrents/migrations/0011_auto_20190223_2345.py
|
2600box/harvest
|
57264c15a3fba693b4b58d0b6d4fbf4bd5453bbd
|
[
"Apache-2.0"
] | 9
|
2019-03-26T14:50:00.000Z
|
2020-11-10T16:44:08.000Z
|
torrents/migrations/0011_auto_20190223_2345.py
|
2600box/harvest
|
57264c15a3fba693b4b58d0b6d4fbf4bd5453bbd
|
[
"Apache-2.0"
] | 22
|
2019-03-02T23:16:13.000Z
|
2022-02-27T10:36:36.000Z
|
torrents/migrations/0011_auto_20190223_2345.py
|
2600box/harvest
|
57264c15a3fba693b4b58d0b6d4fbf4bd5453bbd
|
[
"Apache-2.0"
] | 5
|
2019-04-24T00:51:30.000Z
|
2020-11-06T18:31:49.000Z
|
# Generated by Django 2.1.7 on 2019-02-23 23:45
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('torrents', '0010_auto_20190223_0326'),
]
operations = [
migrations.AlterModelOptions(
name='realm',
options={'ordering': ('name',)},
),
]
| 19.222222
| 48
| 0.586705
| 35
| 346
| 5.714286
| 0.828571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 0.283237
| 346
| 17
| 49
| 20.352941
| 0.681452
| 0.130058
| 0
| 0
| 1
| 0
| 0.160535
| 0.076923
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.090909
| 0
| 0.363636
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0ee18e4216ec08fa76991908f8a448c6f9b7427c
| 2,147
|
py
|
Python
|
widgets/ui_ShowResultDialog.py
|
JaySon-Huang/SecertPhotos
|
e741cc26c19a5b249d45cc70959ac6817196cb8a
|
[
"MIT"
] | null | null | null |
widgets/ui_ShowResultDialog.py
|
JaySon-Huang/SecertPhotos
|
e741cc26c19a5b249d45cc70959ac6817196cb8a
|
[
"MIT"
] | 3
|
2015-05-19T08:43:46.000Z
|
2015-06-10T17:55:28.000Z
|
widgets/ui_ShowResultDialog.py
|
JaySon-Huang/SecertPhotos
|
e741cc26c19a5b249d45cc70959ac6817196cb8a
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'src/ui_ShowResultDialog.ui'
#
# Created: Sat May 16 17:05:43 2015
# by: PyQt5 UI code generator 5.4
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_Dialog(object):
def setupUi(self, Dialog):
Dialog.setObjectName("Dialog")
Dialog.resize(400, 300)
self.verticalLayout = QtWidgets.QVBoxLayout(Dialog)
self.verticalLayout.setObjectName("verticalLayout")
self.lb_image = ImageLabel(Dialog)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.lb_image.sizePolicy().hasHeightForWidth())
self.lb_image.setSizePolicy(sizePolicy)
self.lb_image.setMinimumSize(QtCore.QSize(100, 100))
self.lb_image.setAlignment(QtCore.Qt.AlignCenter)
self.lb_image.setObjectName("lb_image")
self.verticalLayout.addWidget(self.lb_image)
self.hLayout = QtWidgets.QHBoxLayout()
self.hLayout.setObjectName("hLayout")
spacerItem = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.hLayout.addItem(spacerItem)
self.btn_save = QtWidgets.QPushButton(Dialog)
self.btn_save.setObjectName("btn_save")
self.hLayout.addWidget(self.btn_save)
spacerItem1 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.hLayout.addItem(spacerItem1)
self.verticalLayout.addLayout(self.hLayout)
self.retranslateUi(Dialog)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self, Dialog):
_translate = QtCore.QCoreApplication.translate
Dialog.setWindowTitle(_translate("Dialog", "Dialog"))
self.lb_image.setText(_translate("Dialog", "Image Label"))
self.btn_save.setText(_translate("Dialog", "Save it"))
from widgets.ImageLabel import ImageLabel
| 43.816327
| 115
| 0.72054
| 230
| 2,147
| 6.63913
| 0.404348
| 0.041257
| 0.057629
| 0.074656
| 0.160445
| 0.128356
| 0.128356
| 0.128356
| 0.128356
| 0.128356
| 0
| 0.023177
| 0.17606
| 2,147
| 48
| 116
| 44.729167
| 0.840023
| 0.104797
| 0
| 0
| 1
| 0
| 0.04441
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.057143
| false
| 0
| 0.057143
| 0
| 0.142857
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0ef760e6a3a5620b5876eba10c68bc7b0bb1b6c8
| 474
|
py
|
Python
|
buzzbox/restaurants/migrations/0002_restaurant_description.py
|
Danielvalev/kutiika
|
661b850163de942a137157a97d98d90553861044
|
[
"MIT"
] | null | null | null |
buzzbox/restaurants/migrations/0002_restaurant_description.py
|
Danielvalev/kutiika
|
661b850163de942a137157a97d98d90553861044
|
[
"MIT"
] | null | null | null |
buzzbox/restaurants/migrations/0002_restaurant_description.py
|
Danielvalev/kutiika
|
661b850163de942a137157a97d98d90553861044
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.2.9 on 2021-12-06 10:02
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('restaurants', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='restaurant',
name='description',
field=models.CharField(default='Description', max_length=255, verbose_name='Description'),
preserve_default=False,
),
]
| 23.7
| 102
| 0.620253
| 48
| 474
| 6.020833
| 0.791667
| 0.103806
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.063218
| 0.265823
| 474
| 19
| 103
| 24.947368
| 0.767241
| 0.094937
| 0
| 0
| 1
| 0
| 0.154567
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.076923
| 0
| 0.307692
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
16214a743fb88fbf7d2c7ed97c9778c2fbeb46d1
| 4,764
|
py
|
Python
|
tools/pod-xml-to-geojson.py
|
24-timmarsseglingarna/app
|
0c028bd2eb284c6893cb16dd91bd093b2222338f
|
[
"Apache-2.0"
] | null | null | null |
tools/pod-xml-to-geojson.py
|
24-timmarsseglingarna/app
|
0c028bd2eb284c6893cb16dd91bd093b2222338f
|
[
"Apache-2.0"
] | 14
|
2017-08-24T12:46:58.000Z
|
2021-04-21T07:56:58.000Z
|
tools/pod-xml-to-geojson.py
|
24-timmarsseglingarna/app
|
0c028bd2eb284c6893cb16dd91bd093b2222338f
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# Converts a PoD XML file to a GeoJSON file.
#
# With the --javascript parameter, the generated file is a javascript
# file defining a variable 'basePodSpec'.
#
# Get the PoD XML file from http://dev.24-timmars.nu/PoD/xmlapi_app.php.
import xml.etree.ElementTree as etree
import argparse
import re
import json
import io
import sys
import os.path
import datetime
if sys.version < '3':
import codecs
# points number 9000 and above are not real points; they are used to mark
# area borders
MAXPOINT=8999
def run():
parser = argparse.ArgumentParser()
parser.add_argument("-i", "--infile", help="input file")
parser.add_argument("-o", "--outfile", help="output file")
parser.add_argument("--id", help="id of terrain")
parser.add_argument("--javascript", action="store_true")
args = parser.parse_args()
tree = etree.parse(args.infile)
all_points, start_points, turning_points = get_points(tree)
inshore_legs, offshore_legs = get_legs(tree, all_points)
output_pod(args.outfile, args.javascript, args.id,
[('startPoints', start_points),
('turningPoints', turning_points),
('inshoreLegs', inshore_legs),
('offshoreLegs', offshore_legs)])
def output_pod(fname, javascript, id, features):
if sys.version < '3':
fd = codecs.open(fname, "w", encoding="utf-8")
else:
fd = io.open(fname, "w", encoding="utf-8")
if javascript:
fd.write(u'/* eslint-disable */\n')
fd.write(u'export var basePodSpec = ')
fd.write(u'{"id": %s, ' % id)
flen = len(features)
i = 1
for (name, obj) in features:
fd.write(u'"%s": {"type": "FeatureCollection",'
'"crs": { "type": "name",'
'"properties": { "name": "urn:ogc:def:crs:OGC:1.3:CRS84" } },'
'"features":' % name)
fd.write(json.dumps(obj, ensure_ascii=False))
if i == flen:
fd.write(u'}')
else:
i = i + 1
fd.write(u'},\n')
if javascript:
fd.write(u'};\n')
else:
fd.write(u'}\n')
def get_points(tree):
doc = tree.getroot()
startnumbers = {}
all_points = {}
start_points = []
turning_points = []
for n in doc.findall("kretsar/krets/startpoints/number"):
startnumbers[n.text] = True
for p in doc.findall("points/point"):
number = p.find("number").text
if int(number) > MAXPOINT:
continue
name = p.find("name").text
descr = p.find("descr").text
lat = p.find("lat").text
lng = p.find("long").text
footnote = None
footnoteelem = p.find("footnote")
if footnoteelem is not None:
footnote = footnoteelem.text
properties = {"number": number,
"name": name,
"descr": descr}
if footnote != None:
properties["footnote"] = footnote
coordinates = [float(lng), float(lat)]
geometry = {"type": "Point",
"coordinates": coordinates}
point = {"type": "Feature",
"properties": properties,
"geometry": geometry},
if number in startnumbers:
start_points.extend(point)
else:
turning_points.extend(point)
all_points[number] = coordinates
return all_points, start_points, turning_points
def get_legs(tree, all_points):
doc = tree.getroot()
coast = []
offshore = []
for p in doc.findall("legs/leg"):
src = p.find("from").text
dst = p.find("to").text
if int(src) > MAXPOINT or int(dst) > MAXPOINT:
continue
if int(src) < int(dst):
# since all legs are present twice (in both directions),
# skip one direction
continue
dist = p.find("dist").text
sea = p.find("sea").text
addtime = p.find("addtime").text
if dist is None:
print("** error: no distance: src: %s dst: %s" % (src, dst))
properties = {"src": src,
"dst": dst,
"dist": float(dist)}
if properties["dist"] == 0 and addtime == "1":
properties["addtime"] = True;
src_coords = all_points[src]
dst_coords = all_points[dst]
geometry = {"type": "LineString",
"coordinates": [src_coords, dst_coords]}
leg = {"type": "Feature",
"properties": properties,
"geometry": geometry},
if sea == "0":
coast.extend(leg)
else:
offshore.extend(leg)
return coast, offshore
if __name__ == '__main__':
run()
| 29.407407
| 79
| 0.553736
| 559
| 4,764
| 4.633274
| 0.316637
| 0.021236
| 0.02471
| 0.023166
| 0.136293
| 0.09305
| 0.037838
| 0
| 0
| 0
| 0
| 0.006959
| 0.306255
| 4,764
| 161
| 80
| 29.590062
| 0.776702
| 0.084173
| 0
| 0.145161
| 1
| 0.008065
| 0.146829
| 0.014476
| 0
| 0
| 0
| 0
| 0
| 1
| 0.032258
| false
| 0
| 0.072581
| 0
| 0.120968
| 0.008065
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.