hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
d9afca45a6adc9c41c0b981032c729d59e9db234
| 2,801
|
py
|
Python
|
examples/p02_budgets/budget_data_ingest/migrations/0001_initial.py
|
18F/data-federation-ingest
|
a896ef2da1faf3966f018366b26a338bb66cc717
|
[
"CC0-1.0"
] | 18
|
2019-07-26T13:43:01.000Z
|
2022-01-15T14:57:52.000Z
|
examples/p02_budgets/budget_data_ingest/migrations/0001_initial.py
|
18F/data-federation-ingest
|
a896ef2da1faf3966f018366b26a338bb66cc717
|
[
"CC0-1.0"
] | 96
|
2019-06-14T18:30:54.000Z
|
2021-08-03T09:25:02.000Z
|
examples/p02_budgets/budget_data_ingest/migrations/0001_initial.py
|
18F/data-federation-ingest
|
a896ef2da1faf3966f018366b26a338bb66cc717
|
[
"CC0-1.0"
] | 3
|
2020-01-23T04:48:18.000Z
|
2021-01-12T09:31:20.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.13 on 2018-06-08 22:54
from __future__ import unicode_literals
from django.conf import settings
import django.contrib.postgres.fields.jsonb
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='BudgetItem',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('year', models.IntegerField()),
('agency', models.TextField()),
('data_source', models.TextField()),
('category', models.TextField()),
('dollars_budgeted', models.DecimalField(decimal_places=2, max_digits=14)),
('dollars_spent', models.DecimalField(decimal_places=2, max_digits=14)),
('row_number', models.IntegerField()),
],
),
migrations.CreateModel(
name='Upload',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('file_metadata', django.contrib.postgres.fields.jsonb.JSONField(null=True)),
('file', models.FileField(upload_to='')),
('raw', models.BinaryField(null=True)),
('validation_results', django.contrib.postgres.fields.jsonb.JSONField(null=True)),
('status', models.CharField(choices=[('LOADING', 'Loading'), ('PENDING', 'Pending'), ('STAGED', 'Staged'), ('INSERTED', 'Inserted'), ('DELETED', 'Deleted')], default='LOADING', max_length=10)),
('status_changed_at', models.DateTimeField(null=True)),
('replaces', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='replaced_by', to='budget_data_ingest.Upload')),
('status_changed_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to=settings.AUTH_USER_MODEL)),
('submitter', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.AddField(
model_name='budgetitem',
name='upload',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='budget_data_ingest.Upload'),
),
]
| 47.474576
| 209
| 0.611567
| 285
| 2,801
| 5.831579
| 0.396491
| 0.028881
| 0.042118
| 0.066185
| 0.465704
| 0.358604
| 0.358604
| 0.358604
| 0.247894
| 0.247894
| 0
| 0.012258
| 0.24277
| 2,801
| 58
| 210
| 48.293103
| 0.771334
| 0.024634
| 0
| 0.22
| 1
| 0
| 0.135581
| 0.018322
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.1
| 0
| 0.18
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
d9bd741cd9ad9e20eeb1069fce4709781f43edd4
| 6,476
|
py
|
Python
|
Qt_interface/add_subject.py
|
kithsirij/NLP-based-Syllabus-Coverage-Exam-paper-checker-Tool
|
b7b38a7b7c6d0a2ad5264df32acd75cdef552bd0
|
[
"MIT"
] | 1
|
2019-07-17T09:08:41.000Z
|
2019-07-17T09:08:41.000Z
|
Qt_interface/add_subject.py
|
kithsirij/NLP-based-Syllabus-Coverage-Exam-paper-checker-Tool
|
b7b38a7b7c6d0a2ad5264df32acd75cdef552bd0
|
[
"MIT"
] | null | null | null |
Qt_interface/add_subject.py
|
kithsirij/NLP-based-Syllabus-Coverage-Exam-paper-checker-Tool
|
b7b38a7b7c6d0a2ad5264df32acd75cdef552bd0
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'add_subject.ui'
#
# Created by: PyQt4 UI code generator 4.11.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_Dialog_add_subject(object):
def setupUi(self, Dialog_add_subject):
Dialog_add_subject.setObjectName(_fromUtf8("Dialog_add_subject"))
Dialog_add_subject.resize(568, 374)
font = QtGui.QFont()
font.setFamily(_fromUtf8("Times New Roman"))
font.setPointSize(10)
Dialog_add_subject.setFont(font)
Dialog_add_subject.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(_fromUtf8("Qt_interface/SE_syllabus/4zIr6y.jpg")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
Dialog_add_subject.setWindowIcon(icon)
self.lbl_subject_name = QtGui.QLabel(Dialog_add_subject)
self.lbl_subject_name.setGeometry(QtCore.QRect(50, 235, 131, 21))
font = QtGui.QFont()
font.setFamily(_fromUtf8("Times New Roman"))
font.setPointSize(12)
self.lbl_subject_name.setFont(font)
self.lbl_subject_name.setObjectName(_fromUtf8("lbl_subject_name"))
self.label_add_subject = QtGui.QLabel(Dialog_add_subject)
self.label_add_subject.setGeometry(QtCore.QRect(220, 30, 151, 31))
font = QtGui.QFont()
font.setFamily(_fromUtf8("Times New Roman"))
font.setPointSize(14)
font.setBold(True)
font.setWeight(75)
self.label_add_subject.setFont(font)
self.label_add_subject.setObjectName(_fromUtf8("label_add_subject"))
self.lineEdit_subject_name = QtGui.QLineEdit(Dialog_add_subject)
self.lineEdit_subject_name.setGeometry(QtCore.QRect(190, 230, 321, 31))
font = QtGui.QFont()
font.setFamily(_fromUtf8("Times New Roman"))
font.setPointSize(12)
self.lineEdit_subject_name.setFont(font)
self.lineEdit_subject_name.setObjectName(_fromUtf8("lineEdit_subject_name"))
self.label_year = QtGui.QLabel(Dialog_add_subject)
self.label_year.setGeometry(QtCore.QRect(50, 95, 81, 21))
font = QtGui.QFont()
font.setFamily(_fromUtf8("Times New Roman"))
font.setPointSize(12)
self.label_year.setFont(font)
self.label_year.setObjectName(_fromUtf8("label_year"))
self.label_semester = QtGui.QLabel(Dialog_add_subject)
self.label_semester.setGeometry(QtCore.QRect(50, 165, 91, 21))
font = QtGui.QFont()
font.setFamily(_fromUtf8("Times New Roman"))
font.setPointSize(12)
self.label_semester.setFont(font)
self.label_semester.setObjectName(_fromUtf8("label_semester"))
self.pushButton_save = QtGui.QPushButton(Dialog_add_subject)
self.pushButton_save.setGeometry(QtCore.QRect(190, 290, 111, 31))
font = QtGui.QFont()
font.setFamily(_fromUtf8("Times New Roman"))
font.setPointSize(10)
self.pushButton_save.setFont(font)
icon1 = QtGui.QIcon()
icon1.addPixmap(QtGui.QPixmap(_fromUtf8("Qt_interface/SE_syllabus/Save-as.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.pushButton_save.setIcon(icon1)
self.pushButton_save.setIconSize(QtCore.QSize(20, 20))
self.pushButton_save.setObjectName(_fromUtf8("pushButton_save"))
self.pushButton_cancel = QtGui.QPushButton(Dialog_add_subject)
self.pushButton_cancel.setGeometry(QtCore.QRect(340, 290, 111, 31))
font = QtGui.QFont()
font.setFamily(_fromUtf8("Times New Roman"))
self.pushButton_cancel.setFont(font)
icon2 = QtGui.QIcon()
icon2.addPixmap(QtGui.QPixmap(_fromUtf8("Qt_interface/SE_syllabus/if_draw-08_725558.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.pushButton_cancel.setIcon(icon2)
self.pushButton_cancel.setIconSize(QtCore.QSize(20, 20))
self.pushButton_cancel.setObjectName(_fromUtf8("pushButton_cancel"))
self.comboBox_year = QtGui.QComboBox(Dialog_add_subject)
self.comboBox_year.setGeometry(QtCore.QRect(190, 91, 111, 31))
font = QtGui.QFont()
font.setFamily(_fromUtf8("Times New Roman"))
font.setPointSize(12)
self.comboBox_year.setFont(font)
self.comboBox_year.setObjectName(_fromUtf8("comboBox_year"))
self.comboBox_semester = QtGui.QComboBox(Dialog_add_subject)
self.comboBox_semester.setGeometry(QtCore.QRect(190, 160, 111, 31))
font = QtGui.QFont()
font.setFamily(_fromUtf8("Times New Roman"))
font.setPointSize(12)
self.comboBox_semester.setFont(font)
self.comboBox_semester.setObjectName(_fromUtf8("comboBox_semester"))
self.retranslateUi(Dialog_add_subject)
QtCore.QObject.connect(self.pushButton_cancel, QtCore.SIGNAL(_fromUtf8("clicked()")), self.lineEdit_subject_name.clear)
QtCore.QMetaObject.connectSlotsByName(Dialog_add_subject)
def retranslateUi(self, Dialog_add_subject):
Dialog_add_subject.setWindowTitle(_translate("Dialog_add_subject", "Dialog", None))
self.lbl_subject_name.setText(_translate("Dialog_add_subject", "SUBJECT NAME", None))
self.label_add_subject.setText(_translate("Dialog_add_subject", "ADD SUBJECT", None))
self.label_year.setText(_translate("Dialog_add_subject", "YEAR", None))
self.label_semester.setText(_translate("Dialog_add_subject", "SEMESTER", None))
self.pushButton_save.setText(_translate("Dialog_add_subject", "SAVE", None))
self.pushButton_cancel.setText(_translate("Dialog_add_subject", "CANCEL", None))
if __name__ == "__main__":
import sys
app = QtGui.QApplication(sys.argv)
Dialog_add_subject = QtGui.QDialog()
ui = Ui_Dialog_add_subject()
ui.setupUi(Dialog_add_subject)
Dialog_add_subject.show()
sys.exit(app.exec_())
| 48.691729
| 137
| 0.694719
| 761
| 6,476
| 5.654402
| 0.198423
| 0.092958
| 0.118987
| 0.041831
| 0.478039
| 0.408552
| 0.364397
| 0.263072
| 0.228213
| 0.206832
| 0
| 0.033788
| 0.195645
| 6,476
| 132
| 138
| 49.060606
| 0.792283
| 0.028567
| 0
| 0.293103
| 1
| 0
| 0.100634
| 0.022435
| 0
| 0
| 0
| 0
| 0
| 1
| 0.043103
| false
| 0
| 0.017241
| 0.025862
| 0.094828
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
8a3ff7ca606f5ce67c32533b5892e230c75d4eb8
| 413
|
py
|
Python
|
tables/migrations/0004_auto_20200901_2004.py
|
jarnoln/exposures
|
bbae3f79078048d25b77e178db6c0801ffe9f97e
|
[
"MIT"
] | null | null | null |
tables/migrations/0004_auto_20200901_2004.py
|
jarnoln/exposures
|
bbae3f79078048d25b77e178db6c0801ffe9f97e
|
[
"MIT"
] | null | null | null |
tables/migrations/0004_auto_20200901_2004.py
|
jarnoln/exposures
|
bbae3f79078048d25b77e178db6c0801ffe9f97e
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.1.1 on 2020-09-01 17:04
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('tables', '0003_exposure_category'),
]
operations = [
migrations.AlterField(
model_name='exposure',
name='location',
field=models.CharField(blank=True, default='', max_length=200),
),
]
| 21.736842
| 75
| 0.605327
| 44
| 413
| 5.590909
| 0.818182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.073333
| 0.273608
| 413
| 18
| 76
| 22.944444
| 0.746667
| 0.108959
| 0
| 0
| 1
| 0
| 0.120219
| 0.060109
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.083333
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
8a458f7c27c0535d07e4b642f5a00528aee12141
| 3,387
|
py
|
Python
|
main.py
|
DanielM24/Romanian-sub-dialect-identificator
|
78b3e00f8ee768eb0b1e8cf832a2dc0b8504b04d
|
[
"MIT"
] | null | null | null |
main.py
|
DanielM24/Romanian-sub-dialect-identificator
|
78b3e00f8ee768eb0b1e8cf832a2dc0b8504b04d
|
[
"MIT"
] | null | null | null |
main.py
|
DanielM24/Romanian-sub-dialect-identificator
|
78b3e00f8ee768eb0b1e8cf832a2dc0b8504b04d
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""Proiect.ipynb
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/1TR1Frf0EX4PtFZkLlVdGtMTINqhoQwRw
"""
# Importarea librariilor
import numpy as np
import pandas as pd # pandas pentru citirea fisierelor
from sklearn import preprocessing
from sklearn import svm # importarea modelului
from sklearn.feature_extraction.text import TfidfVectorizer # modelarea datelor pentru a obtine valori numerice din text
from sklearn.metrics import classification_report, confusion_matrix
# Incarcarea datelor
train_labels = pd.read_csv('train_labels.txt', sep='\t', header=None, engine='python')
train_labels = train_labels.to_numpy() # convertim data frame-ul intr-un vector
train_labels = train_labels[:,1] # pastram doar etichetele
train_samples = pd.read_csv('train_samples.txt', sep='\t', header=None, engine='python')
train_samples = train_samples.to_numpy()
train_samples = train_samples[:,1] # pastram doar cuvintele
validation_samples = pd.read_csv('validation_samples.txt', sep='\t', header=None, engine='python')
validation_samples = validation_samples.to_numpy()
validation_samples = validation_samples[:,1] # salvam cuvintele
validation_labels = pd.read_csv('validation_labels.txt', sep='\t', header=None, engine='python')
validation_labels = validation_labels.to_numpy()
validation_labels = validation_labels[:,1] # pastram doar etichetele
test_samples = pd.read_csv('test_samples.txt', sep='\t', header=None, engine='python')
test_samples = test_samples.to_numpy()
label = test_samples[:,0] # salvam etichetele
test_samples = test_samples[:,1] # salvam cuvintele
def normalize_data(train_data, test_data, type='l2'): # functia care intoarce datele normalizate
#tipul de normalizare este setat implicit la l2
scaler = None
if type == 'standard':
scaler = preprocessing.StandardScaler()
elif type == 'min_max':
scaler = preprocessing.MinMaxScaler()
elif type == 'l1' or type == 'l2':
scaler = preprocessing.Normalizer(norm = type)
if scaler is not None:
scaler.fit(train_data)
scaled_train_data = scaler.transform(train_data)
scaled_test_data = scaler.transform(test_data)
return scaled_train_data, scaled_test_data
else:
return train_data, test_data
# Modelarea datelor
vectorizer = TfidfVectorizer()
training_features = vectorizer.fit_transform(train_samples)
validation_features = vectorizer.transform(validation_samples)
testing_features = vectorizer.transform(test_samples)
# Normalizarea datelor
norm_train, norm_test = normalize_data(training_features, testing_features)
norm_validation, _ = normalize_data(validation_features, validation_features)
# Aplicam modelul SVM
model_svm = svm.SVC(kernel='linear', C=23, gamma=110) # definim modelul
model_svm.fit(norm_train, train_labels) # procesul de invatare
test_predictions = model_svm.predict(norm_test) # predictie pe datele de test
print("Classification report: ")
print(classification_report(validation_labels, model_svm.predict(norm_validation)))
print("Confusion matrix: ")
print(confusion_matrix(validation_labels, model_svm.predict(norm_validation)))
# Exportarea datelor in format CSV
test_export = {'id':label,'label':test_predictions}
data_f = pd.DataFrame(test_export)
data_f.to_csv('test_submission.csv',index=False)
| 38.05618
| 120
| 0.775613
| 439
| 3,387
| 5.756264
| 0.343964
| 0.050653
| 0.017808
| 0.025722
| 0.15829
| 0.117926
| 0.117926
| 0.082311
| 0
| 0
| 0
| 0.006741
| 0.124004
| 3,387
| 89
| 121
| 38.05618
| 0.844961
| 0.220254
| 0
| 0
| 1
| 0
| 0.086458
| 0.01645
| 0
| 0
| 0
| 0
| 0
| 1
| 0.018868
| false
| 0
| 0.113208
| 0
| 0.169811
| 0.075472
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
8a49e6407bf66d6fbb676497c6a102a344eeed6b
| 2,533
|
py
|
Python
|
apps/core/migrations/0001_initial.py
|
Visualway/Vitary
|
c7db9a25837fa7390b2177b9db48e73c6f1ab3c8
|
[
"BSD-3-Clause"
] | 4
|
2021-12-24T16:07:44.000Z
|
2022-03-04T02:30:20.000Z
|
apps/core/migrations/0001_initial.py
|
Visualway/Vitary
|
c7db9a25837fa7390b2177b9db48e73c6f1ab3c8
|
[
"BSD-3-Clause"
] | 4
|
2021-12-30T13:32:56.000Z
|
2022-03-15T03:58:48.000Z
|
apps/core/migrations/0001_initial.py
|
Visualway/Vitary
|
c7db9a25837fa7390b2177b9db48e73c6f1ab3c8
|
[
"BSD-3-Clause"
] | null | null | null |
# Generated by Django 4.0.2 on 2022-03-02 03:29
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('vit', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Badge',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=50)),
('description', models.TextField()),
('color', models.CharField(choices=[('success', 'Green'), ('info', 'Blue'), ('link', 'Purple'), ('primary', 'Turquoise'), ('warning', 'Yellow'), ('danger', 'Red'), ('dark', 'Black'), ('white', 'White')], max_length=50)),
('special', models.BooleanField(default=False)),
],
options={
'ordering': ['name'],
},
),
migrations.CreateModel(
name='Requirments',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=50)),
('description', models.TextField()),
('badge', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.badge')),
],
options={
'ordering': ['name'],
},
),
migrations.CreateModel(
name='Abuse',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('abuse_type', models.CharField(choices=[('ABUSE', 'Abuse'), ('INAPPROPRIATE', 'Inappropriate'), ('SPAM', 'Spam'), ('BULLYING', 'Bullying'), ('SEXUAL_CONTENT', 'Sexual Content'), ('OTHER', 'Other')], max_length=50)),
('description', models.TextField()),
('date', models.DateTimeField(auto_now_add=True)),
('to_vit', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='vit.vit')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name_plural': 'Abuses',
'ordering': ['-date'],
},
),
]
| 42.932203
| 236
| 0.55073
| 236
| 2,533
| 5.788136
| 0.389831
| 0.029283
| 0.040996
| 0.064422
| 0.474378
| 0.474378
| 0.38287
| 0.38287
| 0.38287
| 0.38287
| 0
| 0.014803
| 0.279905
| 2,533
| 58
| 237
| 43.672414
| 0.734101
| 0.017766
| 0
| 0.490196
| 1
| 0
| 0.158488
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.058824
| 0
| 0.137255
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
8a4ccded7f4f9f9be895e48e8a31955a7046241e
| 4,371
|
py
|
Python
|
dddppp/settings.py
|
tysonclugg/dddppp
|
22f52d671ca71c2df8d6ac566a1626e5f05b3159
|
[
"MIT"
] | null | null | null |
dddppp/settings.py
|
tysonclugg/dddppp
|
22f52d671ca71c2df8d6ac566a1626e5f05b3159
|
[
"MIT"
] | null | null | null |
dddppp/settings.py
|
tysonclugg/dddppp
|
22f52d671ca71c2df8d6ac566a1626e5f05b3159
|
[
"MIT"
] | null | null | null |
"""
Django settings for dddppp project.
Generated by 'django-admin startproject' using Django 1.8.2.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
import pkg_resources
import pwd
PROJECT_NAME = 'dddppp'
# Enforce a valid POSIX environment
# Get missing environment variables via call to pwd.getpwuid(...)
_PW_CACHE = None
_PW_MAP = {
'LOGNAME': 'pw_name',
'USER': 'pw_name',
'USERNAME': 'pw_name',
'UID': 'pw_uid',
'GID': 'pw_gid',
'HOME': 'pw_dir',
'SHELL': 'pw_shell',
}
for _missing_env in set(_PW_MAP).difference(os.environ):
if _PW_CACHE is None:
_PW_CACHE = pwd.getpwuid(os.getuid())
os.environ[_missing_env] = str(getattr(_PW_CACHE, _PW_MAP[_missing_env]))
del _PW_CACHE, _PW_MAP, pwd
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'nfd_lvt=&k#h#$a^_l09j#5%s=mg+0aw=@t84ry$&rps43c33+'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = [
'localhost',
]
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'dddp',
'dddp.server',
'dddp.accounts',
'dddppp.slides',
]
for (requirement, pth) in [
('django-extensions', 'django_extensions'),
]:
try:
pkg_resources.get_distribution(requirement)
except (
pkg_resources.DistributionNotFound,
pkg_resources.VersionConflict,
):
continue
INSTALLED_APPS.append(pth)
MIDDLEWARE_CLASSES = [
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
#'django.middleware.security.SecurityMiddleware',
]
ROOT_URLCONF = 'dddppp.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'dddppp.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': os.environ.get('PGDATABASE', PROJECT_NAME),
'USER': os.environ.get('PGUSER', os.environ['LOGNAME']),
'PASSWORD': os.environ.get('DJANGO_DATABASE_PASSWORD', ''),
'HOST': os.environ.get('PGHOST', ''),
'PORT': os.environ.get('PGPORT', ''),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-au'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
STATICFILES_STORAGE = 'whitenoise.django.GzipManifestStaticFilesStorage'
# django-secure
# see: https://github.com/carljm/django-secure/ for more options
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
#SECURE_SSL_REDIRECT = True
SECURE_CONTENT_TYPE_NOSNIFF = True
SECURE_FRAME_DENY = True
SESSION_COOKIE_SECURE = True
SESSION_COOKIE_HTTPONLY = True
DDDPPP_CONTENT_TYPES = []
PROJ_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
| 26.981481
| 77
| 0.695722
| 524
| 4,371
| 5.620229
| 0.429389
| 0.052971
| 0.044822
| 0.050934
| 0.099491
| 0.07708
| 0.07708
| 0.07708
| 0.027165
| 0
| 0
| 0.008782
| 0.166324
| 4,371
| 161
| 78
| 27.149068
| 0.799396
| 0.260581
| 0
| 0
| 1
| 0.010101
| 0.389894
| 0.275733
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.010101
| 0.030303
| 0
| 0.030303
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
8aa50b5f8d204a63672c266b3319435ba3678601
| 2,686
|
py
|
Python
|
insight/migrations/0001_initial.py
|
leonhead/chess-insight
|
b893295719df21b4fee10d4e7b01639ded8b42b4
|
[
"MIT"
] | null | null | null |
insight/migrations/0001_initial.py
|
leonhead/chess-insight
|
b893295719df21b4fee10d4e7b01639ded8b42b4
|
[
"MIT"
] | null | null | null |
insight/migrations/0001_initial.py
|
leonhead/chess-insight
|
b893295719df21b4fee10d4e7b01639ded8b42b4
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.1 on 2020-09-08 07:43
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='OpeningSystem',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=40)),
],
),
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=40)),
],
),
migrations.CreateModel(
name='Opening',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=40)),
('eco', models.CharField(max_length=3)),
('moves', models.TextField()),
('opening_system', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='insight.openingsystem')),
],
),
migrations.CreateModel(
name='Game',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('elo_mean', models.IntegerField(default=0)),
('elo_diff', models.IntegerField(default=0)),
('result', models.CharField(max_length=40)),
('timecontrol', models.CharField(max_length=40)),
('timestamp', models.DateTimeField()),
('raw', models.TextField()),
('opening', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='insight.opening')),
],
),
migrations.CreateModel(
name='Analyse',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('turnover_move', models.IntegerField(default=0)),
('turnover_evaluation', models.IntegerField(default=0)),
('unbalance_material', models.IntegerField(default=0)),
('unbalance_officers', models.IntegerField(default=0)),
('unbalance_exchange', models.IntegerField(default=0)),
('game', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='insight.game')),
],
),
]
| 41.323077
| 127
| 0.562919
| 252
| 2,686
| 5.873016
| 0.281746
| 0.085135
| 0.118243
| 0.122973
| 0.593243
| 0.487162
| 0.487162
| 0.487162
| 0.487162
| 0.487162
| 0
| 0.016869
| 0.293745
| 2,686
| 64
| 128
| 41.96875
| 0.76331
| 0.016009
| 0
| 0.491228
| 1
| 0
| 0.105642
| 0.007952
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.035088
| 0
| 0.105263
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
76dfdcc4b341cedf794e7489e27908f2ae58e24b
| 10,024
|
py
|
Python
|
mllib/nlp/seq2seq.py
|
pmaxit/dlnotebooks
|
5e5a161bbd9d0753850029be29e1488b8858ecd5
|
[
"Apache-2.0"
] | null | null | null |
mllib/nlp/seq2seq.py
|
pmaxit/dlnotebooks
|
5e5a161bbd9d0753850029be29e1488b8858ecd5
|
[
"Apache-2.0"
] | null | null | null |
mllib/nlp/seq2seq.py
|
pmaxit/dlnotebooks
|
5e5a161bbd9d0753850029be29e1488b8858ecd5
|
[
"Apache-2.0"
] | null | null | null |
# AUTOGENERATED! DO NOT EDIT! File to edit: nbs/01_seq2seq.ipynb (unless otherwise specified).
__all__ = ['Encoder', 'NewDecoder', 'Seq2Seq']
# Cell
from torch import nn
from torch import optim
import torch
import torch.nn.functional as F
from torch.nn.utils.rnn import pack_padded_sequence, pad_packed_sequence
# Cell
class Encoder(nn.Module):
def __init__(self, input_size, embedding_size, hidden_size, num_layers=2, p=0.1):
super(Encoder, self).__init__()
self.hidden_size = hidden_size
self.num_layers = num_layers
self.dropout = nn.Dropout(p)
self.embedding = nn.Embedding(input_size, embedding_size)
self.rnn = nn.LSTM(embedding_size, hidden_size, num_layers, dropout=p,batch_first=False)
def forward(self, x, x_len):
# x shape (seq_length, N)
embedding = self.dropout(self.embedding(x))
# embedding shape : (seq_length, N, embedding_size)
x_packed = pack_padded_sequence(embedding, x_len.cpu(), batch_first=False, enforce_sorted=False)
output_packed, (hidden,cell) = self.rnn(x_packed)
# irrelevant because we are interested only in hidden state
#output_padded, output_lengths = pad_packed_sequence(output_packed, batch_first=True)
# output is irrelevant, context vector is important
return hidden,cell
# Cell
class NewDecoder(nn.Module):
def __init__(self, hidden_size, embedding_size, output_size, n_layers=1, dropout_p=0.1):
super(NewDecoder, self).__init__()
# Define parameters
self.hidden_size = hidden_size
self.output_size = output_size
self.n_layers =n_layers
self.dropout_p = dropout_p
# Define layers
self.embedding = nn.Embedding(output_size, embedding_size)
self.dropout=nn.Dropout(dropout_p)
self.rnn = nn.LSTM(embedding_size, hidden_size, n_layers, dropout=dropout_p, batch_first=False)
self.out = nn.Linear(hidden_size, output_size)
def forward(self, word_input, last_hidden, encoder_outputs):
# Note that we will only be running forward for a single decoder time step, but will
# use all encoder outputs
word_input = word_input.unsqueeze(0)
# we are not using encoder_outputs here
word_embedded = self.embedding(word_input) # 1 X B
word_embedded = self.dropout(word_embedded) # 1 X B X emb_length
# Combine embedded input word and hidden vector, run through RNN
output, hidden = self.rnn(word_embedded, last_hidden) # 1 X B X hidden
predictions = self.out(output) # 1, B, out
#output = F.log_softmax(predictions)
return predictions, hidden
# Cell
import random
import pytorch_lightning as pl
import pytorch_lightning.metrics.functional as plfunc
from pytorch_lightning.loggers import TensorBoardLogger
# Cell
class Seq2Seq(pl.LightningModule):
""" Encoder decoder pytorch lightning module for training seq2seq model with teacher forcing
Module try to learn mapping from one sequence to another
"""
@staticmethod
def add_model_specific_args(parent_parser):
parser = ArgumentParser(parents=[parent_parser], add_help=False)
parser.add_argument("--emb_dim", type=int, default=32)
parser.add_argument('--hidden_dim', type=int, default=64)
parser.add_argument('--dropout', type=float, default=0.1)
return parser
def __init__(self,
input_vocab_size,
output_vocab_size,
padding_index = 0,
emb_dim = 8,
hidden_dim=32,
dropout=0.1,
max_length=20,
**kwargs):
super().__init__()
# dynamic, based on tokenizer vocab size defined in datamodule
self.input_dim = input_vocab_size
self.output_dim = output_vocab_size
self.enc_emb_dim = emb_dim
self.dec_emb_dim = emb_dim
self.enc_hid_dim = hidden_dim
self.dec_hid_dim = hidden_dim
self.enc_dropout = dropout
self.dec_dropout = dropout
self.pad_idx = padding_index
self.num_layers = 2
self.max_length =10
self.save_hyperparameters()
self.max_epochs= kwargs.get('max_epochs',5)
self.learning_rate = 0.0005
self._loss = nn.CrossEntropyLoss(ignore_index=self.pad_idx)
self.encoder = Encoder(
self.input_dim,
self.enc_emb_dim,
self.enc_hid_dim,
self.num_layers,
self.enc_dropout
)
self.decoder = NewDecoder(
self.enc_hid_dim,
self.dec_emb_dim,
self.output_dim,
self.num_layers,
self.dec_dropout
)
self._init_weights()
def _init_weights(self):
for name, param in self.named_parameters():
if "weight" in name:
nn.init.normal_(param.data, mean=0, std=0.01)
else:
nn.init.constant_(param.data, 0)
def create_mask(self, src):
mask = (src != self.pad_idx).permute(1, 0)
return mask
def forward(self, src_seq, source_len, trg_seq, teacher_force_ratio=0.5):
"""
teacher_force_ratio is used to help in decoding.
In starting, original input token will be sent as input token
"""
source = src_seq.transpose(0, 1)
target_len = self.max_length
if trg_seq is not None:
target = trg_seq.transpose(0, 1)
target_len = target.shape[0]
batch_size = source.shape[1]
target_vocab_size = self.output_dim
outputs = torch.zeros(target_len, batch_size, target_vocab_size).to(self.device)
encoder_hidden = self.encoder(source, source_len)
# mask = [batch_size, src len]
# without sos token at the beginning and eos token at the end
#x = target[0,:]
decoder_input = torch.ones(batch_size).long().to(self.device)
decoder_hidden = encoder_hidden
encoder_outputs = None
for t in range(target_len):
decoder_output, decoder_hidden = self.decoder(decoder_input, decoder_hidden, encoder_outputs)
outputs[t] = decoder_output
#(N, english_vocab_size)
#best_guess = output.argmax(1)
topv, topi = decoder_output.topk(1)
decoder_input = topi.squeeze().detach()
decoder_input = target[t] if random.random() < teacher_force_ratio and target is not None else decoder_input
return outputs
def loss(self, logits, target):
return self._loss(logits, target)
def configure_optimizers(self):
optimizer = optim.AdamW(self.parameters(), lr=self.learning_rate)
lr_scheduler = {
'scheduler': optim.lr_scheduler.OneCycleLR(
optimizer,
max_lr = self.learning_rate,
steps_per_epoch = 3379,
epochs=self.max_epochs,
anneal_strategy='linear',
final_div_factor=1000,
pct_start = 0.01
),
"name": "learning_rate",
"interval":"step",
"frequency": 1
}
return [optimizer],[lr_scheduler]
def training_step(self, batch, batch_idx):
src_seq, trg_seq, src_lengths = batch['src'],batch['trg'], batch['src_len']
output = self.forward(src_seq, src_lengths,trg_seq)
# do not know if this is a problem, loss will be computed with sos token
# without sos token at the beginning and eos token at the end
output = output.view(-1, self.output_dim)
trg_seq = trg_seq.transpose(0, 1)
trg = trg_seq.reshape(-1)
loss = self.loss(output, trg)
self.log('train_loss',loss.item(),
on_step = True,
on_epoch=True,
prog_bar = True,
logger=True)
return loss
def validation_step(self, batch,batch_idx):
""" validation is in eval model so we do not have to use placeholder input sensors"""
src_seq, trg_seq, src_lengths = batch['src'],batch['trg'], batch['src_len']
outputs = self.forward(src_seq, src_lengths, trg_seq, 0)
logits = outputs[1:].view(-1, self.output_dim)
trg = trg_seq[1:].reshape(-1)
loss = self.loss(logits, trg)
pred_seq = outputs[1:].argmax(2) # seq_len*batch_size*vocab_size -> seq_len * batch_size
# change layout: sesq_len * batch_size -> batch_size * seq_len
pred_seq = pred_seq.T
# change layout: seq_len * batch_size -> batch_size * seq_len
trg_batch = trg_seq[1:].T
# compare list of predicted ids for all sequences in a batch to targets
acc = plfunc.accuracy(pred_seq.reshape(-1), trg_batch.reshape(-1))
# need to cast to list of predicted sequences ( as list of token ids ) [ seq_tok1, seqtok2]
predicted_ids - pred_seq.tolist()
# need to add additional dim to each target reference sequence in order to
# conver to format needed by blue_score_func
# [seq1=[[reference1],[reference2]], seq2=[reference1]]
target_ids = torch.unsqueeze(trg_batch, 1).tolist()
bleu_score - plfunc.nlp.bleu_score(predicted_ids, target_ids, n_gram=3).to(self.device)
self.log(
'val_loss',
loss,
on_step=False,
on_epoch=True,
prog_bar=True,
logger=True,
sync_dist=True)
self.log(
"val_acc",
acc,
on_step=False,
on_epoch=True,
prog_bar=True,
logger=True,
sync_dist=True
)
self.log(
"val_bleu_idx",
bleu_score,
on_step=False,
on_epoch=True,
prog_bar=True,
logger=True,
sync_dist=True
)
return loss, acc, bleu_score
| 32.025559
| 120
| 0.621409
| 1,303
| 10,024
| 4.537222
| 0.23561
| 0.012179
| 0.01184
| 0.010149
| 0.207037
| 0.139716
| 0.101996
| 0.092862
| 0.064107
| 0.064107
| 0
| 0.013053
| 0.289206
| 10,024
| 312
| 121
| 32.128205
| 0.816702
| 0.189346
| 0
| 0.142105
| 1
| 0
| 0.023146
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.068421
| false
| 0
| 0.047368
| 0.005263
| 0.178947
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0a017ba6441979fea8dcb4bd6912e6e472b2970d
| 456
|
py
|
Python
|
brokenChains/migrations/0003_auto_20181106_1819.py
|
bunya017/brokenChains
|
3e20c834efd7f0ade8e3abe7acf547c093f76758
|
[
"MIT"
] | 1
|
2018-12-07T09:15:57.000Z
|
2018-12-07T09:15:57.000Z
|
brokenChains/migrations/0003_auto_20181106_1819.py
|
bunya017/brokenChains
|
3e20c834efd7f0ade8e3abe7acf547c093f76758
|
[
"MIT"
] | null | null | null |
brokenChains/migrations/0003_auto_20181106_1819.py
|
bunya017/brokenChains
|
3e20c834efd7f0ade8e3abe7acf547c093f76758
|
[
"MIT"
] | null | null | null |
# Generated by Django 2.1.1 on 2018-11-06 17:19
from django.conf import settings
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('brokenChains', '0002_auto_20181106_1723'),
]
operations = [
migrations.AlterUniqueTogether(
name='habit',
unique_together={('owner', 'name')},
),
]
| 22.8
| 66
| 0.64693
| 48
| 456
| 6
| 0.770833
| 0.069444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.089855
| 0.243421
| 456
| 19
| 67
| 24
| 0.744928
| 0.098684
| 0
| 0
| 1
| 0
| 0.119804
| 0.056235
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.153846
| 0
| 0.384615
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0a03afbc022ab3ed1e3b4074455a3f3fdefc3a2e
| 1,189
|
py
|
Python
|
app/modules/ai_lab/migrations/0003_ailabcasestudy.py
|
nickmoreton/nhsx-website
|
2397d1308376c02b75323d30e6bc916af0daac9d
|
[
"MIT"
] | 50
|
2019-04-04T17:50:00.000Z
|
2021-08-05T15:08:37.000Z
|
app/modules/ai_lab/migrations/0003_ailabcasestudy.py
|
nickmoreton/nhsx-website
|
2397d1308376c02b75323d30e6bc916af0daac9d
|
[
"MIT"
] | 434
|
2019-04-04T18:25:32.000Z
|
2022-03-31T18:23:37.000Z
|
app/modules/ai_lab/migrations/0003_ailabcasestudy.py
|
nhsx-mirror/nhsx-website
|
2133b4e275ca35ff77f7d6874e809f139ec4bf86
|
[
"MIT"
] | 23
|
2019-04-04T09:52:07.000Z
|
2021-04-11T07:41:47.000Z
|
# Generated by Django 3.0.4 on 2020-07-14 11:00
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
("core", "0026_auto_20200713_1535"),
("ai_lab", "0002_ailabusecase"),
]
operations = [
migrations.CreateModel(
name="AiLabCaseStudy",
fields=[
(
"articlepage_ptr",
models.OneToOneField(
auto_created=True,
on_delete=django.db.models.deletion.CASCADE,
parent_link=True,
primary_key=True,
serialize=False,
to="core.ArticlePage",
),
),
(
"use_case",
models.ForeignKey(
on_delete=django.db.models.deletion.PROTECT,
to="ai_lab.AiLabUseCase",
),
),
],
options={"abstract": False,},
bases=("core.articlepage", models.Model),
),
]
| 29
| 68
| 0.444071
| 91
| 1,189
| 5.659341
| 0.626374
| 0.062136
| 0.081553
| 0.128155
| 0.116505
| 0.116505
| 0
| 0
| 0
| 0
| 0
| 0.054348
| 0.458368
| 1,189
| 40
| 69
| 29.725
| 0.745342
| 0.037847
| 0
| 0.147059
| 1
| 0
| 0.127846
| 0.02014
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.058824
| 0
| 0.147059
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0a114ea68c2fa1e2738f0d3ff99019e72e2ea941
| 1,074
|
py
|
Python
|
sitewebapp/migrations/0011_auto_20210130_0150.py
|
deucaleon18/debsoc-nitdgp-website
|
41bd6ade7f4af143ef34aff01848f830cc533add
|
[
"MIT"
] | 2
|
2020-12-05T05:34:56.000Z
|
2020-12-09T10:27:43.000Z
|
sitewebapp/migrations/0011_auto_20210130_0150.py
|
deucaleon18/debsoc-nitdgp-website
|
41bd6ade7f4af143ef34aff01848f830cc533add
|
[
"MIT"
] | 3
|
2021-06-28T16:47:23.000Z
|
2021-06-28T16:48:51.000Z
|
sitewebapp/migrations/0011_auto_20210130_0150.py
|
deucaleon18/debsoc-nitdgp-website
|
41bd6ade7f4af143ef34aff01848f830cc533add
|
[
"MIT"
] | 9
|
2021-01-29T17:06:30.000Z
|
2021-08-21T18:23:26.000Z
|
# Generated by Django 2.2.15 on 2021-01-29 20:20
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('sitewebapp', '0010_auditionanswers_auditionquestions_audtionrounds_candidates'),
]
operations = [
migrations.CreateModel(
name='auditionRounds',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('roundno', models.IntegerField(default=1)),
('candidate', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='inductees', to='sitewebapp.Candidates')),
],
),
migrations.AlterField(
model_name='auditionquestions',
name='round',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='round', to='sitewebapp.auditionRounds'),
),
migrations.DeleteModel(
name='audtionRounds',
),
]
| 34.645161
| 148
| 0.634078
| 102
| 1,074
| 6.558824
| 0.54902
| 0.047833
| 0.06278
| 0.098655
| 0.19133
| 0.19133
| 0.19133
| 0.19133
| 0.19133
| 0.19133
| 0
| 0.025862
| 0.243948
| 1,074
| 30
| 149
| 35.8
| 0.79803
| 0.042831
| 0
| 0.125
| 1
| 0
| 0.196881
| 0.106238
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.083333
| 0
| 0.208333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0a1a359a4636f368d0f28057e4bf1af274c7fb79
| 3,332
|
py
|
Python
|
influxdb_service_sdk/model/container/resource_requirements_pb2.py
|
easyopsapis/easyops-api-python
|
adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0
|
[
"Apache-2.0"
] | 5
|
2019-07-31T04:11:05.000Z
|
2021-01-07T03:23:20.000Z
|
influxdb_service_sdk/model/container/resource_requirements_pb2.py
|
easyopsapis/easyops-api-python
|
adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0
|
[
"Apache-2.0"
] | null | null | null |
influxdb_service_sdk/model/container/resource_requirements_pb2.py
|
easyopsapis/easyops-api-python
|
adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: resource_requirements.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from influxdb_service_sdk.model.container import resource_list_pb2 as influxdb__service__sdk_dot_model_dot_container_dot_resource__list__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='resource_requirements.proto',
package='container',
syntax='proto3',
serialized_options=_b('ZCgo.easyops.local/contracts/protorepo-models/easyops/model/container'),
serialized_pb=_b('\n\x1bresource_requirements.proto\x12\tcontainer\x1a\x38influxdb_service_sdk/model/container/resource_list.proto\"j\n\x14ResourceRequirements\x12\'\n\x06limits\x18\x01 \x01(\x0b\x32\x17.container.ResourceList\x12)\n\x08requests\x18\x02 \x01(\x0b\x32\x17.container.ResourceListBEZCgo.easyops.local/contracts/protorepo-models/easyops/model/containerb\x06proto3')
,
dependencies=[influxdb__service__sdk_dot_model_dot_container_dot_resource__list__pb2.DESCRIPTOR,])
_RESOURCEREQUIREMENTS = _descriptor.Descriptor(
name='ResourceRequirements',
full_name='container.ResourceRequirements',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='limits', full_name='container.ResourceRequirements.limits', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='requests', full_name='container.ResourceRequirements.requests', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=100,
serialized_end=206,
)
_RESOURCEREQUIREMENTS.fields_by_name['limits'].message_type = influxdb__service__sdk_dot_model_dot_container_dot_resource__list__pb2._RESOURCELIST
_RESOURCEREQUIREMENTS.fields_by_name['requests'].message_type = influxdb__service__sdk_dot_model_dot_container_dot_resource__list__pb2._RESOURCELIST
DESCRIPTOR.message_types_by_name['ResourceRequirements'] = _RESOURCEREQUIREMENTS
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
ResourceRequirements = _reflection.GeneratedProtocolMessageType('ResourceRequirements', (_message.Message,), {
'DESCRIPTOR' : _RESOURCEREQUIREMENTS,
'__module__' : 'resource_requirements_pb2'
# @@protoc_insertion_point(class_scope:container.ResourceRequirements)
})
_sym_db.RegisterMessage(ResourceRequirements)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)
| 40.144578
| 380
| 0.801921
| 396
| 3,332
| 6.358586
| 0.325758
| 0.02224
| 0.035743
| 0.038126
| 0.305798
| 0.289118
| 0.289118
| 0.250993
| 0.250993
| 0.250993
| 0
| 0.024228
| 0.095738
| 3,332
| 82
| 381
| 40.634146
| 0.811484
| 0.076831
| 0
| 0.258065
| 1
| 0.016129
| 0.165526
| 0.121538
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.096774
| 0
| 0.096774
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0a3bec6c960ec5a80b8e4e32d4669b80255b605f
| 1,114
|
py
|
Python
|
app/rss_feeder_api/migrations/0003_auto_20200813_1623.py
|
RSaab/rss-scraper
|
9bf608878e7d08fea6508ae90b27f1c226b313f1
|
[
"MIT"
] | null | null | null |
app/rss_feeder_api/migrations/0003_auto_20200813_1623.py
|
RSaab/rss-scraper
|
9bf608878e7d08fea6508ae90b27f1c226b313f1
|
[
"MIT"
] | null | null | null |
app/rss_feeder_api/migrations/0003_auto_20200813_1623.py
|
RSaab/rss-scraper
|
9bf608878e7d08fea6508ae90b27f1c226b313f1
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.1 on 2020-08-13 16:23
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('rss_feeder_api', '0002_feed_subtitle'),
]
operations = [
migrations.AlterModelOptions(
name='entry',
options={'ordering': ('-updated_at',), 'verbose_name_plural': 'entries'},
),
migrations.AlterModelOptions(
name='feed',
options={'ordering': ('-updated_at',), 'verbose_name': 'Feed', 'verbose_name_plural': 'Feeds'},
),
migrations.AddField(
model_name='entry',
name='created_at',
field=models.DateTimeField(auto_now_add=True, default=django.utils.timezone.now),
preserve_default=False,
),
migrations.AddField(
model_name='entry',
name='updated_at',
field=models.DateTimeField(auto_now=True),
),
migrations.AlterUniqueTogether(
name='entry',
unique_together={('guid',)},
),
]
| 29.315789
| 107
| 0.577199
| 104
| 1,114
| 5.990385
| 0.528846
| 0.057785
| 0.060995
| 0.077047
| 0.333868
| 0.333868
| 0
| 0
| 0
| 0
| 0
| 0.022785
| 0.290844
| 1,114
| 37
| 108
| 30.108108
| 0.765823
| 0.0386
| 0
| 0.419355
| 1
| 0
| 0.172123
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.064516
| 0
| 0.16129
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0a4049bea9cce33edfb9f0362df0cd2e91b7aa1a
| 335
|
py
|
Python
|
reo/migrations/0121_merge_20211001_1841.py
|
NREL/REopt_API
|
fbc70f3b0cdeec9ee220266d6b3b0c5d64f257a6
|
[
"BSD-3-Clause"
] | 7
|
2022-01-29T12:10:10.000Z
|
2022-03-28T13:45:20.000Z
|
reo/migrations/0121_merge_20211001_1841.py
|
NREL/reopt_api
|
fbc70f3b0cdeec9ee220266d6b3b0c5d64f257a6
|
[
"BSD-3-Clause"
] | 12
|
2022-02-01T18:23:18.000Z
|
2022-03-31T17:22:17.000Z
|
reo/migrations/0121_merge_20211001_1841.py
|
NREL/REopt_API
|
fbc70f3b0cdeec9ee220266d6b3b0c5d64f257a6
|
[
"BSD-3-Clause"
] | 3
|
2022-02-08T19:44:40.000Z
|
2022-03-12T11:05:36.000Z
|
# Generated by Django 3.1.13 on 2021-10-01 18:41
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('reo', '0117_financialmodel_generator_fuel_escalation_pct'),
('reo', '0120_auto_20210927_2046'),
('reo', '0121_auto_20211012_0305')
]
operations = [
]
| 20.9375
| 69
| 0.662687
| 40
| 335
| 5.275
| 0.85
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.198473
| 0.21791
| 335
| 15
| 70
| 22.333333
| 0.60687
| 0.137313
| 0
| 0
| 1
| 0
| 0.362369
| 0.33101
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.111111
| 0
| 0.444444
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0a4b453e9f68bd48c8b434b43c7c61e7c47c248d
| 3,400
|
py
|
Python
|
modelflow/graph_viz_from_outputs.py
|
ModelFlow/modelflow
|
c2b720b2da8bb17462baff5c00bbe942644474b0
|
[
"MIT"
] | 6
|
2020-07-28T19:58:28.000Z
|
2021-05-01T18:51:37.000Z
|
modelflow/graph_viz_from_outputs.py
|
ModelFlow/modelflow
|
c2b720b2da8bb17462baff5c00bbe942644474b0
|
[
"MIT"
] | 81
|
2020-07-30T07:08:10.000Z
|
2021-07-28T02:17:43.000Z
|
modelflow/graph_viz_from_outputs.py
|
ModelFlow/modelflow
|
c2b720b2da8bb17462baff5c00bbe942644474b0
|
[
"MIT"
] | null | null | null |
import pandas as pd
import argparse
import json
try:
from graphviz import Digraph
except:
print("Note: Optional graphviz not installed")
def generate_graph(df, graph_format='pdf'):
g = Digraph('ModelFlow', filename='modelflow.gv', engine='neato', format=graph_format)
g.attr(overlap='false')
g.attr(splines='true')
column_names = df.columns
states = []
g.attr('node', shape='ellipse')
for column_name in column_names:
if column_name[:6] == 'state_':
states.append((column_name[6:], column_name))
g.node(column_name[6:])
models = []
g.attr('node', shape='box')
for column_name in column_names:
if column_name[:6] != 'state_':
models.append((column_name.split('_')[0], column_name))
g.node(column_name.split('_')[0])
for column_name in column_names:
if column_name[:6] != 'state_':
parts = column_name.split('_')
state = '_'.join(parts[1:])[6:-7]
print(parts[0], state, df[column_name].min(),
df[column_name].max())
if df[column_name].min() < 0 and df[column_name].max() <= 0:
g.edge(state, parts[0])
elif df[column_name].min() >= 0 and df[column_name].max() > 0:
g.edge(parts[0], state)
else:
g.edge(parts[0], state)
g.edge(state, parts[0])
if graph_format == 'json':
# TODO: THIS DOES NOT WORK FOR MULTIPLE MODELFLOWS
with open('modelflow.gv.json', 'r') as f:
return json.load(f)
else:
g.view()
def generate_react_flow_chart(outputs):
df = pd.DataFrame()
for key, value in outputs['output_states'].items():
df[key] = value['data']
return generate_react_flow_chart_from_df(df)
def generate_react_flow_chart_from_df(df):
column_names = df.columns
nodes = {}
# Elipses
for column_name in column_names:
if column_name[:6] == 'state_':
nodes[column_name[6:]] = dict(name=column_name[6:], kind='elipse')
# Boxes
for column_name in column_names:
if column_name[:6] != 'state_':
nodes[column_name.split('_')[0]] = dict(name=column_name.split('_')[0], kind='box')
edges = []
for column_name in column_names:
if column_name[:6] != 'state_':
parts = column_name.split('_')
name1 = parts[0]
state = '_'.join(parts[1:])[6:-7]
# print(name1, state, df[column_name].min(),
# df[column_name].max())
if df[column_name].min() < 0 and df[column_name].max() <= 0:
edges.append([state, name1, 'one_way'])
elif df[column_name].min() >= 0 and df[column_name].max() > 0:
edges.append([name1, state, 'one_way'])
else:
edges.append([name1, state, 'both'])
return dict(nodes=list(nodes.values()), edges=edges)
def main(args):
df = pd.read_csv(args.output_file)
# generate_graph(df)
generate_react_flow_chart_from_df(df)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Generate Graph Viz')
parser.add_argument('-f', '--output_file', type=str,
help='The output file to generate a graph of', required=True)
args = parser.parse_args()
main(args)
| 32.380952
| 95
| 0.577941
| 446
| 3,400
| 4.188341
| 0.262332
| 0.192719
| 0.077088
| 0.04818
| 0.457173
| 0.413276
| 0.38651
| 0.314775
| 0.314775
| 0.314775
| 0
| 0.015783
| 0.273235
| 3,400
| 104
| 96
| 32.692308
| 0.740186
| 0.045
| 0
| 0.371795
| 1
| 0
| 0.087037
| 0
| 0
| 0
| 0
| 0.009615
| 0
| 1
| 0.051282
| false
| 0
| 0.051282
| 0
| 0.141026
| 0.025641
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
6a60c251c96da7b05351011b63ba88125eca7fb7
| 9,790
|
py
|
Python
|
sdk/python/pulumi_azure_native/storage/storage_account_static_website.py
|
sebtelko/pulumi-azure-native
|
711ec021b5c73da05611c56c8a35adb0ce3244e4
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/storage/storage_account_static_website.py
|
sebtelko/pulumi-azure-native
|
711ec021b5c73da05611c56c8a35adb0ce3244e4
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/storage/storage_account_static_website.py
|
sebtelko/pulumi-azure-native
|
711ec021b5c73da05611c56c8a35adb0ce3244e4
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['StorageAccountStaticWebsiteArgs', 'StorageAccountStaticWebsite']
@pulumi.input_type
class StorageAccountStaticWebsiteArgs:
def __init__(__self__, *,
account_name: pulumi.Input[str],
resource_group_name: pulumi.Input[str],
error404_document: Optional[pulumi.Input[str]] = None,
index_document: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a StorageAccountStaticWebsite resource.
:param pulumi.Input[str] account_name: The name of the storage account within the specified resource group.
:param pulumi.Input[str] resource_group_name: The name of the resource group within the user's subscription. The name is case insensitive.
:param pulumi.Input[str] error404_document: The absolute path to a custom webpage that should be used when a request is made which does not correspond to an existing file.
:param pulumi.Input[str] index_document: The webpage that Azure Storage serves for requests to the root of a website or any sub-folder. For example, 'index.html'. The value is case-sensitive.
"""
pulumi.set(__self__, "account_name", account_name)
pulumi.set(__self__, "resource_group_name", resource_group_name)
if error404_document is not None:
pulumi.set(__self__, "error404_document", error404_document)
if index_document is not None:
pulumi.set(__self__, "index_document", index_document)
@property
@pulumi.getter(name="accountName")
def account_name(self) -> pulumi.Input[str]:
"""
The name of the storage account within the specified resource group.
"""
return pulumi.get(self, "account_name")
@account_name.setter
def account_name(self, value: pulumi.Input[str]):
pulumi.set(self, "account_name", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the resource group within the user's subscription. The name is case insensitive.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="error404Document")
def error404_document(self) -> Optional[pulumi.Input[str]]:
"""
The absolute path to a custom webpage that should be used when a request is made which does not correspond to an existing file.
"""
return pulumi.get(self, "error404_document")
@error404_document.setter
def error404_document(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "error404_document", value)
@property
@pulumi.getter(name="indexDocument")
def index_document(self) -> Optional[pulumi.Input[str]]:
"""
The webpage that Azure Storage serves for requests to the root of a website or any sub-folder. For example, 'index.html'. The value is case-sensitive.
"""
return pulumi.get(self, "index_document")
@index_document.setter
def index_document(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "index_document", value)
class StorageAccountStaticWebsite(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
account_name: Optional[pulumi.Input[str]] = None,
error404_document: Optional[pulumi.Input[str]] = None,
index_document: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Enables the static website feature of a storage account.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] account_name: The name of the storage account within the specified resource group.
:param pulumi.Input[str] error404_document: The absolute path to a custom webpage that should be used when a request is made which does not correspond to an existing file.
:param pulumi.Input[str] index_document: The webpage that Azure Storage serves for requests to the root of a website or any sub-folder. For example, 'index.html'. The value is case-sensitive.
:param pulumi.Input[str] resource_group_name: The name of the resource group within the user's subscription. The name is case insensitive.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: StorageAccountStaticWebsiteArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Enables the static website feature of a storage account.
:param str resource_name: The name of the resource.
:param StorageAccountStaticWebsiteArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(StorageAccountStaticWebsiteArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
account_name: Optional[pulumi.Input[str]] = None,
error404_document: Optional[pulumi.Input[str]] = None,
index_document: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = StorageAccountStaticWebsiteArgs.__new__(StorageAccountStaticWebsiteArgs)
if account_name is None and not opts.urn:
raise TypeError("Missing required property 'account_name'")
__props__.__dict__["account_name"] = account_name
__props__.__dict__["error404_document"] = error404_document
__props__.__dict__["index_document"] = index_document
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["container_name"] = None
super(StorageAccountStaticWebsite, __self__).__init__(
'azure-native:storage:StorageAccountStaticWebsite',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'StorageAccountStaticWebsite':
"""
Get an existing StorageAccountStaticWebsite resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = StorageAccountStaticWebsiteArgs.__new__(StorageAccountStaticWebsiteArgs)
__props__.__dict__["container_name"] = None
__props__.__dict__["error404_document"] = None
__props__.__dict__["index_document"] = None
return StorageAccountStaticWebsite(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="containerName")
def container_name(self) -> pulumi.Output[str]:
"""
The name of the container to upload blobs to.
"""
return pulumi.get(self, "container_name")
@property
@pulumi.getter(name="error404Document")
def error404_document(self) -> pulumi.Output[Optional[str]]:
"""
The absolute path to a custom webpage that should be used when a request is made which does not correspond to an existing file.
"""
return pulumi.get(self, "error404_document")
@property
@pulumi.getter(name="indexDocument")
def index_document(self) -> pulumi.Output[Optional[str]]:
"""
The webpage that Azure Storage serves for requests to the root of a website or any sub-folder. For example, 'index.html'. The value is case-sensitive.
"""
return pulumi.get(self, "index_document")
| 48.226601
| 199
| 0.674157
| 1,146
| 9,790
| 5.49651
| 0.140489
| 0.054136
| 0.066677
| 0.048897
| 0.653755
| 0.575806
| 0.548976
| 0.502461
| 0.480235
| 0.422448
| 0
| 0.008201
| 0.240245
| 9,790
| 202
| 200
| 48.465347
| 0.838666
| 0.309806
| 0
| 0.330645
| 1
| 0
| 0.130107
| 0.024199
| 0
| 0
| 0
| 0
| 0
| 1
| 0.137097
| false
| 0.008065
| 0.040323
| 0
| 0.258065
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
6a75c6bcf2a235fe76f46e51c4cc31283811626a
| 2,534
|
py
|
Python
|
simulation/dataset_G_1q_X_Z_N1.py
|
eperrier/QDataSet
|
383b38b9b4166848f72fac0153800525e66b477b
|
[
"MIT"
] | 42
|
2021-08-17T02:27:59.000Z
|
2022-03-26T16:00:57.000Z
|
simulation/dataset_G_1q_X_Z_N1.py
|
eperrier/QDataSet
|
383b38b9b4166848f72fac0153800525e66b477b
|
[
"MIT"
] | 1
|
2021-09-25T11:15:20.000Z
|
2021-09-27T04:18:25.000Z
|
simulation/dataset_G_1q_X_Z_N1.py
|
eperrier/QDataSet
|
383b38b9b4166848f72fac0153800525e66b477b
|
[
"MIT"
] | 6
|
2021-08-17T02:28:04.000Z
|
2022-03-22T07:11:48.000Z
|
##############################################
"""
This module generate a dataset
"""
##############################################
# preample
import numpy as np
from utilites import Pauli_operators, simulate, CheckNoise
################################################
# meta parameters
name = "G_1q_X_Z_N1"
################################################
# quantum parameters
dim = 2 # dimension of the system
Omega = 12 # qubit energy gap
static_operators = [0.5*Pauli_operators[3]*Omega] # drift Hamiltonian
dynamic_operators = [0.5*Pauli_operators[1]] # control Hamiltonian
noise_operators = [0.5*Pauli_operators[3]] # noise Hamiltonian
initial_states = [
np.array([[0.5,0.5],[0.5,0.5]]), np.array([[0.5,-0.5],[-0.5,0.5]]),
np.array([[0.5,-0.5j],[0.5j,0.5]]),np.array([[0.5,0.5j],[-0.5j,0.5]]),
np.array([[1,0],[0,0]]), np.array([[0,0],[0,1]])
] # intial state of qubit
measurement_operators = Pauli_operators[1:] # measurement operators
##################################################
# simulation parameters
T = 1 # Evolution time
M = 1024 # Number of time steps
num_ex = 10000 # Number of examples
batch_size = 50 # batch size for TF
##################################################
# noise parameters
K = 2000 # Number of realzations
noise_profile = [1] # Noise type
###################################################
# control parameters
pulse_shape = "Gaussian" # Control pulse shape
num_pulses = 5 # Number of pulses per sequence
####################################################
# Generate the dataset
sim_parameters = dict( [(k,eval(k)) for k in ["name", "dim", "Omega", "static_operators", "dynamic_operators", "noise_operators", "measurement_operators", "initial_states", "T", "M", "num_ex", "batch_size", "K", "noise_profile", "pulse_shape", "num_pulses"] ])
CheckNoise(sim_parameters)
simulate(sim_parameters)
####################################################
| 56.311111
| 261
| 0.404893
| 228
| 2,534
| 4.359649
| 0.372807
| 0.030181
| 0.024145
| 0.024145
| 0.148893
| 0.123742
| 0.071429
| 0.071429
| 0.071429
| 0.071429
| 0
| 0.042815
| 0.327151
| 2,534
| 45
| 262
| 56.311111
| 0.540176
| 0.190608
| 0
| 0
| 1
| 0
| 0.108795
| 0.013681
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.08
| 0
| 0.08
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
6a7ebe45370c220d4cb3303c8715bdc2a5f264ae
| 7,074
|
py
|
Python
|
python/sdk/client/api/log_api.py
|
ashwinath/merlin
|
087a7fa6fb21e4c771d64418bd58873175226ca1
|
[
"Apache-2.0"
] | null | null | null |
python/sdk/client/api/log_api.py
|
ashwinath/merlin
|
087a7fa6fb21e4c771d64418bd58873175226ca1
|
[
"Apache-2.0"
] | null | null | null |
python/sdk/client/api/log_api.py
|
ashwinath/merlin
|
087a7fa6fb21e4c771d64418bd58873175226ca1
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Merlin
API Guide for accessing Merlin's model management, deployment, and serving functionalities # noqa: E501
OpenAPI spec version: 0.7.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from client.api_client import ApiClient
class LogApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def logs_get(self, name, pod_name, namespace, cluster, **kwargs): # noqa: E501
"""Retrieve log from a container # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.logs_get(name, pod_name, namespace, cluster, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: (required)
:param str pod_name: (required)
:param str namespace: (required)
:param str cluster: (required)
:param str follow:
:param str limit_bytes:
:param str pretty:
:param str previous:
:param str since_seconds:
:param str since_time:
:param str tail_lines:
:param str timestamps:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.logs_get_with_http_info(name, pod_name, namespace, cluster, **kwargs) # noqa: E501
else:
(data) = self.logs_get_with_http_info(name, pod_name, namespace, cluster, **kwargs) # noqa: E501
return data
def logs_get_with_http_info(self, name, pod_name, namespace, cluster, **kwargs): # noqa: E501
"""Retrieve log from a container # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.logs_get_with_http_info(name, pod_name, namespace, cluster, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: (required)
:param str pod_name: (required)
:param str namespace: (required)
:param str cluster: (required)
:param str follow:
:param str limit_bytes:
:param str pretty:
:param str previous:
:param str since_seconds:
:param str since_time:
:param str tail_lines:
:param str timestamps:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'pod_name', 'namespace', 'cluster', 'follow', 'limit_bytes', 'pretty', 'previous', 'since_seconds', 'since_time', 'tail_lines', 'timestamps'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method logs_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `logs_get`") # noqa: E501
# verify the required parameter 'pod_name' is set
if ('pod_name' not in params or
params['pod_name'] is None):
raise ValueError("Missing the required parameter `pod_name` when calling `logs_get`") # noqa: E501
# verify the required parameter 'namespace' is set
if ('namespace' not in params or
params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `logs_get`") # noqa: E501
# verify the required parameter 'cluster' is set
if ('cluster' not in params or
params['cluster'] is None):
raise ValueError("Missing the required parameter `cluster` when calling `logs_get`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'name' in params:
query_params.append(('name', params['name'])) # noqa: E501
if 'pod_name' in params:
query_params.append(('pod_name', params['pod_name'])) # noqa: E501
if 'namespace' in params:
query_params.append(('namespace', params['namespace'])) # noqa: E501
if 'cluster' in params:
query_params.append(('cluster', params['cluster'])) # noqa: E501
if 'follow' in params:
query_params.append(('follow', params['follow'])) # noqa: E501
if 'limit_bytes' in params:
query_params.append(('limit_bytes', params['limit_bytes'])) # noqa: E501
if 'pretty' in params:
query_params.append(('pretty', params['pretty'])) # noqa: E501
if 'previous' in params:
query_params.append(('previous', params['previous'])) # noqa: E501
if 'since_seconds' in params:
query_params.append(('since_seconds', params['since_seconds'])) # noqa: E501
if 'since_time' in params:
query_params.append(('since_time', params['since_time'])) # noqa: E501
if 'tail_lines' in params:
query_params.append(('tail_lines', params['tail_lines'])) # noqa: E501
if 'timestamps' in params:
query_params.append(('timestamps', params['timestamps'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/logs', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 39.3
| 185
| 0.607577
| 837
| 7,074
| 4.946237
| 0.193548
| 0.050242
| 0.057488
| 0.055072
| 0.525604
| 0.454589
| 0.408696
| 0.390338
| 0.36715
| 0.342029
| 0
| 0.017327
| 0.290218
| 7,074
| 179
| 186
| 39.519553
| 0.80721
| 0.326124
| 0
| 0
| 1
| 0
| 0.221276
| 0.015197
| 0
| 0
| 0
| 0
| 0
| 1
| 0.033333
| false
| 0
| 0.044444
| 0
| 0.122222
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
6aa897704d8b8b96376b6c78aa9de27ecec18071
| 378
|
py
|
Python
|
app/django_first/news/migrations/0002_movies_year.py
|
vvuri/flask_pipeline
|
d3f283b8a6a6239e56d85e67dbe3edce55bcb980
|
[
"MIT"
] | null | null | null |
app/django_first/news/migrations/0002_movies_year.py
|
vvuri/flask_pipeline
|
d3f283b8a6a6239e56d85e67dbe3edce55bcb980
|
[
"MIT"
] | null | null | null |
app/django_first/news/migrations/0002_movies_year.py
|
vvuri/flask_pipeline
|
d3f283b8a6a6239e56d85e67dbe3edce55bcb980
|
[
"MIT"
] | null | null | null |
# Generated by Django 4.0.1 on 2022-01-19 23:58
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('news', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='movies',
name='year',
field=models.CharField(max_length=4, null=True),
),
]
| 19.894737
| 60
| 0.582011
| 42
| 378
| 5.166667
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.075188
| 0.296296
| 378
| 18
| 61
| 21
| 0.740602
| 0.119048
| 0
| 0
| 1
| 0
| 0.07855
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.083333
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
6ac9be98a456dcdce40e3c4f391cc313ab62f054
| 13,522
|
py
|
Python
|
sdk/python/pulumi_google_native/healthcare/v1beta1/user_data_mapping.py
|
AaronFriel/pulumi-google-native
|
75d1cda425e33d4610348972cd70bddf35f1770d
|
[
"Apache-2.0"
] | 44
|
2021-04-18T23:00:48.000Z
|
2022-02-14T17:43:15.000Z
|
sdk/python/pulumi_google_native/healthcare/v1beta1/user_data_mapping.py
|
AaronFriel/pulumi-google-native
|
75d1cda425e33d4610348972cd70bddf35f1770d
|
[
"Apache-2.0"
] | 354
|
2021-04-16T16:48:39.000Z
|
2022-03-31T17:16:39.000Z
|
sdk/python/pulumi_google_native/healthcare/v1beta1/user_data_mapping.py
|
AaronFriel/pulumi-google-native
|
75d1cda425e33d4610348972cd70bddf35f1770d
|
[
"Apache-2.0"
] | 8
|
2021-04-24T17:46:51.000Z
|
2022-01-05T10:40:21.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
from ._inputs import *
__all__ = ['UserDataMappingArgs', 'UserDataMapping']
@pulumi.input_type
class UserDataMappingArgs:
def __init__(__self__, *,
consent_store_id: pulumi.Input[str],
data_id: pulumi.Input[str],
dataset_id: pulumi.Input[str],
user_id: pulumi.Input[str],
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
resource_attributes: Optional[pulumi.Input[Sequence[pulumi.Input['AttributeArgs']]]] = None):
"""
The set of arguments for constructing a UserDataMapping resource.
:param pulumi.Input[str] data_id: A unique identifier for the mapped resource.
:param pulumi.Input[str] user_id: User's UUID provided by the client.
:param pulumi.Input[str] name: Resource name of the User data mapping, of the form `projects/{project_id}/locations/{location_id}/datasets/{dataset_id}/consentStores/{consent_store_id}/userDataMappings/{user_data_mapping_id}`.
:param pulumi.Input[Sequence[pulumi.Input['AttributeArgs']]] resource_attributes: Attributes of the resource. Only explicitly set attributes are displayed here. Attribute definitions with defaults set implicitly apply to these User data mappings. Attributes listed here must be single valued, that is, exactly one value is specified for the field "values" in each Attribute.
"""
pulumi.set(__self__, "consent_store_id", consent_store_id)
pulumi.set(__self__, "data_id", data_id)
pulumi.set(__self__, "dataset_id", dataset_id)
pulumi.set(__self__, "user_id", user_id)
if location is not None:
pulumi.set(__self__, "location", location)
if name is not None:
pulumi.set(__self__, "name", name)
if project is not None:
pulumi.set(__self__, "project", project)
if resource_attributes is not None:
pulumi.set(__self__, "resource_attributes", resource_attributes)
@property
@pulumi.getter(name="consentStoreId")
def consent_store_id(self) -> pulumi.Input[str]:
return pulumi.get(self, "consent_store_id")
@consent_store_id.setter
def consent_store_id(self, value: pulumi.Input[str]):
pulumi.set(self, "consent_store_id", value)
@property
@pulumi.getter(name="dataId")
def data_id(self) -> pulumi.Input[str]:
"""
A unique identifier for the mapped resource.
"""
return pulumi.get(self, "data_id")
@data_id.setter
def data_id(self, value: pulumi.Input[str]):
pulumi.set(self, "data_id", value)
@property
@pulumi.getter(name="datasetId")
def dataset_id(self) -> pulumi.Input[str]:
return pulumi.get(self, "dataset_id")
@dataset_id.setter
def dataset_id(self, value: pulumi.Input[str]):
pulumi.set(self, "dataset_id", value)
@property
@pulumi.getter(name="userId")
def user_id(self) -> pulumi.Input[str]:
"""
User's UUID provided by the client.
"""
return pulumi.get(self, "user_id")
@user_id.setter
def user_id(self, value: pulumi.Input[str]):
pulumi.set(self, "user_id", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Resource name of the User data mapping, of the form `projects/{project_id}/locations/{location_id}/datasets/{dataset_id}/consentStores/{consent_store_id}/userDataMappings/{user_data_mapping_id}`.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def project(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "project")
@project.setter
def project(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project", value)
@property
@pulumi.getter(name="resourceAttributes")
def resource_attributes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['AttributeArgs']]]]:
"""
Attributes of the resource. Only explicitly set attributes are displayed here. Attribute definitions with defaults set implicitly apply to these User data mappings. Attributes listed here must be single valued, that is, exactly one value is specified for the field "values" in each Attribute.
"""
return pulumi.get(self, "resource_attributes")
@resource_attributes.setter
def resource_attributes(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['AttributeArgs']]]]):
pulumi.set(self, "resource_attributes", value)
class UserDataMapping(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
consent_store_id: Optional[pulumi.Input[str]] = None,
data_id: Optional[pulumi.Input[str]] = None,
dataset_id: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
resource_attributes: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['AttributeArgs']]]]] = None,
user_id: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Creates a new User data mapping in the parent consent store.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] data_id: A unique identifier for the mapped resource.
:param pulumi.Input[str] name: Resource name of the User data mapping, of the form `projects/{project_id}/locations/{location_id}/datasets/{dataset_id}/consentStores/{consent_store_id}/userDataMappings/{user_data_mapping_id}`.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['AttributeArgs']]]] resource_attributes: Attributes of the resource. Only explicitly set attributes are displayed here. Attribute definitions with defaults set implicitly apply to these User data mappings. Attributes listed here must be single valued, that is, exactly one value is specified for the field "values" in each Attribute.
:param pulumi.Input[str] user_id: User's UUID provided by the client.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: UserDataMappingArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Creates a new User data mapping in the parent consent store.
:param str resource_name: The name of the resource.
:param UserDataMappingArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(UserDataMappingArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
consent_store_id: Optional[pulumi.Input[str]] = None,
data_id: Optional[pulumi.Input[str]] = None,
dataset_id: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
resource_attributes: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['AttributeArgs']]]]] = None,
user_id: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = UserDataMappingArgs.__new__(UserDataMappingArgs)
if consent_store_id is None and not opts.urn:
raise TypeError("Missing required property 'consent_store_id'")
__props__.__dict__["consent_store_id"] = consent_store_id
if data_id is None and not opts.urn:
raise TypeError("Missing required property 'data_id'")
__props__.__dict__["data_id"] = data_id
if dataset_id is None and not opts.urn:
raise TypeError("Missing required property 'dataset_id'")
__props__.__dict__["dataset_id"] = dataset_id
__props__.__dict__["location"] = location
__props__.__dict__["name"] = name
__props__.__dict__["project"] = project
__props__.__dict__["resource_attributes"] = resource_attributes
if user_id is None and not opts.urn:
raise TypeError("Missing required property 'user_id'")
__props__.__dict__["user_id"] = user_id
__props__.__dict__["archive_time"] = None
__props__.__dict__["archived"] = None
super(UserDataMapping, __self__).__init__(
'google-native:healthcare/v1beta1:UserDataMapping',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'UserDataMapping':
"""
Get an existing UserDataMapping resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = UserDataMappingArgs.__new__(UserDataMappingArgs)
__props__.__dict__["archive_time"] = None
__props__.__dict__["archived"] = None
__props__.__dict__["data_id"] = None
__props__.__dict__["name"] = None
__props__.__dict__["resource_attributes"] = None
__props__.__dict__["user_id"] = None
return UserDataMapping(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="archiveTime")
def archive_time(self) -> pulumi.Output[str]:
"""
Indicates the time when this mapping was archived.
"""
return pulumi.get(self, "archive_time")
@property
@pulumi.getter
def archived(self) -> pulumi.Output[bool]:
"""
Indicates whether this mapping is archived.
"""
return pulumi.get(self, "archived")
@property
@pulumi.getter(name="dataId")
def data_id(self) -> pulumi.Output[str]:
"""
A unique identifier for the mapped resource.
"""
return pulumi.get(self, "data_id")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Resource name of the User data mapping, of the form `projects/{project_id}/locations/{location_id}/datasets/{dataset_id}/consentStores/{consent_store_id}/userDataMappings/{user_data_mapping_id}`.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="resourceAttributes")
def resource_attributes(self) -> pulumi.Output[Sequence['outputs.AttributeResponse']]:
"""
Attributes of the resource. Only explicitly set attributes are displayed here. Attribute definitions with defaults set implicitly apply to these User data mappings. Attributes listed here must be single valued, that is, exactly one value is specified for the field "values" in each Attribute.
"""
return pulumi.get(self, "resource_attributes")
@property
@pulumi.getter(name="userId")
def user_id(self) -> pulumi.Output[str]:
"""
User's UUID provided by the client.
"""
return pulumi.get(self, "user_id")
| 45.837288
| 400
| 0.654859
| 1,582
| 13,522
| 5.326802
| 0.112516
| 0.075709
| 0.071437
| 0.060045
| 0.702029
| 0.633915
| 0.59523
| 0.565088
| 0.546695
| 0.466477
| 0
| 0.000293
| 0.24205
| 13,522
| 294
| 401
| 45.993197
| 0.821934
| 0.278953
| 0
| 0.362245
| 1
| 0
| 0.111542
| 0.00786
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| false
| 0.005102
| 0.035714
| 0.020408
| 0.265306
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
6acb7ed968b97603aa5b744b910e0997b0f3f62d
| 561
|
py
|
Python
|
server/api/migrations/0002_auto_20201011_1053.py
|
ShahriarDhruvo/WebTech_Assignment2
|
845d198a91b1dcc8ed149362499754167fca419d
|
[
"MIT"
] | null | null | null |
server/api/migrations/0002_auto_20201011_1053.py
|
ShahriarDhruvo/WebTech_Assignment2
|
845d198a91b1dcc8ed149362499754167fca419d
|
[
"MIT"
] | null | null | null |
server/api/migrations/0002_auto_20201011_1053.py
|
ShahriarDhruvo/WebTech_Assignment2
|
845d198a91b1dcc8ed149362499754167fca419d
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.1.2 on 2020-10-11 10:53
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('api', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='task',
name='author',
field=models.CharField(default='Anonymous', max_length=100),
),
migrations.AlterField(
model_name='task',
name='deadline',
field=models.DateTimeField(default='2020-10-11 10:53'),
),
]
| 23.375
| 72
| 0.57041
| 58
| 561
| 5.448276
| 0.637931
| 0.037975
| 0.050633
| 0.063291
| 0.310127
| 0.234177
| 0
| 0
| 0
| 0
| 0
| 0.086957
| 0.30303
| 561
| 23
| 73
| 24.391304
| 0.721228
| 0.080214
| 0
| 0.352941
| 1
| 0
| 0.120623
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.058824
| 0
| 0.235294
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
End of preview. Expand
in Data Studio
README.md exists but content is empty.
- Downloads last month
- 10