index int64 0 1,000k | blob_id stringlengths 40 40 | code stringlengths 7 10.4M |
|---|---|---|
992,200 | 42d01edb896166a9e1819b41c978effa1b45eb49 | import sys
from collections import deque
dx=[-1,1,0,0]
dy=[0,0,-1,1]
q=deque()
def bfs(x, y):
q.append((x,y))
visit[x][y]=1
while q:
front_x, front_y = q[0]
q.popleft()
for i in range(0, 4):
nx=dx[i]+front_x
ny=dy[i]+front_y
if nx >= 0 and nx < n and ny >= 0 and ny < m:
if visit[nx][ny] == 0 and arr[nx][ny] == 1:
visit[nx][ny]=1
q.append((nx,ny))
t=int(input())
for i in range(0, t):
cnt = 0
list = []
m,n,k=map(int, sys.stdin.readline().rstrip().split())
arr=[[0]*(m) for _ in range(n)]
visit=[[0]*(m) for _ in range(n)]
for i in range(0, k):
x, y=map(int, sys.stdin.readline().rstrip().split())
arr[y][x]=1
for i in range(0, n):
for j in range(0, m):
if arr[i][j]==1 and visit[i][j]==0:
bfs(i,j)
cnt=cnt+1
list.append(cnt)
print(len(list)) |
992,201 | 649813e8dc97d5b1ec7ff9d4126f4c610f009b08 | from gpiozero import Button, LED
from datetime import datetime
pins = [
{
'buttonPin': 17 ,
'ledPin': 13 ,
},
{
'buttonPin': 22 ,
'ledPin': 26 ,
}
]
class Paddle:
def __init__(self, paddleId, buttonPressCb):
self.id = paddleId
self.button = Button(pins[paddleId]['buttonPin'])
self.led = LED(pins[paddleId]['ledPin'])
self.led.off()
self.pressedAt = None
self.buttonPressCb = buttonPressCb
def enable(self):
self.button.when_pressed = self.handleButtonPress
print ("Paddle enabled")
def disable(self):
self.button.when_pressed = None
print ("Paddle disabled")
def handleButtonPress(self):
self.pressedAt = datetime.now()
self.buttonPressCb(self)
def on(self):
self.led.on()
def off(self):
self.led.off()
def pressed(self):
return self.button.is_pressed
|
992,202 | 9fbf4cae7678b3eebe0c8266d52002b0f03811f6 | from django.contrib.auth.models import User
from django.core.exceptions import (
FieldError,
MultipleObjectsReturned,
ObjectDoesNotExist,
)
from django_celery_results.models import TaskResult
from rest_framework.exceptions import ValidationError
from rest_framework.serializers import ModelSerializer
from jobs.models import Departament, Group
__all__ = [
"NestedDepartamentSerializer",
"NestedGroupSerializer",
"NestedTaskResultSerializer",
"NestedUserSerializer",
]
class NestedDepartamentSerializer(ModelSerializer):
class Meta:
model = Departament
fields = ["id", "name"]
def to_internal_value(self, data):
if data is None:
return None
if isinstance(data, dict):
queryset = self.Meta.model.objects
try:
return queryset.get(**data)
except ObjectDoesNotExist:
raise ValidationError(
"Related object not found using the provided attributes: {}".format(
data
)
)
except MultipleObjectsReturned:
raise ValidationError(
"Multiple objects match the provided attributes: {}".format(data)
)
except FieldError as e:
raise ValidationError(e)
if isinstance(data, int):
pk = data
else:
try:
pk = int(data)
except (TypeError, ValueError):
raise ValidationError(
"Related objects must be referenced by numeric ID or by dictionary of attributes. Received an "
"unrecognized value: {}".format(data)
)
queryset = self.Meta.model.objects
try:
return queryset.get(pk=int(data))
except ObjectDoesNotExist:
raise ValidationError(
"Related object not found using the provided numeric ID: {}".format(pk)
)
class NestedGroupSerializer(ModelSerializer):
class Meta:
model = Group
fields = ["id", "name"]
class NestedUserSerializer(ModelSerializer):
class Meta:
model = User
fields = ["id", "username"]
class NestedTaskResultSerializer(ModelSerializer):
class Meta:
model = TaskResult
fields = ["id", "task_id", "status", "date_done", "result"]
|
992,203 | ed52f854ff91c22896cfd7cf59ed2817ebdfe51e | import tempfile
from PIL import Image
from django.contrib.auth import get_user_model
from django.test import TestCase
from django.urls import reverse
from rest_framework import status
from rest_framework.test import APIClient
from core import models
from staff import serializer
PROCEDURE_URL = reverse("staff:procedure-list")
def get_item_url(pk):
"""Used to get item delete url"""
return reverse('staff:procedure-detail', args=[pk])
def create_new_user():
"""Creates a new user"""
return get_user_model().objects.create_user(
email='test@gmail.com',
password='test@londodnjisdjfois',
username='tempusername'
)
class PublicUserAPITests(TestCase):
"""Tests for public users"""
def setUp(self):
"""Setup code for running public tests"""
self.client = APIClient()
self.speciality = models.Speciality.objects.create(
name='Speciality'
)
self.payload = {
'name': "Knee Replacement",
'speciality': [self.speciality.pk],
'days_in_hospital': 2,
'days_in_destination': 2,
'duration_minutes': 120,
'overview': '<strong>Bla</strong> bla bla',
}
def test_list_procedure_success_unauthenticated_user(self):
"""Test that list procedure is success"""
p1 = models.Procedure.objects.create(
name="procedure1",
overview='bla bla bla'
)
p1.speciality.set([self.speciality.pk])
p1.save()
p2 = models.Procedure.objects.create(
name="procedure2",
overview='bla bla bla'
)
p2.speciality.set([self.speciality.pk])
p2.save()
res = self.client.get(PROCEDURE_URL)
procedures = models.Procedure.objects.all().order_by("-name")
ser = serializer.ProcedureSerializer(procedures, many=True)
self.assertEqual(res.status_code, status.HTTP_200_OK)
self.assertEqual(len(res.data), 2)
self.assertEqual(res.data, ser.data)
def test_unauthenticated_user_post_request_failure(self):
"""Test that post request fails for unauthenticated user"""
res = self.client.post(PROCEDURE_URL, self.payload, format='json')
self.assertEqual(res.status_code, status.HTTP_401_UNAUTHORIZED)
def test_user_delete_procedure_failure(self):
"""Test that deleting procedure by unauthenticated user fails"""
p1 = models.Procedure.objects.create(
name='temp',
overview='bla bla bla'
)
p1.speciality.set([self.speciality.pk])
p1.save()
res = self.client.get(PROCEDURE_URL)
url = get_item_url(res.data[0]['id'])
del_procedure = self.client.delete(url)
self.assertEqual(del_procedure.status_code,
status.HTTP_401_UNAUTHORIZED)
class PrivateUserAPITests(TestCase):
"""Test for authenticated user"""
def setUp(self):
self.client = APIClient()
self.user = create_new_user()
self.client.force_authenticate(user=self.user)
self.payload = {
'name': 'payload',
'speciality': 'orthopedics',
'overview': 'bla bla bla'
}
self.speciality = models.Speciality.objects.create(
name='Speciality'
)
def test_list_procedure_success_authenticated_user(self):
"""Test that list procedure is success"""
p1 = models.Procedure.objects.create(
name="procedure1",
overview='bla bla bla'
)
p1.speciality.set([self.speciality.pk])
p1.save()
p2 = models.Procedure.objects.create(
name="procedure2",
overview='bla bla bla'
)
p2.speciality.set([self.speciality.pk])
p2.save()
res = self.client.get(PROCEDURE_URL)
procedures = models.Procedure.objects.all().order_by("-name")
ser = serializer.ProcedureSerializer(procedures, many=True)
self.assertEqual(res.status_code, status.HTTP_200_OK)
self.assertEqual(len(res.data), 2)
self.assertEqual(res.data, ser.data)
def test_authenticated_user_post_request_failure(self):
"""Test that post request fails for authenticated user"""
res = self.client.post(PROCEDURE_URL, self.payload, format='json')
self.assertEqual(res.status_code, status.HTTP_403_FORBIDDEN)
def test_user_delete_procedure_failure(self):
"""Test that deleting procedure by user fails"""
p1 = models.Procedure.objects.create(
name='temp',
overview='bla bla bla'
)
p1.speciality.set([self.speciality.pk])
p1.save()
res = self.client.get(PROCEDURE_URL)
url = get_item_url(res.data[0]['id'])
del_procedure = self.client.delete(url)
self.assertEqual(del_procedure.status_code,
status.HTTP_403_FORBIDDEN)
def test_user_update_procedure_failure(self):
"""Test that updating procedure by user fails"""
p1 = models.Procedure.objects.create(
name='temp',
overview='bla bla bla'
)
p1.speciality.set([self.speciality.pk])
p1.save()
res = self.client.get(PROCEDURE_URL)
url = get_item_url(res.data[0]['id'])
new_payload = {
'other_details': 'new details'
}
response = self.client.patch(url, new_payload, format='json')
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
class StaffAPITests(TestCase):
"""Tests for staff API"""
def setUp(self):
"""Setup code for running all the tests"""
self.staff = get_user_model().objects.create_user(
email='staff@curesio.com',
password='staffpassword1234',
username='staffusername'
)
self.staff.is_staff = True
self.staff.save()
self.staff.refresh_from_db()
self.client = APIClient()
self.client.force_authenticate(user=self.staff)
self.speciality = models.Speciality.objects.create(
name='Speciality'
)
self.payload = {
'name': "Knee Replacement",
'speciality': [self.speciality.id],
'overview': '<strong>Bla</strong> bla bla',
}
"""Test that list procedure is success"""
p1 = models.Procedure.objects.create(
name="procedure1",
overview='bla bla bla'
)
p1.speciality.set([self.speciality.pk])
p1.save()
p2 = models.Procedure.objects.create(
name="procedure2",
overview='bla bla bla'
)
p2.speciality.set([self.speciality.pk])
p2.save()
res = self.client.get(PROCEDURE_URL)
procedures = models.Procedure.objects.all().order_by("-name")
ser = serializer.ProcedureSerializer(procedures, many=True)
self.assertEqual(res.status_code, status.HTTP_200_OK)
self.assertEqual(len(res.data), 2)
self.assertEqual(res.data, ser.data)
def test_create_valid_procedure_authenticated_staff_success(self):
"""Test creating valid procedure by staff success"""
res = self.client.post(PROCEDURE_URL, self.payload, format='json')
exists = models.Procedure.objects.filter(
name=self.payload['name'].lower()
).exists()
self.assertTrue(exists)
self.assertEqual(res.status_code, status.HTTP_201_CREATED)
self.assertEqual(res.data['name'],
self.payload['name'].lower())
self.assertEqual(res.data['speciality'],
self.payload['speciality'])
self.assertEqual(res.data['days_in_hospital'], None)
self.assertEqual(res.data['days_in_destination'], None)
self.assertEqual(res.data['duration_minutes'], None)
self.assertEqual(res.data['overview'], self.payload['overview'])
self.assertEqual(res.data['other_details'], '')
def test_create_invalid_procedure_authenticated_staff_failure(self):
"""Test creating invalid procedure by staff failure"""
payload = {'name': ''}
res = self.client.post(PROCEDURE_URL, payload, format='json')
self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST)
def test_create_duplicate_procedure_failure(self):
"""Test that creating duplicate procedure fails"""
self.client.post(PROCEDURE_URL, self.payload, format='json')
res = self.client.post(PROCEDURE_URL, self.payload, format='json')
self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST)
def test_staff_delete_procedure_success(self):
"""Test that deleting procedure by staff is success"""
res = self.client.post(PROCEDURE_URL, self.payload, format='json')
url = get_item_url(res.data['id'])
del_procedure = self.client.delete(url)
procedure_exists = models.Procedure.objects.filter(
name=self.payload['name'].lower()
).exists()
self.assertEqual(del_procedure.status_code, status.HTTP_204_NO_CONTENT)
self.assertFalse(procedure_exists)
def test_staff_update_procedure_success(self):
"""Test that updating procedure by staff is success"""
res = self.client.post(PROCEDURE_URL, self.payload, format='json')
url = get_item_url(res.data['id'])
new_payload = {
'other_details': 'new details'
}
response = self.client.patch(url, new_payload, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['name'],
self.payload['name'].lower())
self.assertEqual(response.data['other_details'],
new_payload['other_details'])
def test_staff_update_duplicate_procedure_fails(self):
"""Test that updating procedure by duplicate content fails"""
res = self.client.post(PROCEDURE_URL, self.payload, format='json')
second_payload = {
'name': 'abc',
'speciality': [self.speciality.id],
'overview': 'bla bla bla'
}
self.client.post(PROCEDURE_URL, second_payload, format='json')
url = get_item_url(res.data['id'])
new_payload = {
'name': 'abc',
}
response = self.client.patch(url, new_payload, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
class ProcedureImageUploadTests(TestCase):
"""Tests for uploading procedure picture"""
def setUp(self):
"""Setup for running all the tests"""
self.staff = get_user_model().objects.create_doctor(
email='temp@curesio.com',
password='testpass@4',
username='tempuser4'
)
self.staff.is_staff = True
self.staff.save()
self.staff.refresh_from_db()
self.client = APIClient()
self.client.force_authenticate(self.staff)
self.speciality = models.Speciality.objects.create(
name='Speciality1'
)
def test_procedure_picture_upload(self):
"""Test that uploading procedure picture is successful"""
image_upload_url = PROCEDURE_URL
with tempfile.NamedTemporaryFile(suffix='.jpg') as ntf:
img = Image.new('RGB', (10, 10))
img.save(ntf, format='JPEG')
ntf.seek(0)
payload = {
'name': 'temp',
'speciality': [self.speciality.pk],
'image': ntf,
'overview': 'bla bla bla'
}
res = self.client.post(
image_upload_url,
payload,
format="multipart"
)
self.assertEqual(res.status_code, status.HTTP_201_CREATED)
self.assertIn('image', res.data)
def test_user_profile_picture_invalid_image_fails(self):
"""Test that invalid image upload fails"""
image_upload_url = PROCEDURE_URL
payload = {
'name': 'temp',
'speciality': [self.speciality.pk],
'image': 'invalid image',
'overview': 'bla bla bla'
}
res = self.client.post(
image_upload_url,
payload,
format="multipart"
)
self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST)
|
992,204 | f94c3f9f9020d80bd169972e8eebe1dd8fa4c7d5 | import pandas as pd
import sys
df = pd.read_csv(sys.argv[1],names=["comment_text"])
#remove all the tagged user, along with the hashtag symbol and quotes
df["comment_text"]=df["comment_text"].replace({"(@\w+)":""},regex=True)
#replace also the @ symbol
df["comment_text"]=df["comment_text"].replace({"@":""},regex=True)
df["comment_text"]=df["comment_text"].replace({"#":""},regex=True)
df["comment_text"]=df["comment_text"].replace({'\"':" "})
df['comment_text'] = df["comment_text"].apply(lambda x: ''.join([" " if ord(i) < 32 or ord(i) > 126 else i for i in x]))
df.to_csv("clean.csv",index=False,header=False)
|
992,205 | 0c7b05dbb38965194763ce7446be07cf3655b0c0 | # EVERYTHING IN ONE SINGLE ARRAY
# TRIANGULAR NOISE
# ~6 TIMES SLOWER THAN 1 :(
import numpy as np
import matplotlib.pyplot as plt
class Network:
def __init__(self, time_constant=20, error_pairs=2, normalize_weights=(0,), pos_error_to_head=(2, 0), neg_error_to_head=(0.3, 0),
learning_rate=0.01, dendrite_threshold=2/3, noise_max_amplitude=0.3, noise_rise_rate=0.0000002,
noise_fall_rate=0.0002, noise_fall_threshold=0.5, block_threshold=0.02, log_head=False, log_head_out=True,
log_neg_error=False, log_neg_error_diff=True, log_neg_error_out=True, log_pos_error_out=True, log_noise_amplitude=False,
log_weights=True):
self.groups = {}
self.group_sizes = []
self.names = []
self.num_circuits = 0
self.error_pairs = error_pairs
self.normalize_weights = normalize_weights # tuple of indices of the e-pairs where weights are normalized
self.time_constant = time_constant
self.fast_time_constant = time_constant/10
self.pos_error_to_head = pos_error_to_head
self.neg_error_to_head = neg_error_to_head
self.learning_rate = learning_rate
self.k = 3 # constant for the activation function of head neurons
# parameters for the dendritic nonlinearity
self.dendrite_threshold = dendrite_threshold
self.dendrite_slope = 1/(1 - self.dendrite_threshold)
self.dendrite_offset = -self.dendrite_slope*self.dendrite_threshold
# down counter for blocking learning when neg_error_diff is above block_threshold
self.block_count = None
self.block_threshold = block_threshold
self.head = None
self.log_head = log_head
if log_head: self.head_log = None
self.head_out = None
self.log_head_out = log_head_out
if log_head_out: self.head_out_log = None
self.neg_error = None
self.log_neg_error = log_neg_error
if log_neg_error: self.neg_error_log = None
self.log_neg_error_diff = log_neg_error_diff
if log_neg_error_diff: self.neg_error_diff_log = None
self.neg_error_out = None
self.log_neg_error_out = log_neg_error_out
if log_neg_error_out: self.neg_error_out_log = None
self.pos_error_out = None
self.log_pos_error_out = log_pos_error_out
if log_pos_error_out: self.pos_error_out_log = None
self.noise_max_amplitude = noise_max_amplitude
self.noise_amplitude = None
self.log_noise_amplitude = log_noise_amplitude
if log_noise_amplitude: self.noise_amplitude_log = None
self.noise_period = 15*time_constant
self.noise_step_num = 0
self.noise_previous = None
self.noise = None
self.noise_next = None
self.noise_rise_rate = noise_rise_rate
self.noise_fall_rate = noise_fall_rate
self.noise_fall_threshold = noise_fall_threshold
self.wta_weights = None
self.wta_sum = None
self.weights = None
self.log_weights = log_weights
if log_weights: self.weights_log = None
self.weights_mask = None
self.ones = None
def add_group(self, name, num_circuits):
self.group_sizes.append(num_circuits)
start = self.num_circuits
self.num_circuits += num_circuits
end = self.num_circuits
self.groups[name] = (start, end)
def build(self):
# Construct a list of names and a matrix for the winner take all
self.names = [None] * self.num_circuits
self.wta_sum = np.zeros(len(self.groups))
self.wta_weights = np.zeros((self.num_circuits, len(self.groups)))
for group_num, (group_name, indices) in enumerate(self.groups.items()):
self.wta_weights[indices[0]:indices[1], group_num] = 1
for rel_index, abs_index in enumerate(range(indices[0], indices[1])):
self.names[abs_index] = group_name + '_' + str(rel_index)
# Initialize weights mask
self.weights_mask = np.ones((self.error_pairs, self.num_circuits, self.num_circuits))
self.ones = np.ones((1, 1, self.num_circuits))
def connect(self, input_group, output_group, error_pair):
first_input, last_input = self.groups[input_group]
first_output, last_output = self.groups[output_group]
self.weights_mask[error_pair, first_output:last_output, first_input:last_input] = 0
def initialize(self):
# Initialize activities
self.head = np.zeros(self.num_circuits)
if self.log_head: self.head_log = [self.head]
self.head_out = np.zeros(self.num_circuits)
if self.log_head_out: self.head_out_log = [self.head_out]
self.neg_error = np.zeros((self.error_pairs, self.num_circuits))
if self.log_neg_error: self.neg_error_log = [self.neg_error]
if self.log_neg_error_diff: self.neg_error_diff_log = [self.neg_error]
self.block_count = np.zeros((self.error_pairs, self.num_circuits))
self.neg_error_out = np.zeros((self.error_pairs, self.num_circuits))
if self.log_neg_error_out: self.neg_error_out_log = [self.neg_error_out]
self.pos_error_out = np.zeros((self.error_pairs, self.num_circuits))
if self.log_pos_error_out: self.pos_error_out_log = [self.pos_error_out]
# Initialize weights
self.weights = np.ma.masked_array(np.zeros((self.error_pairs, self.num_circuits, self.num_circuits)),
self.weights_mask)
if self.log_weights: self.weights_log = [self.weights]
# Initialize noise variables
self.noise_amplitude = np.ones(self.num_circuits)*self.noise_max_amplitude
if self.log_noise_amplitude: self.noise_amplitude_log = [self.noise_amplitude]
self.noise_previous = np.random.uniform(-self.noise_amplitude, self.noise_amplitude, self.num_circuits)
self.noise = np.zeros(self.num_circuits)
self.noise_next = np.random.uniform(-self.noise_amplitude, self.noise_amplitude, self.num_circuits)
def dendrite_nonlinearity(self, input_values):
return np.where(input_values < self.dendrite_threshold, 0, input_values*self.dendrite_slope
+ self.dendrite_offset)
def slow_noise(self):
# update noise_amplitude
self.noise_amplitude = np.clip(self.noise_amplitude + self.noise_rise_rate
- self.noise_fall_rate*(self.head_out > self.noise_fall_threshold),
0, self.noise_max_amplitude)
if self.log_noise_amplitude:
self.noise_amplitude_log.append(self.noise_amplitude)
alpha = self.noise_step_num / self.noise_period
self.noise = (1-alpha)*self.noise_previous + alpha*self.noise_next
self.noise_step_num += 1
if self.noise_step_num == self.noise_period:
self.noise_previous = self.noise_next
self.noise_next = np.random.uniform(-self.noise_amplitude, self.noise_amplitude, self.num_circuits)
self.noise_step_num = 0
return self.noise
def step(self, external_input):
self.wta_sum += (-self.wta_sum + np.dot(self.head_out, self.wta_weights)) / self.fast_time_constant
self.head = (self.head + (-self.head + 2*self.head_out - np.dot(self.pos_error_to_head, self.pos_error_out)
+ np.dot(self.neg_error_to_head, self.neg_error_out)
- np.dot(self.wta_sum, self.wta_weights.T) + self.slow_noise()) / self.time_constant)
if self.log_head: self.head_log.append(self.head)
self.head_out = np.clip(np.tanh(self.k*self.head), 0, 1)
if self.log_head_out: self.head_out_log.append(self.head_out)
neg_error_diff = - self.neg_error
neg_error_input = self.dendrite_nonlinearity(np.ma.inner(self.head_out, self.weights))
self.neg_error = (self.neg_error + (-self.neg_error - self.head_out + external_input + neg_error_input)
/ self.fast_time_constant)
neg_error_diff += self.neg_error
if self.log_neg_error: self.neg_error_log.append(self.neg_error)
if self.log_neg_error_diff: self.neg_error_diff_log.append(neg_error_diff)
self.block_count = np.where(np.abs(neg_error_diff) > self.block_threshold, 3 * self.time_constant,
np.maximum(self.block_count - 1, 0))
self.neg_error_out = np.clip(self.neg_error, 0, 1)
if self.log_neg_error_out: self.neg_error_out_log.append(self.neg_error_out)
self.pos_error_out = np.clip(-self.neg_error, 0, 1)
if self.log_pos_error_out: self.pos_error_out_log.append(self.pos_error_out)
# Update weights
weight_update = np.ma.inner(self.neg_error[:, :, np.newaxis], self.head_out[:, np.newaxis])
weight_update[self.normalize_weights, :, :] += self.weights[self.normalize_weights, :, :]*neg_error_input[self.normalize_weights, :, np.newaxis]*(self.ones-self.head_out)
weight_update = np.where(self.block_count[:, :, np.newaxis], 0, -self.learning_rate*weight_update)
self.weights = np.clip(self.weights + weight_update, a_min=0, a_max=None)
if self.log_weights: self.weights_log.append(self.weights)
def run(self, num_steps, external_input):
for step_num in range(num_steps):
self.step(external_input)
def plot_traces(self):
print('plotting...')
x_label = "Simulation steps"
fig, ax = plt.subplots(self.num_circuits, self.error_pairs, sharex=True, sharey=True)
fig.suptitle('Neuronal activities')
for error_pair_num in range(self.error_pairs):
for circuit_num in range(self.num_circuits):
if self.log_head:
ax[circuit_num, error_pair_num].plot(np.array(self.head_log)[:, circuit_num], 'deepskyblue',
label=r'$' + self.names[circuit_num] + '.h_i$')
if self.log_head_out:
ax[circuit_num, error_pair_num].plot(np.array(self.head_out_log)[:, circuit_num], 'b',
label=r'$' + self.names[circuit_num] + '.h$')
if self.log_neg_error:
ax[circuit_num, error_pair_num].plot(np.array(self.neg_error_log)[:, error_pair_num, circuit_num],
'limegreen', label=r'$' + self.names[circuit_num] + '.n_i$')
if self.log_neg_error_out:
ax[circuit_num, error_pair_num].plot(np.array(self.neg_error_out_log)[:, error_pair_num, circuit_num],
'g', label=r'$' + self.names[circuit_num] + '.n$')
if self.log_pos_error_out:
ax[circuit_num, error_pair_num].plot(np.array(self.pos_error_out_log)[:, error_pair_num, circuit_num],
'r', label=r'$' + self.names[circuit_num] + '.p$')
ax[circuit_num, error_pair_num].legend()
for axes in ax[-1]:
axes.set_xlabel(x_label)
if self.log_weights:
fig_w, ax_w = plt.subplots(self.num_circuits, self.error_pairs)
fig_w.suptitle('Weights')
for error_pair_num in range(self.error_pairs):
for circuit_num in range(self.num_circuits):
empty = 1
for input_num in range(self.num_circuits):
if not self.weights_mask[error_pair_num, circuit_num, input_num]:
label = r'$w_{' + self.names[input_num] + r'.h\rightarrow ' + self.names[circuit_num] \
+ '.p}$'
ax_w[circuit_num, error_pair_num].plot(np.array(self.weights_log)
[:, error_pair_num, circuit_num, input_num],
label=label)
empty = 0
if empty:
ax_w[circuit_num, error_pair_num].axis('off')
else:
ax_w[circuit_num, error_pair_num].set_ylim([-1.2, 1.2])
ax_w[circuit_num, error_pair_num].legend(loc='lower right', ncol=5)
for axes in ax_w[-1]:
axes.set_xlabel(x_label)
if self.log_noise_amplitude:
fig_n, ax_n = plt.subplots(len(self.group_sizes))
fig_n.suptitle('Noise amplitude')
circuit_num = 0
for group_num, group_size in enumerate(self.group_sizes):
for _ in range(group_size):
ax_n[group_num].plot(np.array(self.noise_amplitude_log)[:, circuit_num], label=r'$' + self.names[circuit_num] + '.a$')
circuit_num += 1
ax_n[group_num].legend()
ax_n[-1].set_xlabel(x_label)
if self.log_neg_error_diff:
fig_d, ax_d = plt.subplots(self.num_circuits, self.error_pairs)
for error_pair_num in range(self.error_pairs):
for circuit_num in range(self.num_circuits):
ax_d[circuit_num, error_pair_num].plot(np.array(self.neg_error_diff_log)[:, error_pair_num, circuit_num], label=r'$' + self.names[circuit_num] + '.n_diff$')
plt.show()
|
992,206 | 1c77a7297417ba15739f4807ce53d7486dcebb81 | # -*- coding: utf-8 -*-
# @Author: Mr.Jhonson
# @Date: 2017-08-20 11:22:24
# @Last Modified by: Mr.Jhonson
# @Last Modified time: 2017-08-22 23:45:00
def get_number(s):#用异常去处理,这是传入参数的异常
try:
float(s)
except ValueError:
return False
else:
return True
s = "+ 1"
print(get_number(s)) |
992,207 | 2008a41d93b650d87436b40b7202f5e87a83de6f | import cv2
import numpy as np
Video = cv2.VideoCapture('wild.mkv')
x, f1 = Video.read()
x, f2 = Video.read()
while Video.isOpened():
differenceOfFrames = cv2.absdiff(f1, f2)
grayFrame = cv2.cvtColor(differenceOfFrames, cv2.COLOR_BGR2GRAY)
blur = cv2.GaussianBlur(grayFrame, (5, 5), 0)
_, thresh = cv2.threshold(blur, 20, 255, cv2.THRESH_BINARY)
dilated = cv2.dilate(thresh, None, iterations=3)
contours, _ = cv2.findContours(dilated, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
cv2.drawContours(f1, contours, -1, (0, 0, 255), 2)
cv2.imshow("feed", f1)
f1 = f2
x, f2 = Video.read()
if cv2.waitKey(40) == 27:
break
cv2.destroyAllWindows()
Video.release() |
992,208 | 99e39f979c637027c54b1991a5f6555964d125e7 | def parse_selector(selector):
rpl = selector & 0b11 # requested privileges level
ti = (selector >> 2) & 1 # table indicator
index = selector >> 3 # descriptor index
print(hex(selector) + ': ' + privileges(rpl) + ', ' + table(ti))
def privileges(flags):
privs = {
0: 'ring 0',
1: 'ring 1',
2: 'ring 2',
3: 'ring 3'
}
return privs[flags]
def table(flags):
tables = {
0: 'GDT',
1: 'LDT'
}
return tables[flags]
if __name__ == "__main__":
code_kernel = 0b0000000000001000
data_kernel = 0b0000000000010000
code_user = 0b0000000000011011
data_user = 0b0000000000100111
parse_selector(code_kernel)
parse_selector(data_kernel)
parse_selector(code_user)
parse_selector(data_user)
|
992,209 | 4ce10cbf8f1f8687640a86f6f2f56ab910a1b429 | # -*- coding: utf-8 -*-
from flask import request, redirect, session, jsonify
from database import clouddb
import pandas as pd
import hashlib
import time
import os
import datetime
# import minisql
def add_api_rouer(app):
@app.route('/api/minisql', methods=['GET'])
def minisql():
sql = request.args.get('sql')
sql = sql.replace('"', '\\"')
# print('sqlexecute \"' + sql + '\"')
# print(os.path.abspath('./'))
# proc = os.popen('test ' + sql)
proc = os.popen('sqlexecute \"' + sql + '\"')
# print(proc.read())
return proc.read()
|
992,210 | 5d4099a38a5048c001d82bef590974dbc1d76841 | #!/usr/bin/env python3
import sys
import time
output_path = sys.argv[2]
input_path = sys.argv[3]
if "I001" in input_path:
print("I crash on example I001!", file=sys.stderr)
exit(1)
if "I002" in input_path:
print("I don't produce output on example I001!", file=sys.stderr)
exit(0)
|
992,211 | 3b92b021d37c501d6126040893620335c792633b | from apispec import APISpec as API_Spec
from apispec.ext.marshmallow import MarshmallowPlugin as Marshmallow_Plugin
from falcon_apispec import FalconPlugin as Falcon_Plugin
from json import dumps
from pathlib import Path
from resource import tags as rc_tags
from utils.string import is_str
__all__ = [
'Spec'
]
class Spec:
def __init__(self, api, title, version):
self.obj = API_Spec(title=title, version=version, openapi_version='2.0',
produces=['application/json'], consumes=['application/json'],
tags=rc_tags, plugins=[Falcon_Plugin(api), Marshmallow_Plugin(schema_name_resolver=self.__schema_name_resolver)])
def get(self):
return self.obj
def write(self):
path = Path(__file__).parent / '../swagger/schema.yaml'
with path.open('w') as file:
file.write(self.obj.to_yaml())
path = Path(__file__).parent / '../swagger/schema.json'
with path.open('w') as file:
file.write(dumps(self.obj.to_dict(), indent=2))
@staticmethod
def __schema_name_resolver(schema):
if is_str(schema):
ref = schema
else:
ref = schema.__class__.__name__
return ref.replace('_Schema', '')
|
992,212 | 5aa8521e90f8c747d06b41abfdd4b583c7e192b5 | # Generated by Django 2.2.3 on 2019-08-09 05:46
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('userprofile', '0024_auto_20190804_0155'),
]
operations = [
migrations.RemoveField(
model_name='annotation',
name='annotation_privacy',
),
]
|
992,213 | 809e42ee942c848bb3cdbb11d0aa55aa3230bcb2 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Feb 11 15:53:27 2019
@author: mirjamheinemans
Individual traces of the animals during the whole test
TRAINING columns:
0.
1.MH33,
2.MH33_in_shelter
3.MH33_doorway
4.MH33_with_pellet
5.MH33_eat_pellet
6.MH33_freeze
7.MH33_reaching
8.MH33_scanning
9.MH33_new_pellet
TEST columns:
1.x-value33,
2.MH33_in_shelter
3.MH33_doorway
4.MH33_with_pellet
5.MH33_eat_pellet
6.MH33_freeze
7.MH33_reaching
8.MH33_scanning
9.MH33_stim
"""
import scipy.stats as ss
import csv
import numpy as np
import os, glob # Operating System
import pandas as pd
import seaborn as sns
import matplotlib.pyplot as plt
import json
os.chdir('/Users/mirjamheinemans/Desktop/Annotator python tryout') # to change directory Read csv files with Pandas
#%%
path_name = '/Users/mirjamheinemans/Desktop/Annotator python tryout/_Loom'
def Trace(file_names):
if file_names == '.DS_Store':
next
else:
dataset = pd.read_csv(path_name +'/' + file_names +'/' + 'training.csv', usecols = [1,4,9])
if int(file_names[2:]) >45:
dataset = dataset.groupby(np.arange(len(dataset.index))//2).mean()
dataset['took_pell'+file_names] = 0
took_pell = dataset.iloc[:,1].diff()[dataset.iloc[:,1].diff() > 0].index.values#[0:4]
dataset.iloc[took_pell,-1] = dataset.iloc[took_pell,0]
#print(sheltertime)
return(dataset)
#%%
'''in this for-loop i create a list of lists of lists with each animal on one line.'''
path_name = '/Users/mirjamheinemans/Desktop/Annotator python tryout/_Loom'
columns = ['xpos']
index = range(900)
shelter_loom = pd.DataFrame(index = index, columns = columns)
for file_names in sorted(os.listdir(path_name)):
print(file_names)
animal = Trace(file_names)
shelter_loom = pd.concat([shelter_loom, animal], axis=1)
shelter_loom = shelter_loom.drop(columns = ['xpos'])
shelter_loom = shelter_loom.drop(index = 0)
#shelter_loom['frames']= range(len(shelter_loom))
#shelter_loom['sec'] =shelter_loom['frames']/60
#%%
path_name = '/Users/mirjamheinemans/Desktop/Annotator python tryout/_T_Loom'
columns = ['xpos']
index = range(900)
shelter_t_loom = pd.DataFrame(index = index, columns = columns)
for file_names in sorted(os.listdir(path_name)):
print(file_names)
animal = Trace(file_names)
shelter_t_loom = pd.concat([shelter_t_loom, animal], axis=1)
shelter_t_loom = shelter_t_loom.drop(columns = ['xpos'])
shelter_t_loom = shelter_t_loom.drop(index = 0)
#shelter_t_loom['frames']= range(len(shelter_loom))
#shelter_t_loom['sec'] =shelter_t_loom['frames']/60
#%%
path_name = '/Users/mirjamheinemans/Desktop/Annotator python tryout/_T_Shock'
columns = ['xpos']
index = range(900)
shelter_t_shock = pd.DataFrame(index = index, columns = columns)
for file_names in sorted(os.listdir(path_name)):
print(file_names)
animal = Trace(file_names)
shelter_t_shock = pd.concat([shelter_t_shock, animal], axis=1)
shelter_t_shock = shelter_t_shock.drop(columns = ['xpos'])
shelter_t_shock = shelter_t_shock.drop(index = 0)
#shelter_t_shock['frames']= range(len(shelter_t_shock))
#shelter_t_shock['sec'] =shelter_t_shock['frames']/60
#%%
path_name = '/Users/mirjamheinemans/Desktop/Annotator python tryout/_Tone'
columns = ['xpos']
index = range(900)
shelter_tone = pd.DataFrame(index = index, columns = columns)
for file_names in sorted(os.listdir(path_name)):
print(file_names)
animal = Trace(file_names)
shelter_tone = pd.concat([shelter_tone, animal], axis=1)
shelter_tone = shelter_tone.drop(columns = ['xpos'])
shelter_tone = shelter_tone.drop(index = 0)
#shelter_tone['frames']= range(len(shelter_tone))
#shelter_tone['sec'] =shelter_tone['frames']/60
#%%
df = pd.concat([shelter_loom, shelter_t_shock, shelter_t_loom, shelter_tone], axis = 1)
df['frames']= range(len(df))
df['sec'] =df['frames']/60
#%%
'''Loom'''
#ax = sns.lineplot(x='sec', y='x-value33', data =df, color = 'black', zorder=1).set_title('Loom MH33')#Loom
#ax = sns.scatterplot(x = 'sec', y = 'took_pellMH033', data = df, color = 'c',s =150,zorder = 2)
#
#
#ax = sns.lineplot(x='sec', y='x-value34', data =df, color = 'black', zorder=1).set_title('Loom MH34')#Loom
#ax = sns.scatterplot(x = 'sec', y = 'took_pellMH034', data = df, color = 'c',s =150,zorder = 2)
#
#
#ax = sns.lineplot(x='sec', y='x-value46', data =df, color = 'black', zorder=1).set_title('Loom MH46')#Loom
#ax = sns.scatterplot(x = 'sec', y = 'took_pellMH046', data = df, color = 'c',s =150,zorder = 2)
#
#
#ax = sns.lineplot(x='sec', y='x-value59', data =df, color = 'black', zorder=1).set_title('Loom MH59')#Loom
#ax = sns.scatterplot(x = 'sec', y = 'took_pellMH059', data = df, color = 'c',s =150,zorder = 2)
#
#
#ax = sns.lineplot(x='sec', y='x-value60', data =df, color = 'black', zorder=1).set_title('Loom MH60')#Loom
#ax = sns.scatterplot(x = 'sec', y = 'took_pellMH060', data = df, color = 'c',s =150,zorder = 2)
#
#
#ax = sns.lineplot(x='sec', y='x-value65', data =df, color = 'black', zorder=1).set_title('Loom MH65')#Loom
#ax = sns.scatterplot(x = 'sec', y = 'took_pellMH065', data = df, color = 'c',s =150,zorder = 2)
#
#
#ax = sns.lineplot(x='sec', y='x-value66', data =df, color = 'orange', zorder=1).set_title('Loom MH66')#Loom
#ax = sns.scatterplot(x = 'sec', y = 'took_pellMH066', data = df, color = 'c',s =150,zorder = 2)
#
#
#'''Loom'''
#ax = sns.lineplot(x='sec', y='x-value85', data =df, color = 'gray', zorder = 1).set_title('Loom MH85')#Loom
#ax = sns.scatterplot(x = 'sec', y = 'took_pellMH085', data = df, color = 'g',s =150,zorder = 2)
#
#
#ax = sns.lineplot(x='sec', y='x-value86', data =df, color = 'gray', zorder = 1).set_title('Loom MH86')#Loom
#ax = sns.scatterplot(x = 'sec', y = 'took_pellMH086', data = df, color = 'g',s =150,zorder = 2)
#
#
#ax = sns.lineplot(x='sec', y='x-value87', data =df, color = 'gray', zorder = 1).set_title('Loom MH87')#Loom
#ax = sns.scatterplot(x = 'sec', y = 'took_pellMH087', data = df, color = 'g',s =150,zorder = 2)
#
#
#ax = sns.lineplot(x='sec', y='x-value88', data =df, color = 'gray', zorder = 1).set_title('Loom MH88')#Loom
#ax = sns.scatterplot(x = 'sec', y = 'took_pellMH088', data = df, color = 'g',s =150,zorder = 2)
#ax.set_xlim([0, 20])
#ax.set_ylim([0, 800])
#ax.plot([10 ,10], [0, 1200],color ='gray')
#ax.plot([1 ,1], [0, 1200],color ='gray')
ax.set_ylabel('x-position (a.u.)')
ax.set_ylim([0, 1200])
#%%
'''Tone-Loom'''
#ax = sns.lineplot(x='sec', y='x-value43', data =df, color = 'blue',zorder = 1)#T-Loom
#ax = sns.scatterplot(x = 'sec', y = 'took_pellMH043', data = df, color = 'c',s =100,zorder = 2).set_title('Tone-Loom MH43')
#
#
#
#ax = sns.lineplot(x='sec', y='x-value44', data =df, color = 'orange',zorder = 1)#T-Loom
#ax = sns.scatterplot(x = 'sec', y = 'took_pellMH044', data = df, color = 'c',s =100,zorder = 2)#.set_title('Tone-Loom MH44')
#ax.set_xlim([0,350])
#ax.set_title('Tone-Loom MH44')
#
#ax = sns.lineplot(x='sec', y='x-value51', data =df, color = 'orange',zorder = 1)#T-Loom
#ax.set_ylabel('x-position (a.u.)')
#ax.set_ylim([0, 1200])
#ax = sns.scatterplot(x = 'sec', y = 'took_pellMH051', data = df, color = 'c',s =100,zorder = 2).set_title('Tone-Loom MH51')
#
#
#ax = sns.lineplot(x='sec', y='x-value52', data =df, color = 'blue',zorder = 1)#T-Loom
#ax = sns.scatterplot(x = 'sec', y = 'took_pellMH052', data = df, color = 'c',s =100,zorder = 2).set_title('Tone-Loom MH52')
#
#
#ax = sns.lineplot(x='sec', y='x-value53', data =df, color = 'blue',zorder = 1)#T-Loom
#ax = sns.scatterplot(x = 'sec', y = 'took_pellMH053', data = df, color = 'c',s =100,zorder = 2).set_title('Tone-Loom MH53')
#
#
#ax = sns.lineplot(x='sec', y='x-value63', data =df, color = 'orange',zorder = 1)#T-Loom
#ax = sns.scatterplot(x = 'sec', y = 'took_pellMH063', data = df, color = 'c',s =100,zorder = 2).set_title('Tone-Loom MH63')
#
#
#ax = sns.lineplot(x='sec', y='x-value64', data =df, color = 'orange',zorder = 1)#T-Loom
#ax = sns.scatterplot(x = 'sec', y = 'took_pellMH064', data = df, color = 'c',s =100,zorder = 2).set_title('Tone-Loom MH64')
#
#
#ax = sns.lineplot(x='sec', y='x-value70', data =df, color = 'orange',zorder = 1)#T-Loom
#ax = sns.scatterplot(x = 'sec', y = 'took_pellMH070', data = df, color = 'c',s =100,zorder = 2).set_title('Tone-Loom MH70')
#
#
#
#ax = sns.lineplot(x='sec', y='x-value71', data =df, color = 'orange',zorder = 1).set_title('Tone-Loom MH71')#T-Loom
#ax = sns.scatterplot(x = 'sec', y = 'took_pellMH071', data = df, color = 'c',s =100,zorder = 2)
#
#
#ax = sns.lineplot(x='sec', y='x-value78', data =df, color = 'blue',zorder = 1)#T-Loom
#ax = sns.scatterplot(x = 'sec', y = 'took_pellMH078', data = df, color = 'c',s =100,zorder = 2).set_title('Tone-Loom MH78')
##
##
#ax = sns.lineplot(x='sec', y='x-value92', data =df, color = 'blue',zorder = 1)#T-Loom
#ax = sns.scatterplot(x = 'sec', y = 'took_pellMH092', data = df, color = 'c',s =100,zorder = 2).set_title('Tone-Loom MH92')
#
#
#ax = sns.lineplot(x='sec', y='x-value96', data =df, color = 'orange',zorder = 1)#T-Loom
#ax = sns.scatterplot(x = 'sec', y = 'took_pellMH096', data = df, color = 'c',s =100,zorder = 2).set_title('Tone-Loom MH96')
#
#
#ax = sns.lineplot(x='sec', y='x-value101',data =df, color = 'orange',zorder = 1).set_title('Tone-Loom MH101')
#ax = sns.scatterplot(x = 'sec', y = 'took_pellMH101', data = df, color = 'c',s =100,zorder = 2).set_ylabel('x-position (a.u.)')#T-Loom
#ax = sns.lineplot([10 ,10], [0, 1200],color ='gray')
#ax.set_xlim([0, 20])
#ax.set_ylim([0, 800])
#ax.set_ylabel('x-position (a.u.)')
#ax.plot([10 ,10], [0, 1200],color ='gray')
#ax.plot([1 ,1], [0, 1200],color ='gray')
ax.set_ylabel('x-position (a.u.)')
ax.set_ylim([0, 1200])
#%%
'''Tone-Shock'''
#ax = sns.lineplot(x='sec', y='x-value39', data =df, color = 'red',zorder = 1).set_title('Tone-Shock MH39')#T-Shock
#ax = sns.scatterplot(x = 'sec', y = 'took_pellMH039', data = df, color = 'b',s =100,zorder = 2)
#
#
ax = sns.lineplot(x='sec', y='x-value40', data =df, color = 'red',zorder = 1).set_title('Tone-Shock MH40')#T-Shock
ax = sns.scatterplot(x = 'sec', y = 'took_pellMH040', data = df, color = 'b',s =100,zorder = 2)
#
#
#ax = sns.lineplot(x='sec', y='x-value49', data =df, color = 'red',zorder = 1).set_title('Tone-Shock MH49')#T-Shock
#ax = sns.scatterplot(x = 'sec', y = 'took_pellMH049', data = df, color = 'b',s =100,zorder = 2)
#
#
#ax = sns.lineplot(x='sec', y='x-value50', data =df, color = 'red',zorder = 1).set_title('Tone-Shock MH50')#T-Shock
#ax = sns.scatterplot(x = 'sec', y = 'took_pellMH050', data = df, color = 'b',s =100,zorder = 2)
#
#
#ax = sns.lineplot(x='sec', y='x-value55', data =df, color = 'red',zorder = 1).set_title('Tone-Shock MH55')#T-Shock
#ax = sns.scatterplot(x = 'sec', y = 'took_pellMH055', data = df, color = 'b',s =100,zorder = 2)
#
#
#ax = sns.lineplot(x='sec', y='x-value56', data =df, color = 'red',zorder = 1).set_title('Tone-Shock MH56')#T-Shock
#ax = sns.scatterplot(x = 'sec', y = 'took_pellMH056', data = df, color = 'b',s =100,zorder = 2)
#
#
#ax = sns.lineplot(x='sec', y='x-value67', data =df, color = 'orange',zorder = 1).set_title('Tone-Shock MH67')#T-Shock
#ax = sns.scatterplot(x = 'sec', y = 'took_pellMH067', data = df, color = 'b',s =100,zorder = 2)
#
##
#ax = sns.lineplot(x='sec', y='x-value68', data =df, color = 'red',zorder = 1).set_title('Tone-Shock MH68')#T-Shock
#ax = sns.scatterplot(x = 'sec', y = 'took_pellMH068', data = df, color = 'b',s =100,zorder = 2)
#
#
#
#
#'''Tone-Shock'''
#ax = sns.lineplot(x='sec', y='x-value73', data =df, color = 'red',zorder = 1).set_title('Tone-Shock MH73')#T-Shock
#ax = sns.scatterplot(x = 'sec', y = 'took_pellMH073', data = df, color = 'b',s =100,zorder = 2)
#
#ax = sns.lineplot(x='sec', y='x-value74', data =df, color = 'red',zorder = 1).set_title('Tone-Shock MH74')#T-Shock
#ax = sns.scatterplot(x = 'sec', y = 'took_pellMH074', data = df, color = 'b',s =100,zorder = 2)
#
#
#ax = sns.lineplot(x='sec', y='x-value79', data =df, color = 'red',zorder = 1).set_title('Tone-Shock MH79')#T-Shock
#ax = sns.scatterplot(x = 'sec', y = 'took_pellMH079', data = df, color = 'b',s =100,zorder = 2)
#
#
#ax = sns.lineplot(x='sec', y='x-value80', data =df, color = 'red',zorder = 1).set_title('Tone-Shock MH80')#T-Shock
#ax = sns.scatterplot(x = 'sec', y = 'took_pellMH080', data = df, color = 'b',s =100,zorder = 2)
#
#ax = sns.lineplot(x='sec', y='x-value84', data =df, color = 'red',zorder = 1).set_title('Tone-Shock MH84')#T-Shock
#ax = sns.scatterplot(x = 'sec', y = 'took_pellMH084', data = df, color = 'b',s =100,zorder = 2)
#
#
#ax = sns.lineplot(x='sec', y='x-value89', data =df, color = 'red',zorder = 1).set_title('Tone-Shock MH89')#T-Shock
#ax = sns.scatterplot(x = 'sec', y = 'took_pellMH089', data = df, color = 'b',s =100,zorder = 2)
#
#
#ax = sns.lineplot(x='sec', y='x-value90', data =df, color = 'red',zorder = 1).set_title('Tone-Shock MH90')#T-Shock
#ax = sns.scatterplot(x = 'sec', y = 'took_pellMH090', data = df, color = 'b',s =100,zorder = 2)
#
#
#ax = sns.lineplot(x='sec', y='x-value100',data =df, color = 'red',zorder = 1).set_title('Tone-Shock MH100')#T-Shock
#ax = sns.scatterplot(x = 'sec', y = 'took_pellMH100', data = df, color = 'b',s =100,zorder = 2)
#ax.set_xlim([0, 20])
#ax.set_ylim([0, 800])
#ax.plot([10 ,10], [0, 1200],color ='gray')
#ax.plot([1 ,1], [0, 1200],color ='gray')
ax.set_ylabel('x-position (a.u.)')
ax.set_ylim([0, 1200])
#%%
'''Tone'''
#ax = sns.lineplot(x='sec', y='x-value37', data =df, color = 'orange', zorder = 1).set_title('Tone MH37')#Tone
#ax = sns.scatterplot(x = 'sec', y = 'took_pellMH037', data = df, color = 'b',s =100,zorder = 2)
#ax = sns.lineplot(x='sec', y='x-value38', data =df, color = 'green', zorder = 1).set_title('Tone MH38')#Tone
#ax = sns.scatterplot(x = 'sec', y = 'took_pellMH038', data = df, color = 'b',s =100,zorder = 2)
#
#
#ax = sns.lineplot(x='sec', y='x-value42', data =df, color = 'orange', zorder = 1).set_title('Tone MH42')#Tone
#ax = sns.scatterplot(x = 'sec', y = 'took_pellMH042', data = df, color = 'b',s =100,zorder = 2)
#
#
#ax = sns.lineplot(x='sec', y='x-value47', data =df, color = 'green', zorder = 1).set_title('Tone MH47')#Tone
#ax = sns.scatterplot(x = 'sec', y = 'took_pellMH047', data = df, color = 'b',s =100,zorder = 2)
##
#
#ax = sns.lineplot(x='sec', y='x-value48', data =df, color = 'green', zorder = 1).set_title('Tone MH48')#Tone
#ax = sns.scatterplot(x = 'sec', y = 'took_pellMH048', data = df, color = 'b',s =100,zorder = 2)
#
#
#ax = sns.lineplot(x='sec', y='x-value57', data =df, color = 'green', zorder = 1).set_title('Tone MH57')#Tone
#ax = sns.scatterplot(x = 'sec', y = 'took_pellMH057', data = df, color = 'b',s =100,zorder = 2)
#
#
#ax = sns.lineplot(x='sec', y='x-value58', data =df, color = 'orange', zorder = 1).set_title('Tone MH58')#Tone
#ax = sns.scatterplot(x = 'sec', y = 'took_pellMH058', data = df, color = 'b',s =100,zorder = 2)
#
#
#ax = sns.lineplot(x='sec', y='x-value61', data =df, color = 'green', zorder = 1).set_title('Tone MH61')#Tone
#ax = sns.scatterplot(x = 'sec', y = 'took_pellMH061', data = df, color = 'b',s =100,zorder = 2)
#
#
#ax = sns.lineplot(x='sec', y='x-value62', data =df, color = 'orange', zorder = 1).set_title('Tone MH62')#Tone
#ax = sns.scatterplot(x = 'sec', y = 'took_pellMH062', data = df, color = 'b',s =100,zorder = 2)
#
#
#
#ax = sns.lineplot(x='sec', y='x-value75', data =df, color = 'green', zorder = 1).set_title('Tone MH75')#Tone
#ax = sns.scatterplot(x = 'sec', y = 'took_pellMH075', data = df, color = 'b',s =100,zorder = 2)
#
#
#ax = sns.lineplot(x='sec', y='x-value76', data =df, color = 'orange', zorder = 1).set_title('Tone MH76')#Tone
#ax = sns.scatterplot(x = 'sec', y = 'took_pellMH076', data = df, color = 'b',s =100,zorder = 2)
#
#
#ax = sns.lineplot(x='sec', y='x-value81', data =df, color = 'orange', zorder = 1).set_title('Tone MH81')#Tone
#ax = sns.scatterplot(x = 'sec', y = 'took_pellMH081', data = df, color = 'b',s =100,zorder = 2)
#
#
#ax = sns.lineplot(x='sec', y='x-value82', data =df, color = 'green', zorder = 1).set_title('Tone MH82')#Tone
#ax = sns.scatterplot(x = 'sec', y = 'took_pellMH082', data = df, color = 'b',s =100,zorder = 2)
#
#
#ax = sns.lineplot(x='sec', y='x-value93', data =df, color = 'green', zorder = 1).set_title('Tone MH93')#Tone
#ax = sns.scatterplot(x = 'sec', y = 'took_pellMH093', data = df, color = 'b',s =100,zorder = 2)
#
#
ax = sns.lineplot(x='sec', y='x-value97', data =df, color = 'green', zorder = 1).set_title('Tone MH97')#Tone
ax = sns.scatterplot(x = 'sec', y = 'took_pellMH097', data = df, color = 'b',s =100,zorder = 2)
#
#
#ax = sns.lineplot(x='sec', y='x-value98', data =df, color = 'orange', zorder = 1).set_title('Tone MH98')#Tone
#ax = sns.scatterplot(x = 'sec', y = 'took_pellMH098', data = df, color = 'b',s =100,zorder = 2)
#ax.set_xlim([0, 20])
#ax.set_ylim([0, 800])
#ax.plot([10 ,10], [0, 1200],color ='gray')
#ax.plot([1 ,1], [0, 1200],color ='gray')
ax.set_ylabel('x-position (a.u.)')
ax.set_ylim([0, 1200])
#%%
|
992,214 | 2ce8bc334f47e5d88661320175515438c411b929 | from django.contrib import admin
from .models import Varient
admin.site.register(Varient) |
992,215 | d438f139f783c52a6cf74f53c96f73a064f0866b | import struct
from crc import compute
from crc import check
from consts import Consts
class Packet:
def __init__(self, seq_num,is_ack = False,is_valid = True):
self.seq_num = seq_num
self.is_ack = is_ack
self.is_valid = is_valid
self.data_length = 0
@classmethod
def from_bytes(cls, pack_bytes,include_payload= True):
crc_header = bin(ord(list(pack_bytes)[0])).zfill(10).replace('0b', '')
crc_header_input_string = cls.__get_crc_input_string__(pack_bytes[2:Consts.HEADER_SIZE])
if not check(crc_header_input_string,crc_header):
return None
seq_num, is_ack, is_valid, data_length = struct.unpack('=i??i', pack_bytes[2:Consts.HEADER_SIZE])
packet = cls(seq_num, is_ack, is_valid)
packet.data_length = data_length
if data_length and include_payload:
data = pack_bytes[Consts.HEADER_SIZE:]
crc_payload = bin(ord(list(pack_bytes)[1])).zfill(10).replace('0b', '')
crc_payload_input_string = cls.__get_crc_input_string__(data)
if not check(crc_payload_input_string,crc_payload):
return None
packet.payload = data
return packet
def set_payload(self,data):
self.payload = data
self.data_length = len(data)
def to_bytes(self):
crc_payload_byte = struct.pack('B', 0)
raw_bytes = struct.pack('=i??i',self.seq_num,self.is_ack,self.is_valid,self.data_length)
crc_header_input_string = self.__get_crc_input_string__(raw_bytes)
crc_header_code = compute(crc_header_input_string)
crc_header_byte = struct.pack('B', int(crc_header_code, 2))
if self.data_length:
payload_bytes = struct.pack(str(self.data_length) + 's',self.payload)
crc_payload_input_string = self.__get_crc_input_string__(payload_bytes)
crc_payload_code = compute(crc_payload_input_string)
crc_payload_byte = struct.pack('B', int(crc_payload_code, 2))
raw_bytes += payload_bytes + (Consts.PAYLOAD_SIZE - len(payload_bytes)) * '0'
return crc_header_byte + crc_payload_byte + raw_bytes
@classmethod
def __get_crc_input_string__(self, raw_bytes):
crc_input_string = ''.join(map(lambda byte: bin(ord(byte)).zfill(10).replace('0b', ''), list(raw_bytes)))
return crc_input_string
|
992,216 | 2c4db94d759d42ca712ec8a42ca17d9fcfc2ec8c | #!/usr/bin/env python
# by Samuel Huckins
def main():
"""
Print the ASCII codes for the passed text.
"""
import sys
plain = raw_input("Please enter the plaintext string you want to encode: ")
print "Here are the ASCII codes for that text, space-separated:"
for e in plain:
print ord(e),
print
if raw_input("Press RETURN to exit."):
sys.exit(0)
if __name__ == '__main__':
main()
|
992,217 | 70e1a4219ba60bea1226209d8664acf1cd6e6eeb | import math
import os
import random
import re
import sys
debug = True
def swap(s):
k = s.copy()
for ind, v in enumerate(k):
if v == s[0] and ind > 0:
break
k.pop(ind)
k.pop(0)
return k, ind - 1
def MinSwap(s):
if len(s) % 2 != 0:
return "Error"
elif len(s) == 0:
return 0
elif s[1] == s[0]:
return MinSwap(s[2:])
else:
k, times = swap(s)
return times + MinSwap(k)
return
def main():
# input
n = int(input())
a = list(map(int, input().split()))
# Cards = input()
# solution
result = MinSwap(a)
#print
print(result)
if __name__ == "__main__":
if not debug:
try:
while True:
main()
except EOFError:
exit()
else:
main() |
992,218 | ca5417c3f2c299c69bb5a1b0bd8db0337560faad | ####### this is the first project of cv#######
################### VIRTUAL PAINT ##################
import cv2
import numpy as np
frameWidth = 640
frameHeight = 480
cap = cv2.VideoCapture(0)
cap.set(3,640)
cap.set(4,480)
cap.set(10,100)
myColors = [[0,53,51,255,0,255],
[133,56,0,159,156,255],
[57,76,0,100,255,255]]
myColorValues =[[51,153,255],[255,0,255],[0,255,0]] ####bgr format
myPoints = [] ## x,y, colorid
def findColor(img,myColors,myColorValues):
imgHSV = cv2.cvtColor(img, cv2.COLOR_BGR2HSV)
count=0
newPoints=[]
for color in myColors:
lower = np.array(color[0:3])
upper = np.array(color[3:6])
mask = cv2.inRange(imgHSV, lower, upper)
# cv2.imshow("img", mask)
x,y= getContours(mask)
cv2.circle(imgResult,(x,y),15,myColorValues[count],cv2.FILLED)
if x!=0 and y!=0:
newPoints.append([x,y,count])
count +=1
return newPoints
def getContours(img):
contours , hierarchy = cv2.findContours(img,cv2.RETR_EXTERNAL,cv2.CHAIN_APPROX_NONE)
x,y,w,h =0,0,0,0
for cnt in contours:
area = cv2.contourArea(cnt)
if area>500:
cv2.drawContours(imgResult, cnt, -1, (255, 0, 0), 3)
peri = cv2.arcLength(cnt,True)
approx = cv2.approxPolyDP(cnt,0.02*peri,True)
x,y,w,h = cv2.boundingRect(approx)
return x+w//2,y
def drawOnCanvas(myPoints,myColorValues):
for point in myPoints:
cv2.circle(imgResult,(point[0],point[1]),10,myColorValues[point[2]], cv2.FILLED)
while True:
success, img =cap.read()
if img is None:
break
imgResult = img.copy()
newPoints= findColor(img, myColors,myColorValues)
if len(newPoints)!=0:
for newP in newPoints:
myPoints.append(newP)
if len(myPoints)!=0:
drawOnCanvas(myPoints,myColorValues)
cv2.imshow("Result",imgResult)
if cv2.waitKey(1) & 0xFF ==ord('q'):
break |
992,219 | fdc090133da4b769acc75af4860cda5bdb95b94b | # -*- coding: utf-8 -*-
# Generated by Django 1.11.10 on 2018-03-27 07:38
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('pis_retailer', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Product',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('brand_name', models.CharField(max_length=200)),
('retailer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='retailer_product', to='pis_retailer.Retailer')),
],
),
migrations.CreateModel(
name='ProductDetail',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('updated_at', models.DateTimeField(auto_now=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('retail_price', models.DecimalField(decimal_places=2, default=0, max_digits=65)),
('consumer_price', models.DecimalField(decimal_places=2, default=0, max_digits=65)),
('available_item', models.IntegerField(default=1)),
('purchased_item', models.IntegerField(default=0)),
('product', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='product_detail', to='pis_product.Product')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='PurchasedProduct',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('updated_at', models.DateTimeField(auto_now=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('manual_discount', models.DecimalField(decimal_places=2, default=0, max_digits=65)),
('discount_percentage', models.DecimalField(decimal_places=2, default=0, max_digits=65)),
('purchase_amount', models.DecimalField(decimal_places=2, default=0, max_digits=65)),
('product', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='purchased_product', to='pis_product.Product')),
],
options={
'abstract': False,
},
),
]
|
992,220 | da8074eb94f3a29e996dd77fe02e89d977b4a909 | from __future__ import unicode_literals
from django.db import models
from django.utils import timezone
from django.contrib.auth.models import User
# Create your models here.
class Post(models.Model):
author=models.ForeignKey(User)
title=models.CharField(max_length=120,blank=True,null=True)
text=models.TextField()
created_time=models.DateTimeField(default=timezone.now)
def __unicode__(self):
return self.title
|
992,221 | 9bcaf2f5ca2dfa0881ffe9f1eec814d2bf69b390 | import pymysql
import os, sys
from datetime import date
import queries
# table prefix permits script to be run repeatedly with results confined to namespace
if 'TBLPREFIX' in os.environ.keys():
tbl_prefix = str(os.environ['TBLPREFIX']) + "_"
else:
today = date.today()
tt = today.timetuple()
tbl_prefix = "{}{}{}_".format(tt[0], tt[1], tt[2])
print("Be sure to export TBLPREFIX to your ENV!")
print("export TBLPREFIX="+str(tbl_prefix))
try:
db_host = os.environ['DBHOST']
db_user = os.environ['DBUSERNAME']
db_pass = os.environ['DBPASSWORD']
db_name = os.environ['DBNAME']
except:
print("This script requires DBHOST, DBUSERNAME, DBPASSWORD, and DBNAME environment variables.")
sys.exit(1)
print("Creating tables...\n")
db = pymysql.connect(host=db_host, port=3306, user=db_user, passwd=db_pass, db=db_name)
try:
cursor = db.cursor()
except Exception as e:
print("\n{}\n".format(e))
db.close()
sys.exit(1)
tables = ['sql_tblogingest', 'sql_tblogcut', 'sql_tbrequestdigest', 'sql_tbcutbreakout']
for table in tables:
create_query = getattr(queries,table)(tbl_prefix)
try:
print("Executing query: {}".format(create_query))
cursor.execute(create_query)
print()
except Exception as e:
print("\n{}\n".format(e))
db.close()
sys.exit(1)
db.close()
print("If you made it here,\n\tall of your tables were created.")
|
992,222 | 85a5da167fdc01a2286f082b92b892daac4ef4ec | # -*- coding:utf-8 -*-
"""
Handle the image
"""
from scipy import misc
import numpy as np
def zip_block(block_img):
"""Zip the image block to a vector."""
rows = block_img.shape[0]
cols = block_img.shape[1]
zip_vector = []
for i in range(cols):
zip_vector = zip_vector + block_img[:, i].tolist()
return zip_vector
def unzip_block(vector, K):
"""Unzip the vector to the image block."""
block_dim = int(len(vector)/K)
block_img = np.zeros((block_dim, block_dim), dtype='uint8')
for i in range(block_dim):
block_img[: i] = vector[i*K: (i+1)*K]
return block_img
def devide_block(img, K):
"""Devide the image to a matrix and each column contains the sub pic."""
src_row = img.shape[0]
src_col = img.shape[1]
devided_row = int(src_row*src_col/(K*K))
devided_col = K*K
resize_row = int(src_row/K)
resize_col = int(src_col/K)
# each sub block of the picture and zip the sub block to a vector axis=0(row)
devided_img = np.zeros((devided_row, devided_col), dtype='uint8')
for i in range(resize_row):
for j in range(resize_col):
# the sub block img
block_img = img[i*K:(i+1)*K, j*K:(j+1)*K]
# zip the block
zip_vector = zip_block(block_img)
devided_img[i*resize_col+j, :] = zip_vector
return devided_img
def undevide_block(res_data, K):
"""Undevide the block of the image."""
block_num = res_data.shape[0]
row = int(np.sqrt(block_num*K*K))
col = row
block_row = int(row/K)
block_col = int(col/K)
res_img = np.zeros((row, col), dtype='uint8')
for i in range(block_row):
for j in range(block_col):
block_img = unzip_block(res_data[i*block_row+j], K)
res_img[i*K: (i+1)*K, j*K: (j+1)*K] = block_img
return res_img
def normalize_sample(sample_data):
"""Normalize the sample data."""
BASE = 255
sample_data = np.array(sample_data, dtype='float32')
return sample_data/BASE
def unnormalize_sample(sample_data):
"""Unnormalize the sample data."""
BASE = 255
return np.array(np.around(sample_data*BASE), dtype='uint8')
def read_image(path):
"""Use misc to read the image."""
img = misc.imread(path)
return img
def save_image(path, data):
"""Save the image data."""
misc.imsave(path, data)
|
992,223 | 165d280cc4c2396ed18885b7169894749746eec2 | # Generated by Django 2.2.14 on 2020-07-14 09:55
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('mainapp', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='entity',
name='doc_delivered',
field=models.FilePathField(blank=True, path='/home/vafedorchenko/DBU-dev_back/media/doc/'),
),
migrations.AlterField(
model_name='entity',
name='label',
field=models.FilePathField(blank=True, path='/home/vafedorchenko/DBU-dev_back/media/labels/'),
),
]
|
992,224 | 5351a735b2fce76575130d7a1ed09a5dc7bca97f | from django.shortcuts import render,redirect
from .models import *
from .forms import *
from .filters import *
# Create your views here.
def index(request):
tasks = Task.objects.all()
taskform = TaskForm()
search = SearchForm(request.GET,queryset=tasks)
tasks = search.qs
if request.method =="POST":
taskform = TaskForm(request.POST)
if taskform.is_valid():
taskform.save()
return redirect('/')
context = {
'tasks': tasks,
'taskform':taskform,
'search':search
}
return render(request,'task/list.html',context)
def updateTask(request, pk):
items = Task.objects.get(id=pk)
taskform = TaskForm(instance=items)
if request.method =='POST':
taskform = TaskForm(request.POST,instance = items)
if taskform.is_valid():
taskform.save()
return redirect('/')
context = {'items':items ,'taskform':taskform}
return render(request,'task/Update.html',context)
def deleteTask(request,pk):
item = Task.objects.get(id=pk)
if request.method=="POST":
item.delete()
return redirect ('/')
context = {'item':item}
return render (request, 'task/delete.html',context) |
992,225 | 0c1aa592d5d982634f9c8971e14be28e136053e9 | import torchvision.models as models
print(models.) |
992,226 | 1b19dda061a5321edb4dbbf0eb2a9b74476151c5 | import pandas as pd
if __name__ == '__main__':
df = pd.read_csv('segment.dat', sep=' ', header=None)
df.columns = [
'region-centroid-col',
'region-centroid-row',
'region-pixel-count',
'short-line-density-5',
'short-line-density-2',
'vedge-mean',
'vegde-sd',
'hedge-mean',
'hedge-sd',
'intensity-mean',
'rawred-mean',
'rawblue-mean',
'rawgreen-mean',
'exred-mean',
'exblue-mean',
'exgreen-mean',
'value-mean',
'saturation-mean',
'hue-mean',
'category'
]
df = df.drop(columns='region-pixel-count')
df['category'] = df['category'].map({
1: 'brickface',
2: 'sky',
3: 'foliage',
4: 'cement',
5: 'window',
6: 'path',
7: 'grass'
})
df = df.astype({'region-centroid-col': int, 'region-centroid-row': int})
df.to_csv('segment.csv.zip', index=False)
|
992,227 | d982ea6af68a3b3df03eb4884d902422687ea13c | def utasok():
a = [1, 2, 3]
for ertek in a:
yield ertek
yield ertek * 2
x = utasok()
for utas in x:
print(utas)
def kipakol():
hozott_termek = {"udito": 12, "torta": 4, "kave": 20, "harcsa": 50}
for key in hozott_termek:
yield key, hozott_termek[key]
for termek, darab in kipakol():
print(termek, darab)
|
992,228 | 1fea94822a33d589a6b598e49ee3f6642ade7543 | # We need to import `request` to access the details of the POST request
# and `render_template`, to render our templates (form and response).
# We'll use `url_for` to get some URLs for the app in the templates.
from flask import Flask, render_template, request, url_for
import unicodecsv
import random
import os
import thread
import subprocess
import glob
import fnmatch
import shutil
from moviepy.audio.io import AudioFileClip
from operator import itemgetter
#from pydub import AudioSegment
import timeit
os.chdir('/home/audio_labeler/')
## Returns duration of any media file using ffprobe
def media_duration(media_path):
return float(subprocess.check_output(['ffprobe', '-v', 'quiet', '-of', 'csv=p=0', '-show_entries', 'format=duration', media_path]).strip())
## Creating a list of file IDs in the "clips" directory
#complete_clip_ids = list(set([item.split('____')[0] for item in os.listdir('clips') if '____' in item]))
## Creating a list of every media file in the "media" directory
media_paths=[]
for root, dirnames, filenames in os.walk('/home/audio_labeler/media'):
for filename in fnmatch.filter(filenames, '*'):
media_paths.append(os.path.join(root, filename))
media_paths = [item for item in media_paths if item.lower()[-4:] in ('.mp3','.mp4','.wav')]
# Initialize the Flask application
app = Flask(__name__)
app.config['SEND_FILE_MAX_AGE_DEFAULT'] = 0
label_count_dict = {}
# Define a route for the default URL, which loads the form
@app.route('/',methods=['POST','GET'])
def form():
## Dealing with form data from the last round, if applicable
try:
classname=request.form['classname']
if request.form['button'] == 'Apply Label':
write_classname = classname
else:
write_classname = request.form['button']
audio_file_id=request.form['audio_file_id']
start_time=request.form['start_time']
output_csv_path='/home/audio_labeler/output_table.csv'
with open(output_csv_path,'a') as fo:
label_duration = 1
fo.write(audio_file_id + ',' + \
str(float(start_time)) + ',' + \
str(float(label_duration)) + ',' + \
'''"''' + write_classname + '''"\n''')
shutil.copy(output_csv_path,'/home/audio_labeler/static/output/output_table.csv')
except:
classname=''
try:
os.remove('/home/audio_labeler/static/' + request.form['temp_wav_filename'])
except:
pass
default_buttons=["Background Speaker","Music","Silence","Multiple Speakers","Not Sure", "Noise",""]
try:
if write_classname not in default_buttons:
label_count_dict[write_classname] += 1
except:
try:
if write_classname not in default_buttons:
label_count_dict[write_classname] = 1
except: pass
label_counts = map(list, label_count_dict.items())
label_counts = sorted(label_counts, key=itemgetter(1))[::-1]
## Launching new round
#audio_filename=random.choice([item for item in os.listdir('/home/audio_labeler/clips') if item[-4:].lower() in ('.mp3','.wav','.mp4')])
media_path = random.choice(media_paths)
audio_file_id = media_path.split('/')[-1][:-4]
duration = media_duration(media_path)
start_time = int((random.random()*(duration-5.0)))
snd = AudioFileClip.AudioFileClip(media_path)
temp_wav_filename = str(random.random())[2:] + '.wav'
snd.subclip(start_time,start_time + 5).write_audiofile('/home/audio_labeler/static/' + temp_wav_filename)
response = render_template('form_audio.html', audio_file_id=audio_file_id, \
start_time=start_time, classname=classname, temp_wav_filename=temp_wav_filename, \
media_path=media_path, label_counts=label_counts[:15])
return response
# Run the app
if __name__ == '__main__':
app.run(threaded=True,
host="0.0.0.0",
port=int("8000")
)
|
992,229 | 7e79d10586fc5d4283f3c288cb6543d79100e4ca | """
模糊操作:均值模糊,中值模糊,自定义模糊
基于离散卷积,定义好每个卷积核,不同卷积核得到不同的卷积效果,模糊是卷积的一种表象
"""
from numpy import *
import cv2 as cv
import numpy as np
#定义添加椒盐噪声的函数
def SaltAndPepper(src,percetage):
SP_NoiseImg=src
SP_NoiseNum=int(percetage*src.shape[0]*src.shape[1])
for i in range(SP_NoiseNum):
randX=random.randint(0, src.shape[0]-1)
randY=random.randint(0, src.shape[1]-1)
if random.randint(0, 1) == 0:
SP_NoiseImg[randX, randY] = 0
else:
SP_NoiseImg[randX, randY] = 255
return SP_NoiseImg
def blur_demo(image): #均值模糊,随机去噪
dst = cv.blur(image, (50, 50)) #卷积核1行3列
cv.imshow("blur_demo", dst)
def median_blur_demo(image): #中值模糊-降噪,椒盐噪声
dst = cv.medianBlur(image, 5)
cv.imshow("median_blur_demo", dst)
def custom_blur_demo(image):
#kernel = np.array([[0, -1, 0], [-1, 5, -1], [0, -1, 0]], np.float32) #锐化
kernel = np.array([[1, 1, 1], [1, 1, 1], [1, 1, 1]], np.float32) / 9
#kernel = np.ones([5, 5], np.float32)/25
dst = cv.filter2D(image, -1, kernel = kernel) #-1默认ddepth
cv.imshow("custom_blur_demo", dst)
def clamp(pv):
if pv > 255:
return 255
if pv < 0:
return 0
else:
return pv
def gaussian_noise(image):
h, w, c = image.shape
for row in range(h):
for col in range(w):
s = np.random.normal(0, 20, 3)
b = image[row, col, 0] #blue
g = image[row, col, 1] #green
r = image[row, col, 2] #red
image[row, col, 0] = clamp(b + s[0])
image[row, col, 1] = clamp(g + s[1])
image[row, col, 2] = clamp(r + s[2])
cv.imshow("noise image", image)
print("---hello python---")
src = cv.imread("C:/Users/46507/Desktop/zlf.jpg")
cv.namedWindow("input image", cv.WINDOW_AUTOSIZE)
cv.imshow("input image", src)
#blur_demo(src)
# src_noise = SaltAndPepper(src, 0.1)
# # cv.imshow("Salt_and_Pepper", src_noise)
# # median_blur_demo(src_noise)
#custom_blur_demo(src)
t1 = cv.getTickCount()
gaussian_noise(src)
t2 = cv.getTickCount()
time = (t2 - t1)/cv.getTickFrequency() #s
print("time comsume : %s" % (time*1000)) #ms
dst = cv.GaussianBlur(src, (0, 0), 15) #两个值只需要设置一个,根据高斯公式
#dst = cv.GaussianBlur(src, (5, 5), 0)
cv.imshow("Gaussian Blur", dst)
cv.waitKey(0)
cv.destroyAllWindows()
|
992,230 | abd3c73b67fab91cffff4a6242243624a012da34 | '''
>>> userList = createUserList()
>>> movieList = createMovieList()
>>> numUsers = len(userList)
>>> numMovies = len(movieList)
>>> [rLu, rLm] = createRatingsList(numUsers, numMovies)
>>> [0.99 < similarity(i, i, rLu) < 1.01 for i in range(1, numUsers+1)].count(True) == numUsers
True
>>> [-1.01 < similarity(1, i, rLu) < 1.01 for i in range(1, numUsers+1)].count(True) == numUsers
True
>>> sim = sorted([(similarity(1, i, rLu), i) for i in range(2, numUsers+1)], reverse = True)[:5]
>>> sorted([x[1] for x in sim])
[155, 341, 418, 685, 812]
>>> commonMovies = [m for m in range(1, numMovies+1) if m in rLu[0] and m in rLu[417]]
>>> commonMovies
[258, 269]
>>> rLu[0][258]
5
>>> rLu[417][258]
5
>>> rLu[0][269]
5
>>> rLu[417][269]
5
>>> sim = sorted([(similarity(1, i, rLu), i) for i in range(2, numUsers+1)])[:4]
>>> sorted([x[1] for x in sim])
[88, 631, 688, 729]
>>> L = [(similarity(i, j, rLu), i, j) for i in range(1, 501) for j in range(1, 501)]
>>> [x[1:] for x in L if x[0] > 0.95 and x[1] < x[2] and len([y for y in rLu[x[1]-1].keys() if y in rLu[x[2]-1]]) > 6]
[(8, 433), (17, 449), (22, 199), (47, 385), (79, 120), (118, 476), (123, 333)]
>>> sorted([(round(x[1], 3), x[0]) for x in kNearestNeighbors(500, rLu, 10)], reverse = True)
[(1.0, 500), (0.81, 273), (0.805, 813), (0.791, 557), (0.785, 171), (0.742, 729), (0.715, 47), (0.709, 414), (0.697, 789), (0.679, 166)]
>>> sorted([(x[0], round(x[1], 3)) for x in kNearestNeighbors(200, rLu, 70) if x[0] != 200 and round(x[1], 3) == 1])
[(688, 1.0)]
'''
#-------------------------------------------------------
from project3 import *
#-------------------------------------------------------
if __name__ == "__main__":
import doctest
doctest.testmod()
|
992,231 | ade77569afd0dba1decff56dbcb6785f13b600f1 | num1 = int(input('Digite um número:'))
num2 = int(input('Digite outro número:'))
num3 = float(input('Digite mais um número:'))
a = (2*num1)*(num2/2)
b = (3*num1) + (num3)
c = num3**3
print("O produto do dobro do primeiro com metade do segundo: ", a)
print("A soma do triplo do primeiro com metade do segundo: ", b)
print("O terceiro elevado ao cubo: ", c) |
992,232 | 44c18e15203286bc9ebe753f26c06155ddc7ce7b | import os
import numpy as np
from sklearn.cluster import KMeans
from scipy.stats import norm
import pickle as pkl
class NDB:
def __init__(self, training_data=None, number_of_bins=100, significance_level=0.05, z_threshold=None,
whitening=False, max_dims=None, cache_folder=None, stage=-1):
"""
NDB Evaluation Class
:param training_data: Optional - the training samples - array of m x d floats (m samples of dimension d)
:param number_of_bins: Number of bins (clusters) default=100
:param significance_level: The statistical significance level for the two-sample test
:param z_threshold: Allow defining a threshold in terms of difference/SE for defining a bin as statistically different
:param whitening: Perform data whitening - subtract mean and divide by per-dimension std
:param max_dims: Max dimensions to use in K-means. By default derived automatically from d
:param bins_file: Optional - file to write / read-from the clusters (to avoid re-calculation)
"""
self.number_of_bins = number_of_bins
self.significance_level = significance_level
self.z_threshold = z_threshold
self.whitening = whitening
self.ndb_eps = 1e-6
self.training_mean = 0.0
self.training_std = 1.0
self.max_dims = max_dims
self.cache_folder = cache_folder
self.bin_centers = None
self.bin_proportions = None
self.ref_sample_size = None
self.used_d_indices = None
self.results_file = None
self.test_name = 'ndb_{}_bins_{}_{}'.format(self.number_of_bins, 'whiten' if self.whitening else 'orig', stage)
self.cached_results = {}
if self.cache_folder:
self.results_file = os.path.join(cache_folder, self.test_name + '_results.pkl')
if os.path.isfile(self.results_file):
# print('Loading previous results from', self.results_file, ':')
self.cached_results = pkl.load(open(self.results_file, 'rb'))
# print(self.cached_results.keys())
if training_data is not None or cache_folder is not None:
bins_file = None
if cache_folder:
os.makedirs(cache_folder, exist_ok=True)
bins_file = os.path.join(cache_folder, self.test_name + '.pkl')
self.construct_bins(training_data, bins_file)
def construct_bins(self, training_samples, bins_file):
"""
Performs K-means clustering of the training samples
:param training_samples: An array of m x d floats (m samples of dimension d)
"""
if self.__read_from_bins_file(bins_file):
return
n, d = training_samples.shape
k = self.number_of_bins
if self.whitening:
self.training_mean = np.mean(training_samples, axis=0)
self.training_std = np.std(training_samples, axis=0) + self.ndb_eps
if self.max_dims is None and d > 1000:
# To ran faster, perform binning on sampled data dimension (i.e. don't use all channels of all pixels)
self.max_dims = d // 6
whitened_samples = (training_samples - self.training_mean) / self.training_std
d_used = d if self.max_dims is None else min(d, self.max_dims)
self.used_d_indices = np.random.choice(d, d_used, replace=False)
clusters = KMeans(n_clusters=k, max_iter=100, n_jobs=-1).fit(whitened_samples[:, self.used_d_indices])
bin_centers = np.zeros([k, d])
for i in range(k):
bin_centers[i, :] = np.mean(whitened_samples[clusters.labels_ == i, :], axis=0)
# Organize bins by size
label_vals, label_counts = np.unique(clusters.labels_, return_counts=True)
bin_order = np.argsort(-label_counts)
self.bin_proportions = label_counts[bin_order] / np.sum(label_counts)
self.bin_centers = bin_centers[bin_order, :]
self.ref_sample_size = n
self.__write_to_bins_file(bins_file)
def evaluate(self, query_samples):
"""
Assign each sample to the nearest bin center (in L2). Pre-whiten if required. and calculate the NDB
(Number of statistically Different Bins) and JS divergence scores.
:param query_samples: An array of m x d floats (m samples of dimension d)
:param model_label: optional label string for the evaluated model, allows plotting results of multiple models
:return: results dictionary containing NDB and JS scores and array of labels (assigned bin for each query sample)
"""
n = query_samples.shape[0]
query_bin_proportions, query_bin_assignments = self.__calculate_bin_proportions(query_samples)
# print(query_bin_proportions)
different_bins = NDB.two_proportions_z_test(self.bin_proportions, self.ref_sample_size, query_bin_proportions,
n, significance_level=self.significance_level,
z_threshold=self.z_threshold)
ndb = np.count_nonzero(different_bins)
js = NDB.jensen_shannon_divergence(self.bin_proportions, query_bin_proportions)
return ndb, js
def __calculate_bin_proportions(self, samples):
assert samples.shape[1] == self.bin_centers.shape[1]
n, d = samples.shape
k = self.bin_centers.shape[0]
D = np.zeros([n, k], dtype=samples.dtype)
whitened_samples = (samples - self.training_mean) / self.training_std
for i in range(k):
D[:, i] = np.linalg.norm(
whitened_samples[:, self.used_d_indices] - self.bin_centers[i, self.used_d_indices],
ord=2, axis=1)
labels = np.argmin(D, axis=1)
probs = np.zeros([k])
label_vals, label_counts = np.unique(labels, return_counts=True)
probs[label_vals] = label_counts / n
return probs, labels
def __read_from_bins_file(self, bins_file):
if bins_file and os.path.isfile(bins_file):
bins_data = pkl.load(open(bins_file, 'rb'))
self.bin_proportions = bins_data['proportions']
self.bin_centers = bins_data['centers']
self.ref_sample_size = bins_data['n']
self.training_mean = bins_data['mean']
self.training_std = bins_data['std']
self.used_d_indices = bins_data['d_indices']
return True
return False
def __write_to_bins_file(self, bins_file):
if bins_file:
bins_data = {'proportions': self.bin_proportions,
'centers': self.bin_centers,
'n': self.ref_sample_size,
'mean': self.training_mean,
'std': self.training_std,
'd_indices': self.used_d_indices}
pkl.dump(bins_data, open(bins_file, 'wb'))
@staticmethod
def two_proportions_z_test(p1, n1, p2, n2, significance_level, z_threshold=None):
# Per http://stattrek.com/hypothesis-test/difference-in-proportions.aspx
# See also http://www.itl.nist.gov/div898/software/dataplot/refman1/auxillar/binotest.htm
p = (p1 * n1 + p2 * n2) / (n1 + n2)
se = np.sqrt(p * (1 - p) * (1 / n1 + 1 / n2))
z = (p1 - p2) / se
# Allow defining a threshold in terms as Z (difference relative to the SE) rather than in p-values.
if z_threshold is not None:
return abs(z) > z_threshold
p_values = 2.0 * norm.cdf(-1.0 * np.abs(z)) # Two-tailed test
return p_values < significance_level
@staticmethod
def jensen_shannon_divergence(p, q):
"""
Calculates the symmetric Jensen–Shannon divergence between the two PDFs
"""
m = (p + q) * 0.5
return 0.5 * (NDB.kl_divergence(p, m) + NDB.kl_divergence(q, m))
@staticmethod
def kl_divergence(p, q):
"""
The Kullback–Leibler divergence.
Defined only if q != 0 whenever p != 0.
"""
assert np.all(np.isfinite(p))
assert np.all(np.isfinite(q))
assert not np.any(np.logical_and(p != 0, q == 0))
p_pos = (p > 0)
return np.sum(p[p_pos] * np.log(p[p_pos] / q[p_pos]))
if __name__ == "__main__":
dim = 100
k = 100
n_train = k * 100
n_test = k * 10
train_samples = np.random.uniform(size=[n_train, dim])
ndb = NDB(training_data=train_samples, number_of_bins=k, whitening=True)
test_samples = np.random.uniform(high=1.0, size=[n_test, dim])
ndb.evaluate(test_samples, model_label='Test')
test_samples = np.random.uniform(high=0.9, size=[n_test, dim])
ndb.evaluate(test_samples, model_label='Good')
test_samples = np.random.uniform(high=0.75, size=[n_test, dim])
ndb.evaluate(test_samples, model_label='Bad')
|
992,233 | 06f69784f98af46a8c2706f9040c8db2df577ae2 | """
Function:getRatioUnstruct - creates statistics of the occurence of Pfam domains
in the ChEMBL database as well as the entire human genome
--------------------
Author:
Felix Kruger
momo.sander@googlemail.com
"""
def getRatio(pfamDict,humanTargets, release, user, pword, host, port):
import numpy as np
import queryDevice
for target in pfamDict.keys():
pfamDict[target]['ratio']='NA'
try:
seq = humanTargets[target]
seq_len = len(seq)-1
except KeyError:
seq= queryDevice.queryDevice("SELECT protein_sequence FROM target_dictionary WHERE protein_accession = '%s'"%target, release, user, pword, host, port)
try:
seq_len = len(seq[0][0])-1
except IndexError:
continue
dom_len = 0
for i in range(len(pfamDict[target]['domains'])):
start = pfamDict[target]['start'][i]
end = pfamDict[target]['end'][i]
ind_dom = end - start
dom_len += ind_dom
ratio = np.true_divide(dom_len,seq_len)
pfamDict[target]['ratio'] = ratio
return pfamDict
# if len(pfamDict[target]['start']) == 1:
# start = pfamDict[target]['start'][i]
# end = pfamDict[target]['end'][i]
# pre = start
# post = length - end
# unstructuredRatio = np.true_divide(min([pre,post]), max([pre, post]))
# pfamDict[target]['weighting'] = unstructuredRatio
# else:
# pass
|
992,234 | 4ba94a218e24607572cc4519597fbc45403a5a26 | def ndigits(integer):
'''
Given an integer, it calculates finds out how many digits
the integer have.
'''
assert integer%1 == 0 #Confirms it is integer
integer = abs(integer) #sign is not relevant
if int(integer/10) == 0: #if it is a single number
return 1
else:
reducedInteger = int(integer/10)
return 1 + ndigits(reducedInteger)
#Makes recursive call do ndigits adding the one digit removed
|
992,235 | 6afa2324a06c88fc520edc0b732f70344e63979f | toDecode = list(input())
k = int(input())
asciiNums = list()
codedList = list()
el = []
for i in toDecode:
el.append(toDecode)
for x in el:
if [el] % 2 == 0:
asciiNums.append(ord(x))
decodedList = [el+k for el in asciiNums]
for nums in decodedList:
codedList.append(chr(nums))
codedString = ''.join(codedList)
print(codedString)
|
992,236 | 2653bbe6931cd6aae72cda96cbf7aa319cfc884d | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""This example script demonstrates the Gulliver suite's ability to
reconstruct events with gradient based minimizers.
"""
from __future__ import print_function
import os
import icecube.icetray
import icecube.dataclasses
import icecube.dataio
import icecube.gulliver
import icecube.lilliput
import icecube.phys_services
from I3Tray import I3Tray
class GaussianLiklihood(icecube.gulliver.I3EventLogLikelihood):
"""Example of a simple Likelihood calculation in Python
The likelihood is is just a quadratic so that its gradient is simple
to calculate
"""
def __init__(self, pulses, error):
super(GaussianLiklihood, self).__init__()
self.pulse_name = pulses
self.error = error
def GetName(self):
return "Likelihood"
def SetGeometry(self, geo):
self.geometry = geo.omgeo
def SetEvent(self, frame):
self.pulses = icecube.dataclasses.I3RecoPulseSeriesMap.from_frame(
frame, self.pulse_name)
self.calls_to_gradient = 0
self.calls_to_likelihood = 0
def GetLogLikelihoodWithGradient(self, hypo, grad, weight):
L = self.GetLogLikelihood(hypo)
part = hypo.particle
zenith = -2. * (part.dir.zenith - 1.) * 100.
azimuth = -2. * (part.dir.azimuth - 3.) * 100.
x = -2. * (part.pos.x - 4.)
y = -2. * (part.pos.y - 5.)
z = -2. * (part.pos.z - 6.)
grad.particle.dir = icecube.dataclasses.I3Direction(zenith, azimuth)
grad.particle.pos = icecube.dataclasses.I3Position(x, y, z)
self.calls_to_gradient += 1
return L
def GetLogLikelihood(self, hypo):
part = hypo.particle
L = -(
100.*(part.dir.zenith - 1.)**2 + 100.*(part.dir.azimuth - 3.)**2 +
(part.pos.x - 4.)**2 + (part.pos.y - 5.)**2 + (part.pos.z - 6.)**2
)
self.calls_to_likelihood += 1
return L
def GetMultiplicity(self):
return len(self.pulses)
def GetDiagnostics(self, x):
m = icecube.dataclasses.I3MapStringDouble()
m["calls_to_gradient"] = self.calls_to_gradient
m["calls_to_likelihood"] = self.calls_to_likelihood
return m
def HasGradient(self):
return True
filelist = [
"GeoCalibDetectorStatus_IC86.55697_corrected_V2.i3.gz",
"Level3_nugen_numu_IC86.2012.011069.000000_20events.i3.bz2"
]
filelist = [
os.path.join(os.environ["I3_TESTDATA"], "sim", f) for f in filelist
]
pulses = "TWSRTHVInIcePulses"
tray = I3Tray()
tray.Add("I3Reader", FileNameList=filelist)
tray.context["GaussianLiklihood"] = GaussianLiklihood(
pulses, 15.*icecube.icetray.I3Units.ns)
# Stand along version of L-BFGS-B
tray.AddService("I3GulliverLBFGSBFactory", "LBFGSB")
# Minuit2's version of MIGRAD can use gradients but it ignores them most of the
# time.
tray.AddService(
"I3GulliverMinuit2Factory", "Minuit2Migrad",
Algorithm="MIGRAD",
WithGradients=True)
# This is what processing usually used, it does not use gradients, it is here
# for comparison.
tray.AddService(
"I3GulliverMinuit2Factory", "MinuitSimplex",
Algorithm="SIMPLEX")
# This is a wrapper for GSL's gradient based minimization, none of them appear
# to work except for steepest descent.
tray.AddService(
"I3GSLMultiMinFactory", "Steepest",
Algorithm="steepest_descent")
# Now add a bunch of different algorithms from NLopt. There are more algorithms
# but this is just an example. The last algorithm does not use gradients but
# still works fine with a gradient based likelihood.
nlopt_algs = [
"LD_LBFGS", "LD_VAR1", "LD_TNEWTON", "LD_MMA", "LD_AUGLAG", "LD_SLSQP",
"LD_CCSAQ", "LN_BOBYQA"
]
for alg in nlopt_algs:
tray.AddService("I3GulliverNLoptFactory", "NLopt_" + alg, Algorithm=alg)
tray.AddService(
"I3SimpleParametrizationFactory", "param",
StepX=20.*icecube.icetray.I3Units.m,
StepY=20.*icecube.icetray.I3Units.m,
StepZ=20.*icecube.icetray.I3Units.m,
StepZenith=0.1*icecube.icetray.I3Units.radian,
StepAzimuth=0.2*icecube.icetray.I3Units.radian,
BoundsX=[-2000.*icecube.icetray.I3Units.m,
2000.*icecube.icetray.I3Units.m],
BoundsY=[-2000.*icecube.icetray.I3Units.m,
2000.*icecube.icetray.I3Units.m],
BoundsZ=[-2000.*icecube.icetray.I3Units.m,
2000.*icecube.icetray.I3Units.m])
tray.AddService(
"I3GulliverIPDFPandelFactory", "pandel",
InputReadout=pulses,
EventType="InfiniteMuon",
Likelihood="SPE1st",
PEProb="GaussConvolutedFastApproximation",
JitterTime=15.*icecube.icetray.I3Units.ns,
NoiseProbability=10.*icecube.icetray.I3Units.hertz)
tray.AddService(
"I3BasicSeedServiceFactory", "seed",
InputReadout=pulses,
FirstGuesses=["LineFit_TWHV"],
TimeShiftType="TFirst")
minimizers = ["LBFGSB", "Minuit2Migrad", "Steepest", "MinuitSimplex"]
minimizers.extend("NLopt_" + alg for alg in nlopt_algs)
for m in minimizers:
tray.Add(
"I3SimpleFitter",
OutputName="SPEFitSingle_TWHV_" + m,
SeedService="seed",
Parametrization="param",
LogLikelihood="GaussianLiklihood",
Minimizer=m)
def print_result(frame):
print("\nMinimizer Event ID Likelihood Calls to Likelihood "
"Calls to Gradient")
for m in minimizers:
reco = "SPEFitSingle_TWHV_" + m
print("{:16} {:3} {:10.7f} {:20d} {:20d}".format(
m, frame["I3EventHeader"].event_id, frame[reco + "FitParams"].logl,
int(frame[reco + "_Likelihood"]["calls_to_likelihood"]),
int(frame[reco + "_Likelihood"]["calls_to_gradient"]))
)
print()
tray.Add(print_result)
tray.Execute()
|
992,237 | c2cc10e6986d23244b37b830524fae9a35dd1656 | # -*- coding: UTF-8 -*-
"""
Created on 2017年11月10日
@author: Leo
"""
class DataValidate:
# 校验是否为数字
@staticmethod
def is_int(data):
if isinstance(data, int):
return {"status": True, "data": data}
else:
return {"status": False, "data": data}
# 校验是否为字符串
@staticmethod
def is_str(data):
if isinstance(data, str):
return {"status": True, "data": data}
else:
return {"status": False, "data": data} |
992,238 | afaa665c415289a922b187cbe243ecde1a587af1 | from yelp.client import Client
from yelp.oauth1_authenticator import Oauth1Authenticator
import os
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
auth = Oauth1Authenticator(
consumer_key=os.environ['YELP_CONSUMER_KEY'],
consumer_secret=os.environ['YELP_CONSUMER_SECRET'],
token=os.environ['YELP_TOKEN'],
token_secret=os.environ['YELP_TOKEN_SECRET']
)
client = Client(auth)
def yelp_search(address, term):
params = {
'term': term,
'lang': 'fr'
}
response = client.search(address, **params)
businesses = response.businesses
return businesses
|
992,239 | e99888527ec3d892224102abe115e93908694462 | from pylab import *
import pyqtgraph.opengl as gl
from pyqtgraph.Qt import QtCore, QtGui
import pyqtgraph as pg
from matplotlib import cm
from scipy.signal import butter, lfilter
print('load_data')
## Surface
verts = np.load('pyqt_data/verts.npy')
faces = np.load('pyqt_data/faces.npy')
centres = np.loadtxt('pyqt_data/centres.txt')
vertex_mapping = np.load('pyqt_data/vertex_mapping.npy')
g = open('pyqt_data/name_regions.txt','r')
global name_regions
name_regions = []
for line in g:
name_regions.append(line)
g.close()
## TAVG
x01_curr = 2.5
x02_curr = 3.0
num_curr = 4.0
Ks_curr = 0.5
x0_curr = 69
global tavgs
tavgs = np.load('sEEG_sim/results/compressed_different_'+str(x02_curr)+
'_epileptogenic_'+str(x01_curr)+'_net_'
+str(num_curr)+'_Ks_'+str(Ks_curr)+'_x0_'+str(x0_curr)+'.npy')
## projection matrix and seegs
proj_mat = np.load('seeg_projection_matrix.npy')
seegs_not_filtered = np.dot(proj_mat, tavgs)
def butterworth_bandpass(lowcut, highcut, fs, order=5):
"""
Build a diggital Butterworth filter
"""
nyq = 0.5 * fs # nyquist sampling rate
low = lowcut / nyq # normalize frequency
high = highcut / nyq # normalize frequency
b, a = butter(order, [low, high], btype='band')
return b, a
def filter_data(data, lowcut, highcut, fs, order=5):
# get filter coefficients
b, a = butterworth_bandpass(lowcut, highcut, fs, order=order)
# filter data
y = lfilter(b, a, data)
return y
fs = 110.0
lowcut = 0.1
highcut = 50.0
seegs = filter_data(seegs_not_filtered, lowcut, highcut, fs, order=6)
## electrodes
positions = np.load('pyqt_data/positions.npy')
electrodes_color = np.loadtxt('pyqt_data/electrodes_color.txt')
f = open('pyqt_data/name_electrodes.txt','r')
global name_electrodes
name_electrodes = []
for line in f:
name_electrodes.append(line)
f.close()
print('data loaded')
# interesting values
mean_sig_total = np.mean(tavgs[:, :], axis=1)
max_sig_total = np.max(np.abs(tavgs[:, :]-mean_sig_total[:, newaxis]), axis=1)
min_sig_total = np.min(np.abs(tavgs[:, :]-mean_sig_total[:, newaxis]), axis=1)
max_tavgs = np.max(max_sig_total)
min_tavgs = np.min(min_sig_total)
mean_seegs_total = np.mean(seegs[:, :], axis=1)
max_seegs_total = np.max(np.abs(seegs[:, :]-mean_seegs_total[:, newaxis]), axis=1)
max_seegs = np.max(max_seegs_total)
min_seegs = np.min(max_seegs_total)
## QT application
app = QtGui.QApplication([])
mw = QtGui.QMainWindow()
mw.setWindowTitle('region simulation')
mw.resize(1000,800)
cw = QtGui.QWidget()
mw.setCentralWidget(cw)
l = QtGui.QVBoxLayout()
l = QtGui.QGridLayout()
cw.setLayout(l)
## first window
pw1 = pg.PlotWidget(name='SEEG')
l.addWidget(pw1,0,0)
for i in range(seegs.shape[0]):
#st = (seegs[i,:]-np.min(seegs[i,:])) / (np.max(seegs[i,:])-np.min(seegs[i,:]))
st = seegs[i,:]
pw1.plot(100*st+i, pen=electrodes_color[i])
lr = pg.LinearRegionItem([4000,8000])
lr2 = pg.LinearRegionItem([4000,8000])
lr.setZValue(-10)
def updatePlot1():
indx1, indx2 = lr2.getRegion()
indx1, indx2 = indx1, indx2
lr.setRegion([indx1,indx2])
lr2.sigRegionChanged.connect(updatePlot1)
updatePlot1()
pw1.addItem(lr)
## second window
pw2 = pg.PlotWidget(name='Plot2', title='TAVG')
l.addWidget(pw2,1,0)
pw2.setXLink(pw1)
for i in range(tavgs.shape[0]):
#st = (tavgs[i,:]-np.min(tavgs[i,:])) / (np.max(tavgs[i,:])-np.min(tavgs[i,:]))
st = tavgs[i,:]
pw2.plot(st+i, pen=(0,0,255,200))
lr2.setZValue(-10)
def updatePlot2():
indx1, indx2 = lr.getRegion()
indx1, indx2 = indx1, indx2
lr2.setRegion([indx1,indx2])
lr.sigRegionChanged.connect(updatePlot2)
updatePlot2()
pw2.addItem(lr2)
# third window
pw3 = gl.GLViewWidget()
l.addWidget(pw3, 0,1)
pw3.setCameraPosition(distance=120, azimuth=-210)
mean_verts = np.mean(verts, axis=0)
max_verts = np.max(verts, axis=0)*0.01
verts = -(verts - mean_verts)/max_verts
surf_nf = faces.shape[0]
surf_nv = verts.shape[0]
surf_item = gl.GLMeshItem(vertexes=verts[:], faces=faces[:],
drawFaces=True, drawEdges=False, color=(32,32,32,0.5), smooth=True, shader='shaded')#glOptions='additive', antialias=True)
pw3.addItem(surf_item)
seeg_data = []
seeg_item = []
for i in range(seegs.shape[0]):
seeg_data.append(gl.MeshData.sphere(rows=10, cols=10, radius=1.))
seeg_item.append(gl.GLMeshItem(meshdata=seeg_data[i], smooth=True, shader='shaded', glOptions='additive'))
seeg_item[i].translate(-(positions[i,0]-mean_verts[0])/max_verts[0] , -(positions[i,1]-mean_verts[1])/max_verts[1] , -(positions[i,2]-mean_verts[2])/max_verts[2] )
pw3.addItem(seeg_item[i])
seeg_item[i].setColor(electrodes_color[i]/255.)
centres_data1 = []
centres_item1 = []
for i in range(centres.shape[0]):
centres_data1.append(gl.MeshData.sphere(rows=10, cols=10, radius=1.))
centres_item1.append(gl.GLMeshItem(meshdata=centres_data1[i], smooth=True, color=(1, 0, 0, 1), shader='shaded', glOptions='additive'))
centres_item1[i].translate(-(centres[i,0]-mean_verts[0])/max_verts[0] , -(centres[i,1]-mean_verts[1])/max_verts[1] , -(centres[i,2]-mean_verts[2])/max_verts[2] )
pw3.addItem(centres_item1[i])
def updatePlot3():
indx1, indx2 = lr.getRegion()
indx1, indx2 = indx1, indx2
mean_sig_tavgs1 = np.mean(tavgs[:, int(indx1):int(indx2)], axis=1)
max_sig_tavgs1 = np.max(np.abs(tavgs[:, int(indx1):int(indx2)]-mean_sig_tavgs1[:, newaxis]), axis=1)
ts_tavgs1 = ((max_sig_tavgs1 - min_tavgs)/(max_tavgs-min_tavgs))
for i in range(centres.shape[0]):
centres_item1[i].resetTransform()
centres_item1[i].translate(-(centres[i,0]-mean_verts[0])/max_verts[0] , -(centres[i,1]-mean_verts[1])/max_verts[1] , -(centres[i,2]-mean_verts[2])/max_verts[2] )
#centres_item1[i].scale(1+3*ts_tavgs1[i], 1+3*ts_tavgs1[i], 1+3*ts_tavgs1[i])
centres_item1[i].scale(1+tavgs[i, np.abs(int(indx1))], 1+tavgs[i, np.abs(int(indx1))], 1+tavgs[i, np.abs(int(indx1))])
centres_item1[i].meshDataChanged()
mean_sig = np.mean(seegs[:, int(indx1):int(indx2)], axis=1)
max_sig = np.max(np.abs(seegs[:, int(indx1):int(indx2)]-mean_sig[:, newaxis]), axis=1)
ts = ((max_sig - min_seegs)/(max_seegs-min_seegs))
for i in range(seegs.shape[0]):
seeg_item[i].resetTransform()
seeg_item[i].translate(-(positions[i,0]-mean_verts[0])/max_verts[0] , -(positions[i,1]-mean_verts[1])/max_verts[1] , -(positions[i,2]-mean_verts[2])/max_verts[2] )
#seeg_item[i].scale(1+30*ts[i], 1+30*ts[i], 1+30*ts[i])
seeg_item[i].scale(1+100*seegs[i, 100*np.abs(int(indx1))], 1+100*seegs[i, np.abs(int(indx1))], 1+100*seegs[i, np.abs(int(indx1))])
seeg_item[i].meshDataChanged()
lr.sigRegionChanged.connect(updatePlot3)
updatePlot3()
## fourth window
pw4 = gl.GLViewWidget()
l.addWidget(pw4, 1,1)
pw4.setCameraPosition(distance=120, azimuth=-210)
vertcolors = np.ones((surf_nv, 4)) * np.array([0.7,0.67,0.6,0])
surf_data = gl.MeshData(vertexes=verts[:], faces=faces[:])
m1 = gl.GLMeshItem(meshdata=surf_data, smooth=True, shader='shaded')
pw4.addItem(m1)
centres_data = []
centres_item = []
for i in range(centres.shape[0]):
centres_data.append(gl.MeshData.sphere(rows=10, cols=10, radius=1.))
centres_item.append(gl.GLMeshItem(meshdata=centres_data[i], smooth=True, color=(1, 0, 0, 1), shader='shaded', glOptions='additive'))
centres_item[i].translate(-(centres[i,0]-mean_verts[0])/max_verts[0] , -(centres[i,1]-mean_verts[1])/max_verts[1] , -(centres[i,2]-mean_verts[2])/max_verts[2] )
pw4.addItem(centres_item[i])
def updatePlot4():
indx1, indx2 = lr.getRegion()
indx1, indx2 = indx1, indx2
mean_sig = np.mean(tavgs[:, int(indx1):int(indx2)], axis=1)
max_sig = np.max(np.abs(tavgs[:, int(indx1):int(indx2)]-mean_sig[:, newaxis]), axis=1)
ts_tavgs = ((max_sig - min_tavgs)/(max_tavgs-min_tavgs))
for i in range(centres.shape[0]):
centres_item[i].resetTransform()
centres_item[i].translate(-(centres[i,0]-mean_verts[0])/max_verts[0] , -(centres[i,1]-mean_verts[1])/max_verts[1] , -(centres[i,2]-mean_verts[2])/max_verts[2] )
#centres_item[i].scale(1+3*ts_tavgs[i], 1+3*ts_tavgs[i], 1+3*ts_tavgs[i])
centres_item[i].scale(1+tavgs[i, np.abs(int(indx1))], 1+tavgs[i, np.abs(int(indx1))], 1+tavgs[i, np.abs(int(indx1))])
centres_item[i].meshDataChanged()
lr.sigRegionChanged.connect(updatePlot4)
updatePlot4()
## set sizes
pw1.sizeHint = pw2.sizeHint = pw3.sizeHint = pw4.sizeHint = lambda: pg.QtCore.QSize(100, 100)
pw3.setSizePolicy(pw1.sizePolicy())
pw4.setSizePolicy(pw1.sizePolicy())
## show
mw.show()
QtGui.QApplication.instance().exec_() |
992,240 | 6c70ec5800b2d376aa6606e30cd53c5d898c8670 | def findOri (text):
skew = [0]
genome = text
minimum = []
#print (skew[0])
for i in (range(0, len(text)+1)):
#print(skew[i])
if genome[i] == ("A" or "T"):
skew.append(skew[i])
elif genome[i] == "G":
skew.append(skew[i]+1)
#skew += 1
elif genome[i] == "C":
skew.append(skew[i]-1)
#skew -= 1
print (skew)
#print(skew)
for i in (range(len(text)+1)):
if skew[i] == min(skew):
#print(min(skew))
#if (skew[i+1] < skew[i+2] and skew[i+1] > skew[i]):
minimum.append(str(i+1))
#print(minimum)
#print(min(skew))
#print(skew)
#print(len(skew))
return str(minimum)
import sys # you must import "sys" to read from STDIN
text = sys.stdin.read()
#print(patternsearch(text,pattern))
with open("result.txt", 'a') as out:
out.write(findOri(text))
|
992,241 | 7bfed937bea33492b6e11f944c027eba4947a867 | from __future__ import print_function
import sys
import numpy as np
import matplotlib.pyplot as plt
from robolearn.old_utils.iit.iit_robots_params import *
from robolearn.old_envs import BigmanEnv
from robolearn.old_agents import GPSAgent
from robolearn.old_policies.policy_opt.policy_opt_tf import PolicyOptTf
from robolearn.old_policies.policy_opt.tf_model_example import tf_network
from robolearn.old_utils.sample import Sample
from robolearn.old_utils.sample_list import SampleList
from robolearn.old_costs.cost_action import CostAction
from robolearn.old_costs.cost_state import CostState
from robolearn.old_costs.cost_sum import CostSum
from robolearn.old_costs.cost_utils import RAMP_QUADRATIC
from robolearn.old_utils.algos_utils import IterationData
from robolearn.old_utils.algos_utils import TrajectoryInfo
from robolearn.old_algos.gps.gps import GPS
from robolearn.old_policies.lin_gauss_init import init_lqr, init_pd
from robolearn.old_policies.policy_prior import PolicyPrior # For MDGPS
import rospy
from robolearn.old_utils.print_utils import *
import time
# ################## #
# ################## #
# ### PARAMETERS ### #
# ################## #
# ################## #
# Task parameters
#update_frequency = 5
Ts = 0.01
EndTime = 5 # Using final time to define the horizon
# ################### #
# ################### #
# ### ENVIRONMENT ### #
# ################### #
# ################### #
print("\nCreating Bigman environment...")
# Robot configuration
interface = 'ros'
body_part_active = 'LA'
command_type = 'velocity'
file_save_restore = "models/bigman_agent_vars.ckpt"
observation_active = [{'name': 'joint_state',
'type': 'joint_state',
'ros_topic': '/xbotcore/bigman/joint_states',
'fields': ['link_position', 'link_velocity', 'effort'],
'joints': bigman_params['joint_ids']['UB']}, # Value that can be gotten from robot_params['joints_names']['UB']
{'name': 'ft_left_arm',
'type': 'ft_sensor',
'ros_topic': '/xbotcore/bigman/ft/l_arm_ft',
'fields': ['force', 'torque']},
{'name': 'ft_right_arm',
'type': 'ft_sensor',
'ros_topic': '/xbotcore/bigman/ft/r_arm_ft',
'fields': ['force', 'torque']},
{'name': 'ft_left_leg',
'type': 'ft_sensor',
'ros_topic': '/xbotcore/bigman/ft/l_leg_ft',
'fields': ['force', 'torque']},
{'name': 'ft_right_leg',
'type': 'ft_sensor',
'ros_topic': '/xbotcore/bigman/ft/r_leg_ft',
'fields': ['force', 'torque']},
{'name': 'imu1',
'type': 'imu',
'ros_topic': '/xbotcore/bigman/imu/imu_link',
'fields': ['orientation', 'angular_velocity', 'linear_acceleration']},
{'name': 'optitrack',
'type': 'optitrack',
'ros_topic': '/optitrack/relative_poses',
'fields': ['position', 'orientation'],
'bodies': ['LSoftHand', 'RSoftHand', 'box']},
]
#observation_active = [{'name': 'imu1',
# 'type': 'imu',
# 'ros_topic': '/xbotcore/bigman/imu/imu_link',
# 'fields': ['orientation', 'angular_velocity', 'linear_acceleration']}]
#state_active = [{'name': 'joint_state',
# 'type': 'joint_state',
# 'fields': ['link_position', 'link_velocity'],
# 'joints': bigman_params['joint_ids']['LA']}] # Value that can be gotten from robot_params['joints_ids']['LA']
state_active = [{'name': 'joint_state',
'type': 'joint_state',
'fields': ['link_position', 'link_velocity'],
'joints': bigman_params['joint_ids']['LA']},
{'name': 'optitrack',
'type': 'optitrack',
'fields': ['position', 'orientation'],
'bodies': ['box']}] # check if it is better relative position with EE(EEs)
# Create a Bigman robot ROS EnvInterface
bigman_env = BigmanEnv(interface=interface, mode='simulation',
body_part_active=body_part_active, command_type=command_type,
observation_active=observation_active,
state_active=state_active,
cmd_freq=int(1/Ts))
# TODO: DOMINGOOOO
# TODO: Temporally using current state to set one initial condition
current_state = bigman_env.get_state()
bigman_env.set_initial_conditions([current_state])
action_dim = bigman_env.action_dim
state_dim = bigman_env.state_dim
observation_dim = bigman_env.obs_dim
print("Bigman Environment OK. body_part_active:%s (action_dim=%d). Command_type:%s" % (body_part_active, action_dim, command_type))
# ################# #
# ################# #
# ##### AGENT ##### #
# ################# #
# ################# #
print("\nCreating Bigman Agent...")
# Create an Agent
# Agent option
#policy_params = {
# 'network_params': {
# 'obs_include': [JOINT_ANGLES, JOINT_VELOCITIES],
# 'obs_vector_data': [JOINT_ANGLES, JOINT_VELOCITIES],
# 'sensor_dims': SENSOR_DIMS,
# },
# 'network_model': tf_network,
# 'iterations': 1000,
# 'weights_file_prefix': EXP_DIR + 'policy',
#}
policy_params = {
'network_model': tf_network, # tf_network, multi_modal_network, multi_modal_network_fp
'iterations': 500, # Inner iteration (Default:5000). Reccomended: 1000?
'network_params': {
'n_layers': 1, # Hidden layers??
'dim_hidden': [40], # Dictionary of size per n_layers
'obs_names': bigman_env.get_obs_info()['names'],
'obs_dof': bigman_env.get_obs_info()['dimensions'], # DoF for observation data tensor
'batch_size': 15, # TODO: Check if this value is OK (same than name_samples)
#'num_filters': [5, 10],
#'obs_include': [JOINT_ANGLES, JOINT_VELOCITIES, RGB_IMAGE], # Deprecated from original GPS code
#'obs_vector_data': [JOINT_ANGLES, JOINT_VELOCITIES], # Deprecated from original GPS code
#'obs_image_data': [RGB_IMAGE], # Deprecated from original GPS code
#'sensor_dims': SENSOR_DIMS, # Deprecated from original GPS code
#'image_width': IMAGE_WIDTH (80), # For multi_modal_network
#'image_height': IMAGE_HEIGHT (64), # For multi_modal_network
#'image_channels': IMAGE_CHANNELS (3), # For multi_modal_network
}
}
policy = PolicyOptTf(policy_params, observation_dim, action_dim)
#policy = None
bigman_agent = GPSAgent(act_dim=action_dim, obs_dim=observation_dim, state_dim=state_dim, policy=policy)
# Load previous learned variables
#bigman_agent.load(file_save_restore)
print("Bigman Agent:%s OK\n" % type(bigman_agent))
# ################# #
# ################# #
# ##### COSTS ##### #
# ################# #
# ################# #
# Action Cost #TODO: I think it doesn't have sense if the control is joint position
act_cost = {
'type': CostAction,
'wu': np.ones(action_dim) * 1e-4,
#'l1': 1e-3,
#'alpha': 1e-2,
'target': None, # Target action value
}
# State Cost
box_pose = [-0.7500, # pos x
0.0000, # pos y
0.0184, # pos z
0.0000, # orient x
0.0000, # orient y
0.0000, # orient z
1.0000] # orient w
box_size = [0.4, 0.5, 0.3]
left_ee_pose = box_pose
left_ee_pose[0] += box_size[0]/2 - 0.05 #
target_state = left_ee_pose + box_pose
# 'B' pose
state_cost = {
'type': CostState,
'ramp_option': RAMP_QUADRATIC, # How target cost ramps over time. RAMP_* :CONSTANT,LINEAR, QUADRATIC, FINAL_ONLY
'l1': 0.0,
'l2': 1.0,
'wp_final_multiplier': 5.0, # Weight multiplier on final time step.
'data_types': {
'optitrack': {
'wp': np.ones_like(target_state), # State weights - must be set.
'target_state': target_state, # Target state - must be set.
'average': None, #(12, 3),
'data_idx': bigman_env.get_state_info(name='optitrack')['idx']
}
},
}
#state_cost = {
# 'type': CostState,
# 'ramp_option': RAMP_QUADRATIC, # How target cost ramps over time. RAMP_* :CONSTANT,LINEAR, QUADRATIC, FINAL_ONLY
# 'l1': 0.0,
# 'l2': 1.0,
# 'wp_final_multiplier': 1.0, # Weight multiplier on final time step.
# 'data_types': {
# 'link_position': {
# 'wp': np.ones_like(target_pos), # State weights - must be set.
# 'target_state': target_pos, # Target state - must be set.
# 'average': None, #(12, 3),
# 'data_idx': bigman_env.get_state_info(name='link_position')['idx']
# },
# 'link_velocity': {
# 'wp': np.ones_like(target_vel), # State weights - must be set.
# 'target_state': target_vel, # Target state - must be set.
# 'average': None, #(12, 3),
# 'data_idx': bigman_env.get_state_info(name='link_velocity')['idx']
# },
# },
#}
# Sum of costs
cost_sum = {
'type': CostSum,
'costs': [act_cost, state_cost],
'weights': [0.1, 5.0],
}
# ######################## #
# ######################## #
# ## LEARNING ALGORITHM ## #
# ######################## #
# ######################## #
# Learning params
total_episodes = 5
num_samples = 5 # Samples for exploration trajs
resume_training_itr = None # Resume from previous training iteration
T = int(EndTime/Ts) # Total points
conditions = 1 # Number of initial conditions
sample_on_policy = False
test_policy_after_iter = False
kl_step = 0.2
# init_traj_distr is a list of dict
init_traj_distr = {'type': init_lqr,
'init_var': 1.0,
'stiffness': 1.0,
'stiffness_vel': 0.5,
'final_weight': 1.0,
# Parameters for guessing dynamics
'init_acc': np.zeros(action_dim), # dU vector(np.array) of accelerations, default zeros.
'init_gains': 1*np.ones(action_dim), # dU vector(np.array) of gains, default ones.
}
#init_traj_distr = [{'type': init_pd,
# 'init_var': 0.00001, # initial variance (Default:10)
# 'pos_gains': 0.001, # position gains (Default:10)
# 'vel_gains_mult': 0.01, # velocity gains multiplier on pos_gains
# 'init_action_offset': None,
# }]
#gps_algo = 'pigps'
## PIGPS hyperparams
#gps_algo_hyperparams = {'init_pol_wt': 0.01,
# 'policy_sample_mode': 'add'
# }
gps_algo = 'mdgps'
# MDGPS hyperparams
gps_algo_hyperparams = {'init_pol_wt': 0.01,
'policy_sample_mode': 'add',
# Whether to use 'laplace' or 'mc' cost in step adjusment
'step_rule': 'laplace',
'policy_prior': {'type': PolicyPrior},
}
learn_algo = GPS(agent=bigman_agent, env=bigman_env,
iterations=total_episodes, num_samples=num_samples,
T=T, dt=Ts,
cost=cost_sum,
conditions=conditions,
sample_on_policy=sample_on_policy,
test_after_iter=test_policy_after_iter,
init_traj_distr=init_traj_distr,
kl_step=kl_step,
gps_algo=gps_algo,
gps_algo_hyperparams=gps_algo_hyperparams
)
print("Learning algorithm: %s OK\n" % type(learn_algo))
# Learn using learning algorithm
print("Running Learning Algorithm!!!")
learn_algo.run(resume_training_itr)
print("Learning Algorithm has finished!")
sys.exit()
# ######################### #
# EXAMPLE OF AN EXPLORATION #
# ######################### #
ros_rate = rospy.Rate(int(1/Ts)) # hz
try:
episode = 0
sample_list = SampleList()
print("Starting Training...")
# Learn First
for episode in range(total_episodes):
print("")
print("#"*15)
print("Episode %d/%d" % (episode+1, total_episodes))
print("#"*15)
for n_sample in range(num_samples):
print("")
print("New Sample: Sample %d/%d" % (n_sample+1, num_samples))
i = 0
# Create a sample class
sample = Sample(bigman_env, T)
history = [None] * T
obs_hist = [None] * T
# Collect history
for i in range(T):
obs = bigman_env.get_observation()
state = bigman_env.get_state()
action = bigman_agent.act(obs=obs)
bigman_env.send_action(action)
print("Episode %d/%d | Sample:%d/%d | t=%d/%d" % (episode+1, total_episodes,
n_sample+1, num_samples,
i+1, T))
obs_hist[i] = (obs, action)
history[i] = (state, action)
#print(obs)
#print("..")
#print(state)
#print("--")
#print("obs_shape:(%s)" % obs.shape)
#print("state_shape:(%s)" % state.shape)
#print("obs active names: %s" % bigman_env.get_obs_info()['names'])
#print("obs active dims: %s" % bigman_env.get_obs_info()['dimensions'])
#print("state active names: %s" % bigman_env.get_state_info()['names'])
#print("state active dims: %s" % bigman_env.get_state_info()['dimensions'])
#print("")
#sample.set_acts(action, t=i) # Set action One by one
#sample.set_obs(obs[:42], obs_name='joint_state', t=i) # Set action One by one
#sample.set_states(state[:7], state_name='link_position', t=i) # Set action One by one
ros_rate.sleep()
all_actions = np.array([hist[1] for hist in history])
all_states = np.array([hist[0] for hist in history])
all_obs = np.array([hist[0] for hist in obs_hist])
sample.set_acts(all_actions) # Set all actions at the same time
sample.set_obs(all_obs) # Set all obs at the same time
sample.set_states(all_states) # Set all states at the same time
# Add sample to sample list
print("Sample added to sample_list!")
sample_list.add_sample(sample)
print("Resetting environment!")
bigman_env.reset(time=1)
#rospy.sleep(5) # Because I need to find a good way to reset
print("")
print("Exploration finished. %d samples were generated" % sample_list.num_samples())
print("")
print("Evaluating samples' costs...")
#Evaluate costs for all samples for a condition.
# Important classes
#cost = act_cost['type'](act_cost)
#cost = state_cost['type'](state_cost)
cost = cost_sum['type'](cost_sum)
iteration_data = IterationData()
iteration_data.traj_info = TrajectoryInfo() # Cast it directly in gps algo, with M variable
# Constants.
N_samples = len(sample_list)
# Compute cost.
cs = np.zeros((N_samples, T)) # Sample costs of the current iteration.
cc = np.zeros((N_samples, T)) # Cost estimate constant term.
cv = np.zeros((N_samples, T, state_dim+action_dim)) # Cost estimate vector term.
Cm = np.zeros((N_samples, T, state_dim+action_dim, state_dim+action_dim)) # Cost estimate matrix term.
for n in range(N_samples):
sample = sample_list[n]
# Get costs.
l, lx, lu, lxx, luu, lux = cost.eval(sample)
cc[n, :] = l
cs[n, :] = l
# Assemble matrix and vector.
cv[n, :, :] = np.c_[lx, lu]
Cm[n, :, :, :] = np.concatenate(
(np.c_[lxx, np.transpose(lux, [0, 2, 1])], np.c_[lux, luu]),
axis=1
)
#TODO: Check this part better, and understand it
# Adjust for expanding cost around a sample.
X = sample.get_states()
U = sample.get_acts()
yhat = np.c_[X, U]
rdiff = -yhat
rdiff_expand = np.expand_dims(rdiff, axis=2)
cv_update = np.sum(Cm[n, :, :, :] * rdiff_expand, axis=1)
cc[n, :] += np.sum(rdiff * cv[n, :, :], axis=1) + 0.5 * np.sum(rdiff * cv_update, axis=1)
cv[n, :, :] += cv_update
# Fill in cost estimate.
iteration_data.traj_info.cc = np.mean(cc, 0) # Constant term (scalar).
iteration_data.traj_info.cv = np.mean(cv, 0) # Linear term (vector).
iteration_data.traj_info.Cm = np.mean(Cm, 0) # Quadratic term (matrix).
iteration_data.cs = cs # True value of cost.
print("Mean cost for iteration %d: %f" % (episode+1, np.sum(np.mean(cs, 0))))
print("The episode has finished!")
#print("Training the agent...")
#bigman_agent.train(history=history)
#bigman_agent.save(file_save_restore)
#print("Training ready!")
#all_samples_obs = sample_list.get_obs(idx=range(2, 4), obs_name='joint_state')
#print(all_samples_obs.shape)
#for samp in all_samples_obs:
# plt.plot(samp[:, 0])
#plt.show()
#plt.plot(sample.get_acts()[:, 0], 'k')
#plt.plot(sample.get_obs('joint_state')[:, 0], 'b')
#plt.plot(sample.get_states('link_position')[:, 0], 'r')
#plt.show()
print("Training finished!")
sys.exit()
except KeyboardInterrupt:
print('Training interrupted!')
|
992,242 | 2ee3cdaadd60b750c3a3ad48fdf91b09e777b76d | from torch.utils.data import DataLoader
from data.datasets import uwdataset, collate_fn, mmvaldataset, uiebvaldataset
from utils.visual import get_summary_writer, visualize_boxes
from tqdm import tqdm
from mscv import write_meters_loss, write_image
from models.det.faster_rcnn import Model as det_Model
from models.restoration.deq import Model as res_Model
import numpy as np
from utils.res_metrics import tensor2im
from os.path import join
import torch
import ipdb
classes = ['0B', '1B', '2B']
root = "/media/windows/c/datasets/underwater"
dataset = uwdataset(join(root,"chinamm2019uw/chinamm2019uw_train"),
join(root,"UIEBD"))
dl = DataLoader(dataset, batch_size=4, num_workers=4, shuffle=True, collate_fn=collate_fn)
dataset_val = mmvaldataset(join(root,"chinamm2019uw/chinamm2019uw_train"))
dl_val = DataLoader(dataset_val, batch_size=1, num_workers=4, shuffle=True, collate_fn=collate_fn)
dataset_val_uieb = uiebvaldataset(join(root,"UIEBD"))
dl_val_uieb = DataLoader(dataset_val_uieb, batch_size=1, num_workers=4, shuffle=True, collate_fn=collate_fn)
# writer = get_summary_writer('logs','preview')
# idx = 0
# pbar = tqdm(dl)
# for data in pbar:
# pbar.set_description("preview")
# det_image = data['det_img'][0].detach().cpu().numpy().transpose([1,2,0])
# det_image = (det_image.copy()*255).astype(np.uint8)
#
# uieb_inp = data['uieb_inp'][0].detach().cpu().numpy().transpose([1,2,0])
# uieb_inp = (uieb_inp.copy()*255).astype(np.uint8)
#
# uieb_ref = data['uieb_ref'][0].detach().cpu().numpy().transpose([1, 2, 0])
# uieb_ref = (uieb_ref.copy() * 255).astype(np.uint8)
#
# bboxes = data['det_bboxes'][0].cpu().numpy()
# labels = data['det_labels'][0].cpu().numpy().astype(np.int32)
# visualize_boxes(image=det_image, boxes=bboxes, labels=labels, probs=np.array(np.random.randint(100, 101, size=[len(bboxes)])/100), class_labels=classes)
# write_image(writer, f'preview_mm2019/{idx}', 'image', det_image, 0, 'HWC')
# write_image(writer,f'preview_uieb/{idx*2}','image',uieb_inp,0,'HWC')
# write_image(writer,f'preview_uieb/{idx*2+1}','image',uieb_ref,0,'HWC')
#
# idx+=1
#
# # print(data["uieb_inp"].shape)
#
# writer.flush()
# det_model = det_Model().cuda()
res_model = res_Model().cuda()
state_dict = torch.load('checkpoints/69.pt')
res_model.net.load_state_dict(state_dict)
res_model.eval()
with torch.no_grad():
ret = res_model.evaluate(dl_val_uieb)
res_model.train() |
992,243 | d4034f94ca7da18de430a4008b6c88866c5ec43c | '''
Write a Python program to delete the smallest element from the given Heap and then inserts a new item.
'''
import heapq
l=[4,3,6,2,1,6,7,4,10,93,21,34]
heapq.heapify(l)
heapq.heapreplace(l,0)
print(l) |
992,244 | dc9e476910288c122adb5e6a255aadd6fc2f28ee | """
Patterns for identifying green technologies in text descriptions such as patent abstracts.
Follows Shapira et al. (2014) p.102 http://dx.doi.org/10.1016/j.techfore.2013.10.023
"""
import re
def pattern01_general(text):
if re.search(re.compile(r"\bsustainab\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bgreen good\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bgreen technolog\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bgreen innov\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\beco\w*innov\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bgreen manufac\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bgreen prod\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bpollut\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\becolabel\b", flags=re.I), text):
return True
if re.search(re.compile(r"\benviron\w* product declarat\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bEPD\b"), text) and re.search(re.compile(r"\benviron\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\benviron\w* prefer\w* product\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\benviron\w* label\w*\b", flags=re.I), text):
return True
return False
def pattern02_Environmental_All_purpose(text):
if re.search(re.compile(r"\bnatur\w* environ\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\benviron\w* friend\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\benvironment\w* conserv\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bbiocompat\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bbiodivers\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bfilter\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bfiltra\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bsynth\w* gas\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bregenerat\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\brecircul\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bgasification\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bgasifier\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bfluidized clean gas\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bgas cleaning\b", flags=re.I), text):
return True
return False
def pattern03_Environmental_Biological_treatment(text):
if re.search(re.compile(r"\bbioremed\w*\b", flags=re.I), text) or re.search(re.compile(r"\bbiorecov\w*\b", flags=re.I), text) or \
re.search(re.compile(r"\bbiolog\w* treat\w*\b", flags=re.I), text) or re.search(re.compile(r"\bbiodegrad\w*\b", flags=re.I), text):
if re.search(re.compile(r"\bbiogas\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bbioreact\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bpolyolef\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bbiopolymer\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bdisinfect\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bbiofilm\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bbiosens\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bbiosolid\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bcaprolact\w*\b", flags=re.I), text):
return True
if (re.search(re.compile(r"\bultraviol\w*\b", flags=re.I), text) or re.search(re.compile(r"\bUV\b"), text)) and \
(re.search(re.compile(r"\bradiat\w*\b", flags=re.I), text) or re.search(re.compile(r"\bsol\w*\b", flags=re.I), text)):
return True
return False
def pattern04_Environmental_Air_pollution(text):
if re.search(re.compile(r"\bpollut\w*\b", flags=re.I), text):
if re.search(re.compile(r"\bair\w* contr\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bdust\w* contr\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bparticular\w* contr\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bair\w* qual\w*\b", flags=re.I), text):
return True
return False
def pattern05_Environmental_Environmental_monitoring(text):
if re.search(re.compile(r"\benviron\w* monitor\w*\b", flags=re.I), text):
if (re.search(re.compile(r"\benviron\w*\b", flags=re.I), text) and re.search(re.compile(r"\binstrument\w*\b", flags=re.I), text)) or \
(re.search(re.compile(r"\benviron\w*\b", flags=re.I), text) and re.search(re.compile(r"\banalys\w*\b", flags=re.I), text)):
# note that including keyword "environ" here is unneccessary, because this is implied by the first condition
return True
if re.search(re.compile(r"\blife\w*cycle analysis\b", flags=re.I), text):
return True
if re.search(re.compile(r"\blife cycle analys\w*\b", flags=re.I), text):
return True
return False
def pattern06_Environmental_Marine_pollution(text):
if re.search(re.compile(r"\bmarin\w* control\w*\b", flags=re.I), text) and re.search(re.compile(r"\bpollut\w*\b", flags=re.I), text):
return True
return False
def pattern07_Environmental_Noise_vibration(text):
if re.search(re.compile(r"\bnois\w* abat\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bnois\w* reduc\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bnois\w* lessen\w*\b", flags=re.I), text):
return True
return False
def pattern08_Environmental_Contaminated_land(text):
if re.search(re.compile(r"\bland\b", flags=re.I), text):
if re.search(re.compile(r"\breclam\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bremediat\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bcontamin\w*\b", flags=re.I), text):
return True
return False
def pattern09_Environmental_Waste_management(text):
if re.search(re.compile(r"\bwast\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bsewag\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\binciner\w*\b", flags=re.I), text):
return True
return False
def pattern10_Environmental_Water_supply(text):
if re.search(re.compile(r"\bwater treat\w*\b", flags=re.I), text) or re.search(re.compile(r"\bwater purif\w*\b", flags=re.I), text) or \
re.search(re.compile(r"\bwater pollut\w*\b", flags=re.I), text):
if re.search(re.compile(r"\bslurr\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bsludg\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\baque\w* solution\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bwastewat\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\beffluent\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bsediment\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bfloccul\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bdetergen\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bcoagul\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bdioxin\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bflow\w* control\w* dev\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bfluid commun\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bhigh purit\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bimpur\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bzeolit\w*\b", flags=re.I), text):
return True
return False
def pattern11_Environmental_Recovery_recycling(text):
if re.search(re.compile(r"\brecycl\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bcompost\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bstock process\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bcoal combust\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bremanufactur\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bcoal\b", flags=re.I), text) and re.search(re.compile(r"\bPCC\b"), text):
return True
if re.search(re.compile(r"\bcirculat\w* fluid\w* bed combust\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bcombust\w*\b", flags=re.I), text) and re.search(re.compile(r"\bCFBC\b"), text):
return True
return False
def pattern12_Renewable_All_purpose(text):
if re.search(re.compile(r"\brenewabl\w*\b", flags=re.I), text) and \
(re.search(re.compile(r"\benerg\w*\b", flags=re.I), text) or re.search(re.compile(r"\belectric\w*\b", flags=re.I), text)):
return True
return False
def pattern13_Renewable_Wave_tidal(text):
if re.search(re.compile(r"\belectric\w*\b", flags=re.I), text):
if re.search(re.compile(r"\btwo basin schem\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bwave\w* energ\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\btid\w* energ\w*\b", flags=re.I), text):
return True
return False
def pattern14_Renewable_Biomass(text):
if re.search(re.compile(r"\bbiomass\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\benzymat\w* hydrolys\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bbio\w*bas\w* product\w*\b", flags=re.I), text):
return True
return False
def pattern15_Renewable_Wind(text):
if re.search(re.compile(r"\bwind power\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bwind energ\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bwind farm\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bturbin\w*\b", flags=re.I), text) and re.search(re.compile(r"\bwind\w*\b", flags=re.I), text):
return True
return False
def pattern16_Renewable_Geothermal(text):
if re.search(re.compile(r"\bwhole system\w*\b", flags=re.I), text) and re.search(re.compile(r"\bgeotherm\w*\b", flags=re.I), text): # subset of next one below
return True
if re.search(re.compile(r"\bgeotherm\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bgeoexchang\w*\b", flags=re.I), text):
return True
return False
def pattern17_Renewable_PV_solar(text):
if re.search(re.compile(r"\bsolar\w*\b", flags=re.I), text):
if re.search(re.compile(r"\bener\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\blinear fresnel sys\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\belectric\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bcell\w*\b", flags=re.I), text): # seems too broad, instead: "solar cell"?
return True
if re.search(re.compile(r"\bheat\w*\b", flags=re.I), text): # seems too broad
return True
if re.search(re.compile(r"\bcool\w*\b", flags=re.I), text): # seems too broad
return True
if re.search(re.compile(r"\bphotovolt\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bPV\b"), text): # maybe too broad?
return True
if re.search(re.compile(r"\bcdte\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bcadmium tellurid\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bPVC-U\b"), text):
return True
if re.search(re.compile(r"\bphotoelectr\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bphotoactiv\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bsol\w*gel\w* process\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bevacuat\w* tub\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bflat plate collect\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\broof integr\w* system\w*\b", flags=re.I), text):
return True
return False
def pattern18_LowCarb_All_purpose(text):
if re.search(re.compile(r"\blow carbon\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bzero carbon\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bno carbon\b", flags=re.I), text):
return True
if re.search(re.compile(r"\b0 carbon\b", flags=re.I), text):
return True
if re.search(re.compile(r"\blow\w*carbon\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bzero\w*carbon\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bno\w*carbon\b", flags=re.I), text):
return True
return False
def pattern19_LowCarb_Alt_fuel_vehicle(text):
if re.search(re.compile(r"\belectric\w* vehic\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bhybrid vehic\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\belectric\w* motor\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bhybrid motor\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bhybrid driv\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\belectric\w* car\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bhybrid car\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\belectric\w* machin\w*\b", flags=re.I), text): #seems too broad
return True
if re.search(re.compile(r"\belectric\w* auto\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bhybrid auto\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\byaw\w* rat\w* sens\w*\b", flags=re.I), text):
return True
return False
def pattern20_LowCarb_Alt_fuels(text):
if re.search(re.compile(r"\balternat\w* fuel\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bmainstream\w* fuel\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bfuel cell\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bnuclear powe\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bnuclear stat\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bnuclear plant\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bnuclear energ\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bnuclear\b", flags=re.I), text) and re.search(re.compile(r"\belectric\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bnuclear fuel\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bfuel\w* process\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bporous\w* struct\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bporous\w* substrat\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bsolid\w* oxid\w* fuel\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bFischer\w*Tropsch\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\brefus\w* deriv\w* fuel\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\brefus\w*deriv\w* fuel\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bfuel\b", flags=re.I), text) and re.search(re.compile(r"\bbiotech\w*\b", flags=re.I), text) and \
(re.search(re.compile(r"\bethanol\b", flags=re.I), text) or re.search(re.compile(r"\bhydrogen\w*\b", flags=re.I), text)):
return True
if re.search(re.compile(r"\bbio\w*fuel\w*\b", flags=re.I), text): # subset of previous one above
return True
if re.search(re.compile(r"\bsynthetic fuel\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bcombined heat and power\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bsynth\w* gas\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bsyngas\b", flags=re.I), text):
return True
return False
def pattern21_LowCarb_Electrochemical_processes(text):
if re.search(re.compile(r"\belectrochem\w* cell\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\belectrochem\w* fuel\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bmembran\w* electrod\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bion\w* exchang\w* membran\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bion\w*exchang\w* membran\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\belectrolyt\w* cell\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bcatalyt\w* convers\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bsolid\w* separat\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bmembran\w* separat\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bion\w* exchang\w* resin\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bion\w*exchang\w* resin\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bproton\w* exchang\w* membra\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bproton\w*exchang\w* membra\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bcataly\w* reduc\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\belectrod\w* membra\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\btherm\w* engin\w*\b", flags=re.I), text):
return True
return False
def pattern22_LowCarb_Battery(text):
if re.search(re.compile(r"\bbatter\w*\b", flags=re.I), text) or re.search(re.compile(r"\baccumul\w*\b", flags=re.I), text):
if re.search(re.compile(r"\bcharg\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\brechar\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bturbocharg\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bhigh capacit\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\brapid charg\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\blong life\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bultra\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bsolar\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bno lead\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bno mercury\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bno cadmium\b", flags=re.I), text):
return True
if re.search(re.compile(r"\blithium\w*ion\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\blithium\w* ion\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bLi\w*ion\w*\b"), text):
return True
return False
def pattern23_LowCarb_Additional_energy(text):
if re.search(re.compile(r"\baddition\w* energ\w* sourc\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\baddition\w* sourc\w* of energ\w*\b", flags=re.I), text):
return True
return False
def pattern24_LowCarb_Carbon_capture_storage(text):
if re.search(re.compile(r"\bcarbon\b", flags=re.I), text) and re.search(re.compile(r"\bcaptu\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bcarbon\b", flags=re.I), text) and re.search(re.compile(r"\bstor\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bcarbon dioxid\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bCO2\b"), text):
return True
return False
def pattern25_LowCarb_Energy_management(text):
if re.search(re.compile(r"\bener\w* sav\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bener\w* effic\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\benerg\w*effic\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\benerg\w*sav\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\blight\w* emit\w* diod\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bLED\b"), text):
return True
if re.search(re.compile(r"\borganic LED\b"), text):
return True
if re.search(re.compile(r"\bOrganic LED\b"), text):
return True
if re.search(re.compile(r"\bOLED\b"), text):
return True
if re.search(re.compile(r"\bCFL\b"), text):
return True
if re.search(re.compile(r"\bcompact fluorescent\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\benerg\w* conserve\w*\b", flags=re.I), text):
return True
return False
def pattern26_LowCarb_Building_technologies(text):
if re.search(re.compile(r"\bbuild\w*\b", flags=re.I), text) or re.search(re.compile(r"\bconstruct\w*\b", flags=re.I), text):
if re.search(re.compile(r"\binsula\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bheat\w* retent\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bheat\w* exchang\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bheat\w* pump\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\btherm\w* exchang\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\btherm\w* decompos\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\btherm\w* energ\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\btherm\w* communic\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bthermoplast\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bthermocoup\w*\b", flags=re.I), text):
return True
if re.search(re.compile(r"\bheat\w* recover\w*\b", flags=re.I), text):
return True
return False
|
992,245 | a213bc74d490f62f20286dd8dd6c6d666636cf03 | times=('Corinthians','São paulo', 'Flamengo','Cruzeiro', 'ATLÉTICO MINEIRO','Inter',
'Grêmio','Santos','Palmeiras','Goias','Curitiba')
print("=x"*10)
print('TABELA')
print("=x"*10)
for t in times:
print(t)
print("=x"*10)
print('4 PRIMEIROS ')
print("=x"*10)
for t in times[0:4]:
print(t)
print("=x"*10)
print('4 ÚLTIMOS ')
print("=x"*10)
print(times[-4:])
print("=x"*10)
print(f' {sorted(times)}')
time=str(input('digite o time q deseja obter a posição: '))
print(times.index(time)+1)
timeposição=int(input('digite a posição para obter o time: '))
timeposiçãomenos=timeposição-1
print(times[timeposiçãomenos])
|
992,246 | 51e221493ada278e7d8203c1a5a691519405e4c0 | def print_name():
age = 33
first_name = "Simcha"
#Comment
#print and str and built in fun
print(first_name + " is " + str(age))
print_name()
|
992,247 | 891abe0a82ba61bbdf58c6a7703b6cad5e8178a8 | import sys
import numpy as np
def main():
script = sys.argv[0]
action = sys.argv[1]
filenames = sys.argv[2:]
assert action in ['--min', '--mean', '--max', '--sum'], \
'Action is not one of --min, --mean, --sum or --max: ' + action
if len(filenames) == 0:
process(sys.stdin, action)
else:
for f in filenames:
process(f, action)
def process(filename, action):
data = np.loadtxt(filename, delimiter=',')
data_shape = data.shape
if len(data_shape) == 1:
data = data.reshape((1, len(data)))
if action == '--min':
values = data.min(axis=1)
elif action == '--mean':
values = data.mean(axis=1)
elif action == '--max':
values = data.max(axis=1)
elif action == '--sum':
values = data.sum(axis=1)
for m in values:
print m
main()
|
992,248 | 59f66ce6e51e96b8b0ae5cf8cad550504d867011 | #!/usr/local/bin/python3
import cgi
print("Content-type: text/html")
print('''
<!DOCTYPE html>
<html>
<head>
<title>Resources</title>
</head>
<body>
<h1>Resources</h1>
<ul><a href="python.py">Python</ul>
<ul><a href="linux.py">Linux</ul>
</body>
</html>
''')
|
992,249 | 74cb44d04109699974271599445f217f229902f9 | import tkinter as tk
def builder(page, cid):
cache = page.components[cid]
master = cache["master"]
padding = cache["padding"]
config = cache["config"]
# container
frame = tk.Frame(master)
frame.pack(side=config["side"], anchor=config["anchor"],
padx=padding[0], pady=padding[1])
# title
label = tk.Label(frame, text=config["title"])
label.pack(anchor="w")
# items container
items_frame = tk.Frame(frame)
items_frame.pack(anchor="w")
int_vars = []
radiobuttons = []
int_var = tk.IntVar()
# loop in items
for i, item in enumerate(config["items"]):
radiobutton = tk.Radiobutton(items_frame,
variable=int_var,
value=i,
text=item)
radiobuttons.append(radiobutton)
on_choice = config["on_choice"]
if on_choice:
command = (lambda page=page,
cid=cid,
on_choice=on_choice:
on_choice(page, cid))
radiobutton.config(command=command)
if config["stacking"] == "horizontal":
radiobutton.pack(side=tk.LEFT, anchor="w")
else:
radiobutton.pack(anchor="w")
# populate
default = config["default"]
if default is None:
pass
else:
int_var.set(default)
# parts
parts = {"label": label, "frame": frame, "items_frame": items_frame,
"int_var": int_var, "radiobuttons": radiobuttons}
return parts, data_getter
def data_getter(page, cid):
cache = page.components[cid]
parts = cache["parts"]
config = cache["config"]
index = parts["int_var"].get()
text = None
if index is not None:
text = config["items"][index]
return index, text
|
992,250 | a5e0e39003b9f696da37726ca05332c2ed7b2aa7 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Read temperature and humidity from living room"""
from kavalkilu import LogWithInflux
from pitools import Sensor
from pitools.peripherals import PiGarage
logg = LogWithInflux('garage_temp', log_dir='weather')
sensor = Sensor('DALLAS', serial=PiGarage.dallas.sn)
# Take readings & log to db
sensor.measure_and_log_to_db(send_to_ha=True)
logg.debug('Temp logging successfully completed.')
logg.close()
|
992,251 | f09bedafdd65c3fd4c5be88e3f0e1585d32cdcfb | #!/usr/bin/env python
from distutils.core import setup
import py2exe
setup(
options = {
'py2exe': {
'dll_excludes' : ['msvcr71.dll', 'w9xpopen.exe'],
'compressed' : 1,
'optimize' : 2,
'ascii' : 1,
'bundle_files' : 1,
'packages' : ['encodings', 'os', 'xml']}
},
zipfile = None,
console = ["cycle_submit.py"]
)
|
992,252 | 457c31a16d08bafe0d3ce4c97f0c83a333ea30a1 | import webbrowser
from time import sleep
while True: # This keeps the program going until you press 'q'
user = input('''
~~~~~~WEBSITES~~~~~~
[G]oogle
[Gi]thub
[F]lat.io
[O]ffice 365
[C]anvas
[S]kyward
[T]eams
[Q]uit
>>> ''') # This is what the user gets prompted. This is in charge of what website will open
''' This will open websites according to the input '''
if (user == 'g'):
webbrowser.open('https://www.google.com/', new=0, autoraise=True)
elif (user == 'gi)':
webbrowser.open('https://github.com/', new=0, autoraise=True)
elif (user == 'f'):
webbrowser.open('https://flat.io/my-library', new=0, autoraise=True)
elif (user == 'o'):
webbrowser.open('https://www.office.com/?auth=2', new=0, autoraise=True)
elif (user == 'c'):
webbrowser.open('https://issaquah.instructure.com/', new=0, autoraise=True)
elif (user == 's'):
webbrowser.open('https://www01.nwrdc.wa-k12.net/scripts/cgiip.exe/WService=wissaqus71/fwemnu01.w', new=0, autoraise=True)
elif (user == 't'):
webbrowser.open('https://teams.microsoft.com/_?culture=en-us&country=US&lm=deeplink&lmsrc=homePageWeb&cmpid=WebSignIn#/school/conversations/General?threadId=19:9e74feca54a646ff89094952c7af701e@thread.tacv2&ctx=channel', new=0, autoraise=True)
elif (user == 'q'):
break
else:
print('Please enter valid command')
sleep(0.6)
|
992,253 | 09135321567ca71d147730f0b47897ab90d113e6 | import curses, time
''' more text animation
>>> stdscr = curses.initscr()
>>> dims = stdscr.getmaxyx() 'return a tuple (height,width) of the window'
row = 0,1,2,...,24
col = 0,1,2,...,79
>>> stdscr.addstr(row,col,'text',curse.A_REVERSE)
>>> stdscr.nodelay(1) 'If yes is 1, getch() will be non-blocking.'
'''
from helper import *
import time
class direction(object):
x = y = 0
class Animation(object):
def __init__(self,screen):
self.screen = screen
self.windows= [self.screen]
self.init_colors()
self.screen_instruction()
self.mainloop(True)
def screen_instruction(self):
dimension = ">>> curses.LINES,curses.COLS == stdscr.getmaxyx()"
rows = ">>> rows = (0,1,2,..,curses.LINES-1)"
cols = ">>> cols = (0,1,2,..,curses.COLS-1)"
self.screen.addstr(2,10,'more text animation',self.white)
self.screen.addstr(3,10,dimension,self.green)
self.screen.addstr(4,10,rows,self.green)
self.screen.addstr(5,10,cols,self.green)
def update(self):
for win in self.windows:
win.noutrefresh()
curses.doupdate()
def mainloop(self,running):
# set up window
rows,cols = 20,50 #curses.COLS-20 #msgwin.getmaxyx()
msgwin = newwin(rows,cols,7,5,self.green_w)
#msgwin.nodelay(True)
self.windows.append(msgwin)
# initialize variables
text = 'Hello World'
row,col = 0,0
DOWN = RIGHT = +1; UP = LEFT = -1
# loop
while running:
msgwin.erase()
msgwin.addstr(row,col,text)
msgwin.addstr(rows-1,cols-6,'{:02},{:02}'.format(row,col),self.red_w)
self.update()
time.sleep(0.05)
q = msgwin.getch()
direction.x = direction.y = 0
if q == 27:
running = False
if q == ord('k') and row > 0:
direction.y = UP
elif q == ord('j') and row < rows-1:
direction.y = DOWN
elif q == ord('h') and col > 0:
direction.x = LEFT
elif q == ord('l') and col < cols - len(text):
direction.x = RIGHT
row += direction.y
col += direction.x
if __name__ == '__main__':
wrapper(Animation)
|
992,254 | dfa6683c93c08d21cdd45c98ba6d384b4da1b27a | """
Given an integer array nums of unique elements, return all possible subsets (the power set).
The solution set must not contain duplicate subsets. Return the solution in any order.
"""
n = list(map(int, input().split(',')))
res = [[]]
for i in n:
res += [x + [i] for x in res]
print(res)
|
992,255 | 800177dd16111bce3504687a0634c2917445818b |
import argparse
from PIL import Image
def asci(text):
ntext = []
for char in text:
ntext.append(format(ord(char), '08b'))
return ntext
def pixelgen(pic, text):
lentext = len(text)
piciter = iter(pic)
bintext = asci(text)
for i in range(lentext):
pixels = [ value for value in piciter.__next__()[:3] +
piciter.__next__()[:3] +
piciter.__next__()[:3] ]
for j in range(8):
if (bintext[i][j] == '0') and (pixels[j] % 2 != 0):
pixels[j] = (pixels[j] + 1) % 256
elif (bintext[i][j] == '1') and (pixels[j] % 2 == 0):
pixels[j] = (pixels[j] + 1) % 256
if (i == lentext - 1) :
if (pixels[-1] % 2 == 0):
pixels[-1] = (pixels[-1] + 1) % 256
else:
if (pixels[-1] % 2 != 0):
pixels[-1] = (pixels[-1] + 1) % 256
pixels = tuple(pixels)
yield pixels[0:3]
yield pixels[3:6]
yield pixels[6:9]
def encode(pic, text):
npic = pic.copy()
width = npic.size[0]
(x, y) = (0, 0)
for pixel in pixelgen(npic.getdata(), text):
npic.putpixel((x, y), pixel)
if (x == width - 1):
x = 0
y += 1
else:
x += 1
return npic
def decode(pic):
piciter = iter(pic.getdata())
text = ''
while True:
pixels = [ value for value in piciter.__next__()[:3] +
piciter.__next__()[:3] +
piciter.__next__()[:3] ]
bintext = ''
for j in range(8):
if (pixels[j] % 2 == 0):
bintext += '0'
else:
bintext += '1'
text += chr(int(bintext, 2))
if (pixels[-1] % 2 != 0):
return text
parser = argparse.ArgumentParser(prefix_chars='-')
parser.add_argument('-f', type=str, help='image file')
parser.add_argument('-m', type=str, help='text file')
args = parser.parse_args()
if (args.f != None) and (args.m != None):
img = Image.open(args.f, 'r')
txt = open(args.m)
msg = str(txt.read())
if len(img.getdata())/3 <= len(msg):
print('Text is too long for this picture.')
else:
name, ext = str(args.f).split('.')
nimg = encode(img, msg)
#nimg.save(name + '-new.' + ext)
nimg.save(name + '.' + ext)
elif (args.f != None):
img = Image.open(str(args.f), 'r')
with open("out.txt", "w") as text_file:
text_file.write(decode(img))
#print(decode(img))
else:
parser.print_help()
|
992,256 | 2f5a6ca7e418b9aa391f3683eac446293e412fe2 | from src.main.backend.models.Kingdom import Kingdom
from typing import List
class Ruler(Kingdom):
""" A class to model a ruler.
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.__allies = list()
def add_ally(self, ally: Kingdom):
self.__allies.append(ally)
def get_allies(self) -> List[Kingdom]:
return self.__allies
def send_message(self, message: str, kingdom_emblem: str) -> str:
"""
send_message() -> This method sends the encrypted message to a kingdom.
:param message: The message to be encrypted.
:param kingdom_emblem: The kingdom_emblem of the kingdom that receives the message.
:return: A string representing the encrypted message.
"""
crypto_strategy = self.get_crypto_strategy()
cipher_key = len(kingdom_emblem)
encrypted_message = crypto_strategy.encrypt(message, cipher_key)
return encrypted_message
|
992,257 | 70b9586dfd114bfc00deff2bfea390973e39e0b9 | from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'simplesite.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^$', 'simplesite.views.home', name='home'),
url(r'^about/$', 'simplesite.views.about', name='about'),
url(r'^json/$', 'simplesite.views.json', name='json'),
url(r'^two/$', 'simplesite.views.two', name='two'),
url(r'^three/$', 'simplesite.views.three', name='three'),
url(r'^xone/$', 'simplesite.views.xone', name='xone'),
url(r'^xtwo/$', 'simplesite.views.xtwo', name='xtwo'),
url(r'^five/$', 'simplesite.views.five', name='five'),
url(r'^music/$', 'simplesite.views.music', name='music'),
url(r'^four/$', 'simplesite.views.four', name='four'),
url(r'^simpleapp/$', include('simpleapp.urls')),
)
|
992,258 | 38a56dd394ae4e418f0aeb3276c0effc412da612 | """
Various column classes
@author: Martin Kuemmel, Jonas Haase
@organization: Space Telescope - European Coordinating Facility (ST-ECF)
@license: Gnu Public Licence
@contact: mkuemmel@eso.org
$LastChangedBy: mkuemmel $
$LastChangedDate: 2008-07-03 10:27:47 +0200 (Thu, 03 Jul 2008) $
$HeadURL: http://astropy.scipy.org/svn/astrolib/trunk/asciidata/Lib/asciicolumn.py $
"""
from __future__ import absolute_import
__version__ = "Version 1.0 $LastChangedRevision: 503 $"
from .asciielement import *
from .asciierror import *
from .asciiutils import *
class NullColumn(object):
"""
Class for an empty column with 'None'-elements
"""
def __init__(self, nrows):
"""
Constructor for the class
@param nrows: the number of rows
@type nrows: integer
"""
# create the data array
dummy_list = []
# set the row number
self._nrows = nrows
# append the reuqested number of None
# for index in range(nrows):
# self._data.append(None)
# append the reuqested number of None
# perhaps a bit faster than the above lines
self._data = list(map(dummy_list.append, range(nrows)))
# set the row number
self._nrows = nrows
class AsciiColumn(NullColumn):
"""
Class to store the ascii data into.
"""
def __init__(self, element=None, colname=None, null=None, nrows=None):
"""
Constructor for the column class.
Instances of this column class hold the data in
a private list. Moreover there exist few
attributes in addition. A column does have
a type, which is either string/integer/float.
The column can be undefined, which means it contains
only 'None', but the default type is string.
@param element: list of elements to start the data with
@type element: string/integer/float
@param colname: the name of the column
@type colname: string
"""
self.colname = colname
self.unit = ''
self.colcomment =''
self._data = []
self._defined = 0
self._type = str
self._format = ['%10s','%10s']
self._nrows = 0
# set the default null string
if null:
self._null = [null[0].strip()]
else:
self._null = ['*']
if not element and nrows:
super(AsciiColumn, self).__init__(nrows)
else:
# go over each element in the list
for item in element:
# check for 'None'
if item != None:
# check whether the column is defined
if not self._defined:
# if undefined, set the type
# append the element, set to defined
elem = ForElement(item)
self._type = elem.get_type()
self._format = elem.get_fvalue()
self._data.append(elem.get_tvalue())
self._defined = 1
self._nrows += 1
else:
# if defined, add the element
self.add_element(item)
else:
# simply append the 'None'
self._data.append(item)
# increment the number of rows
self._nrows += 1
def __getitem__(self, index):
"""
Defines the list operator for indexing
This method returns the value at a given index.
In the current class this means the method returns
the column value at the requested index.
@param index: the index of the column to be returned
@type index: integer
@return: the column value
@rtype: string/integer/float
"""
# check whether the requested index is available.
# raise an error if not
# [BUG] ? self._nrows-1: -> self._nrows:
if index > self._nrows-1:
err_msg = 'Index: '+str(index)+' is larger than nrows: '\
+str(self._nrows)+'!!'
raise Exception(err_msg)
# return the value at the index
return self._data[index]
def __setitem__(self, index, value):
"""
Defines the list operator for indexed assignement
The method inserts a value into the column at the
specified index. It is not possible to create
extra rows with this method. Only existing
elements can be overwritten.
@param index: the index to put the colun to
@type index: integer
@param value: the value to assign to an index
@type value: string/integer/float
"""
# check whether the indexed element does exist
# raise an error if not
if index > self._nrows-1:
err_msg = 'Index: '+str(index)+' is larger than nrows: '\
+str(self._nrows)+'!!'
raise Exception(err_msg)
if value != None:
# check whether the column is defined
if not self._defined:
# create an element object
val = ForElement(value)
# if not set the type and the define flagg
self._type = val.get_type()
self._format = val.get_fvalue()
self._defined = 1
else:
# create an element object
val = ValElement(value)
# if defined, check whether the element
# type matches the column type
if self._type != val.get_type():
# create a transformator object if the types do not match
type_trans = TypeTransformator(self._type, val.get_type())
# check whether the element is transformable
if type_trans.istransf:
# determine the transformed value
# the old code uses the typed value
# as the basis for the transformation:
#trans_value = type_trans.to_higher_type(val.get_tvalue())
# the new code uses the string value
# as the basis for the transformation:
trans_value = type_trans.to_higher_type(val.get_value())
val.set_tvalue(trans_value)
else:
# change the entire column type
self._change_column_type(type_trans, value)
# set the column element to the transformed value
self._data[index] = val.get_tvalue()
else:
self._data[index] = None
def _change_column_type(self, t_trans, value):
"""
Changes the type of a column
The method changes the type of a column. It transformes
all element into the new type and also defines the
new type and formats.
@param t_trans: the transformator object
@type t_trans:
@param value: the template value
@type value: string/integer/float
"""
# create an element object
val = ForElement(value)
# if not set the type and the define flagg
self._format = val.get_fvalue()
# set the type to the one in the transformator object
self._type = t_trans.higher_type
# go over all data
for index in range(len(self._data)):
if self._data[index] != None:
# transform all non-Null entries
self._data[index] = t_trans.to_higher_type(self._data[index])
def _get_nullformat(self, newformat):
"""
Find the null-format
The method finds an appropriate format for the null
elements for a given new format and the column type.
This null-format may be smaller than needed to fully
represent the null element.
@param newformat: the new column format
@type newformat: string
@return: the format for the null elements
@rtype: string
"""
if self._type == int:
length = len(str(newformat % 1))
return '%'+str(length)+'s'
elif self._type == float:
length = len(str(newformat % 1.0))
return '%'+str(length)+'s'
else:
return newformat
def __iter__(self):
"""
Provide an iterator object.
The function provides and returns an interator object
for the AstroColumnData class. Due to this iterator object
sequences like:
for elem in ascii_column_object:
<do something with elem>
are possible.
"""
# return AsciiColumnIter(self)
return AsciiLenGetIter(self)
def __str__(self):
"""
Print the column elements to the screen.
The method prints the column name and the elements onto
the screen. The format is column format, each
element is written otno a new line.
@return: the string representation of the column
@rtype: string
"""
# make the header
bigstring = 'Column: '+str(self.colname)
# go over each row
for index in range(self._nrows):
# append the string repr. to the output
bigstring += '\n'+self.fprint_elem(index)
# return the string
return bigstring
def __len__(self):
"""
Defines a length method for the object
@return: the length of the object
@rtype: integer
"""
return self._nrows
def __delitem__(self, index):
"""
Deletes an index.
The method deletes a column row specified in the input.
The column is specified by the index.
@param index: row index
@type index: integer
"""
# delete the column
del self._data[index]
# adjust the number of columns
self._nrows -= 1
def __delslice__(self, start, end):
"""
Deletes an index slice.
The method deletes a slice from the AsciiColumn
data. Start and end index to be deleted are specfified
in the input. This standard method redirect calls such
as "del gaga[i:j]".
@param start: starting row index
@type start: integer
@param end: ending row index
@type end: integer
"""
# delete the slice from the data
del self._data[start:end]
# determined the length of the data element
self._nrows = len(self._data)
def rename(self, newname):
"""
Rename a column
The method renames the column. The old column
name is simply overwritten.
@param newname: the new column name
@type newname: string
"""
# set the new column name
self.colname = newname
def reformat(self, newformat):
"""
Gives a new column format
The method gives a new formar to a column.
The old column format is simply overwritten.
@param newformat: the new column format
@type newformat: string
"""
# check whether the column is defined
if self._defined:
# get the appropriate null-format
nullformat = self._get_nullformat(newformat)
# set the new formats
self._format = [newformat, nullformat]
else:
# first the column type must be defined
raise Exception('The data type of this column is not yet defined!')
def add_element(self, element):
"""
Adds an element to the the column
The method adds an element at the end of the data list
of the column object. Type cheking is performed, and
and error is thrown if the types do not match.
@param element: string to be interpretet as NULL
@type element: string/integer/float
"""
# check for 'None'
if element != None:
# check whether the column is defined
if not self._defined:
# create an element object
elem = ForElement(element)
# if not, set the type and the define flagg
self._type = elem.get_type()
self._format = elem.get_fvalue()
self._defined = 1
else:
# create an element object
elem = ValElement(element)
# if defined, check whether the element
# type matches the column type
if self._type != elem.get_type():
# create a transformator object if the types do not match
type_trans = TypeTransformator(self._type,elem.get_type())
# check whether the element is transformable
if type_trans.istransf:
# determine the transformed value
trans_value = type_trans.to_higher_type(elem.get_tvalue())
elem.set_tvalue(trans_value)
else:
# change the entire column type
self._change_column_type(type_trans, element)
# set the column element to the given value
self._data.append(elem.get_tvalue())
# print elem.get_tvalue()
else:
# append a 'None' element
self._data.append(element)
# increment the number of rows
self._nrows += 1
def fprint_elem(self, index):
"""
Create and return a formatted string representation for an element.
The method creates a formatted string representation
for an element in an AsciiColumn. The element is specified
by the row index. The string representation is returned.
@param index: the index of the element
@type index: integer
@return: the string representation of the element
@rtype: string
"""
# check whether the row exists
# raise an error if not
if index > self._nrows-1:
err_msg = 'Index: '+str(index)+' is larger than nrows: '\
+str(self._nrows)+'!!'
raise Exception(err_msg)
# check for 'None'-entry
if self._data[index] != None:
# valid entries are formatted with the first format
return self._format[0] % self._data[index]
else:
# None entries get the second format
return self._format[1] % self._null[0]
def tonumarray(self):
"""
Transforms column to a numarray
If possible, the column data is transformed to a
numarray object and returned. Type specific numarrays
are created to shorten the effort in the numarray module.
@return: the numarray representation of the data
@rtype: numarray
"""
import numarray
# initialize the return
narray = None
if None in self._data:
raise Exception('There are "None" elements in the column. They can not be\ntransformed to numarrays!')
# check for string column
if self._type == str:
# import CharArrays
import numarray.strings
# transform the array to CharArrays
narray = numarray.strings.array(self._data)
elif self._type == int:
# transform the data to integer numarray
narray = numarray.array(self._data, type='Int32')
elif self._type == float:
# transform the data to float numarray
narray = numarray.array(self._data, type='Float64')
else:
# raise an exception in case of string column
err_msg = 'Can not transform column type: '+str(self._type)+' to numarray!'
raise Exception(err_msg)
# return the result
return narray
def tonumpy(self):
"""
Transforms column to a numpy
The column data is transformed to a numpy object
and returned.
@return: the numpy representation of the data
@rtype: numpy/numpy masked array
"""
import numpy
from numpy import ma
# initialize the return
narray = None
if None in self._data:
# define a lambda function
# to create the mask array
make_mask = lambda x: x == None
# create the numpy array,
# making on the fly the mask
narray = numpy.ma.array(self._data, mask=list(map(make_mask, self._data)))
else:
# convert the list to a numpy object
narray = numpy.array(self._data)
# return the numpy object
return narray
def copy(self):
"""
Returns a copy of the AsciiColumn
The method creates a deep copy of the instance
itself. The new AsciiColumn is then returned.
@return: the copy of the current column
@rtype: AsciiColumn
"""
data_copy = []
# make a copy of the data
for ii in range(self._nrows):
data_copy.append(self._data[ii])
# explicitly create a column from the data copy
self_copy = AsciiColumn(element=data_copy, colname=self.colname,
null=self._null)
# explicitly transport the format
self_copy._format = self._format
# return the new column
return self_copy
def get_nrows(self):
"""
Returns the number of rows.
@return: the number of rows
@rtype: integer
"""
return self._nrows
def get_type(self):
"""
Returns the column type.
@return: the column type
@rtype: <types>-name
"""
return self._type
def set_type(self, type):
"""
Sets the column type.
@param type: the column type
@type type: <types>-name
"""
self._type = type
def get_format(self):
"""
Returns the column format
@return: the format of the column
@rtype: string
"""
return self._format[0]
def get_defined(self):
"""
Returns the defined flagg.
@return: the defined status of the column
@rtype: integer
"""
return self._defined
def set_defined(self):
"""
Sets the column status to defined.
"""
self._defined = 1
def set_unit(self,unit):
"""
Sets the column unit
@param unit: the column unit
@type unit: string
"""
self.unit = unit
def get_unit(self):
"""
Returns the column unit
@return: the unit of the column
@rtype: string
"""
return self.unit
def set_colcomment(self,colcomment):
"""
Sets the colcomment
@param colcomment: the column comment
@type colcomment: string
"""
self.colcomment = colcomment
def get_colcomment(self):
"""
Returns the column colcomment
@return: the comment of the column
@rtype: string
"""
return self.colcomment
def info(self):
"""
Prints some column info onto the screen.
@return: the string representing the information
@rtype: string
"""
# define the return string
bigstring = ''
# assemble the information on the column
bigstring += 'Column name: ' + self.colname + '\n'
bigstring += 'Column type: ' + str(self._type) + '\n'
bigstring += 'Column format: ' + str(self._format) + '\n'
bigstring += 'Column null value : ' + str(self._null) + '\n'
if self.unit:
bigstring += 'Column unit : ' + self.unit + '\n'
if self.colcomment:
bigstring += 'Column comment : ' + self.colcomment + '\n'
# return the result
return bigstring
def collheader(self,n,commentesc):
'''
returns string as used by column information in the header
@param n: Column number, 0 is first column
@type n: int
@param commentesc: the currently used escapesequence for comments
@type commentesc: string
@return: the full line of column definition to append to the header
@rtype: string
'''
outstring = commentesc + ' ' + str(n+1)
if n>8:
outstring += ' '
else:
outstring += ' '
outstring += self.colname
if self.colcomment:
outstring += ' '+self.colcomment
if self.unit:
outstring += ' ['+self.unit +']'
outstring += '\n'
return outstring
|
992,259 | bfb013a43669791a35b729e95de17319bc443c36 | # -*- coding: utf-8 -*-
# 455139656:AAE9id16VLNGI8gz4dBtCSs2WE8Jp1zsu1k
import threading
import telebot
from telebot import types
from event.EventStorage import read_all_events
from event.EventStorage import save_events
from user.User import User
from user.UserStorage import read_all_users, save_user
bot = telebot.TeleBot('455139656:AAE9id16VLNGI8gz4dBtCSs2WE8Jp1zsu1k')
subscribe = 'Подписаться'
genres = ['rock', 'pop', 'jazz']
@bot.message_handler(content_types=['text'])
def process(message):
if message.text == '/start':
markup = types.InlineKeyboardMarkup()
markup.add(types.InlineKeyboardButton(text=subscribe, url='ya.ru', callback_data="1"))
bot.send_message(message.chat.id, 'Выберите действие', reply_markup=markup)
elif message.text == subscribe:
markup = types.ReplyKeyboardMarkup()
markup.row(*genres)
bot.send_message(message.chat.id, 'Выберите стиль', reply_markup=markup)
elif genres.__contains__(message.text):
save_user(User(message.from_user.id, [message.text]))
bot.send_message(message.chat.id, 'Вы подписаны на ' + message.text, reply_markup=types.ReplyKeyboardRemove())
@bot.inline_handler(lambda query: len(query.query) > 0)
def query_text(query):
print(query)
@bot.callback_query_handler(func=lambda call: True)
def query_text(call):
print(call)
def set_broadcast_timer():
broadcast_all()
timer = threading.Timer(30, set_broadcast_timer)
timer.start()
def broadcast_all():
all_events = read_all_events()
all_users = read_all_users()
for event in all_events:
for user in all_users:
intersection = set(event.genres).intersection(user.genres)
if len(intersection) > 0 and not event.sent_to.__contains__(user.user_id):
bot.send_message(user.user_id,
event.place + " приглашает вас " + event.time + " на концерт исполнителя " + event.artist)
event.sent_to.append(user.user_id)
save_events(all_events)
set_broadcast_timer()
bot.polling(none_stop=True)
# https://github.com/ignatyev/concerts-telegram-bot
|
992,260 | d65e0a180f7a67ac9ceb831aafec7a251d23b59f | from turtle import *
def hinhvuong(solan, dodai):
right(90)
for i in range (solan*4):
forward(dodai)
left(90)
dodai = dodai +2
speed(-1)
bgcolor("green")
color("blue")
hinhvuong(20,10)
|
992,261 | 48f9cc4932f4ca05f9c73990f68f569f000c3aaa | #!/usr/bin/env python
from check_splunk import CheckSplunk
import sys
SPLUNK_SERVER = sys.argv[1]
SPLUNK_USERNAME = sys.argv[2]
SPLUNK_PASSWORD = sys.argv[3]
try:
SPLUNK_POOL = sys.argv[4]
except:
SPLUNK_POOL = 'auto_generated_pool_enterprise'
WARN_PERCENT = 75
CRIT_PERCENT = 90
args = [
"-H", SPLUNK_SERVER,
"-u", SPLUNK_USERNAME,
"-p", SPLUNK_PASSWORD,
"-L", SPLUNK_POOL,
"-W", str(WARN_PERCENT),
"-C", str(CRIT_PERCENT),
]
CheckSplunk(args).check_license().exit()
|
992,262 | 264c7c0db070ade8eb70290b9e476d012191c6b2 | from django.urls import path
from . import views
urlpatterns = [
path('', views.home, name='home'),
path('about/', views.about, name='about'),
path('accounts/signup/', views.signup, name='signup'),
path('accounts/signup/vintner', views.VintnerSignUpView.as_view(), name='vintner_signup'),
path('accounts/signup/enthusiast', views.EnthusiastSignUpView.as_view(), name='enthusiast_signup'),
path('glossary/', views.glossary, name='glossary'),
path('profile/', views.profile, name='profile'),
### Winery ###
path('findwineries/', views.find_wineries, name='find_wineries'),
path('winery/create/', views.create_winery, name='winery_create'),
path('winery/<int:winery_id>', views.winery_detail, name="winery_detail"),
path('winery/<int:winery_id>/update/', views.winery_update, name='winery_update'),
path('winery/<int:pk>/delete/', views.WineryDelete.as_view(), name='winery_delete'),
path('winery/search', views.winery_search, name="winery_search"),
### Wines ###
path('findwines/', views.home, name='home'),
path('mywines/', views.my_wines, name='my_wines'),
path('wine/<int:pk>/', views.WineDetail.as_view(), name='wine_detail'),
path('winery/<int:winery_id>/add_wine/', views.create_wine, name='add_wine'),
path('wine/<int:wine_id>/update/', views.wine_update, name='wine_update'),
path('wine/<int:pk>/delete/', views.WineDelete.as_view(), name='wine_delete'),
path('wine/search', views.wine_search, name="wine_search"),
path('wine/mapsearch', views.wine_search_map, name="wine_search_map"),
### Grapes ###
path('mygrapes/', views.my_grapes, name='my_grapes'),
### Comments ###
path('wine/<int:wine_id>/add_comment/', views.create_comment, name='add_comment'),
path('comment/<int:pk>/update/', views.CommentUpdate.as_view(), name='comment_update'),
path('comment/<int:pk>/delete/', views.CommentDelete.as_view(), name='comment_delete'),
] |
992,263 | 74d7afb000f5b4e5827dcd2ee79301d7c211a547 | #!/usr/bin/env python
import os
import sys
from six.moves import input
from vimapt import Extract
class Make(object):
def __init__(self, work_dir):
current_file_dir = os.path.dirname(os.path.abspath(__file__))
self.tpl_file = os.path.join(current_file_dir, 'data', 'vimapt.vpb')
self.work_dir = work_dir
def manual_make(self):
package_name = input("Input you package name:\n")
prompt_message = "Input you package version. Format like x.y.z:\n"
package_version = input(prompt_message)
package_dir = package_name + '_' + package_version
package_dir_abspath = os.path.join(self.work_dir, package_dir)
if os.path.isdir(package_dir_abspath):
print("Target dir exists, exit!")
sys.exit(0)
else:
os.mkdir(package_dir_abspath)
extract_object = Extract.Extract(self.tpl_file, package_dir_abspath)
extract_object.extract()
print("New packaging directory build in: %s" % package_dir_abspath)
rel_tpl_list = ['vimapt/control/vimapt.yaml',
'vimapt/copyright/vimapt.yaml',
'vimrc/vimapt.vimrc',
]
for rel_tpl_file in rel_tpl_list:
tpl_file = os.path.join(package_dir_abspath, rel_tpl_file)
tpl_file_dir = os.path.dirname(tpl_file)
_, ext_name = os.path.splitext(tpl_file)
target_file = os.path.join(tpl_file_dir, package_name + ext_name)
# print(tpl_file)
print(target_file)
os.rename(tpl_file, target_file)
print("Jobs done! Template making is succeed!")
print("Have fun!")
def auto_make(self, package_name, package_version, package_revision):
tpl_abs_file = self.tpl_file
full_version = package_version + '-' + package_revision
package_dir = package_name + '_' + full_version
package_dir_abspath = os.path.join(self.work_dir, package_dir)
if os.path.isdir(package_dir_abspath):
print("Target dir exists, exit!")
sys.exit(0)
else:
os.mkdir(package_dir_abspath)
extract_object = Extract.Extract(tpl_abs_file, package_dir_abspath)
extract_object.extract()
rel_tpl_list = ['vimapt/control/vimapt.yaml',
'vimapt/copyright/vimapt.yaml',
'vimrc/vimapt.vimrc',
]
for rel_tpl_file in rel_tpl_list:
tpl_file = os.path.join(package_dir_abspath, rel_tpl_file)
tpl_file_dir = os.path.dirname(tpl_file)
_, ext_name = os.path.splitext(tpl_file)
target_file = os.path.join(tpl_file_dir, package_name + ext_name)
print(tpl_file)
print(target_file)
os.rename(tpl_file, target_file)
def main():
current_dir = os.getcwd()
make = Make(current_dir)
make.manual_make()
if __name__ == "__main__":
main()
|
992,264 | 386b4e1f0ed9ac52ebee78d9387e583db9677750 | '''
세로로 잘리는 위치, 가로로 잘리는 위치 저장을 위한 리스트 생성, 0(시작점)을 미리 넣어둠
가로인지 세로인지에 따라서 가로리스트, 세로리스트에 저장
마지막 점을 리스트에 추가.
가로, 세로 리스트 정렬.(sort)
가로/세로 리스트의 두 점의 거리 (절대값(첫번째-두번째))를 구함.
그 중에서 최대값을 구하고 각각 곱하면 정답.
'''
# N,M=map(int,input().split()) #가로,세로 크기
# dot=int(input()) #점선 개수=자르는 횟수
# Narr=[0]
# Marr=[0]
# for x in range(dot): #세번째줄부터 점선의 개수만큼 있으니까 for문 안에 input을 넣어줘야 합니다.
# X,Y=map(int,input().split())
# if X==0: #가로인지 세로인지에 따라서 가로리스트, 세로리스트에 저장
# Narr.append(Y)
# else:
# Marr.append(Y)
# Narr.append(M) #마지막 점을 리스트에 추가.
# Marr.append(N)
# Narr.sort() #가로, 세로 리스트 정렬.(sort)
# Marr.sort()
# Nd=0
# Md=0
# Ndmax=-100
# Mdmax=-100
# for i in range(len(Narr)-1): #가로/세로 리스트의 두 점의 거리 (절대값(첫번째-두번째))를 구함.
# Nd = abs(Narr[i+1]-Narr[i])
# if Nd >= Ndmax: #그 중에서 최대값을 구하고 각각 곱하면 정답.
# Ndmax = Nd
# for j in range(len(Marr)-1):
# Md = abs(Marr[j+1]-Marr[j])
# if Md>=Mdmax:
# Mdmax = Md
# result = Mdmax * Ndmax #그 중에서 최대값을 구하고 각각 곱하면 정답.
# print(result)
N,M=map(int,input().split()) #가로길이, 세로길이
A = [0] + [N] # 가로리스트
B = [0] + [M] # 세로리스트
Num = int(input()) #점선의 개수
for _ in range(Num): #점선의 개수만큼 가로(0), 세로(1) 좌표번호
w, z = map(int,input().split())
#가로인지 세로인지 판단해서 가로리스트 세로리스트에 넣음
if w == 1:#가로리스트에 세로점선
A += [z]
else: #세로리스트에 가로점선 넣는다.
B += [z]
A_max, B_max = 0,0
A, B = sorted(A), sorted(B)
#가로리스트의 인덱스값끼리의 차이가 가장 큰것 찾아야 됨
for i in range(1, len(A)):
A_max = max(A_max, A[i]-A[i-1])
#세로리스트의 인덱스값끼리의 차이가 가장 큰것 찾아서
for j in range(1, len(B)):
B_max = max(B_max, B[j]-B[j-1])
#가로세로길이 곱해주면 가장 큰 넓이 구할 수 있음
print(A_max*B_max)
|
992,265 | 6481bde185e5f13f47d8790dd789cc0229463a8a | """ Load David Khatami's model """
import numpy as np
def load():
modeldir = "/Users/annaho/Dropbox/Projects/Research/ZTF18abukavn/data"
model = np.loadtxt(modeldir + "/2018gep_csm_model.dat")
mod_dt = model[:,0]
mod_lum = model[:,1]
mod_rad = model[:,2]
mod_temp = model[:,3]
return mod_dt, mod_lum, mod_rad, mod_temp
|
992,266 | 3d92325d698e68f984d112df2ad6c7eacdd82c14 | #!/usr/bin/python3
from __future__ import print_function
import argparse
import requests
from requests_kerberos import HTTPKerberosAuth
from requests import conf
TEMPLATE = """
On {freshmaker_date}, Freshmaker rebuilt {original_nvr} container image [1] as a result of Important/Critical RHSA advisory [2].
It seems the maintainer of this image did not use the Freshmaker's build, but instead built and shipped the new image [3] himself on {container_advisory_date}.
Was there any reason why you haven't used the Freshmaker's build? We think that by using the Freshmaker's build, you could save the time needed for rebuild and also provide the fixed image faster.
This ticket is created mainly for us to find out if there was any issue you hit with Freshmaker which prevented you to use the mentioned build and also reminder for you that Freshmaker is building the images with fixed security issues automatically for you.
[1] {freshmaker_brew_build}
[2] {rhsa_advisory}
[3] {container_advisory}
"""
ERRATA_URL = 'https://errata.devel.redhat.com/api/v1/'
FRESHMAKER_URL = 'https://freshmaker.engineering.redhat.com/api/1/'
def get_advisory(errata_id):
krb_auth = HTTPKerberosAuth()
r = requests.get(ERRATA_URL + "erratum/%s" % str(errata_id), auth=krb_auth, timeout=conf.requests_timeout)
r.raise_for_status()
data = r.json()
return data["errata"].values()[0]
def get_freshmaker_build(search_key, original_nvr):
url = FRESHMAKER_URL + "events/?search_key=%s" % search_key
r = requests.get(url, timeout=conf.requests_timeout)
r.raise_for_status()
data = r.json()
for build in data["items"][0]["builds"]:
if build["original_nvr"].startswith(original_nvr):
return data["items"][0], build
return None
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("SEARCH_KEY", help="Freshmaker's search_key")
parser.add_argument("ORIGINAL_NVR", help="Freshmaker's original_nvr")
parser.add_argument("CONTAINER_ADVISORY", help="Advisory with shipped non-freshmaker build")
args = parser.parse_args()
search_key = args.SEARCH_KEY
original_nvr = args.ORIGINAL_NVR
container_advisory = args.CONTAINER_ADVISORY
event, build = get_freshmaker_build(search_key, original_nvr)
errata = get_advisory(container_advisory)
template_data = {
"freshmaker_date": build["time_completed"].split("T")[0],
"original_nvr": build["original_nvr"],
"freshmaker_brew_build": "https://brewweb.engineering.redhat.com/brew/taskinfo?taskID=%d" % build["build_id"],
"rhsa_advisory": "https://errata.devel.redhat.com/advisory/%s" % event["search_key"],
"container_advisory": "https://errata.devel.redhat.com/advisory/%s" % container_advisory,
"container_advisory_date": errata["issue_date"].split("T")[0],
}
print(TEMPLATE.format(**template_data))
|
992,267 | f1172defa62805334990d0c51405e1f3ee5a1bb1 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
#Calculamos el precio de la reserva y de la limpieza en función del número de huéspedes, de habitaciones y de noches:
def get_booking_price(personas, habitaciones, noches):
if personas == 1 and habitaciones == 1:
price = 70*noches
cleaning = 70
if personas == 2 and habitaciones == 1:
price = 70*noches
cleaning = 75
if personas == 2 and habitaciones == 2:
price = 75*noches
cleaning = 75
if personas == 3 and habitaciones == 2:
price = 85*noches
cleaning = 80
if personas == 3 and habitaciones == 3:
price = 90*noches
cleaning = 80
if personas == 4 and habitaciones == 2:
price = 100*noches
cleaning = 85
if personas == 4 and habitaciones == 3:
price = 105*noches
cleaning = 85
if personas == 4 and habitaciones == 4:
price = 110*noches
cleaning = 85
if personas == 5 and habitaciones == 3:
price = 115*noches
cleaning = 90
if personas == 5 and habitaciones == 4:
price = 120*noches
cleaning = 90
if personas == 6 and habitaciones == 3:
price = 130*noches
cleaning = 95
if personas == 6 and habitaciones == 4:
price = 135*noches
cleaning = 95
if personas == 7 and habitaciones == 4:
price = 145*noches
cleaning = 100
if personas == 8 and habitaciones == 4:
price = 160*noches
cleaning = 100
else:
price == 0
cleaning == 0
return price, cleaning
|
992,268 | 1c8814e9e4157537c492ae61821f494df6d96bf4 | #-*- coding: utf-8 -*-
import json
import socket
import os
import logging
import logging.handlers as ih
from redis import StrictRedis
from img.imagesaver import ImageSaver
from sqlalchemy import engine, MetaData, create_engine
root_dir = os.path.dirname(os.path.realpath(__file__))
if socket.gethostname() == "spider-1":
run_env = "online"
elif socket.gethostname() == "sandbox-1":
run_env = "sandbox"
else:
run_env = "rd"
conf = {}
with open(os.path.join(root_dir, "conf", "config." + run_env + ".json")) as fd:
content = fd.read()
conf = json.loads(content)
saver = ImageSaver("ucloud", conf["ucloud"])
class WFHandler(logging.Handler):
def __init__(self, name):
fm = logging.Formatter("[%(asctime)s][%(filename)s:%(lineno)d][%(levelname)s]: %(message)s")
self.info = ih.WatchedFileHandler(name)
self.info.setFormatter(fm)
self.err = ih.WatchedFileHandler(name + ".wf")
self.err.setFormatter(fm)
logging.Handler.__init__(self)
def emit(self, record):
if record.levelno <= logging.INFO:
self.info.emit(record)
else:
self.err.emit(record)
logging.getLogger("requests").setLevel(logging.WARNING)
rootLogger = logging.getLogger("")
rootLogger.setLevel(logging.INFO)
rootLogger.addHandler(WFHandler("/data/logs/opendata_crawler/crawler.log"))
redis = StrictRedis(host = conf["redis"]["host"], port = conf["redis"]["port"])
meta = MetaData()
engine = create_engine("mysql://%s:%s@%s:%s/%s?charset=%s&use_unicode=0" \
%(conf["mysql"]["user"], conf["mysql"]["password"], conf["mysql"]["host"], \
conf["mysql"]["port"], conf["mysql"]["db"], conf["mysql"]["charset"]), \
pool_reset_on_return = None, pool_size = 10, pool_recycle = 600,\
encoding = "utf8", \
execution_options={"autocommit": True})
meta.reflect(bind = engine)
|
992,269 | 68ce07e0d0487501b756d16f509359df843d5de5 |
def fib(max):
a,b,n = 0,1,0
while True:
if n<max :
a, b = b, a + b
yield a
n += 1
else:
break
f = fib(10)
for i in range(10):
print(f.__next__()) |
992,270 | 612d48d10c2bd7dac653bbc79cd032db4f5eac0b | '''
Created on 26/09/2018
@author: ernesto
'''
# XXX: http://codeforces.com/contest/1040/problem/B
if __name__ == '__main__':
n, k = [int(x) for x in input().strip().split(" ")]
tam_seccion = min((k << 1) + 1, n)
num_secciones = n // tam_seccion
sobrante_secciones = n % tam_seccion
puntos_de_volteo = list(range(min(k, n - 1), n, tam_seccion))
ultimo_abarcado = puntos_de_volteo[-1] + k
if ultimo_abarcado < n - 1:
punto_extra = ultimo_abarcado + k + 1
puntos_de_volteo += [punto_extra]
puntos_de_volteo = list(map(lambda p:p - max(0, punto_extra - n + 1), puntos_de_volteo))
puntos_de_volteo = list(map(lambda p:p + 1, puntos_de_volteo))
print("{}".format(len(puntos_de_volteo)))
print("{}".format(" ".join(map(str, puntos_de_volteo))))
|
992,271 | 0dc55006bb8a6a672a4b0bab7f121e60a3d7b8a7 | from selenium.webdriver.common.by import By
class MainPageLocators():
LOGIN_LINK = (By.CSS_SELECTOR, "#login_link")
class LoginPageLocators:
LOGIN_FORM = (By.CSS_SELECTOR, ".login_form")
REGISTER_FORM = (By.CSS_SELECTOR, ".register_form")
INPUT_EMAIL = (By.CSS_SELECTOR, "#id_registration-email")
INPUT_PASSWORD = (By.CSS_SELECTOR, "#id_registration-password1")
CONFIRM_PASSWORD = (By.CSS_SELECTOR, "#id_registration-password2")
REG_BUTTON = (By.NAME, "registration_submit")
class BasePageLocators:
LOGIN_LINK = (By.CSS_SELECTOR, "#login_link")
LOGIN_LINK_INVALID = (By.CSS_SELECTOR, "#login_link_inc")
BASKET_LINK = (By.CSS_SELECTOR, ".btn-group > a")
USER_ICON = (By.CSS_SELECTOR, ".icon-user")
class ProductPageLocators:
ADD_TO_BASKET_BUTTON = (By.CSS_SELECTOR, ".btn-add-to-basket")
PRODUCT_NAME = (By.CSS_SELECTOR, ".product_main > h1")
PRODUCT_NAME_IN_BASKET = (By.CSS_SELECTOR, "#messages > div:nth-child(1) > div > strong")
PRODUCT_PRICE = (By.CSS_SELECTOR, ".product_main > .price_color")
PRODUCT_PRICE_IN_BASKET = (By.CSS_SELECTOR, ".alertinner>p>strong")
SUCCESS_MESSAGE = (By.CSS_SELECTOR, "#messages > div:nth-child(1)")
class BasketPageLocators:
BASKET_ITEM = (By.CSS_SELECTOR, ".basket-items")
BASKET_EMPTY_TEXT = (By.CSS_SELECTOR, "#content_inner>p")
|
992,272 | 599d9a36c2a89436c8cdc9c82720e0bae6f5a282 | from json import JSONDecoder, dumps
with open("example_pretty.json", "r") as f:
z = JSONDecoder().decode(f.read())
#for k, v in z.iteritems():
# print k, "::", v
output = []
results = z['feed']['entry']
for result in results:
output.append({})
z = output[-1]
z['vidname'] = result['title']['$t']#.encode('utf8')
z['vidURL'] = result['link'][0]['href']#.encode('utf8')
z['category'] = result['category'][1]['label']#.encode('utf8')
z['thumbnail'] = result["media$group"]["media$thumbnail"][0]['url']#.encode('utf8')
print dumps(output).encode('utf8')
|
992,273 | 63076920c0461a56ec71d91b704cb1f9856a82ec | from subprocess import Popen
import time
from sys import argv
sources = ["galsim_bright", "galsim_dimmer", "pts_bright", "pts_dimmer"]
filters = ["sex2_1.5", "sex2_2", "sex2_4", "sex4_1.5", "sex4_2", "sex4_4"]
for ss in sources:
for ff in filters:
print("----------------------------------------------------------")
t1 = time.time()
cmd = "mpirun -n 25 ./resolution %s %s 1000"%(ss,ff)
a = Popen(cmd, shell=True)
a.wait()
t2 = time.time()
print(cmd + " %.2f sec"%(t2-t1))
print("----------------------------------------------------------\n") |
992,274 | 73986a9b8e80c0df10b5a6522ccc0278561f7a9b | def test():
print("this is a item module") |
992,275 | 75dc66b3a858dc13112d75e398bb6f3f0e39b2ff | def inicio():
opcion = 0
personas = []
id = 1
while opcion < 5:
print(" ------------------------------------")
print(" -Que acción deseas realizar -")
print(" -1. Agregar nuevo usuario -")
print(" -2. Editar un usuario -")
print(" -3. Eliminar un usuario -")
print(" -4. Listar todos todos los usuarios-")
print(" -5. Salir -")
print(" ------------------------------------")
opcion = int(input("Ingresa una opción: "))
if(opcion == 1):
nombres = input("Ingresa los nombres del usuario: ")
apellidos = input("Ingresa los apellidos del usuario: ")
celular = input("Ingresa el celular del usuario: ")
personas.append({
"id": id,
"nombres": nombres,
"apellidos": apellidos,
"celular": celular
})
id += 1
print("El usuario", nombres, "se ha agregado correctamente")
elif(opcion == 2):
usuario_eliminar = int(input("Ingrese el id del usuario a editar: "))
index_eliminar = -1
usuario = {}
for index, persona in enumerate(personas):
if(usuario_eliminar == persona["id"]):
index_eliminar = index
usuario = persona
if(index_eliminar == -1):
print("No se ha encontrado ningun usuario con el id", usuario_eliminar)
else:
print("Actualizando datos del usuario", usuario['nombres'],", dejar los campos en blanco si no desea cambiar la información")
nombres = input("Nombres (" + usuario['nombres'] + "): ")
apellidos = input("Apellidos (" + usuario['apellidos'] + "): ")
celular = input("Celular (" + usuario['celular'] + "): ")
if(nombres != ""):
usuario['nombres'] = nombres
if(apellidos != ""):
usuario['apellidos'] = apellidos
if(celular != ""):
usuario['celular'] = celular
personas[index_eliminar] = usuario
print("El usuario", usuario['nombres'], "ha sido actualizado correctamente")
elif(opcion == 3):
usuario_eliminar = int(input("Ingrese el id del usuario a eliminar: "))
index_eliminar = -1
usuario = {}
for index, persona in enumerate(personas):
if(usuario_eliminar == persona["id"]):
index_eliminar = index
usuario = persona
if(index_eliminar == -1):
print("No se ha encontrado ningun usuario con el id", usuario_eliminar)
else:
del personas[index_eliminar]
print("El usuario", usuario['nombres'], "se ha eliminado correctamente")
elif(opcion == 4):
if(not len(personas)):
print("No se han encontrado registros")
else:
print("Listado de usuarios")
print("__________")
print("ID|Nombres|Apellidos|Celular")
for persona in personas:
print(persona["id"],"|",persona["nombres"],"|",persona["apellidos"],"|",persona["celular"])
print("__________")
if(opcion != 5):
input("Presione cualquier tecla para continuar...")
print("Gracias por utilizar el programa") |
992,276 | 70ce59992d045c6e32d43ceae7cd804258c4e0ea | # -*- coding: utf-8 -*-
'这是数据对象的学习'
#type(parm) 判断对象,变量的类型
#import types 使用types模块
#isinstance(),判断class对象继承与那个类,同样适用于普通变量
#dir()函数,获得一个对象的所有属性和方法
#len(),获取一个对象的长度
#str.lower() 返回小写的字符串 |
992,277 | 76314c1f38ac29ab916b2da29cd7b18176136f62 | __author__ = 'Cjj'
from scrapy.cmdline import execute
execute() |
992,278 | bd7e53c337079d3b671402f0b7d49e8428ab2701 | temp = n = int(input())
check = 0
while(True):
ten = temp//10
one = temp%10
res = ten + one
check += 1
temp = int(str(temp%10)+str(res%10))
if(n == temp):
break
print(check) |
992,279 | ae2b16e0093d9de821923fb52f4c11aa082cf084 | #
#Test branch
##
#
#
# This is a sample Python script.
# Press Shift+F10 to execute it or replace it with your code.
# Press Double Shift to search everywhere for classes, files, tool windows, actions, and settings.
# import MAC_01
# import Search_SN
import Input_SN_BOX
# import PLC
# import Mail_send_Test
# import Camera_Spinel
# import Keyboard_Function
# import ssh_shell
# import Could_Delete
# import Test_camera
# import D_MAC_01
def print_hi(name):
# Use a breakpoint in the code line below to debug your script.
print(f'Hi, {name}') # Press Ctrl+F8 to toggle the breakpoint.
# Press the green button in the gutter to run the script.
# if __name__ == '__main__':
# print_hi('PyCharm')
# See PyCharm help at https://www.jetbrains.com/help/pycharm/
|
992,280 | 1c9154c8db4a80397eb95294a8a71b01270574b7 | from flask import Flask
app = Flask(__name__)
# Add any other Flask app configurations here . . .
from getting_started import views |
992,281 | a1eb77d6e9d7bfcfe1d86f3fdc87298c5c318b17 | from fivePro import *
import pygame as pg
class FivePlay():
def __init__(self, sd, addr):
self.sd = sd
self.addr = addr
self.isRecv = False
def quit(self, e):
if e.type == pg.QUIT or e.type == pg.KEYDOWN and e.key == pg.K_ESCAPE:
self.sd.sendto("quit".encode(), self.addr)
return True
def play(self, key, game):
if key in [pg.K_a, pg.K_d, pg.K_w, pg.K_s]:
move(key, game)
pg.display.update()
elif key == pg.K_p and game.ctrl in game.chess:
self.send(game)
self.isRecv = True
def replay(self, key, game):
if key == pg.K_SPACE:
self.sd.sendto("replay".encode(), self.addr)
self.isRecv = True
return True
def recv(self, game):
data = self.sd.recvfrom(1024)[0].decode()
if data == "replay":
return True
elif data == "quit":
return False
else:
data = data[1: -1].split(", ")
point = (int(data[0]), int(data[1]))
if game.ctrl not in game.own:
flash(game.ctrl)
else:
drawChess(OWN, game.ctrl, SIZE // 4)
game.changeCtrl(point)
fall(game)
if scan(game):
game.chess = []
game.win["enemy"] = True
elif not game.chess:
game.win["draw"] = True
over(game)
pg.display.update()
def send(self, game):
fall(game, 1)
self.sd.sendto(str(game.ctrl).encode(), self.addr)
if scan(game, 1):
game.win["own"] = True
game.chess = []
elif not game.chess:
game.win["draw"] = True
over(game)
pg.display.update()
def wait(self, game):
if self.isRecv:
self.isRecv = False
return self.recv(game)
def fall(self, game):
if not self.isRecv:
e = pg.event.wait()
if e.type == pg.KEYDOWN and game.chess:
self.play(e.key, game)
elif e.type == pg.KEYDOWN and not game.chess:
if self.quit(e):
return False
elif self.replay(e.key, game):
return True
|
992,282 | f1222646a47e45abc65f65e0080912480df850cc | a=int(input())
for i in range(2,a+1):
while(a!=i):
if(a%i==0):
print(i)
a=a/i
else:
break
print(a)
|
992,283 | 0aae9319d66cd29bd7e3e53a2e9a66e6aa78b0da | #----------------------------------------------------------------------------------------
# Process RefSeq data files
#----------------------------------------------------------------------------------------
import os
import numpy as np
from pathlib import Path
from text_tools import parse_refSeq_fasta, merge_refSeq_sequence_files
def main():
# parent directory of all data files
dataDir = Path('../data')
# directory of data files from external sources
extDir = dataDir / 'external'
# parent directory of all processed data files
procDir = dataDir / 'processed'
# directory for RefSeq raw data files
refseqInDir = extDir / 'RefSeq'
# directory to save RefSeq processed data files
refseqOutDir = procDir / 'refSeq_intermediate'
# output data files
refseqFile = procDir / 'refseq_human_protein_sequences.txt'
if not procDir.exists():
os.makedirs(procDir)
if not refseqOutDir.exists():
os.makedirs(refseqOutDir)
numfiles = len([name for name in os.listdir(str(refseqInDir))])
for i in np.arange(1, numfiles + 1):
print('parsing RefSeq sequence file %d of %d' % (i, numfiles))
parse_refSeq_fasta (refseqInDir / ('human.%d.protein.faa' % i),
refseqOutDir / ('refseq_human_protein_%d.txt' % i))
if not refseqFile.is_file():
print('merging RefSeq sequences from all files')
numfiles = len([name for name in os.listdir(str(refseqOutDir))])
merge_refSeq_sequence_files (refseqOutDir, numfiles, refseqFile)
if __name__ == '__main__':
main()
|
992,284 | 686a3e42bfd497d73ad5b941053b7b0f562e0eda | import unittest
import requests
import sys
sys.path.append("..")
from commen.assertions import JsonResonseValidate
from commen.generate_data import Data
class ApiTest(unittest.TestCase):
def setUp(self):
self.data = Data()
# self.url = 'http://qa-no-services.thebump.com/core/v1/hbib'
# self.jsonValidate = JsonResonseValidate('hbib.json')
self.url, self.jsonSchema = self.data.get_test_data()
self.jsonValidate = JsonResonseValidate(self.jsonSchema)
def test_hbib_content(self):
'''test hbib content'''
r = requests.get(self.url)
json_data = r.json()
self.jsonValidate.assert_valid_schema(json_data)
self.assertEqual(r.status_code,200)
# if __name__ == '__main__':
# unittest.main(testRunner=HtmlTestRunner.HTMLTestRunner(output='reports',report_title='My Report'))
|
992,285 | 253261e6687cc4eb4140aff7e6126c04552d10be | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#dict使用的小demo
#通过学号查找姓名
d = {1:'zhou', 2:'bart', 3:'gemini', 4:'minami'}
n =int(input('请输入学号:'))
if n in d:
print('%d号对应的学生姓名为%s:'%(n,d[n]))
else:
print('查无此号')
|
992,286 | 9b90c7e439886a93c24187a2ba6e3e82a7d3c86c | # -*- coding: utf-8 -*-
"""Wide Residual Network models for Keras.
# Reference
- [Wide Residual Networks](https://arxiv.org/abs/1605.07146)
"""
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
import warnings
from keras.models import Model
from keras.layers.core import Dense, Dropout, Activation, Flatten
from keras.layers.convolutional import Convolution2D
from keras.layers.pooling import AveragePooling2D, MaxPooling2D
from keras.layers import Input
from keras.layers.normalization import BatchNormalization
from keras.utils.layer_utils import convert_all_kernels_in_model
from keras.utils.data_utils import get_file
from keras.engine.topology import get_source_inputs
from keras.applications.imagenet_utils import _obtain_input_shape
import keras.backend as K
from keras.layers.merge import add
URL_BASE = ('https://github.com/titu1994/Wide-Residual-Networks/releases/'
'download/v1.2/wrn_28_8_')
TH_WEIGHTS_PATH = URL_BASE + 'th_kernels_th_dim_ordering.h5'
TF_WEIGHTS_PATH = URL_BASE + 'tf_kernels_tf_dim_ordering.h5'
TH_WEIGHTS_PATH_NO_TOP = URL_BASE + 'th_kernels_th_dim_ordering_no_top.h5'
TF_WEIGHTS_PATH_NO_TOP = URL_BASE + 'tf_kernels_tf_dim_ordering_no_top.h5'
def create_model(classes=10, input_shape=None, config=None,
depth=28, width=8, dropout_rate=0.0,
include_top=True, weights=None,
input_tensor=None):
"""
Create wide resnet.
Instantiate the Wide Residual Network architecture, optionally loading
weights pre-trained on CIFAR-10. Note that when using TensorFlow, for best
performance you should set `image_dim_ordering="tf"` in your Keras config
at ~/.keras/keras.json.
The model and the weights are compatible with both TensorFlow and Theano.
The dimension ordering convention used by the model is the one specified in
your Keras config file.
Parameters
----------
classes: optional number of classes to classify images
into, only to be specified if `include_top` is True, and
if no `weights` argument is specified.
input_shape: optional shape tuple, only to be specified
if `include_top` is False (otherwise the input shape
has to be `(32, 32, 3)` (with `tf` dim ordering)
or `(3, 32, 32)` (with `th` dim ordering).
It should have exactly 3 inputs channels,
and width and height should be no smaller than 8.
E.g. `(200, 200, 3)` would be one valid value.
depth: number or layers in the DenseNet
width: multiplier to the ResNet width (number of filters)
dropout_rate: dropout rate
include_top: whether to include the fully-connected
layer at the top of the network.
weights: one of `None` (random initialization) or
"cifar10" (pre-training on CIFAR-10)..
input_tensor: optional Keras tensor (i.e. output of `layers.Input()`)
to use as image input for the model.
# Returns
A Keras model instance.
"""
if weights not in {'cifar10', None}:
raise ValueError('The `weights` argument should be either '
'`None` (random initialization) or `cifar10` '
'(pre-training on CIFAR-10).')
if weights == 'cifar10' and include_top and classes != 10:
raise ValueError('If using `weights` as CIFAR 10 with `include_top`'
' as true, `classes` should be 10')
if (depth - 4) % 6 != 0:
raise ValueError('Depth of the network must be such that (depth - 4)'
'should be divisible by 6.')
# Determine proper input shape
input_shape = _obtain_input_shape(input_shape,
default_size=32,
min_size=8,
data_format=K.image_dim_ordering(),
include_top=include_top)
if input_tensor is None:
img_input = Input(shape=input_shape)
else:
if not K.is_keras_tensor(input_tensor):
img_input = Input(tensor=input_tensor, shape=input_shape)
else:
img_input = input_tensor
x = __create_wide_residual_network(classes, img_input, include_top, depth,
width, dropout_rate)
# Ensure that the model takes into account
# any potential predecessors of `input_tensor`.
if input_tensor is not None:
inputs = get_source_inputs(input_tensor)
else:
inputs = img_input
# Create model.
model = Model(inputs, x, name='wide-resnet')
# load weights
if weights == 'cifar10':
if (depth == 28) and (width == 8) and (dropout_rate == 0.0):
# Default parameters match. Weights for this model exist:
if K.image_dim_ordering() == 'th':
if include_top:
weights_path = get_file(('wide_resnet_28_8_th_dim_ordering'
'_th_kernels.h5'),
TH_WEIGHTS_PATH,
cache_subdir='models')
else:
weights_path = get_file(('wide_resnet_28_8_th_dim_ordering'
'_th_kernels_no_top.h5'),
TH_WEIGHTS_PATH_NO_TOP,
cache_subdir='models')
model.load_weights(weights_path)
if K.backend() == 'tensorflow':
warnings.warn('You are using the TensorFlow backend, yet '
' youare using the Theano '
'image dimension ordering convention '
'(`image_dim_ordering="th"`). '
'For best performance, set '
'`image_dim_ordering="tf"` in '
'your Keras config '
'at ~/.keras/keras.json.')
convert_all_kernels_in_model(model)
else:
if include_top:
weights_path = get_file(('wide_resnet_28_8_tf_dim_ordering'
'_tf_kernels.h5'),
TF_WEIGHTS_PATH,
cache_subdir='models')
else:
weights_path = get_file(('wide_resnet_28_8_tf_dim_ordering'
'_tf_kernels_no_top.h5'),
TF_WEIGHTS_PATH_NO_TOP,
cache_subdir='models')
model.load_weights(weights_path)
if K.backend() == 'theano':
convert_all_kernels_in_model(model)
return model
def __conv1_block(input):
x = Convolution2D(16, (3, 3), padding='same')(input)
channel_axis = 1 if K.image_dim_ordering() == "th" else -1
x = BatchNormalization(axis=channel_axis)(x)
x = Activation('relu')(x)
return x
def __conv2_block(input, k=1, dropout=0.0):
init = input
channel_axis = 1 if K.image_dim_ordering() == "th" else -1
# Check if input number of filters is same as 16 * k, else create
# convolution2d for this input
if K.image_dim_ordering() == "th":
if init._keras_shape[1] != 16 * k:
init = Convolution2D(16 * k, (1, 1), activation='linear',
padding='same')(init)
else:
if init._keras_shape[-1] != 16 * k:
init = Convolution2D(16 * k, (1, 1), activation='linear',
padding='same')(init)
x = Convolution2D(16 * k, (3, 3), padding='same')(input)
x = BatchNormalization(axis=channel_axis)(x)
x = Activation('relu')(x)
if dropout > 0.0:
x = Dropout(dropout)(x)
x = Convolution2D(16 * k, (3, 3), padding='same')(x)
x = BatchNormalization(axis=channel_axis)(x)
x = Activation('relu')(x)
m = add([init, x])
return m
def __conv3_block(input, k=1, dropout=0.0):
init = input
channel_axis = 1 if K.image_dim_ordering() == "th" else -1
# Check if input number of filters is same as 32 * k, else create
# convolution2d for this input
if K.image_dim_ordering() == "th":
if init._keras_shape[1] != 32 * k:
init = Convolution2D(32 * k, (1, 1), activation='linear',
padding='same')(init)
else:
if init._keras_shape[-1] != 32 * k:
init = Convolution2D(32 * k, (1, 1), activation='linear',
padding='same')(init)
x = Convolution2D(32 * k, (3, 3), padding='same')(input)
x = BatchNormalization(axis=channel_axis)(x)
x = Activation('relu')(x)
if dropout > 0.0:
x = Dropout(dropout)(x)
x = Convolution2D(32 * k, (3, 3), padding='same')(x)
x = BatchNormalization(axis=channel_axis)(x)
x = Activation('relu')(x)
m = add([init, x])
return m
def ___conv4_block(input, k=1, dropout=0.0):
init = input
channel_axis = 1 if K.image_dim_ordering() == "th" else -1
# Check if input number of filters is same as 64 * k, else create
# convolution2d for this input
if K.image_dim_ordering() == "th":
if init._keras_shape[1] != 64 * k:
init = Convolution2D(64 * k, (1, 1), activation='linear',
padding='same')(init)
else:
if init._keras_shape[-1] != 64 * k:
init = Convolution2D(64 * k, (1, 1), activation='linear',
padding='same')(init)
x = Convolution2D(64 * k, (3, 3), padding='same')(input)
x = BatchNormalization(axis=channel_axis)(x)
x = Activation('relu')(x)
if dropout > 0.0:
x = Dropout(dropout)(x)
x = Convolution2D(64 * k, (3, 3), padding='same')(x)
x = BatchNormalization(axis=channel_axis)(x)
x = Activation('relu')(x)
m = add([init, x])
return m
def __create_wide_residual_network(nb_classes, img_input, include_top,
depth=28, width=8, dropout=0.0):
"""
Create a Wide Residual Network with specified parameters.
Parameters
----------
nb_classes: Number of output classes
img_input: Input tensor or layer
include_top: Flag to include the last dense layer
depth: Depth of the network. Compute N = (n - 4) / 6.
For a depth of 16, n = 16, N = (16 - 4) / 6 = 2
For a depth of 28, n = 28, N = (28 - 4) / 6 = 4
For a depth of 40, n = 40, N = (40 - 4) / 6 = 6
width: Width of the network.
dropout: Adds dropout if value is greater than 0.0
Returns
-------
a Keras Model
"""
N = (depth - 4) // 6
x = __conv1_block(img_input)
nb_conv = 4
for i in range(N):
x = __conv2_block(x, width, dropout)
nb_conv += 2
x = MaxPooling2D((2, 2))(x)
for i in range(N):
x = __conv3_block(x, width, dropout)
nb_conv += 2
x = MaxPooling2D((2, 2))(x)
for i in range(N):
x = ___conv4_block(x, width, dropout)
nb_conv += 2
x = AveragePooling2D((8, 8))(x)
if include_top:
x = Flatten()(x)
x = Dense(nb_classes, activation='softmax')(x)
return x
if __name__ == '__main__':
model = create_model(100, (32, 32, 3))
model.summary()
from keras.utils import plot_model
plot_model(model, to_file='resnetpa.png',
show_layer_names=False, show_shapes=True)
|
992,287 | 70c9d6c5893a02aed589f0cc752aee464b6b281d | import functools
import json
import multiprocessing
import sys
import threading
import traceback
import ansimarkup
class StrRecord(str):
__slots__ = ("record",)
class Handler:
def __init__(
self,
*,
writer,
stopper,
levelno,
formatter,
is_formatter_dynamic,
filter_,
colorize,
serialize,
backtrace,
catch,
enqueue,
encoding,
id_,
colors=[]
):
self.writer = writer
self.stopper = stopper
self.levelno = levelno
self.formatter = formatter
self.is_formatter_dynamic = is_formatter_dynamic
self.filter = filter_
self.colorize = colorize
self.serialize = serialize
self.backtrace = backtrace
self.catch = catch
self.enqueue = enqueue
self.encoding = encoding
self.id = id_
self.static_format = None
self.decolorized_format = None
self.precolorized_formats = {}
self.lock = threading.Lock()
self.queue = None
self.thread = None
self.stopped = False
if not self.is_formatter_dynamic:
self.static_format = self.formatter
self.decolorized_format = self.decolorize_format(self.static_format)
for color in colors:
self.update_format(color)
if self.enqueue:
self.queue = multiprocessing.SimpleQueue()
self.thread = threading.Thread(target=self.queued_writer, daemon=True)
self.thread.start()
@staticmethod
def serialize_record(text, record):
exc = record["exception"]
serializable = {
"text": text,
"record": {
"elapsed": dict(repr=record["elapsed"], seconds=record["elapsed"].total_seconds()),
"exception": exc
and dict(type=exc.type.__name__, value=exc.value, traceback=bool(exc.traceback)),
"extra": record["extra"],
"file": dict(name=record["file"].name, path=record["file"].path),
"function": record["function"],
"level": dict(
icon=record["level"].icon, name=record["level"].name, no=record["level"].no
),
"line": record["line"],
"message": record["message"],
"module": record["module"],
"name": record["name"],
"process": dict(id=record["process"].id, name=record["process"].name),
"thread": dict(id=record["thread"].id, name=record["thread"].name),
"time": dict(repr=record["time"], timestamp=record["time"].timestamp()),
},
}
return json.dumps(serializable, default=str) + "\n"
@staticmethod
def make_ansimarkup(color):
color = ansimarkup.parse(color)
custom_markup = dict(level=color, lvl=color)
am = ansimarkup.AnsiMarkup(tags=custom_markup, strict=True)
return am
@staticmethod
@functools.lru_cache(maxsize=32)
def decolorize_format(format_):
am = Handler.make_ansimarkup("")
return am.strip(format_)
@staticmethod
@functools.lru_cache(maxsize=32)
def colorize_format(format_, color):
am = Handler.make_ansimarkup(color.strip())
return am.parse(format_)
def update_format(self, color):
if self.is_formatter_dynamic or not self.colorize or color in self.precolorized_formats:
return
self.precolorized_formats[color] = self.colorize_format(self.static_format, color)
def handle_error(self, record=None):
if not self.catch:
raise
if not sys.stderr:
return
ex_type, ex, tb = sys.exc_info()
try:
sys.stderr.write("--- Logging error in Loguru Handler #%d ---\n" % self.id)
sys.stderr.write("Record was: ")
try:
sys.stderr.write(str(record))
except Exception:
sys.stderr.write("/!\\ Unprintable record /!\\")
sys.stderr.write("\n")
traceback.print_exception(ex_type, ex, tb, None, sys.stderr)
sys.stderr.write("--- End of logging error ---\n")
except OSError:
pass
finally:
del ex_type, ex, tb
def emit(self, record, level_color, ansi_message, raw):
try:
if self.levelno > record["level"].no:
return
if self.filter is not None:
if not self.filter(record):
return
if self.is_formatter_dynamic:
format_ = self.formatter(record)
if self.colorize:
if ansi_message:
precomputed_format = format_
else:
precomputed_format = self.colorize_format(format_, level_color)
else:
precomputed_format = self.decolorize_format(format_)
else:
if self.colorize:
if ansi_message:
precomputed_format = self.static_format
else:
precomputed_format = self.precolorized_formats[level_color]
else:
precomputed_format = self.decolorized_format
exception = record["exception"]
if exception:
error = exception.format_exception(self.backtrace, self.colorize, self.encoding)
else:
error = ""
formatter_record = {**record, **{"exception": error}}
if ansi_message and not self.colorize:
formatter_record["message"] = self.decolorize_format(record["message"])
if raw:
formatted = formatter_record["message"]
else:
formatted = precomputed_format.format_map(formatter_record)
if ansi_message and self.colorize:
try:
formatted = self.colorize_format(formatted, level_color)
except ansimarkup.AnsiMarkupError:
formatter_record["message"] = self.decolorize_format(record["message"])
if self.is_formatter_dynamic:
precomputed_format = self.decolorize_format(format_)
else:
precomputed_format = self.decolorized_format
formatted = precomputed_format.format_map(formatter_record)
if self.serialize:
formatted = self.serialize_record(formatted, record)
str_record = StrRecord(formatted)
str_record.record = record
with self.lock:
if self.stopped:
return
if self.enqueue:
self.queue.put(str_record)
else:
self.writer(str_record)
except Exception:
self.handle_error(record)
def queued_writer(self):
message = None
queue = self.queue
try:
while True:
message = queue.get()
if message is None:
break
self.writer(message)
except Exception:
if message and hasattr(message, "record"):
message = message.record
self.handle_error(message)
def stop(self):
with self.lock:
self.stopped = True
if self.enqueue:
self.queue.put(None)
self.thread.join()
self.stopper()
|
992,288 | f3780c984f6907e5e5ae62e25e91b7c7bdce35c0 | import random
import pygame
from pygame.locals import *
import copy
board = [[7, 8, 0, 4, 0, 0, 1, 2, 0],
[6, 0, 0, 0, 7, 5, 0, 0, 9],
[0, 0, 0, 6, 0, 1, 0, 7, 8],
[0, 0, 7, 0, 4, 0, 2, 6, 0],
[0, 0, 1, 0, 5, 0, 9, 3, 0],
[9, 0, 4, 0, 6, 0, 0, 0, 5],
[0, 7, 0, 3, 0, 0, 0, 1, 2],
[1, 2, 0, 0, 0, 7, 4, 0, 0],
[0, 4, 9, 2, 0, 6, 0, 0, 7]]
blank_board = [[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0]]
rand_board = [[4, 3, 5, 2, 6, 9, 7, 8, 1],
[6, 8, 2, 5, 7, 1, 4, 9, 3],
[1, 9, 7, 8, 3, 4, 5, 6, 2],
[8, 2, 6, 1, 9, 5, 3, 4, 7],
[3, 7, 4, 6, 8, 2, 9, 1, 5],
[9, 5, 1, 7, 4, 3, 6, 2, 8],
[5, 1, 9, 3, 2, 6, 8, 7, 4],
[2, 4, 8, 9, 5, 7, 1, 3, 6],
[7, 6, 3, 4, 1, 8, 2, 5, 9]]
test_board = [[3, 0, 6, 5, 0, 8, 4, 0, 0],
[5, 2, 0, 0, 0, 0, 0, 0, 0],
[0, 8, 7, 0, 0, 0, 0, 3, 1],
[0, 0, 3, 0, 1, 0, 0, 8, 0],
[9, 0, 0, 8, 6, 3, 0, 0, 5],
[0, 5, 0, 0, 9, 0, 6, 0, 0],
[1, 3, 0, 0, 0, 0, 2, 5, 0],
[0, 0, 0, 0, 0, 0, 0, 7, 4],
[0, 0, 5, 2, 0, 6, 3, 0, 0]]
text_blinking = False
def draw_board(window, squares):
window.fill((255, 255, 255))
tile_width = window.get_width() / 9
tile_height = (window.get_height() - 100) / 9
gap = tile_height
# draw vertical lines
for i in range(9):
if i % 3 == 0 and i != 0:
thickness = 4
else:
thickness = 1
pygame.draw.line(window, (0, 0, 0), (i * tile_width, 0), (i * tile_width, window.get_height() - 100), thickness)
for i in range(10):
if i % 3 == 0 and i != 0:
thickness = 4
else:
thickness = 1
pygame.draw.line(window, (0, 0, 0), (0, i * tile_height), (window.get_width(), i * tile_height), thickness)
# font = pygame.font.SysFont('Arial', 40)
# for i in range(9):
# for j in range(9):
# num = font.render(str(board[j][i]), True, (0, 0, 0))
# window.blit(num, (i * gap + 6, j * gap + 6))
for i in range(9):
for j in range(9):
squares[i][j].draw(window)
class Square:
def __init__(self, value, row, col, side):
self.value = value
self.temp = 0
self.row = row
self.col = col
self.side_length = side / 9
self.selected = False
def draw(self, window):
font = pygame.font.SysFont('Arial', 40)
number = font.render(str(self.value), True, (0, 0, 0))
underscore = font.render('_', True, (0, 0, 0))
if self.selected:
pygame.draw.rect(window, (0, 255, 0),
(self.row * self.side_length, self.col * self.side_length, self.side_length,
self.side_length))
# if self.value == 0:
# if text_blinking:
# window.blit(underscore, (self.row * self.side_length, self.col * self.side_length))
# elif text_blinking:
# window.blit(number, (self.row * self.side_length, self.col * self.side_length))
if text_blinking:
if self.value == 0:
window.blit(underscore, (self.row * self.side_length, self.col * self.side_length))
else:
window.blit(number, (self.row * self.side_length, self.col * self.side_length))
else:
if self.value != 0:
window.blit(number, (self.row * self.side_length, self.col * self.side_length))
def blank_board(squares):
for i in range(9):
for j in range(9):
squares[i][j].value = 0
def shuffle_rows(squares):
rand_group = random.randint(0, 2)
row1 = 3 * rand_group + random.randint(0, 2)
row2 = 3 * rand_group + random.randint(0, 2)
while row1 == row2:
row1 = 3 * rand_group + random.randint(0, 2)
for i in range(9):
temp = squares[row1][i].value
squares[row1][i].value = squares[row2][i].value
squares[row2][i].value = temp
def shuffle_cols(squares):
rand_group = random.randint(0, 2)
col1 = 3 * rand_group + random.randint(0, 2)
col2 = 3 * rand_group + random.randint(0, 2)
while col1 == col2:
col1 = 3 * rand_group + random.randint(0, 2)
for i in range(9):
temp = squares[i][col1].value
squares[i][col1].value = squares[i][col2].value
squares[i][col2].value = temp
# easy board 25 filled in squares
def new_board(squares, difficulty):
num_known = difficulty * 10
for i in range(9):
for j in range(9):
squares[i][j].value = rand_board[i][j]
for i in range(100):
shuffle_rows(squares)
shuffle_cols(squares)
for i in range(81 - num_known):
row = random.randint(0, 8)
col = random.randint(0, 8)
while squares[row][col].value == 0:
row = random.randint(0, 8)
col = random.randint(0, 8)
squares[row][col].value = 0
def clicked_square(squares, x, y):
row = x / 66.666
col = y / 66.666
print(str(row) + ' ' + str(col))
print(squares[int(col)][int(row)].value)
for i in range(9):
for j in range(9):
squares[i][j].selected = False
squares[int(col)][int(row)].selected = True
# for event in pygame.event.get():
# if event.type == KEYDOWN:
# if event.key == K_1:
# squares[row][col].value = 1
def print_board(squares):
for i in range(9):
for j in range(9):
print(squares[i][j].value, end='')
print('')
def solve(squares):
print('Solving the squares')
if is_done(squares):
return True
is_empty = find_empty(squares)
if is_empty:
print_board(squares)
for i in range(1, 10):
print(i)
if valid(i, squares, is_empty[0], is_empty[1]):
squares[is_empty[0]][is_empty[1]].value = i
if is_done(squares):
return True
if solve(squares):
return True
squares[is_empty[0]][is_empty[1]].value = 0
return False
def find_empty(squares):
for i in range(9):
for j in range(9):
if squares[i][j].value == 0:
print('empty ' + str(i) + ' ' + str(j))
return [i, j]
return False
def is_done(squares):
for i in range(9):
for j in range(9):
# print(squares[i][j].value)
if squares[i][j].value == 0:
print('not done')
return False
print('done')
return True
def valid(value, squares, row, col):
for i in range(9):
if squares[row][i].value == value:
return False
if squares[i][col].value == value:
return False
box_x = row // 3
box_y = col // 3
for i in range(box_x * 3, box_x * 3 + 3):
for j in range(box_y * 3, box_y * 3 + 3):
if squares[i][j].value == value and (i != row and j != col):
return False
# if squares[i][j].value == value:
# return False
return True
def main():
print('Just Testing')
pygame.init()
window = pygame.display.set_mode((600, 700))
clock = pygame.time.Clock()
blink_timer = 0
running = True
squares = [[Square(rand_board[i][j], j, i, window.get_width()) for j in range(9)] for i in
range(9)]
new_board(squares, 4)
# mouse_x = None
# mouse_y = None
selected_something = False
selected = [-1, -1]
global text_blinking
# solve(squares)
while running:
for event in pygame.event.get():
if event.type == QUIT:
running = False
elif event.type == KEYDOWN:
if event.key == K_ESCAPE:
running = False
if selected_something:
if event.key == K_0:
squares[int(selected[0])][int(selected[1])].value = 0
if event.key == K_1:
squares[int(selected[0])][int(selected[1])].value = 1
if event.key == K_2:
squares[int(selected[0])][int(selected[1])].value = 2
if event.key == K_3:
squares[int(selected[0])][int(selected[1])].value = 3
if event.key == K_4:
squares[int(selected[0])][int(selected[1])].value = 4
if event.key == K_5:
squares[int(selected[0])][int(selected[1])].value = 5
if event.key == K_6:
squares[int(selected[0])][int(selected[1])].value = 6
if event.key == K_7:
squares[int(selected[0])][int(selected[1])].value = 7
if event.key == K_8:
squares[int(selected[0])][int(selected[1])].value = 8
if event.key == K_9:
squares[int(selected[0])][int(selected[1])].value = 9
if event.key == K_e:
print('new easy')
new_board(squares, 4)
if event.key == K_c:
print('shuffle columns')
shuffle_cols(squares)
if event.key == K_n:
print('new board')
blank_board(squares)
if event.key == K_r:
print('shuffle rows')
shuffle_rows(squares)
if event.key == K_s:
print('solving')
if solve(squares):
print_board(squares)
else:
print('no solution')
draw_board(window, squares)
blink_timer = 0
text_blinking = True
elif event.type == MOUSEBUTTONDOWN:
if event.button == 1:
mouse_x, mouse_y = pygame.mouse.get_pos()
if mouse_y and mouse_y <= window.get_width():
selected_something = True
else:
selected_something = False
selected[1] = mouse_x / 66.666
selected[0] = mouse_y / 66.666
# print(str(mouse_x) + ' ' + str(mouse_y))
if mouse_y <= 600:
clicked_square(squares, mouse_x, mouse_y)
draw_board(window, squares)
pygame.display.update()
clock.tick(30)
blink_timer += 1
if blink_timer == 15:
blink_timer = 0
if text_blinking:
text_blinking = False
else:
text_blinking = True
main()
|
992,289 | 73b674f6d07d4ef58d4e36ef3d93172155c38903 | from django.urls import path
from . import views
app_name = 'account'
urlpatterns=[
path('',views.user_login,name='login') , #由主程的映射引导进来,所以,这里为空就可以了
path('logout/',views.user_logout,name='logout'),
path('register/',views.register,name='register'),
path('my-information',views.Myself,name='my-information'),
path('myself_edit',views.Myself_edit,name='myself_edit'),
path('myimage',views.my_image,name='myimage')
] |
992,290 | e75e4950eef3f3dcd38ef5e444bc41d6fa196a51 | def on_gesture_shake():
basic.show_icon(IconNames.YES)
music.play_melody("G A B C5 C5 B A G ", 120)
input.on_gesture(Gesture.SHAKE, on_gesture_shake)
|
992,291 | a47a105b1e163da8e4079c63e0ed9d3959ee1e93 | from brownie import Contract
from cachetools.func import ttl_cache
from yearn.cache import memory
from yearn.multicall2 import fetch_multicall
from yearn.prices import magic
@memory.cache()
def is_balancer_pool(address):
pool = Contract(address)
required = {"getCurrentTokens", "getBalance", "totalSupply"}
if set(pool.__dict__) & required == required:
return True
return False
@ttl_cache(ttl=600)
def get_price(token, block=None):
pool = Contract(token)
tokens, supply = fetch_multicall([pool, "getCurrentTokens"], [pool, "totalSupply"], block=block)
supply = supply / 1e18
balances = fetch_multicall(*[[pool, "getBalance", token] for token in tokens], block=block)
balances = [balance / 10 ** Contract(token).decimals() for balance, token in zip(balances, tokens)]
total = sum(balance * magic.get_price(token, block=block) for balance, token in zip(balances, tokens))
return total / supply
|
992,292 | ea07d2ee7cd00bb061aff912c4edf380a226eaa2 | from django.forms import ModelForm
from django import forms
from .models import Data
class DateInput(forms.DateInput):
input_type = 'date'
class Form(ModelForm):
class Meta:
model = Data
fields = ['name','reports', 'team_lead','hours', 'today_progress','today_doc', 'concern', 'next_plan', 'next_doc']
widgets = {
'team_lead': forms.RadioSelect()
} |
992,293 | 21b60b38f049f070824867340c569165fd4e0cc1 | """ Contains the reward functions used to compute the instant and terminal rewards for an EV Trip Scheduler.
"""
class SimpleRewards:
def ComputeTimeReward(self, currentTime, expectedTime):
""" Computes a reward for a given time step.
The reward is negative if the current time step is past the expected time.
The reward is positive if the current time step is before the expected time.
Keyword arguments:
currentTime -- The current timestep
expectedTime -- The expect time to complete the trip.
"""
return (expectedTime - currentTime) * 1 if currentTime < expectedTime else (expectedTime - currentTime) * 1
def ComputeRewardForDestinationWithoutCharger(self, currentBatteryCharge, batteryCapacity):
""" Computes the reward given the current battery charge for a Destination without a charger.
The reward is 0 if the battery charge is greater than 20% the capacity.
The reward is negative if the battery charge is less than 20% the capacity.
Keyword arguments:
currentBatteryCharge -- The current charge level in the battery.
batteryCapacity -- The capacity of the battery in KWH.
"""
return 0 if currentBatteryCharge > batteryCapacity * .20 else -1
def ComputeBatteryRewardForDriving(self, currentBatteryCharge, batteryCapacity):
""" Computes the reward given the current battery charge after a driving action.
The reward is 0 if the battery charge is greater than 10% the capacity.
The reward is negative if the battery charge is less than 10% the capacity.
Keyword arguments:
currentBatteryCharge -- The current charge level in the battery.
batteryCapacity -- The capacity of the battery in KWH.
"""
return 0 if currentBatteryCharge > batteryCapacity * .20 else -1
def ComputeBatteryRewardForCharging(self, currentBatteryCharge, batteryCapacity, purchasedPower, distanceFromRoute=0, chargingPrice=0.13):
""" Computes the reward given the current battery charge after a charging action.
The reward is 0 if the battery charge is less than 80% the capacity.
The reward is negative if the battery charge is greater than 80% the capacity.
Keyword arguments:
currentBatteryCharge -- The current charge level in the battery.
batteryCapacity -- The capacity of the battery in KWH.
purchasedPower -- The amount of energy purchesed.
chargingPrice -- Pre computed price of charging the battery to the currentBatteryCharge.
"""
#TODO Base Cost + rate cost
return -(purchasedPower * chargingPrice) if currentBatteryCharge < batteryCapacity * .90 else -1 |
992,294 | e2a5670462444a05419042b14c4a9a78789073f7 | import random
import string
from dataclasses import is_dataclass
from haproxy.collections import Collection
from haproxy.dataclasses import Proxy, Frontend
class TimeSeries(Collection):
def __aggregate__(self, field):
if not self._items:
return None
else:
values = (getattr(i, field.name) for i in self._items)
if is_dataclass(field.type):
values = [v for v in values if v is not None]
if len(values) >= 1:
cls_uuid = ''.join(random.choices(string.ascii_uppercase + string.digits, k=6))
cls = type(field.type.__name__ + 'TimeSeries_' + cls_uuid, (TimeSeries, field.type), {})
return cls(values)
else:
return None
else:
return values
def append(self, item):
classes = tuple(cls for cls in self.__class__.__mro__ if cls not in Collection.__mro__
and not issubclass(cls, Collection))
if not isinstance(item, classes):
raise TypeError('Item must be an instance of ' +
' or '.join(', '.join([c.__name__ for c in classes]).rsplit(', ', 1)))
self._items.append(item)
class ProxyTimeSeries(TimeSeries, Proxy):
pass
class FrontendTimeSeries(TimeSeries, Frontend):
pass
|
992,295 | 732c9ae54ed2cf2caf65dade6f45af3d9317e60a | ## Class Living Thing Factory
# Uses reflection to create instances of living things
# Requires that classes have the same name as the import they
# come from
class LivingThingFactory():
def __init__(self):
pass
##Create life method
#@param name name of the class to create.
#@param return instance of the class created
def create_life(self, name):
mod = __import__(name)
cls = getattr(mod, name)
return cls() |
992,296 | ddc5a651bf9b0a61c74396db282b7e21b39fbf78 | import string
import random
import requests
class GameFlask:
def __init__(self):
self.grid = self.random_grid()
def random_grid(self):
return ''.join(random.choice(string.ascii_uppercase) for i in range(3))
def is_valid(self, word):
if self.grid == word:
return True
else:
return False
def is_dico(self,word):
return requests.get(f"https://wagon-dictionary.herokuapp.com/{word}").json()['found']
class Game:
def __init__(self):
self.grid = self.random_grid()
print("To cheat -- grid = [{}]".format(self.grid))
self.word = input("Choice your word? ")
print("To run with test_game.py -- word = [{}]".format(self.word))
def random_grid(self):
return ''.join(random.choice(string.ascii_uppercase) for i in range(9))
def is_valid(self):
if self.grid == self.word:
return True
else:
return False
def is_dico(self,word):
return requests.get(f"https://wagon-dictionary.herokuapp.com/{word}").json()['found']
if __name__ == "__main__":
gg = Game()
if gg.is_valid():
print("{} is the same {}".format(gg.word, gg.grid))
else:
print("{} is NOT the same {}".format(gg.word, gg.grid))
if gg.is_dico( gg.word ):
print("{} is in dico".format(gg.word))
else:
print("{} is NOT in dico".format(gg.word))
|
992,297 | 79806d5cc1c6685745bb7f866452c2cba49b75ee | # -*- coding: utf-8 -*-
import pymysql
import scrapy
from hongkong.items import HongKongHtmlFileItem
class DocumentFileDownloadSpiderSpider(scrapy.Spider):
'''document type 类型文件的下载'''
name = 'HKEX_document_file_download_spider'
allowed_domains = ['hkex.com']
start_urls = ['http://hkex.com/']
def start_requests(self):
conn = pymysql.connect(host="10.100.4.99", port=3306, db="opd_common", user="root", passwd="OPDATA",
charset="utf8")
cursor = conn.cursor()
sql = "select corresponding_link, report_id from financial_html_origin_began_complete where " \
"source_category='document type' and is_downloaded='0' and country_code='HKG'"
cursor.execute(sql)
results = cursor.fetchall()
# if results:
for res in results:
corresponding_link = res[0]
report_id = res[1]
item = HongKongHtmlFileItem()
yield scrapy.FormRequest(method='GET', url=corresponding_link, callback=self.parse,
errback=self.errback_scraping,
meta={
'corresponding_link': corresponding_link,
'report_id': report_id,
'item': item,
# 'proxy': 'http://' + random.choice(self.ip_list)
})
def parse(self, response):
report_id = response.meta['report_id']
with open('W:/hkg/html/hongkong/' + report_id + '.pdf', 'wb') as wf:
wf.write(response.body)
wf.close()
doc_local_path = 'X:/data/html/hongkong/' + str(report_id) + '.pdf'
print('已成功下载' + report_id)
item = HongKongHtmlFileItem()
item['is_downloaded'] = '1'
item['doc_local_path'] = doc_local_path
item['report_id'] = report_id
yield item
def errback_scraping(self, failure):
request = failure.request
report_id = request.meta['report_id']
self.logger.info('未成功下载 %s...', report_id)
# print('未成功下载' + report_id)
|
992,298 | f68a56f66d13cb4fac56f0d0818428c1ceb2d759 | from bson.json_util import dumps
from flask_pymongo import PyMongo
from bson.objectid import ObjectId
from bson import json_util
import logging
from flask import jsonify, Blueprint, request, current_app, Response, jsonify
from werkzeug.security import generate_password_hash, check_password_hash
from ..Database.CRUD import CRUD
users_api = Blueprint('users_api', __name__)
@users_api.route('/users', methods=['POST'])
def insert_user():
if(not request.json):
return 'bad request',400
_json = request.json
pass_hash = generate_password_hash(_json['password'])
request.json['password'] = pass_hash
print(request.json)
action = CRUD(collection='users')
response = action.insert(request.json)
return Response(response, status=200, mimetype='application/json')
@users_api.route('/users',methods=['GET'])
def get_users():
action = CRUD(collection='users')
response = action.get_all()
return response
@users_api.route('/users/<id>',methods=['GET'])
def get_user_by_id(id):
action = CRUD(collection='users')
response = action.get_one(id)
return response
@users_api.route('/users/<id>', methods=['DELETE'])
def delete_user(id):
action = CRUD(collection='users')
response = action.delete(id)
return response
@users_api.route('/users/<id>', methods=['PUT'])
def update_user(id):
_json = request.json
if(not request.json):
return Response(json.dumps({'invalid request - json is empty'}), status=400)
data = {
'username' : _json['username'],
'email' : _json['email'],
'avatar': _json['avatar_uri'],
'bio': _json['bio']
}
action = CRUD(collection='users')
mongo_resp = action.update(data= data, id = id)
return mongo_resp
@users_api.route('/passwordchange/<id>', methods=['PUT'])
def update_password(id):
pass
@users_api.route('/authenticate', methods=['GET'])
def authenticate_user():
pass |
992,299 | bcd316fd8d4a8e6d4b72f2b5c73a3ee9003683dd | import PySimpleGUI as sg
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
import time
from tqdm import tqdm
sg.theme('TealMono')
layout =[ [sg.Image(r'C:\Users\Irina\Desktop\practice\Intuit.png')],
[ sg.Text('Программа для закачки курса INTUIT')],
[ sg.Button('Ввод ссылки и имени файла')],
[ sg.Text('введите ссылку на курс: ', size=(22,1)), sg.Input(k = '-IN-'), sg.Button('Парсинг')],
[ sg.Text('введите имя файла: ', size=(22,1)), sg.Input(k ='-OUT-') ],
[ sg.ProgressBar(10, orientation='h', size=(46, 20), key='progressBar')],
[ sg.Text('Лекция: ', size=(22,1)), sg.Input(k = 'lecture')],
[ sg.Text('Номер страницы: ', size=(22,1)), sg.Input(k = 'page', size=(10,1))],
[ sg.ProgressBar(10, orientation='h', size=(46, 20), key='progress')]]
window = sg.Window('Парсинг лекций с сайта Интуит', layout)
progress_bar = window.FindElement('progress')
fh = open ("url.txt")
ls = fh.readlines()
for line in ls:
line = line.strip()
line_num = 0
while True:
event,values = window.read()
print(event)
if event == 'Ввод ссылки и имени файла':
line = ls[line_num]
url, file_name = line.split()
line_num += 1
if line_num > len(ls):
url, file_name = '', ''
window["-IN-"].update(url)
window["-OUT-"].update(file_name)
window['page'].update()
window['lecture'].update()
if event == 'Парсинг':
file_name = values['-OUT-']
fhd = open( file_name, "w")
fhd.write("<html><head>\n")
fhd.write('<meta http-equiv="Content-Type" content="text/html; charset=cp1251">\n')
fhd.write('<link type="text/css" rel="stylesheet" href="ab.css">')
fhd.write("</head><body>\n")
driver = webdriver.Firefox('C://Users//Irina//AppData//Local//Programs//Python//Python36')
url = values['-IN-']
driver.get( url )
link = driver.find_element_by_id('non-collapsible-item-1')
all_links = link.find_elements_by_css_selector('a')
print(len(all_links))
test_links = link.find_elements_by_partial_link_text("Тест")
print(len(test_links))
exam_links = link.find_elements_by_partial_link_text('Экзамен')
print(len(exam_links))
lect_count = len(all_links)-len(test_links)- len(exam_links)
print(lect_count)
link = driver.find_element_by_link_text('Лекция 1')
link.send_keys(Keys.RETURN)
print("лекция 1")
time.sleep(3)
page_num = 0
lect_num = 0
lect_name = ""
while True:
doc_state = driver.execute_script("return document.readyState")
if doc_state == 'complete':
break
text_header = ""
lect_headers = driver.find_elements_by_css_selector('div.title span.zag')
time.sleep(.5)
for lect_header in lect_headers:
text_header = lect_header.text
html_header = '<div class = "abzag"> '+ text_header + ' </div>'
#print( text_header)
window['lecture'].update(text_header)
#print("Имя лекции:", lect_name )
print("Имя лекции:", lect_name )
print("Текст заголовка:", text_header)
if lect_name != text_header:
page_num = 1
lect_name = text_header
print("Имя лекции if:", lect_name )
print("Текст заголовка if:", text_header)
else:
page_num += 1
window['page'].update(page_num)
fhd.write( html_header)
prev_header = text_header
contents = driver.find_elements_by_css_selector('div.spelling-content-entity')
content = contents[-1]
html_content = content.get_attribute("outerHTML")
html_content = html_content.replace('"/EDI', '"http://www.intuit.ru/EDI')
fhd.write( html_content + "\n")
time.sleep(.5)
while True:
try:
driver.find_element_by_link_text('Дальше >>').send_keys(Keys.RETURN)
time.sleep(30)
except:
break
while True:
doc_state = driver.execute_script("return document.readyState")
if doc_state == 'complete':
break
time.sleep(.5)
lect_headers = driver.find_elements_by_css_selector('div.title span.zag')
if len( lect_headers) < 1:
break
text_header = lect_headers[-1].text
print( text_header)
window['lecture'].update(text_header)
if lect_name != text_header:
page_num = 1
lect_name = text_header
lect_num += 1
print (lect_num)
new_val = lect_num*10//lect_count
window['progressBar'].update(new_val)
else:
page_num += 1
window['progress'].update(page_num)
window['page'].update(page_num)
html_header = '<div class = "abzag"> '+ text_header + ' </div>'
if prev_header != text_header:
fhd.write( html_header + "\n")
prev_header = text_header
time.sleep(.5)
annotations = driver.find_elements_by_css_selector('div.annotation')
if len( annotations ) > 0:
annotation = annotations[0]
print( annotation.text )
html_annotation = annotation.get_attribute("outerHTML")
fhd.write( html_annotation + "\n")
contents = driver.find_elements_by_css_selector('div.spelling-content-entity')
content = contents[-1]
html_content = content.get_attribute("outerHTML")
html_content = html_content.replace('"/EDI', '"http://www.intuit.ru/EDI')
fhd.write( html_content + "\n")
fhd.write("</body></html>\n")
fhd.close()
driver.close()
time.sleep(3)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.