code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
|---|---|---|---|---|---|
AZURE_STORAGE_NAME = 'mirisimagestorage'
AZURE_STORAGE_KEY = '2F2hCPXsmhGBFUUBFoFfTF1mlavK3YYdkX+tlAauYv6mly621fXYQSLckqEZfHR+XVBBI3PgV0spOed+JLrlKg=='
COGNITIVE_URL = 'https://westus.api.cognitive.microsoft.com/vision/v1.0/ocr'
COGNITIVE_KEY = '8ae1ba5adb144e69aa1016f66ff00b97'
|
KimBoWoon/Cognitive-API
|
config.py
|
Python
|
mit
| 279
|
"""
Copyright (c) 2017 Jet Propulsion Laboratory,
California Institute of Technology. All rights reserved
"""
import unittest
import ClimatologySpark2
class CCMPTest(unittest.TestCase):
def cmmp_test(self):
dsName = 'CCMPWind'
nEpochs = '1'
nWindow = '1'
averager = 'pixelMean'
sparkConfig = 'multicore,4,4'
outHdfsPath = 'cache/clim'
ClimatologySpark2.main([dsName, nEpochs, nWindow, averager, sparkConfig, outHdfsPath])
|
dataplumber/nexus
|
climatology/clim/test/ccmpTest.py
|
Python
|
apache-2.0
| 487
|
# -*- coding: latin1 -*-
################################################################################################
#
#
import snap, datetime, sys, time, json, os, os.path, shutil, time, random, math
import numpy as np
from math import*
# Script auxiliar para cálculos matemáticos que deve estar no mesmo diretório deste aqui.
import plot_metrics
# Script auxiliar para gerar histogramas
import histogram
import networkx as nx
reload(sys)
sys.setdefaultencoding('utf-8')
######################################################################################################################################################################
## Status - Versão 1 - Script para plotar propriedades estruturais das redes-ego
##
## ERRRO DE ALOCAÇÃO DE MEMÓRIA!!!!!
######################################################################################################################################################################
######################################################################################################################################################################
#
# Armazenar as propriedades do dataset
#
######################################################################################################################################################################
def prepare(source_dir,source2_dir):
print("\n######################################################################\n")
nodes = {}
edges = {}
diameter = {}
closecentr = {}
bet_centr_nodes = {}
bet_centr_edges = {}
modularity = {}
for i in range(1,11):
net="n"+str(i)
if os.path.isfile(source_dir+net+"_net_struct.json"):
with open(source_dir+net+"_net_struct.json", 'r') as f:
with open(source2_dir+net+"_net_struct.json", 'r') as g:
overview = json.load(f)
overview2 = json.load(g)
nodes[net] = {'media':overview['Nodes']['media'],'std':overview['Nodes']['desvio_padrao']}
edges[net] = {'media':overview['Edges']['media'],'std':overview['Edges']['desvio_padrao']}
diameter[net] = {'media':overview['Diameter']['media'],'std':overview['Diameter']['desvio_padrao']}
closecentr[net] = {'media':overview['CloseCentr']['media'],'std':overview['CloseCentr']['desvio_padrao']}
bet_centr_nodes[net] = {'media':overview2['BetweennessCentrNodes']['media'],'std':overview2['BetweennessCentrNodes']['desvio_padrao']}
bet_centr_edges[net] = {'media':overview2['BetweennessCentrEdges']['media'],'std':overview2['BetweennessCentrEdges']['desvio_padrao']}
modularity[net] = {'media':overview['Modularity']['media'],'std':overview['Modularity']['desvio_padrao']}
data = {}
data['Nodes'] = nodes
data['Edges'] = edges
data['Diameter'] = diameter
data['Close Centrality'] = closecentr
data['Betweenness Centrality Nodes'] = bet_centr_nodes
data['Betweenness Centrality Edges'] = bet_centr_edges
data['Modularity'] = modularity
return data
######################################################################################################################################################################
######################################################################################################################################################################
#
# Método principal do programa.
#
######################################################################################################################################################################
######################################################################################################################################################################
def main():
os.system('clear')
print "################################################################################"
print" "
print" Script para apresentação de propriedades do dataset (rede-ego) "
print" "
print"#################################################################################"
print
source_dir1 = source+"graphs_with_ego/"
source2_dir1 = source2+"graphs_with_ego/"
data1 = prepare(source_dir1,source2_dir1)
source_dir2 = source+"graphs_without_ego/"
source2_dir2 = source2+"graphs_without_ego/"
data2 = prepare(source_dir2,source2_dir2)
if data1 is not None and data2 is not None:
for k,v in data1.iteritems():
metric = k
plot_metrics.plot_bars_full(output,data1[k],data2[k],metric)
######################################################################
######################################################################
print("\n######################################################################\n")
print("Script finalizado!")
print("\n######################################################################\n")
######################################################################################################################################################################
#
# INÍCIO DO PROGRAMA
#
######################################################################################################################################################################
source = "/home/amaury/Dropbox/net_structure_hashmap/snap/"
source2 = "/home/amaury/Dropbox/net_structure_hashmap/snap_v2/"
output = "/home/amaury/Dropbox/net_structure_hashmap_statistics/snap/"
#Executa o método main
if __name__ == "__main__": main()
|
amaurywalbert/twitter
|
net_structure/old/hashmap_plot_network_structure_with_betweenness.py
|
Python
|
gpl-3.0
| 5,411
|
# -*- coding: utf-8 -*-
from .constants import *
from threading import Lock
from . import soundqueue
from . import messages
class Users(dict):
"""Object that stores and update all connected users"""
def __init__(self, mumble_object, callbacks):
self.mumble_object = mumble_object
self.callbacks = callbacks
self.myself = None # user object of the pymumble thread itself
self.myself_session = None # session number of the pymumble thread itself
self.lock = Lock()
def update(self, message):
"""Update a user informations, based in an incoming message"""
self.lock.acquire()
if message.session not in self:
self[message.session] = User(self.mumble_object, message)
self.callbacks(PYMUMBLE_CLBK_USERCREATED, self[message.session])
if message.session == self.myself_session:
self.myself = self[message.session]
else:
actions = self[message.session].update(message)
self.callbacks(PYMUMBLE_CLBK_USERUPDATED, self[message.session], actions)
self.lock.release()
def remove(self, message):
"""Remove a user object based on server info"""
self.lock.acquire()
if message.session in self:
user = self[message.session]
del self[message.session]
self.callbacks(PYMUMBLE_CLBK_USERREMOVED, user, message)
self.lock.release()
def set_myself(self, session):
"""Set the "myself" user"""
self.myself_session = session
if session in self:
self.myself = self[session]
def count(self):
"""Return the count of connected users"""
return len(self)
class User(dict):
"""Object that store one user"""
def __init__(self, mumble_object, message):
self.mumble_object = mumble_object
self["session"] = message.session
self.update(message)
self.sound = soundqueue.SoundQueue(self.mumble_object) # will hold this user incoming audio
def update(self, message):
"""Update user state, based on an incoming message"""
actions = dict()
if message.HasField("actor"):
actions["actor"] = message.actor
for (field, value) in message.ListFields():
if field.name in ("session", "actor", "comment", "texture"):
continue
actions.update(self.update_field(field.name, value))
if message.HasField("comment_hash"):
if message.HasField("comment"):
self.mumble_object.blobs[message.comment_hash] = message.comment
else:
self.mumble_object.blobs.get_user_comment(message.comment_hash)
if message.HasField("texture_hash"):
if message.HasField("texture"):
self.mumble_object.blobs[message.texture_hash] = message.texture
else:
self.mumble_object.blobs.get_user_texture(message.texture_hash)
return actions # return a dict, useful for the callback functions
def update_field(self, name, field):
"""Update one state value for a user"""
actions = dict()
if name not in self or self[name] != field:
self[name] = field
actions[name] = field
return actions
def get_property(self, property):
if property in self:
return self[property]
else:
return None
def mute(self):
"""Mute a user"""
params = {"session": self["session"]}
if self["session"] == self.mumble_object.users.myself_session:
params["self_mute"] = True
else:
params["mute"] = True
cmd = messages.ModUserState(self.mumble_object.users.myself_session, params)
self.mumble_object.execute_command(cmd)
def unmute(self):
"""Unmute a user"""
params = {"session": self["session"]}
if self["session"] == self.mumble_object.users.myself_session:
params["self_mute"] = False
else:
params["mute"] = False
cmd = messages.ModUserState(self.mumble_object.users.myself_session, params)
self.mumble_object.execute_command(cmd)
def deafen(self):
"""Deafen a user"""
params = {"session": self["session"]}
if self["session"] == self.mumble_object.users.myself_session:
params["self_deaf"] = True
else:
params["deaf"] = True
cmd = messages.ModUserState(self.mumble_object.users.myself_session, params)
self.mumble_object.execute_command(cmd)
def undeafen(self):
"""Undeafen a user"""
params = {"session": self["session"]}
if self["session"] == self.mumble_object.users.myself_session:
params["self_deaf"] = False
else:
params["deaf"] = False
cmd = messages.ModUserState(self.mumble_object.users.myself_session, params)
self.mumble_object.execute_command(cmd)
def suppress(self):
"""Disable a user"""
params = {"session": self["session"],
"suppress": True}
cmd = messages.ModUserState(self.mumble_object.users.myself_session, params)
self.mumble_object.execute_command(cmd)
def unsuppress(self):
"""Enable a user"""
params = {"session": self["session"],
"suppress": False}
cmd = messages.ModUserState(self.mumble_object.users.myself_session, params)
self.mumble_object.execute_command(cmd)
def recording(self):
"""Set the user as recording"""
params = {"session": self["session"],
"recording": True}
cmd = messages.ModUserState(self.mumble_object.users.myself_session, params)
self.mumble_object.execute_command(cmd)
def unrecording(self):
"""Set the user as not recording"""
params = {"session": self["session"],
"recording": False}
cmd = messages.ModUserState(self.mumble_object.users.myself_session, params)
self.mumble_object.execute_command(cmd)
def comment(self, comment):
"""Set the user comment"""
params = {"session": self["session"],
"comment": comment}
cmd = messages.ModUserState(self.mumble_object.users.myself_session, params)
self.mumble_object.execute_command(cmd)
def texture(self, texture):
"""Set the user texture"""
params = {"session": self["session"],
"texture": texture}
cmd = messages.ModUserState(self.mumble_object.users.myself_session, params)
self.mumble_object.execute_command(cmd)
def move_in(self, channel_id):
session = self.mumble_object.users.myself_session
cmd = messages.MoveCmd(session, channel_id)
self.mumble_object.execute_command(cmd)
def send_message(self, message):
"""Send a text message to the user."""
cmd = messages.TextPrivateMessage(self["session"], message)
self.mumble_object.execute_command(cmd)
|
robozman/pymumblegui
|
pymumble/pymumble_py3/users.py
|
Python
|
gpl-3.0
| 7,133
|
# -*- coding: utf-8 -*-
# Copyright 2013 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nailgun.test.base import BaseIntegrationTest
from nailgun.utils import reverse
class TestHandlers(BaseIntegrationTest):
def _get_allocation_stats(self):
resp = self.app.get(
reverse('NodesAllocationStatsHandler'))
return resp.json_body
def test_allocation_stats_unallocated(self):
self.env.create_node(api=False)
stats = self._get_allocation_stats()
self.assertEqual(stats['total'], 1)
self.assertEqual(stats['unallocated'], 1)
def test_allocation_stats_total(self):
self.env.create_node(api=False)
self.env.create(
cluster_kwargs={},
nodes_kwargs=[
{
"pending_addition": True,
}
]
)
stats = self._get_allocation_stats()
self.assertEqual(stats['total'], 2)
self.assertEqual(stats['unallocated'], 1)
|
nebril/fuel-web
|
nailgun/nailgun/test/integration/test_node_allocation_stats_handler.py
|
Python
|
apache-2.0
| 1,552
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Must satisfy the signature
# [t,X,D,P] = sim_function(T,X0,D0,P0,I0);
"""Uses Assimulo solvers"""
import numpy as np
from assimulo.problem import Explicit_Problem
from assimulo.solvers.sundials import CVode
from assimulo.solvers import LSODAR
from assimulo.solvers import RungeKutta34
from assimulo.solvers import RungeKutta4
from assimulo.solvers import Dopri5
import pylab as plt
import utils as U
PLT = False
class SIM(object):
def __init__(self, _, pvt_init_data):
self.model = create_model()
@U.memoize2disk(U.memoize_hash_method)
def sim(self, TT, X0, D, P, I, property_checker):
tol = 1e-2
if ((abs(X0[1]) <= tol and abs(X0[3]) <= tol) or X0[1] < 0):
X0[1] = 0
X0[3] = 0
return (TT[0], X0, D, P), False
#print X0
#Simulation
ncp = 200 #Number of communication points
self.model.re_init(TT[0], X0)
t, y = self.model.simulate(TT[1], ncp) #Simulate
#Print event information
#sim.print_event_data()
# t is a list for some reasons. Maybe there is a better way to
# fix this.
t = np.array(t)
# violating values
(t_, y_), property_violated = property_checker.first_sat_value_or_end(t, y)
# if xv and tv are empty, no violation were found
ret_t, ret_X = t_, y_
ret_D = D
ret_P = P
if PLT:
plt.plot(y[:, 0], y[:, 1], 'b-')#, linewidth=2)
#plt.plot(t, y[:, 3], 'b-')#, linewidth=2)
#plt.plot(t, y[:, 1], 'r-', linewidth=2)
return (ret_t, ret_X, ret_D, ret_P), property_violated
def create_model():
def pendulum(t, X, sw):
"""
The ODE to be simulated. The parameter sw should be fixed during
the simulation and only be changed during the event handling.
"""
g = 1
Y = X.copy()
Y[0] = X[2] #x_dot
Y[1] = X[3] #y_dot
Y[2] = 0 #vx_dot
Y[3] = -g #vy_dot
return Y
def state_events(t, X, sw):
"""
This is our function that keep track of our events, when the sign
of any of the events has changed, we have an event.
"""
return [X[1]] # y == 0
def handle_event(solver, event_info):
"""
Event handling. This functions is called when Assimulo finds an event as
specified by the event functions.
"""
state_info = event_info[0] #We are only interested in state events info
if state_info[0] != 0: #Check if the first event function have been triggered
if solver.sw[0]:
X = solver.y
if X[3] < 0: # if the ball is falling (vy < 0)
# bounce!
#X[1] = 1e-5 # used with CVode
X[1] = 1e-3 # gives better results with Dopri
X[3] = -0.75*X[3]
#solver.sw[0] = not solver.sw[0] #Change event function
#Initial values
y0 = [0., 0., 0., 0.] #Initial states
t0 = 0.0 #Initial time
switches0 = [True] #Initial switches
#Create an Assimulo Problem
mod = Explicit_Problem(pendulum, y0, t0, sw0=switches0)
mod.state_events = state_events #Sets the state events to the problem
mod.handle_event = handle_event #Sets the event handling to the problem
mod.name = 'Bouncing Ball in X-Y' #Sets the name of the problem
#Create an Assimulo solver (CVode)
# leaks memory!!
#sim = CVode(mod)
# hands and possibly leaks memory
#sim = LSODAR(mod)
#sim = RungeKutta34(mod)
sim = Dopri5(mod)
#sim.options['verbosity'] = 20 #LOUD
sim.verbosity = 40 #WHISPER
#sim.display_progress = False
#sim.options['minh'] = 1e-4
#sim.options['rtol'] = 1e-3
# What is time_limit?
sim.time_limit = 1
# #Specifies options
# sim.discr = 'Adams' #Sets the discretization method
# sim.iter = 'FixedPoint' #Sets the iteration method
# sim.rtol = 1.e-8 #Sets the relative tolerance
# sim.atol = 1.e-6 #Sets the absolute tolerance
return sim
|
zutshi/S3CAMR
|
examples/bball/bball.py
|
Python
|
bsd-2-clause
| 4,193
|
from setuptools import setup
fn = 'lino_xl/setup_info.py'
with open(fn, "rb") as fd:
exec(compile(fd.read(), fn, 'exec'))
if __name__ == '__main__':
setup(**SETUP_INFO)
|
lino-framework/xl
|
setup.py
|
Python
|
bsd-2-clause
| 177
|
from __future__ import division
from collections import Counter, defaultdict
from itertools import groupby, combinations
from functools import partial
from datetime import datetime, timedelta
from bandicoot.utils import all
def _round_half_hour(record):
"""
Round a time DOWN to half nearest half-hour.
"""
k = record.datetime + timedelta(minutes=-(record.datetime.minute % 30))
return datetime(k.year, k.month, k.day, k.hour, k.minute, 0)
def _count_interaction(user, interaction=None, direction='out'):
if interaction is 'call_duration':
d = defaultdict(int)
for r in user.records:
if r.direction == direction and r.interaction == 'call':
d[r.correspondent_id] += r.call_duration
return d
if interaction is None:
keyfn = lambda x: x.correspondent_id
records = (r for r in user.records if r.direction == direction)
chunks = groupby(sorted(records, key=keyfn), key=keyfn)
# Count the number of distinct half-hour blocks for each user
return Counter({c_id: len(set((_round_half_hour(i) for i in items))) for c_id, items in chunks})
if interaction in ['call', 'text']:
filtered = [x.correspondent_id for x in user.records if x.interaction == interaction and x.direction == direction]
else:
raise ValueError("{} is not a correct value of interaction, only 'call'"
", 'text', and 'call_duration' are accepted".format(interaction))
return Counter(filtered)
def _interaction_matrix(user, interaction=None, default=0, missing=None):
generating_fn = partial(_count_interaction, interaction=interaction)
# Just in case, we remove the user from user.network (self records can happen)
neighbors = matrix_index(user)
def make_direction(direction):
rows = []
for u in neighbors:
correspondent = user.network.get(u, user)
if correspondent is None:
# We assume that missing users don't interact with themselves
row = [missing if v != u else 0 for v in neighbors]
else:
cur_out = generating_fn(correspondent, direction=direction)
row = [cur_out.get(v, default) for v in neighbors]
rows.append(row)
return rows
m1 = make_direction('out')
m2 = make_direction('in')
m = [[m1[i][j] if m1[i][j] is not None else m2[j][i] for j in range(len(neighbors))] for i in range(len(neighbors))]
return m
def matrix_index(user):
"""
Returns the keys associated with each axis of the matrices.
The first key is always the name of the current user, followed by the
sorted names of all the correspondants.
"""
return [user.name] + sorted([k for k in user.network.keys() if k != user.name])
def matrix_directed_weighted(user, interaction=None):
"""
Returns a directed, weighted matrix for call, text and call duration.
If interaction is None, the weight measures both calls and texts: the weight is the number
of 30 minutes periods with at least one call or one text.
Example
-------
>>> m = bc.network.matrix_directed_weighted(user, interaction='call')
``m[i][j]`` is the number of calls from ``i`` to ``j``.
"""
return _interaction_matrix(user, interaction=interaction)
def matrix_directed_unweighted(user):
"""
Returns a directed, unweighted matrix where an edge exists if there is at
least one call or text.
"""
matrix = _interaction_matrix(user, interaction=None)
for a in range(len(matrix)):
for b in range(len(matrix)):
if matrix[a][b] is not None and matrix[a][b] > 0:
matrix[a][b] = 1
return matrix
def matrix_undirected_weighted(user, interaction=None):
"""
Returns an undirected, weighted matrix for call, text and call duration
where an edge exists if the relationship is reciprocated.
"""
matrix = _interaction_matrix(user, interaction=interaction)
result = [[0 for _ in range(len(matrix))] for _ in range(len(matrix))]
for a in range(len(matrix)):
for b in range(len(matrix)):
if a != b and matrix[a][b] and matrix[b][a]:
result[a][b] = matrix[a][b] + matrix[b][a]
elif matrix[a][b] is None or matrix[b][a] is None:
result[a][b] = None
else:
result[a][b] = 0
return result
def matrix_undirected_unweighted(user):
"""
Returns an undirected, unweighted matrix where an edge exists if the
relationship is reciprocated.
"""
matrix = matrix_undirected_weighted(user, interaction=None)
for a, b in combinations(range(len(matrix)), 2):
if matrix[a][b] > 0 and matrix[b][a] > 0:
matrix[a][b], matrix[b][a] = 1, 1
return matrix
def clustering_coefficient_unweighted(user):
"""
The clustering coefficient of the user in the unweighted, undirected ego
network.
It is defined by counting the number of closed triplets including the current user:
.. math::
C = \\frac{2 * \\text{closed triplets}}{ \\text{degree} \, (\\text{degree - 1})}
where ``degree`` is the degree of the current user in the network.
"""
matrix = matrix_undirected_unweighted(user)
closed_triplets = 0
for a, b in combinations(xrange(len(matrix)), 2):
a_b, a_c, b_c = matrix[a][b], matrix[a][0], matrix[b][0]
if a_b is not None and a_c is not None and b_c is not None:
if a_b > 0 and a_c > 0 and b_c > 0:
closed_triplets += 1.
d_ego = sum(matrix[0])
return 2 * closed_triplets / (d_ego * (d_ego - 1)) if d_ego > 1 else 0
def clustering_coefficient_weighted(user, interaction=None):
"""
The clustering coefficient of the user's weighted, undirected network.
It is defined the same way as :meth`~bandicoot.network.clustering_coefficient_unweighted`,
except that closed triplets are weighted by the number of interactions. For each triplet
(A, B, C), we compute the geometric mean of the number of interactions, using the
undirected weighted matrix:
.. math::
weight_{abc} = (m_{ab} \; m_{bc} \; m_{ac})^{1/3}
The weight is normalized, between 0 and 1, by the maximum value in the matrix.
"""
matrix = matrix_undirected_weighted(user, interaction=interaction)
triplet_weight = 0
max_weight = max(weight for g in matrix for weight in g)
for a, b in combinations(range(len(matrix)), 2):
a_b, a_c, b_c = matrix[a][b], matrix[a][0], matrix[b][0]
if a_b is not None and a_c is not None and b_c is not None:
if a_b and a_c and b_c:
triplet_weight += (a_b * a_c * b_c) ** (1 / 3) / max_weight
d_ego = sum(1 for i in matrix[0] if i > 0)
return 2 * triplet_weight / (d_ego * (d_ego - 1)) if d_ego > 1 else 0
def assortativity_indicators(user):
"""
Computes the assortativity of indicators.
This indicator measures the similarity of the current user with his
correspondants, for all bandicoot indicators. For each one, it calculates
the variance of the current user's value with the values for all his
correspondants:
.. math::
\\text{assortativity}(J) = \\frac{1}{n} \\sum_i^n (J_{\\text{user}} - J_{\\text{i}})^2
for the indicator :math:`J`, and all the :math:`n` correspondents.
"""
matrix = matrix_undirected_unweighted(user)
count_indicator = defaultdict(int)
total_indicator = defaultdict(int)
# Use all indicator except reporting variables and attributes
ego_indics = all(user, flatten=True)
ego_indics = {a: value for a, value in ego_indics.items() if a != "name" and a[:11] != "reporting__" and a[:10] != "attributes"}
for i, u_name in enumerate(matrix_index(user)):
correspondent = user.network.get(u_name, None)
if correspondent is None or u_name == user.name or matrix[0][i] == 0: # Non reciprocated edge
continue
neighbor_indics = all(correspondent, flatten=True)
for a in ego_indics:
if ego_indics[a] is not None and neighbor_indics[a] is not None:
total_indicator[a] += 1
count_indicator[a] += (ego_indics[a] - neighbor_indics[a]) ** 2
assortativity = {}
for i in count_indicator:
assortativity[i] = count_indicator[i] / total_indicator[i]
return assortativity
def assortativity_attributes(user):
"""
Computes the assortativity of the nominal attributes.
This indicator measures the homophily of the current user with his
correspondants, for each attributes. It returns a value between 0
(no assortativity) and 1 (all the contacts share the same value):
the percentage of contacts sharing the same value.
"""
matrix = matrix_undirected_unweighted(user)
neighbors = [k for k in user.network.keys() if k != user.name]
neighbors_attrbs = {}
for i, u_name in enumerate(matrix_index(user)):
correspondent = user.network.get(u_name, None)
if correspondent is None or u_name == user.name or matrix[0][i] == 0:
continue
if correspondent.has_attributes:
neighbors_attrbs[correspondent.name] = correspondent.attributes
assortativity = {}
for a in user.attributes:
total = sum(1 for n in neighbors if n in neighbors_attrbs and user.attributes[a] == neighbors_attrbs[n][a])
den = sum(1 for n in neighbors if n in neighbors_attrbs)
assortativity[a] = total / den if den != 0 else None
return assortativity
|
econandrew/bandicoot
|
bandicoot/network.py
|
Python
|
mit
| 9,664
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
from webapi_tests.wifi.wifi_test import WifiTestCommon
from webapi_tests.wifi.test_wifi_basic import TestWifiBasic
|
cr/fxos-certsuite
|
webapi_tests/wifi/__init__.py
|
Python
|
mpl-2.0
| 315
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Exceptions used by Group Policy plugin and drivers."""
from neutron.common import exceptions
class GroupPolicyDriverError(exceptions.NeutronException):
"""Policy driver call failed."""
message = _("%(method)s failed.")
class GroupPolicyException(exceptions.NeutronException):
"""Base for policy driver exceptions returned to user."""
pass
class GroupPolicyDeploymentError(GroupPolicyException):
message = _("Deployment not configured properly. See logs for details.")
class GroupPolicyInternalError(GroupPolicyException):
message = _("Unexpected internal failure. See logs for details.")
class GroupPolicyBadRequest(exceptions.BadRequest, GroupPolicyException):
"""Base for policy driver exceptions returned to user."""
pass
class GroupPolicyNotSupportedError(GroupPolicyBadRequest):
message = _("Operation %(method_name)s for resource "
"%(resource_name)s is not supported by this "
"deployment.")
class PolicyTargetRequiresPolicyTargetGroup(GroupPolicyBadRequest):
message = _("An policy target group was not specified when "
"creating policy_target.")
class PolicyTargetGroupUpdateOfPolicyTargetNotSupported(GroupPolicyBadRequest):
message = _("Updating policy target group of policy target "
"is not supported.")
class PolicyTargetGroupSubnetRemovalNotSupported(GroupPolicyBadRequest):
message = _("Removing a subnet from an policy target group is not "
"supported.")
class L2PolicyUpdateOfPolicyTargetGroupNotSupported(GroupPolicyBadRequest):
message = _("Updating L2 policy of policy target group is not supported.")
class L3PolicyUpdateOfL2PolicyNotSupported(GroupPolicyBadRequest):
message = _("Updating L3 policy of L2 policy is not supported.")
class L3PolicyMultipleRoutersNotSupported(GroupPolicyBadRequest):
message = _("L3 policy does not support multiple routers.")
class L3PolicyRoutersUpdateNotSupported(GroupPolicyBadRequest):
message = _("Updating L3 policy's routers is not supported.")
class NoSubnetAvailable(exceptions.ResourceExhausted, GroupPolicyException):
message = _("No subnet is available from l3 policy's pool.")
class PolicyTargetGroupInUse(GroupPolicyBadRequest):
message = _("Policy Target Group %(policy_target_group)s is in use")
class InvalidPortForPTG(GroupPolicyBadRequest):
message = _("Subnet %(port_subnet_id)s of port %(port_id)s does not "
"match subnet %(ptg_subnet_id)s of Policy Target Group "
"%(policy_target_group_id)s.")
class InvalidSubnetForPTG(GroupPolicyBadRequest):
message = _("Subnet %(subnet_id)s does not belong to network "
"%(network_id)s associated with L2P %(l2p_id)s for PTG "
"%(ptg_id)s.")
class OverlappingIPPoolsInSameTenantNotAllowed(GroupPolicyBadRequest):
message = _("IP Pool %(ip_pool)s overlaps with one of the existing L3P "
"for the same tenant %(overlapping_pools)s.")
class SharedResourceReferenceError(GroupPolicyBadRequest):
message = _("Shared resource of type %(res_type)s with id %(res_id)s "
"can't reference the non shared resource of type "
"%(ref_type)s with id %(ref_id)s")
class InvalidSharedResource(GroupPolicyBadRequest):
message = _("Resource of type %(type)s cannot be shared by driver "
"%(driver)s")
class CrossTenantL2PolicyL3PolicyNotSupported(GroupPolicyBadRequest):
message = _("Cross tenancy not supported between L2Ps and L3Ps")
class CrossTenantPolicyTargetGroupL2PolicyNotSupported(
GroupPolicyBadRequest):
message = _("Cross tenancy not supported between PTGs and L2Ps")
class NonSharedNetworkOnSharedL2PolicyNotSupported(GroupPolicyBadRequest):
message = _("Non Shared Network can't be set for a shared L2 Policy")
class InvalidSharedAttributeUpdate(GroupPolicyBadRequest):
message = _("Invalid shared attribute update. Shared resource %(id)s is "
"referenced by %(rid)s, which is either shared or owned by a "
"different tenant.")
class ExternalRouteOverlapsWithL3PIpPool(GroupPolicyBadRequest):
message = _("Destination %(destination)s for ES %(es_id)s overlaps with "
"L3P %(l3p_id)s.")
class ExternalSegmentSubnetOverlapsWithL3PIpPool(GroupPolicyBadRequest):
message = _("Subnet %(subnet)s for ES %(es_id)s overlaps with "
"L3P %(l3p_id)s.")
class ExternalRouteNextHopNotInExternalSegment(GroupPolicyBadRequest):
message = _("One or more external routes' nexthop are not part of "
"subnet %(cidr)s.")
class InvalidL3PExternalIPAddress(GroupPolicyBadRequest):
message = _("Address %(ip)s allocated for l3p %(l3p_id)s on segment "
"%(es_id)s doesn't belong to the segment subnet %(es_cidr)s")
class InvalidAttributeUpdateForES(GroupPolicyBadRequest):
message = _("Attribute %(attribute)s cannot be updated for External "
"Segment.")
class MultipleESPerEPNotSupported(GroupPolicyBadRequest):
message = _("Multiple External Segments per External Policy is not "
"supported.")
class ESIdRequiredWhenCreatingEP(GroupPolicyBadRequest):
message = _("External Segment ID is required when creating ExternalPolicy")
class ESUpdateNotSupportedForEP(GroupPolicyBadRequest):
message = _("external_segments update for External Policy is not "
"supported.")
class MultipleESPerL3PolicyNotSupported(GroupPolicyBadRequest):
message = _("Only one External Segment per L3 Policy supported.")
class InvalidSubnetForES(GroupPolicyBadRequest):
message = _("External Segment subnet %(sub_id)s is not part of an "
"external network %(net_id)s.")
class OnlyOneEPPerTenantAllowed(GroupPolicyBadRequest):
message = _("Only one External Policy per Tenant is allowed.")
class ImplicitSubnetNotSupported(GroupPolicyBadRequest):
message = _("RMD doesn't support implicit external subnet creation.")
class DefaultL3PolicyAlreadyExists(GroupPolicyBadRequest):
message = _("Default L3 Policy with name %(l3p_name)s already "
"exists and is visible for this tenant.")
class DefaultExternalSegmentAlreadyExists(GroupPolicyBadRequest):
message = _("Default External Segment with name %(es_name)s already "
"exists and is visible for this tenant.")
class InvalidCrossTenantReference(GroupPolicyBadRequest):
message = _("Not supported cross tenant reference: object "
"%(res_type)s:%(res_id)s can't link %(ref_type)s:%(ref_id)s "
"unless it's shared.")
class InvalidNetworkAccess(GroupPolicyBadRequest):
message = _("%(msg)s : Network id %(network_id)s doesn't belong to "
" the tenant id %(tenant_id)s.")
class InvalidRouterAccess(GroupPolicyBadRequest):
message = _("%(msg)s : Router id %(router_id)s does not belong to the "
" tenant id %(tenant_id)s.")
class MultipleRedirectActionsNotSupportedForRule(GroupPolicyBadRequest):
message = _("Resource Mapping Driver does not support multiple redirect "
"actions in a Policy Rule.")
class MultipleRedirectActionsNotSupportedForPRS(GroupPolicyBadRequest):
message = _("Resource Mapping Driver does not support multiple redirect "
"actions in a Policy Rule Set.")
class InvalidNetworkServiceParameters(GroupPolicyBadRequest):
message = _("Resource Mapping Driver currently supports only one "
"parameter of type: ip_single and value: self_subnet and one "
"parameter of type ip_single or ip_pool and value nat_pool")
class ESSubnetRequiredForNatPool(GroupPolicyBadRequest):
message = _("Resource Mapping Driver requires an External Segment which "
"has an external subnet specified to create a Nat Pool")
class InvalidESSubnetCidrForNatPool(GroupPolicyBadRequest):
message = _("Resource Mapping Driver requires an External Segment which "
"maps to ip pool value specified in the nat pool")
class NSPRequiresES(GroupPolicyBadRequest):
message = _("Resource Mapping Driver requires an External Segment in "
"l3policy to associate a NSP with value nat_pool to a PTG")
class NSPRequiresNatPool(GroupPolicyBadRequest):
message = _("Resource Mapping Driver requires an External Segment in "
"l3policy which has nat_pool associated for associating a NSP "
"with value nat_pool to a PTG")
class L3PEsinUseByNSP(exceptions.InUse, GroupPolicyException):
message = _("The External Segment in L3Policy cannot be updated because "
"it is in use by Network Service Policy")
class NatPoolinUseByNSP(exceptions.InUse, GroupPolicyException):
message = _("The Nat Pool is in use by Network Service Policy")
class OverlappingNATPoolInES(GroupPolicyBadRequest):
message = _("One or more NAT Pools associated with ES %(es_id)s overlaps "
"with NAT Pool %(np_id)s.")
class OverlappingSubnetForNATPoolInES(GroupPolicyBadRequest):
message = _("One or more subnets associated with network %(net_id)s "
"partially overlaps with NAT Pool %(np_id)s.")
class InvalidProxiedGroupL3P(GroupPolicyBadRequest):
message = _("Cannot proxy PTG %(ptg_id)s: it's on a different L3 policy "
"%(l3p_id)s")
class InvalidProxiedGroupL2P(GroupPolicyBadRequest):
message = _("Cannot proxy PTG %(ptg_id)s: it's on the same L2 Policy as "
"the proxy group of type L2.")
class OnlyOneProxyGatewayAllowed(GroupPolicyBadRequest):
message = _("Another proxy gateway PT already exists for group "
"%(group_id)s")
class OnlyOneGroupDefaultGatewayAllowed(GroupPolicyBadRequest):
message = _("Another group default gateway PT already exists for group "
"%(group_id)s")
|
tbachman/group-based-policy
|
gbpservice/neutron/services/grouppolicy/common/exceptions.py
|
Python
|
apache-2.0
| 10,587
|
"""Basic plugin
This plugin defines two simple features by calculating them in the
`compute_some_new_features` function and setting the plugin metadata in the
`info` dictionary. It is used for testing in dclab.
"""
def compute_some_new_features(rtdc_ds):
"""The function that does the heavy-lifting"""
circ_per_area = rtdc_ds["circ"] / rtdc_ds["area_um"]
circ_times_area = rtdc_ds["circ"] * rtdc_ds["area_um"]
# returns a dictionary-like object
return {"circ_per_area": circ_per_area, "circ_times_area": circ_times_area}
info = {
"method": compute_some_new_features,
"description": "This plugin will compute some features",
"long description": "Even longer description that "
"can span multiple lines",
"feature names": ["circ_per_area", "circ_times_area"],
"feature labels": ["Circularity per Area", "Circularity times Area"],
"features required": ["circ", "area_um"],
"config required": [],
"method check required": lambda x: True,
"scalar feature": [True, True],
"version": "0.1.0",
}
|
ZellMechanik-Dresden/dclab
|
tests/data/feat_anc_plugin_creative.py
|
Python
|
gpl-2.0
| 1,077
|
# -*- coding: utf-8 -*-
# Copyright 2015 Holger Brunn <hbrunn@therp.nl>
# Copyright 2016 Pedro M. Baeza <pedro.baeza@tecnativa.com>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
"name": "2D matrix for x2many fields",
"version": "9.0.1.0.0",
"author": "Therp BV, "
"Tecnativa,"
"Odoo Community Association (OCA)",
"license": "AGPL-3",
"category": "Hidden/Dependency",
"summary": "Show list fields as a matrix",
"depends": [
'web',
],
"data": [
'views/templates.xml',
],
"qweb": [
'static/src/xml/web_widget_x2many_2d_matrix.xml',
],
"installable": True,
}
|
be-cloud-be/horizon-addons
|
web/web_widget_x2many_2d_matrix/__openerp__.py
|
Python
|
agpl-3.0
| 680
|
#!/usr/bin/env python
# encoding: utf-8
import re
import os
try:
from setuptools import setup, Extension
from setuptools.command.build_ext import build_ext as _build_ext
except ImportError:
from distutils.core import setup, Extension
from distutils.command.build_ext import build_ext as _build_ext
def find_boost(hint=None, verbose=True):
"""
Find the location of the Boost include directory. This will return
``None`` on failure.
"""
# List the standard locations including a user supplied hint.
search_dirs = [] if hint is None else hint
search_dirs += [
"/usr/local/include",
"/usr/local/homebrew/include",
"/opt/local/var/macports/software",
"/opt/local/include",
"/usr/include",
"/usr/include/local",
]
# Loop over search paths and check for the existence of the required
# header.
for d in search_dirs:
path = os.path.join(d, "boost", "math", "special_functions",
"ellint_3.hpp")
if os.path.exists(path):
# Determine the version.
vf = os.path.join(d, "boost", "version.hpp")
if not os.path.exists(vf):
continue
src = open(vf, "r").read()
v = re.findall("#define BOOST_LIB_VERSION \"(.+)\"", src)
if not len(v):
continue
v = v[0]
if verbose:
print("Found Boost version {0} in: {1}".format(v, d))
return d
return None
class build_ext(_build_ext):
"""
A custom extension builder that finds the include directories for Boost.
"""
def build_extension(self, ext):
dirs = ext.include_dirs + self.compiler.include_dirs
# Look for the Boost headers and make sure that we can find them.
boost_include = find_boost(hint=dirs)
if boost_include is None:
raise RuntimeError("Required library Boost not found. "
"Check the documentation for solutions.")
# Update the extension's include directories.
ext.include_dirs += [boost_include]
# Run the standard build procedure.
_build_ext.build_extension(self, ext)
if __name__ == "__main__":
import sys
import numpy
from Cython.Build import cythonize
# Publish the library to PyPI.
if "publish" in sys.argv[-1]:
os.system("python setup.py sdist upload")
sys.exit()
# Choose libraries to link.
libraries = []
if os.name == "posix":
libraries.append("m")
# Specify the include directories.
include_dirs = [
"include",
numpy.get_include(),
]
# The source files.
src = [
"transit/_transit.pyx",
# "transit/_transit.c",
"src/quad.cpp",
# "src/driver.cpp",
]
# Set up the extension.
ext = Extension("transit._transit", sources=src,
libraries=libraries, include_dirs=include_dirs)
# Hackishly inject a constant into builtins to enable importing of the
# package before the library is built.
if sys.version_info[0] < 3:
import __builtin__ as builtins
else:
import builtins
builtins.__TRANSIT_SETUP__ = True
import transit
# Execute the setup command.
desc = open("README.rst").read()
setup(
name="transit",
version=transit.__version__,
author="Daniel Foreman-Mackey",
author_email="danfm@nyu.edu",
packages=["transit"],
py_modules=["transit.tests"],
ext_modules=cythonize([ext]),
url="http://github.com/dfm/transit",
license="MIT",
description="A Python library for computing the light curves of "
"transiting planets",
long_description=desc,
package_data={"": ["README.rst", "LICENSE", "include/*.h", ]},
include_package_data=True,
cmdclass=dict(build_ext=build_ext),
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
],
test_suite="nose.collector",
)
|
timothydmorton/transit
|
setup.py
|
Python
|
mit
| 4,320
|
from functools import partial
from django.contrib.admin.sites import AdminSite
from django.contrib.sites.models import Site
from django.core.urlresolvers import reverse
from django.test import TestCase
from mock import patch
from opendebates.admin import SubmissionAdmin
from opendebates.models import Submission
from opendebates.tests.factories import SubmissionFactory, UserFactory, DebateFactory
# Force the reverse() used here in the tests to always use the full
# urlconf, despite whatever machinations have taken place due to the
# DebateMiddleware.
old_reverse = reverse
reverse = partial(old_reverse, urlconf='opendebates.urls')
# mock objects to make the admin think we're superusers.
# mostly copied from
# https://github.com/django/django/blob/master/tests/modeladmin/tests.py#L23-L32
class MockRequest(object):
POST = {}
META = {}
scheme = 'http'
class MockSuperUser(object):
def has_perm(self, perm):
return True
def is_authenticated(self):
return True
class RemoveSubmissionsTest(TestCase):
def setUp(self):
self.site = AdminSite()
self.admin = SubmissionAdmin(Submission, self.site)
self.password = 'secretpassword'
self.user = UserFactory(password=self.password, is_staff=True, is_superuser=True)
assert self.client.login(username=self.user.username, password=self.password)
self.submission = SubmissionFactory()
self.queryset = Submission.objects.all()
self.changelist_url = reverse('admin:opendebates_submission_changelist')
def tearDown(self):
Site.objects.clear_cache()
def test_get(self):
"""
GETting the intermediate page should have specified text and the PK of
the chosen submissions.
"""
request = MockRequest()
request.user = MockSuperUser()
request.debate = DebateFactory()
rsp = self.admin.remove_submissions(request, self.queryset)
self.assertEqual(rsp.status_code, 200)
self.assertContains(rsp, 'remove the selected submissions?')
self.assertContains(rsp, self.submission.pk)
@patch('opendebates.admin.send_email')
def test_post(self, mock_send_email):
"""
POSTing the form should cause submissions to be removed and email to be
sent.
"""
data = {
'post': 'Yes',
'action': 'remove_submissions',
'_selected_action': [self.submission.pk, ]
}
rsp = self.client.post(self.changelist_url, data=data)
self.assertRedirects(rsp, self.changelist_url)
# Now submission should not be approved
submission = Submission.objects.get()
self.assertFalse(submission.approved)
# and 1 email should have been sent
self.assertEqual(mock_send_email.call_count, 1)
submission = Submission.objects.get(id=self.submission.pk)
self.assertIsNotNone(submission.moderated_at)
@patch('opendebates.admin.send_email')
def test_post_multiple(self, mock_send_email):
"POSTing multiple submissions works as well."
data = {
'post': 'Yes',
'action': 'remove_submissions',
'_selected_action': [SubmissionFactory().pk, SubmissionFactory().pk]
}
rsp = self.client.post(self.changelist_url, data=data)
self.assertRedirects(rsp, self.changelist_url)
removed_submissions = Submission.objects.filter(approved=False)
self.assertEqual(removed_submissions.count(), 2)
sub1, sub2 = removed_submissions
self.assertIsNotNone(sub1.moderated_at)
self.assertIsNotNone(sub2.moderated_at)
untouched_submission = Submission.objects.filter(approved=True)
self.assertEqual(untouched_submission.count(), 1)
# and 2 emails have been sent
self.assertEqual(mock_send_email.call_count, 2)
@patch('opendebates.admin.send_email')
def test_dont_send_email_if_already_unapproved(self, mock_send_email):
"If submission was already unapproved, don't bug the user again."
data = {
'post': 'Yes',
'action': 'remove_submissions',
'_selected_action': [SubmissionFactory(approved=False).pk]
}
rsp = self.client.post(self.changelist_url, data=data)
self.assertRedirects(rsp, self.changelist_url)
removed_submissions = Submission.objects.filter(approved=False)
self.assertEqual(removed_submissions.count(), 1)
# and ZERO emails have been sent
self.assertEqual(mock_send_email.call_count, 0)
|
caktus/django-opendebates
|
opendebates/tests/test_admin.py
|
Python
|
apache-2.0
| 4,608
|
from JumpScale import j
import argparse
import sys
class ArgumentParser(argparse.ArgumentParser):
def exit(self, status=0, message=None):
if message:
self._print_message(message, sys.stderr)
if j.application.state == "RUNNING":
j.application.stop(status)
else:
sys.exit(status)
def processLogin(parser):
parser.add_argument("-l", '--login', help='login for grid, if not specified defaults to root')
parser.add_argument("-p", '--passwd', help='passwd for grid')
parser.add_argument(
"-a", '--addr', help='ip addr of master, if not specified will be the one as specified in local config')
opts = parser.parse_args()
if opts.login is None:
opts.login = "root"
# if opts.passwd==None and opts.login=="root":
# if j.application.config.exists("grid.master.superadminpasswd"):
# opts.passwd=j.application.config.get("grid.master.superadminpasswd")
# else:
# opts.passwd=j.tools.console.askString("please provide superadmin passwd for the grid.")
# if opts.addr==None:
# opts.addr=j.application.config.get("grid.master.ip")
return opts
def getProcess(parser=None):
parser = parser or ArgumentParser()
parser.add_argument('-d', '--domain', help='Process domain name')
parser.add_argument('-n', '--name', help='Process name')
return parser.parse_args()
|
Jumpscale/jumpscale_core8
|
lib/JumpScale/tools/cmdutils/__init__.py
|
Python
|
apache-2.0
| 1,433
|
import json
def getConfig():
with open('config.json', 'r') as f:
return json.load(f)
|
UTD-CSLLC/Door-Karma-Server
|
src/config.py
|
Python
|
mit
| 88
|
import calendar
from datetime import timedelta
import json
import os
import random
import traceback
from django.core import management
from django.utils import timezone
from silk import models
from silk.models import SQLQuery, Profile
class MockSuite(object):
"""
Provides some fake data to play around with. Also useful for testing
"""
methods = ['GET', 'POST', 'PUT', 'PATCH', 'HEAD', 'OPTIONS']
path_components = ['path', 'to', 'somewhere', 'around', 'here', 'bobs', 'your', 'uncle']
status_codes = [200, 201, 300, 301, 302, 403, 404, 500]
profile_names = ['slow_bit_of_code', 'terrible_dependency', 'what_on_earth_is_this_code_doing']
file_path = [os.path.realpath(__file__)]
func_names = ['', '', '', 'foo', 'bar']
view_names = ['app:blah', 'index', 'root', 'xxx:xyx']
sql_queries = ['''
SELECT Book.title AS Title,
COUNT(*) AS Authors
FROM Book
JOIN Book_author
ON Book.isbn = Book_author.isbn
GROUP BY Book.title;
''',
'''
SELECT * FROM table
''', '''
SELECT *
FROM Book
WHERE price > 100.00
ORDER BY title;
''', '''
SELECT title,
COUNT(*) AS Authors
FROM Book
NATURAL JOIN Book_author
GROUP BY title;
''',
'''
SELECT A.Col1, A.Col2, B.Col1,B.Col2
FROM (SELECT RealTableZ.Col1, RealTableY.Col2, RealTableY.ID AS ID
FROM RealTableZ
LEFT OUTER JOIN RealTableY
ON RealTableZ.ForeignKeyY=RealTableY.ID
WHERE RealTableY.Col11>14
) AS B
INNER JOIN A
ON A.ForeignKeyY=B.ID
''']
response_content_types = ['text/html', 'application/json', 'text/css']
response_content = {
'text/html': ['<html></html>'],
'text/css': ['#blah {font-weight: bold}'],
'application/json': ['[1, 2, 3]']
}
request_content_types = ['application/json']
request_content = {
'application/json': ['{"blah": 5}']
}
def _random_method(self):
return random.choice(self.methods)
def _random_path(self):
num_components = random.randint(1, 5)
return '/' + '/'.join(random.sample(self.path_components, num_components)) + '/'
def _random_query(self):
return random.choice(self.sql_queries)
def mock_sql_queries(self, request=None, profile=None, n=1, as_dict=False):
start_time, end_time = self._random_time()
queries = []
for _ in range(0, n):
tb = ''.join(reversed(traceback.format_stack()))
d = {
'query': self._random_query(),
'start_time': start_time,
'end_time': end_time,
'request': request,
'traceback': tb
}
if as_dict:
queries.append(d)
else:
query = SQLQuery.objects.create(**d)
queries.append(query)
if profile:
if as_dict:
for q in queries:
profile['queries'].append(q)
else:
profile.queries = queries
profile.save()
return queries
def mock_profile(self, request=None):
start_time, end_time = self._random_time()
dynamic = random.choice([True, False])
profile = Profile.objects.create(start_time=start_time,
end_time=end_time,
request=request,
name=random.choice(self.profile_names),
file_path=random.choice(self.file_path),
line_num=3,
func_name=random.choice(self.func_names),
dynamic=dynamic,
end_line_num=6 if dynamic else None,
exception_raised=random.choice([True, False])
)
self.mock_sql_queries(profile=profile, n=random.randint(0, 10))
return profile
def mock_profiles(self, request=None, n=1):
profiles = []
for _ in range(0, n):
profile = self.mock_profile(request)
profiles.append(profile)
return profiles
def _random_time(self):
start_time = timezone.now()
duration = timedelta(milliseconds=random.randint(0, 3000))
end_time = start_time + duration
return start_time, end_time
def mock_request(self):
start_time, end_time = self._random_time()
num_sql_queries = random.randint(0, 20)
request_content_type = random.choice(self.request_content_types)
request_body = random.choice(self.request_content[request_content_type])
time_taken = end_time - start_time
time_taken = time_taken.total_seconds()
request = models.Request.objects.create(method=self._random_method(),
path=self._random_path(),
num_sql_queries=num_sql_queries,
start_time=start_time,
end_time=end_time,
view_name=random.choice(self.view_names),
time_taken=time_taken,
encoded_headers=json.dumps({'content-type': request_content_type}),
body=request_body)
response_content_type = random.choice(self.response_content_types)
response_body = random.choice(self.response_content[response_content_type])
models.Response.objects.create(request=request,
status_code=random.choice(self.status_codes),
content_type=json.dumps({'content-type': response_content_type}),
body=response_body)
self.mock_sql_queries(request=request, n=num_sql_queries)
self.mock_profiles(request, random.randint(0, 2))
return request
if __name__ == '__main__':
management.call_command('flush', interactive=False)
requests = [MockSuite().mock_request() for _ in range(0, 100)]
|
CloudNcodeInc/silk
|
tests/tests/test_lib/mock_suite.py
|
Python
|
mit
| 6,446
|
import unittest
from PySide.QtGui import QLayout, QWidget, QGraphicsLayout, QGraphicsLayoutItem, QTextCursor, QPrinter
from helper import UsesQApplication
class Layout(QLayout):
def __init__(self):
QLayout.__init__(self)
class GraphicsLayout(QGraphicsLayout):
def __init__(self):
QGraphicsLayout.__init__(self)
class GraphicsLayoutItem(QGraphicsLayoutItem):
def __init__(self):
QGraphicsLayoutItem.__init__(self)
class ReturnsQuadruplesOfNumbers(UsesQApplication):
def compareTuples(self, ta, tb):
for va,vb in zip(ta, tb):
if round(va) != round(vb):
return False
return True
def testQGraphicsLayoutGetContentsMargins(self):
obj = GraphicsLayout()
values = (10.0, 20.0, 30.0, 40.0)
obj.setContentsMargins(*values)
self.assert_(self.compareTuples(obj.getContentsMargins(), values))
def testQGraphicsLayoutItemGetContentsMargins(self):
obj = GraphicsLayoutItem()
self.assert_(self.compareTuples(obj.getContentsMargins(), (0.0, 0.0, 0.0, 0.0)))
def testQWidgetGetContentsMargins(self):
obj = QWidget()
values = (10, 20, 30, 40)
obj.setContentsMargins(*values)
self.assert_(self.compareTuples(obj.getContentsMargins(), values))
def testQLayoutGetContentsMargins(self):
obj = Layout()
values = (10, 20, 30, 40)
obj.setContentsMargins(*values)
self.assert_(self.compareTuples(obj.getContentsMargins(), values))
def testQTextCursorSelectedTableCells(self):
obj = QTextCursor()
self.assertEquals(obj.selectedTableCells(), (-1, -1, -1, -1))
def testQPrinterGetPageMargins(self):
# Bug #742
obj = QPrinter()
values = (10.0, 20.0, 30.0, 40.0, QPrinter.Point)
obj.setPageMargins(*values)
self.assert_(self.compareTuples(obj.getPageMargins(QPrinter.Point), values[:-1]))
if __name__ == "__main__":
unittest.main()
|
M4rtinK/pyside-android
|
tests/QtGui/returnquadruplesofnumbers_test.py
|
Python
|
lgpl-2.1
| 1,993
|
import unittest
class Solution:
def __init__(self):
self.result = []
def readBinaryWatch(self, num):
"""
:type num: int
:rtype: List[str]
"""
self._read(num, 0, 0, 0)
return self.result
def _read(self, num, hour, minute, start_index):
if num == 0:
if hour < 12 and minute < 60:
self.result.append('{}:{:02d}'.format(hour, minute))
elif num > 0:
for i in range(start_index, 11 - num):
if i < 4:
self._read(num - 1, hour + (1 << i), minute, i + 1)
else:
self._read(num - 1, hour, minute + (1 << i - 4), i + 1)
class Test(unittest.TestCase):
def test(self):
self._test(1, ["1:00", "2:00", "4:00", "8:00", "0:01", "0:02", "0:04", "0:08", "0:16", "0:32"])
self._test(2, ["0:03", "0:05", "0:06", "0:09", "0:10", "0:12", "0:17", "0:18", "0:20", "0:24", "0:33", "0:34",
"0:36", "0:40", "0:48", "1:01", "1:02", "1:04", "1:08", "1:16", "1:32", "2:01", "2:02", "2:04",
"2:08", "2:16", "2:32", "3:00", "4:01", "4:02", "4:04", "4:08", "4:16", "4:32", "5:00", "6:00",
"8:01", "8:02", "8:04", "8:08", "8:16", "8:32", "9:00", "10:00"])
def _test(self, num, expected):
actual = Solution().readBinaryWatch(num)
self.assertCountEqual(expected, actual)
if __name__ == '__main__':
unittest.main()
|
chrisxue815/leetcode_python
|
problems/test_0401_backtrack.py
|
Python
|
unlicense
| 1,495
|
from .crf_model_wrapper import CrfModelWrapper
from .requisition_model_wrapper import RequisitionModelWrapper
|
botswana-harvard/edc-visit-schedule
|
edc_visit_schedule/model_wrappers/__init__.py
|
Python
|
gpl-2.0
| 110
|
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2019-2021 Florian Bruhin (The Compiler) <mail@qutebrowser.org>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
"""Implementation of the command debug-cache-stats.
Because many modules depend on this command, this needs to have as few
dependencies as possible to avoid cyclic dependencies.
"""
from typing import Any, Callable, List, Optional, Tuple, TypeVar
# The second element of each tuple should be a lru_cache wrapped function
_CACHE_FUNCTIONS: List[Tuple[str, Any]] = []
_T = TypeVar('_T', bound=Callable)
def register(name: Optional[str] = None) -> Callable[[_T], _T]:
"""Register a lru_cache wrapped function for debug_cache_stats."""
def wrapper(fn: _T) -> _T:
_CACHE_FUNCTIONS.append((fn.__name__ if name is None else name, fn))
return fn
return wrapper
def debug_cache_stats() -> None:
"""Print LRU cache stats."""
from qutebrowser.utils import log
for name, fn in _CACHE_FUNCTIONS:
log.misc.info('{}: {}'.format(name, fn.cache_info()))
|
forkbong/qutebrowser
|
qutebrowser/misc/debugcachestats.py
|
Python
|
gpl-3.0
| 1,694
|
from gffutils import parser, feature, helpers, constants
def test_feature_from_line():
# spaces and tabs should give identical results
line1 = "chr2L FlyBase exon 7529 8116 . + . Name=CG11023:1;Parent=FBtr0300689,FBtr0300690"
line2 = "chr2L FlyBase exon 7529 8116 . + . Name=CG11023:1;Parent=FBtr0300689,FBtr0300690"
assert feature.feature_from_line(line1, strict=False, keep_order=True) == \
feature.feature_from_line(line2, strict=False, keep_order=True)
def test_default_feature():
# Default Feature is 8 tab-delimited ".", with a trailing tab
assert str(feature.Feature()) == \
". . . . . . . . "
def test_attributes_representations():
# These different ways of supplying attributes should yield identical
# results:
s = ". . . . . . . . ID=asdf"
for item in (
'{"ID": ["asdf"]}',
dict(ID=["asdf"]),
"ID=asdf"
):
result = str(feature.Feature(attributes=item))
assert result == s, result
def test_default_start_stop():
# Whether start or end is "." or None, attribute should always be None and
# printing should show "."
c = ['.', None]
for i1 in c:
for i2 in c:
f = feature.Feature(start=i1, end=i2)
assert f.start is None
assert f.end is None
assert f.stop is None
assert str(f) == ". . . . . . . . ", str(f)
# Make sure zero works (protects against sloppy "if start:")
f = feature.Feature(start=0, end=0)
assert f.start == f.end == f.stop == 0
assert str(f) == ". . . 0 0 . . . ", str(f)
def test_aliases():
line = "chr2L FlyBase exon 7529 8116 . + . Name=CG11023:1;Parent=FBtr0300689,FBtr0300690"
f = feature.feature_from_line(line, keep_order=True)
assert f.chrom == 'chr2L' == f.seqid
assert f.end == 8116 == f.stop
f.chrom = 'fake'
f.stop = 1
assert f.chrom == 'fake' == f.seqid
assert f.stop == 1 == f.end
def test_string_representation():
line = "chr2L FlyBase exon 7529 8116 . + . Name=CG11023:1;Parent=FBtr0300689,FBtr0300690"
f = feature.feature_from_line(line, keep_order=True)
assert line == str(f), str(f)
line = "chr2L FlyBase exon 7529 8116 . + . Name=CG11023:1;Parent=FBtr0300689,FBtr0300690 some more stuff"
f = feature.feature_from_line(line, keep_order=True)
assert line == str(f)
def test_pbt_interval_conversion():
try:
import pybedtools
except ImportError:
return
line = "chr2L FlyBase exon 7529 8116 . + . Name=CG11023:1;Parent=FBtr0300689,FBtr0300690"
f = feature.feature_from_line(line, strict=False, keep_order=True)
pbt = helpers.asinterval(f)
assert pbt.chrom == f.chrom == f.seqid
assert pbt.start == f.start -1
assert pbt.stop == f.stop == f.end
pn = pbt.name
fn = f.attributes['Name'][0]
assert pn == fn, '%s, %s' % (pn, fn)
def test_hash():
line = "chr2L FlyBase exon 7529 8116 . + . Name=CG11023:1;Parent=FBtr0300689,FBtr0300690 some more stuff"
f = feature.feature_from_line(line, keep_order=True)
assert hash(f) == hash(line)
def test_repr():
line = "chr2L FlyBase exon 7529 8116 . + . Name=CG11023:1;Parent=FBtr0300689,FBtr0300690 some more stuff"
f = feature.feature_from_line(line, keep_order=True)
print(repr(f))
print(hex(id(f)))
assert repr(f) == ("<Feature exon (chr2L:7529-8116[+]) at %s>" % hex(id(f)))
def test_attribute_order():
# default order is gene_id, transcript_id. But feature_from_line -- if
# dialect not provided -- will infer its own dialect. In this case,
# transcript_id comes first.
attributes = 'transcript_id "mRNA1"; gene_id "gene1";'
a = feature.feature_from_line(
"""
chr1 . mRNA 1 100 . + . %s
""" % attributes, strict=False, keep_order=True)
a.strict = True
a.keep_order = True
assert str(a) == 'chr1 . mRNA 1 100 . + . transcript_id "mRNA1"; gene_id "gene1";', str(a)
# ensure that using the default dialect uses the default order (and
# indidentally converts to GFF3 format)
orig_dialect = a.dialect
a.dialect = constants.dialect
a.keep_order = True
assert str(a) == 'chr1 . mRNA 1 100 . + . gene_id=gene1;transcript_id=mRNA1', str(a)
# adding an attribute shoud always result in that attribute coming last (as
# long as that attribute is not in the dialect order)
a['dummy'] = ['asdf']
a.strict = True
assert str(a) == 'chr1 . mRNA 1 100 . + . gene_id=gene1;transcript_id=mRNA1;dummy=asdf', str(a)
def test_unjsonify():
attributes, dialect = parser._split_keyvals('transcript_id "mRNA1"')
assert attributes == {'transcript_id': ['mRNA1']}, attributes
s = helpers._jsonify(attributes)
assert s == '{"transcript_id":["mRNA1"]}', s
d = helpers._unjsonify(s, isattributes=True)
assert d == attributes
class IsolatedTestCase(object):
"""
Isolated test case for checking that the module-level
constants.always_return_list works.
This was needed because having this test as a function caused other tests
to fail even though constants.always_return_list was put back to its
original setting. Apparently nose runs tests concurrently in the same
namespace or something? Anyway, these setup/teardowns do the trick.
"""
def setup(self):
constants.always_return_list = False
def teardown(self):
constants.always_return_list = True
def test_feature_single_item(self):
line = "chr2L FlyBase exon 7529 8116 . + . Name=CG11023:1;Parent=FBtr0300689,FBtr0300690 some more stuff"
f = feature.feature_from_line(line, keep_order=True)
assert f['Name'] == ['CG11023:1']
|
daler/gffutils
|
gffutils/test/feature_test.py
|
Python
|
mit
| 5,714
|
import logging.config
import structlog
timestamper = structlog.processors.TimeStamper(fmt="%Y-%m-%d %H:%M:%S")
pre_chain = [
# Add the log level and a timestamp to the event_dict if the log entry
# is not from structlog.
structlog.stdlib.add_log_level,
timestamper,
]
logging.config.dictConfig({
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"plain": {
"()": structlog.stdlib.ProcessorFormatter,
"processor": structlog.dev.ConsoleRenderer(colors=False),
"foreign_pre_chain": pre_chain,
},
"colored": {
"()": structlog.stdlib.ProcessorFormatter,
"processor": structlog.dev.ConsoleRenderer(colors=True),
"foreign_pre_chain": pre_chain,
},
},
"handlers": {
"default": {
"level": "DEBUG",
"class": "logging.StreamHandler",
"formatter": "colored",
},
"file": {
"level": "INFO",
"class": "logging.handlers.WatchedFileHandler",
"filename": "test.log",
"formatter": "plain",
},
},
"loggers": {
"": {
"handlers": ["default", "file"],
"level": "INFO",
"propagate": True,
},
}
})
structlog.configure(
processors=[
structlog.stdlib.add_log_level,
timestamper,
structlog.processors.StackInfoRenderer(),
structlog.processors.format_exc_info,
#structlog.processors.JSONRenderer(),
structlog.processors.KeyValueRenderer(key_order=['event','level'])
],
context_class=structlog.threadlocal.wrap_dict(dict),
logger_factory=structlog.stdlib.LoggerFactory(),
wrapper_class=structlog.stdlib.BoundLogger,
cache_logger_on_first_use=True,
)
def getLogger(name='surveystats-default-log'):
return structlog.get_logger(name)
|
semanticbits/survey_stats
|
src/survey_stats/log.py
|
Python
|
bsd-2-clause
| 1,619
|
from model.group import Group
from random import randrange
def test_delete_some_group(app):
if app.group.count() == 0:
app.group.create(Group(name="test"))
old_groups = app.group.get_group_list()
index = randrange(len(old_groups))
app.group.delete_group_by_index(index)
new_groups = app.group.get_group_list()
assert len(old_groups) - 1 == len(new_groups)
old_groups[index:index+1] = []
assert old_groups == new_groups
|
fiore24/python_training
|
test/test_del_group.py
|
Python
|
apache-2.0
| 460
|
from __future__ import with_statement
"""
An RPython implementation of sockets based on rffi.
Note that the interface has to be slightly different - this is not
a drop-in replacement for the 'socket' module.
"""
# XXX this does not support yet the least common AF_xxx address families
# supported by CPython. See http://bugs.pypy.org/issue1942
from errno import EINVAL
from rpython.rlib import _rsocket_rffi as _c, jit, rgc
from rpython.rlib.objectmodel import instantiate, keepalive_until_here
from rpython.rlib.rarithmetic import intmask, r_uint
from rpython.rlib import rthread, rposix
from rpython.rtyper.lltypesystem import lltype, rffi
from rpython.rtyper.lltypesystem.rffi import sizeof, offsetof
from rpython.rtyper.extregistry import ExtRegistryEntry
# Usage of @jit.dont_look_inside in this file is possibly temporary
# and only because some lltypes declared in _rsocket_rffi choke the
# JIT's codewriter right now (notably, FixedSizeArray).
INVALID_SOCKET = _c.INVALID_SOCKET
def mallocbuf(buffersize):
return lltype.malloc(rffi.CCHARP.TO, buffersize, flavor='raw')
constants = _c.constants
locals().update(constants) # Define constants from _c
if _c.WIN32:
from rpython.rlib import rwin32
def rsocket_startup():
wsadata = lltype.malloc(_c.WSAData, flavor='raw', zero=True)
try:
res = _c.WSAStartup(0x0101, wsadata)
assert res == 0
finally:
lltype.free(wsadata, flavor='raw')
else:
def rsocket_startup():
pass
def ntohs(x):
assert isinstance(x, int)
return rffi.cast(lltype.Signed, _c.ntohs(x))
def ntohl(x):
# accepts and returns an Unsigned
return rffi.cast(lltype.Unsigned, _c.ntohl(x))
def htons(x):
assert isinstance(x, int)
return rffi.cast(lltype.Signed, _c.htons(x))
def htonl(x):
# accepts and returns an Unsigned
return rffi.cast(lltype.Unsigned, _c.htonl(x))
_FAMILIES = {}
class Address(object):
"""The base class for RPython-level objects representing addresses.
Fields: addr - a _c.sockaddr_ptr (memory owned by the Address instance)
addrlen - size used within 'addr'
"""
class __metaclass__(type):
def __new__(cls, name, bases, dict):
family = dict.get('family')
A = type.__new__(cls, name, bases, dict)
if family is not None:
_FAMILIES[family] = A
return A
# default uninitialized value: NULL ptr
addr_p = lltype.nullptr(_c.sockaddr_ptr.TO)
def __init__(self, addr, addrlen):
self.addr_p = addr
self.addrlen = addrlen
@rgc.must_be_light_finalizer
def __del__(self):
if self.addr_p:
lltype.free(self.addr_p, flavor='raw', track_allocation=False)
def setdata(self, addr, addrlen):
# initialize self.addr and self.addrlen. 'addr' can be a different
# pointer type than exactly sockaddr_ptr, and we cast it for you.
assert not self.addr_p
self.addr_p = rffi.cast(_c.sockaddr_ptr, addr)
self.addrlen = addrlen
setdata._annspecialcase_ = 'specialize:ll'
# the following slightly strange interface is needed to manipulate
# what self.addr_p points to in a safe way. The problem is that
# after inlining we might end up with operations that looks like:
# addr = self.addr_p
# <self is freed here, and its __del__ calls lltype.free()>
# read from addr
# To prevent this we have to insert a keepalive after the last
# use of 'addr'. The interface to do that is called lock()/unlock()
# because it strongly reminds callers not to forget unlock().
#
def lock(self, TYPE=_c.sockaddr):
"""Return self.addr_p, cast as a pointer to TYPE. Must call unlock()!
"""
return rffi.cast(lltype.Ptr(TYPE), self.addr_p)
lock._annspecialcase_ = 'specialize:ll'
def unlock(self):
"""To call after we're done with the pointer returned by lock().
Note that locking and unlocking costs nothing at run-time.
"""
keepalive_until_here(self)
# ____________________________________________________________
def makeipaddr(name, result=None):
# Convert a string specifying a host name or one of a few symbolic
# names to an IPAddress instance. This usually calls getaddrinfo()
# to do the work; the names "" and "<broadcast>" are special.
# If 'result' is specified it must be a prebuilt INETAddress or
# INET6Address that is filled; otherwise a new INETXAddress is returned.
if result is None:
family = AF_UNSPEC
else:
family = result.family
if len(name) == 0:
info = getaddrinfo(None, "0",
family=family,
socktype=SOCK_DGRAM, # dummy
flags=AI_PASSIVE,
address_to_fill=result)
if len(info) > 1:
raise RSocketError("wildcard resolved to multiple addresses")
return info[0][4]
# IPv4 also supports the special name "<broadcast>".
if name == '<broadcast>':
return makeipv4addr(r_uint(INADDR_BROADCAST), result)
# "dd.dd.dd.dd" format.
digits = name.split('.')
if len(digits) == 4:
try:
d0 = int(digits[0])
d1 = int(digits[1])
d2 = int(digits[2])
d3 = int(digits[3])
except ValueError:
pass
else:
if (0 <= d0 <= 255 and
0 <= d1 <= 255 and
0 <= d2 <= 255 and
0 <= d3 <= 255):
addr = intmask(d0 << 24) | (d1 << 16) | (d2 << 8) | (d3 << 0)
addr = rffi.cast(rffi.UINT, addr)
addr = htonl(addr)
return makeipv4addr(addr, result)
# generic host name to IP conversion
info = getaddrinfo(name, None, family=family, address_to_fill=result)
return info[0][4]
class IPAddress(Address):
"""AF_INET and AF_INET6 addresses"""
def get_host(self):
# Create a string object representing an IP address.
# For IPv4 this is always a string of the form 'dd.dd.dd.dd'
# (with variable size numbers).
host, serv = getnameinfo(self, NI_NUMERICHOST | NI_NUMERICSERV)
return host
def lock_in_addr(self):
""" Purely abstract
"""
raise NotImplementedError
# ____________________________________________________________
HAS_AF_PACKET = 'AF_PACKET' in constants
if HAS_AF_PACKET:
class PacketAddress(Address):
family = AF_PACKET
struct = _c.sockaddr_ll
maxlen = minlen = sizeof(struct)
ifr_name_size = _c.ifreq.c_ifr_name.length
sll_addr_size = _c.sockaddr_ll.c_sll_addr.length
def __init__(self, ifindex, protocol, pkttype=0, hatype=0, haddr=""):
addr = lltype.malloc(_c.sockaddr_ll, flavor='raw', zero=True,
track_allocation=False)
self.setdata(addr, PacketAddress.maxlen)
rffi.setintfield(addr, 'c_sll_family', AF_PACKET)
rffi.setintfield(addr, 'c_sll_protocol', htons(protocol))
rffi.setintfield(addr, 'c_sll_ifindex', ifindex)
rffi.setintfield(addr, 'c_sll_pkttype', pkttype)
rffi.setintfield(addr, 'c_sll_hatype', hatype)
halen = rffi.str2chararray(haddr,
rffi.cast(rffi.CCHARP, addr.c_sll_addr),
PacketAddress.sll_addr_size)
rffi.setintfield(addr, 'c_sll_halen', halen)
@staticmethod
def get_ifindex_from_ifname(fd, ifname):
p = lltype.malloc(_c.ifreq, flavor='raw')
iflen = rffi.str2chararray(ifname,
rffi.cast(rffi.CCHARP, p.c_ifr_name),
PacketAddress.ifr_name_size - 1)
p.c_ifr_name[iflen] = '\0'
err = _c.ioctl(fd, _c.SIOCGIFINDEX, p)
ifindex = p.c_ifr_ifindex
lltype.free(p, flavor='raw')
if err != 0:
raise RSocketError("invalid interface name")
return ifindex
def get_ifname(self, fd):
ifname = ""
a = self.lock(_c.sockaddr_ll)
ifindex = rffi.getintfield(a, 'c_sll_ifindex')
if ifindex:
p = lltype.malloc(_c.ifreq, flavor='raw')
rffi.setintfield(p, 'c_ifr_ifindex', ifindex)
if (_c.ioctl(fd, _c.SIOCGIFNAME, p) == 0):
ifname = rffi.charp2strn(
rffi.cast(rffi.CCHARP, p.c_ifr_name),
PacketAddress.ifr_name_size)
lltype.free(p, flavor='raw')
self.unlock()
return ifname
def get_protocol(self):
a = self.lock(_c.sockaddr_ll)
proto = rffi.getintfield(a, 'c_sll_protocol')
res = ntohs(proto)
self.unlock()
return res
def get_pkttype(self):
a = self.lock(_c.sockaddr_ll)
res = rffi.getintfield(a, 'c_sll_pkttype')
self.unlock()
return res
def get_hatype(self):
a = self.lock(_c.sockaddr_ll)
res = rffi.getintfield(a, 'c_sll_hatype')
self.unlock()
return res
def get_haddr(self):
a = self.lock(_c.sockaddr_ll)
lgt = rffi.getintfield(a, 'c_sll_halen')
d = []
for i in range(lgt):
d.append(a.c_sll_addr[i])
res = "".join(d)
self.unlock()
return res
class INETAddress(IPAddress):
family = AF_INET
struct = _c.sockaddr_in
maxlen = minlen = sizeof(struct)
def __init__(self, host, port):
makeipaddr(host, self)
a = self.lock(_c.sockaddr_in)
rffi.setintfield(a, 'c_sin_port', htons(port))
self.unlock()
def __repr__(self):
try:
return '<INETAddress %s:%d>' % (self.get_host(), self.get_port())
except SocketError:
return '<INETAddress ?>'
def get_port(self):
a = self.lock(_c.sockaddr_in)
port = ntohs(rffi.getintfield(a, 'c_sin_port'))
self.unlock()
return port
def eq(self, other): # __eq__() is not called by RPython :-/
return (isinstance(other, INETAddress) and
self.get_host() == other.get_host() and
self.get_port() == other.get_port())
def from_in_addr(in_addr):
result = instantiate(INETAddress)
# store the malloc'ed data into 'result' as soon as possible
# to avoid leaks if an exception occurs inbetween
sin = lltype.malloc(_c.sockaddr_in, flavor='raw', zero=True,
track_allocation=False)
result.setdata(sin, sizeof(_c.sockaddr_in))
# PLAT sin_len
rffi.setintfield(sin, 'c_sin_family', AF_INET)
rffi.structcopy(sin.c_sin_addr, in_addr)
return result
from_in_addr = staticmethod(from_in_addr)
def lock_in_addr(self):
a = self.lock(_c.sockaddr_in)
p = rffi.cast(rffi.VOIDP, a.c_sin_addr)
return p, sizeof(_c.in_addr)
# ____________________________________________________________
class INET6Address(IPAddress):
family = AF_INET6
struct = _c.sockaddr_in6
maxlen = minlen = sizeof(struct)
def __init__(self, host, port, flowinfo=0, scope_id=0):
makeipaddr(host, self)
a = self.lock(_c.sockaddr_in6)
rffi.setintfield(a, 'c_sin6_port', htons(port))
rffi.setintfield(a, 'c_sin6_flowinfo', htonl(flowinfo))
rffi.setintfield(a, 'c_sin6_scope_id', scope_id)
self.unlock()
def __repr__(self):
try:
return '<INET6Address %s:%d %d %d>' % (self.get_host(),
self.get_port(),
self.get_flowinfo(),
self.get_scope_id())
except SocketError:
return '<INET6Address ?>'
def get_port(self):
a = self.lock(_c.sockaddr_in6)
port = ntohs(rffi.getintfield(a, 'c_sin6_port'))
self.unlock()
return port
def get_flowinfo(self):
a = self.lock(_c.sockaddr_in6)
flowinfo = ntohl(a.c_sin6_flowinfo)
self.unlock()
return rffi.cast(lltype.Unsigned, flowinfo)
def get_scope_id(self):
a = self.lock(_c.sockaddr_in6)
scope_id = a.c_sin6_scope_id
self.unlock()
return rffi.cast(lltype.Unsigned, scope_id)
def eq(self, other): # __eq__() is not called by RPython :-/
return (isinstance(other, INET6Address) and
self.get_host() == other.get_host() and
self.get_port() == other.get_port() and
self.get_flowinfo() == other.get_flowinfo() and
self.get_scope_id() == other.get_scope_id())
def from_in6_addr(in6_addr):
result = instantiate(INET6Address)
# store the malloc'ed data into 'result' as soon as possible
# to avoid leaks if an exception occurs inbetween
sin = lltype.malloc(_c.sockaddr_in6, flavor='raw', zero=True,
track_allocation=False)
result.setdata(sin, sizeof(_c.sockaddr_in6))
rffi.setintfield(sin, 'c_sin6_family', AF_INET6)
rffi.structcopy(sin.c_sin6_addr, in6_addr)
return result
from_in6_addr = staticmethod(from_in6_addr)
def lock_in_addr(self):
a = self.lock(_c.sockaddr_in6)
p = rffi.cast(rffi.VOIDP, a.c_sin6_addr)
return p, sizeof(_c.in6_addr)
# ____________________________________________________________
HAS_AF_UNIX = 'AF_UNIX' in constants
if HAS_AF_UNIX:
class UNIXAddress(Address):
family = AF_UNIX
struct = _c.sockaddr_un
minlen = offsetof(_c.sockaddr_un, 'c_sun_path')
maxlen = sizeof(struct)
def __init__(self, path):
sun = lltype.malloc(_c.sockaddr_un, flavor='raw', zero=True,
track_allocation=False)
baseofs = offsetof(_c.sockaddr_un, 'c_sun_path')
self.setdata(sun, baseofs + len(path))
rffi.setintfield(sun, 'c_sun_family', AF_UNIX)
if _c.linux and path[0] == '\x00':
# Linux abstract namespace extension
if len(path) > sizeof(_c.sockaddr_un.c_sun_path):
raise RSocketError("AF_UNIX path too long")
else:
# regular NULL-terminated string
if len(path) >= sizeof(_c.sockaddr_un.c_sun_path):
raise RSocketError("AF_UNIX path too long")
sun.c_sun_path[len(path)] = '\x00'
for i in range(len(path)):
sun.c_sun_path[i] = path[i]
def __repr__(self):
try:
return '<UNIXAddress %r>' % (self.get_path(),)
except SocketError:
return '<UNIXAddress ?>'
def get_path(self):
a = self.lock(_c.sockaddr_un)
maxlength = self.addrlen - offsetof(_c.sockaddr_un, 'c_sun_path')
if _c.linux and maxlength > 0 and a.c_sun_path[0] == '\x00':
# Linux abstract namespace
length = maxlength
else:
# regular NULL-terminated string
length = 0
while length < maxlength and a.c_sun_path[length] != '\x00':
length += 1
result = ''.join([a.c_sun_path[i] for i in range(length)])
self.unlock()
return result
def eq(self, other): # __eq__() is not called by RPython :-/
return (isinstance(other, UNIXAddress) and
self.get_path() == other.get_path())
HAS_AF_NETLINK = 'AF_NETLINK' in constants
if HAS_AF_NETLINK:
class NETLINKAddress(Address):
family = AF_NETLINK
struct = _c.sockaddr_nl
maxlen = minlen = sizeof(struct)
def __init__(self, pid, groups):
addr = lltype.malloc(_c.sockaddr_nl, flavor='raw', zero=True,
track_allocation=False)
self.setdata(addr, NETLINKAddress.maxlen)
rffi.setintfield(addr, 'c_nl_family', AF_NETLINK)
rffi.setintfield(addr, 'c_nl_pid', pid)
rffi.setintfield(addr, 'c_nl_groups', groups)
def get_pid(self):
a = self.lock(_c.sockaddr_nl)
pid = a.c_nl_pid
self.unlock()
return rffi.cast(lltype.Unsigned, pid)
def get_groups(self):
a = self.lock(_c.sockaddr_nl)
groups = a.c_nl_groups
self.unlock()
return rffi.cast(lltype.Unsigned, groups)
def __repr__(self):
return '<NETLINKAddress %r>' % (self.get_pid(), self.get_groups())
# ____________________________________________________________
def familyclass(family):
return _FAMILIES.get(family, Address)
af_get = familyclass
def make_address(addrptr, addrlen, result=None):
family = rffi.cast(lltype.Signed, addrptr.c_sa_family)
if result is None:
result = instantiate(familyclass(family))
elif result.family != family:
raise RSocketError("address family mismatched")
# copy into a new buffer the address that 'addrptr' points to
addrlen = rffi.cast(lltype.Signed, addrlen)
buf = lltype.malloc(rffi.CCHARP.TO, addrlen, flavor='raw',
track_allocation=False)
src = rffi.cast(rffi.CCHARP, addrptr)
for i in range(addrlen):
buf[i] = src[i]
result.setdata(buf, addrlen)
return result
def makeipv4addr(s_addr, result=None):
if result is None:
result = instantiate(INETAddress)
elif result.family != AF_INET:
raise RSocketError("address family mismatched")
sin = lltype.malloc(_c.sockaddr_in, flavor='raw', zero=True,
track_allocation=False)
result.setdata(sin, sizeof(_c.sockaddr_in))
rffi.setintfield(sin, 'c_sin_family', AF_INET) # PLAT sin_len
rffi.setintfield(sin.c_sin_addr, 'c_s_addr', s_addr)
return result
def make_null_address(family):
klass = familyclass(family)
result = instantiate(klass)
buf = lltype.malloc(rffi.CCHARP.TO, klass.maxlen, flavor='raw', zero=True,
track_allocation=False)
# Initialize the family to the correct value. Avoids surprizes on
# Windows when calling a function that unexpectedly does not set
# the output address (e.g. recvfrom() on a connected IPv4 socket).
rffi.setintfield(rffi.cast(_c.sockaddr_ptr, buf), 'c_sa_family', family)
result.setdata(buf, 0)
return result, klass.maxlen
# ____________________________________________________________
class RSocket(object):
"""RPython-level socket object.
"""
fd = _c.INVALID_SOCKET
family = 0
type = 0
proto = 0
timeout = -1.0
def __init__(self, family=AF_INET, type=SOCK_STREAM, proto=0,
fd=_c.INVALID_SOCKET, inheritable=True):
"""Create a new socket."""
if _c.invalid_socket(fd):
if not inheritable and 'SOCK_CLOEXEC' in constants:
# Non-inheritable: we try to call socket() with
# SOCK_CLOEXEC, which may fail. If we get EINVAL,
# then we fall back to the SOCK_CLOEXEC-less case.
fd = _c.socket(family, type | SOCK_CLOEXEC, proto)
if fd < 0:
if _c.geterrno() == EINVAL:
# Linux older than 2.6.27 does not support
# SOCK_CLOEXEC. An EINVAL might be caused by
# random other things, though. Don't cache.
pass
else:
raise self.error_handler()
if _c.invalid_socket(fd):
fd = _c.socket(family, type, proto)
if _c.invalid_socket(fd):
raise self.error_handler()
if not inheritable:
sock_set_inheritable(fd, False)
# PLAT RISCOS
self.fd = fd
self.family = family
self.type = type
self.proto = proto
self.timeout = defaults.timeout
@staticmethod
def empty_rsocket():
rsocket = instantiate(RSocket)
return rsocket
@rgc.must_be_light_finalizer
def __del__(self):
fd = self.fd
if fd != _c.INVALID_SOCKET:
self.fd = _c.INVALID_SOCKET
_c.socketclose_no_errno(fd)
if hasattr(_c, 'fcntl'):
def _setblocking(self, block):
orig_delay_flag = intmask(_c.fcntl(self.fd, _c.F_GETFL, 0))
if block:
delay_flag = orig_delay_flag & ~_c.O_NONBLOCK
else:
delay_flag = orig_delay_flag | _c.O_NONBLOCK
if orig_delay_flag != delay_flag:
_c.fcntl(self.fd, _c.F_SETFL, delay_flag)
elif hasattr(_c, 'ioctlsocket'):
def _setblocking(self, block):
flag = lltype.malloc(rffi.ULONGP.TO, 1, flavor='raw')
flag[0] = rffi.cast(rffi.ULONG, not block)
_c.ioctlsocket(self.fd, _c.FIONBIO, flag)
lltype.free(flag, flavor='raw')
if hasattr(_c, 'poll') and not _c.poll_may_be_broken:
def _select(self, for_writing):
"""Returns 0 when reading/writing is possible,
1 when timing out and -1 on error."""
if self.timeout <= 0.0 or self.fd == _c.INVALID_SOCKET:
# blocking I/O or no socket.
return 0
pollfd = rffi.make(_c.pollfd)
try:
rffi.setintfield(pollfd, 'c_fd', self.fd)
if for_writing:
rffi.setintfield(pollfd, 'c_events', _c.POLLOUT)
else:
rffi.setintfield(pollfd, 'c_events', _c.POLLIN)
timeout = int(self.timeout * 1000.0 + 0.5)
n = _c.poll(rffi.cast(lltype.Ptr(_c.pollfdarray), pollfd),
1, timeout)
finally:
lltype.free(pollfd, flavor='raw')
if n < 0:
return -1
if n == 0:
return 1
return 0
else:
# Version witout poll(): use select()
def _select(self, for_writing):
"""Returns 0 when reading/writing is possible,
1 when timing out and -1 on error."""
timeout = self.timeout
if timeout <= 0.0 or self.fd == _c.INVALID_SOCKET:
# blocking I/O or no socket.
return 0
tv = rffi.make(_c.timeval)
rffi.setintfield(tv, 'c_tv_sec', int(timeout))
rffi.setintfield(tv, 'c_tv_usec', int((timeout-int(timeout))
* 1000000))
fds = lltype.malloc(_c.fd_set.TO, flavor='raw')
_c.FD_ZERO(fds)
_c.FD_SET(self.fd, fds)
null = lltype.nullptr(_c.fd_set.TO)
if for_writing:
n = _c.select(self.fd + 1, null, fds, null, tv)
else:
n = _c.select(self.fd + 1, fds, null, null, tv)
lltype.free(fds, flavor='raw')
lltype.free(tv, flavor='raw')
if n < 0:
return -1
if n == 0:
return 1
return 0
def error_handler(self):
return last_error()
# build a null address object, ready to be used as output argument to
# C functions that return an address. It must be unlock()ed after you
# are done using addr_p.
def _addrbuf(self):
addr, maxlen = make_null_address(self.family)
addrlen_p = lltype.malloc(_c.socklen_t_ptr.TO, flavor='raw')
addrlen_p[0] = rffi.cast(_c.socklen_t, maxlen)
return addr, addr.addr_p, addrlen_p
@jit.dont_look_inside
def accept(self, inheritable=True):
"""Wait for an incoming connection.
Return (new socket fd, client address)."""
if self._select(False) == 1:
raise SocketTimeout
address, addr_p, addrlen_p = self._addrbuf()
try:
remove_inheritable = not inheritable
if (not inheritable and 'SOCK_CLOEXEC' in constants
and _c.HAVE_ACCEPT4
and _accept4_syscall.attempt_syscall()):
newfd = _c.socketaccept4(self.fd, addr_p, addrlen_p,
SOCK_CLOEXEC)
if _accept4_syscall.fallback(newfd):
newfd = _c.socketaccept(self.fd, addr_p, addrlen_p)
else:
remove_inheritable = False
else:
newfd = _c.socketaccept(self.fd, addr_p, addrlen_p)
addrlen = addrlen_p[0]
finally:
lltype.free(addrlen_p, flavor='raw')
address.unlock()
if _c.invalid_socket(newfd):
raise self.error_handler()
if remove_inheritable:
sock_set_inheritable(newfd, False)
address.addrlen = rffi.cast(lltype.Signed, addrlen)
return (newfd, address)
def bind(self, address):
"""Bind the socket to a local address."""
addr = address.lock()
res = _c.socketbind(self.fd, addr, address.addrlen)
address.unlock()
if res < 0:
raise self.error_handler()
def close(self):
"""Close the socket. It cannot be used after this call."""
fd = self.fd
if fd != _c.INVALID_SOCKET:
self.fd = _c.INVALID_SOCKET
res = _c.socketclose(fd)
if res != 0:
raise self.error_handler()
def detach(self):
fd = self.fd
self.fd = _c.INVALID_SOCKET
return fd
if _c.WIN32:
def _connect(self, address):
"""Connect the socket to a remote address."""
addr = address.lock()
res = _c.socketconnect(self.fd, addr, address.addrlen)
address.unlock()
errno = _c.geterrno()
timeout = self.timeout
if timeout > 0.0 and res < 0 and errno == _c.EWOULDBLOCK:
tv = rffi.make(_c.timeval)
rffi.setintfield(tv, 'c_tv_sec', int(timeout))
rffi.setintfield(tv, 'c_tv_usec',
int((timeout-int(timeout)) * 1000000))
fds = lltype.malloc(_c.fd_set.TO, flavor='raw')
_c.FD_ZERO(fds)
_c.FD_SET(self.fd, fds)
fds_exc = lltype.malloc(_c.fd_set.TO, flavor='raw')
_c.FD_ZERO(fds_exc)
_c.FD_SET(self.fd, fds_exc)
null = lltype.nullptr(_c.fd_set.TO)
try:
n = _c.select(self.fd + 1, null, fds, fds_exc, tv)
if n > 0:
if _c.FD_ISSET(self.fd, fds):
# socket writable == connected
return (0, False)
else:
# per MS docs, call getsockopt() to get error
assert _c.FD_ISSET(self.fd, fds_exc)
return (self.getsockopt_int(_c.SOL_SOCKET,
_c.SO_ERROR), False)
elif n == 0:
return (_c.EWOULDBLOCK, True)
else:
return (_c.geterrno(), False)
finally:
lltype.free(fds, flavor='raw')
lltype.free(fds_exc, flavor='raw')
lltype.free(tv, flavor='raw')
if res == 0:
errno = 0
return (errno, False)
else:
def _connect(self, address):
"""Connect the socket to a remote address."""
addr = address.lock()
res = _c.socketconnect(self.fd, addr, address.addrlen)
address.unlock()
errno = _c.geterrno()
if self.timeout > 0.0 and res < 0 and errno == _c.EINPROGRESS:
timeout = self._select(True)
if timeout == 0:
res = self.getsockopt_int(_c.SOL_SOCKET, _c.SO_ERROR)
if res == _c.EISCONN:
res = 0
errno = res
elif timeout == -1:
return (_c.geterrno(), False)
else:
return (_c.EWOULDBLOCK, True)
if res < 0:
res = errno
return (res, False)
def connect(self, address):
"""Connect the socket to a remote address."""
err, timeout = self._connect(address)
if timeout:
raise SocketTimeout
if err:
raise CSocketError(err)
def connect_ex(self, address):
"""This is like connect(address), but returns an error code (the errno
value) instead of raising an exception when an error occurs."""
err, timeout = self._connect(address)
return err
if hasattr(_c, 'dup'):
def dup(self, SocketClass=None):
if SocketClass is None:
SocketClass = RSocket
fd = _c.dup(self.fd)
if fd < 0:
raise self.error_handler()
return make_socket(fd, self.family, self.type, self.proto,
SocketClass=SocketClass)
@jit.dont_look_inside
def getpeername(self):
"""Return the address of the remote endpoint."""
address, addr_p, addrlen_p = self._addrbuf()
try:
res = _c.socketgetpeername(self.fd, addr_p, addrlen_p)
addrlen = addrlen_p[0]
finally:
lltype.free(addrlen_p, flavor='raw')
address.unlock()
if res < 0:
raise self.error_handler()
address.addrlen = rffi.cast(lltype.Signed, addrlen)
return address
@jit.dont_look_inside
def getsockname(self):
"""Return the address of the local endpoint."""
address, addr_p, addrlen_p = self._addrbuf()
try:
res = _c.socketgetsockname(self.fd, addr_p, addrlen_p)
addrlen = addrlen_p[0]
finally:
lltype.free(addrlen_p, flavor='raw')
address.unlock()
if res < 0:
raise self.error_handler()
address.addrlen = rffi.cast(lltype.Signed, addrlen)
return address
@jit.dont_look_inside
def getsockopt(self, level, option, maxlen):
buf = mallocbuf(maxlen)
try:
bufsize_p = lltype.malloc(_c.socklen_t_ptr.TO, flavor='raw')
try:
bufsize_p[0] = rffi.cast(_c.socklen_t, maxlen)
res = _c.socketgetsockopt(self.fd, level, option,
buf, bufsize_p)
if res < 0:
raise self.error_handler()
size = rffi.cast(lltype.Signed, bufsize_p[0])
assert size >= 0 # socklen_t is signed on Windows
result = ''.join([buf[i] for i in range(size)])
finally:
lltype.free(bufsize_p, flavor='raw')
finally:
lltype.free(buf, flavor='raw')
return result
@jit.dont_look_inside
def getsockopt_int(self, level, option):
flag_p = lltype.malloc(rffi.INTP.TO, 1, flavor='raw')
try:
flagsize_p = lltype.malloc(_c.socklen_t_ptr.TO, flavor='raw')
try:
flagsize_p[0] = rffi.cast(_c.socklen_t, rffi.sizeof(rffi.INT))
res = _c.socketgetsockopt(self.fd, level, option,
rffi.cast(rffi.VOIDP, flag_p),
flagsize_p)
if res < 0:
raise self.error_handler()
result = rffi.cast(lltype.Signed, flag_p[0])
finally:
lltype.free(flagsize_p, flavor='raw')
finally:
lltype.free(flag_p, flavor='raw')
return result
def gettimeout(self):
"""Return the timeout of the socket. A timeout < 0 means that
timeouts are disabled in the socket."""
return self.timeout
def listen(self, backlog):
"""Enable a server to accept connections. The backlog argument
must be at least 1; it specifies the number of unaccepted connections
that the system will allow before refusing new connections."""
if backlog < 1:
backlog = 1
res = _c.socketlisten(self.fd, backlog)
if res < 0:
raise self.error_handler()
def wait_for_data(self, for_writing):
timeout = self._select(for_writing)
if timeout != 0:
if timeout == 1:
raise SocketTimeout
else:
raise self.error_handler()
def recv(self, buffersize, flags=0):
"""Receive up to buffersize bytes from the socket. For the optional
flags argument, see the Unix manual. When no data is available, block
until at least one byte is available or until the remote end is closed.
When the remote end is closed and all data is read, return the empty
string."""
self.wait_for_data(False)
with rffi.scoped_alloc_buffer(buffersize) as buf:
read_bytes = _c.socketrecv(self.fd, buf.raw, buffersize, flags)
if read_bytes >= 0:
return buf.str(read_bytes)
raise self.error_handler()
def recvinto(self, rwbuffer, nbytes, flags=0):
try:
rwbuffer.get_raw_address()
except ValueError:
buf = self.recv(nbytes, flags)
rwbuffer.setslice(0, buf)
return len(buf)
else:
self.wait_for_data(False)
raw = rwbuffer.get_raw_address()
read_bytes = _c.socketrecv(self.fd, raw, nbytes, flags)
keepalive_until_here(rwbuffer)
if read_bytes >= 0:
return read_bytes
raise self.error_handler()
@jit.dont_look_inside
def recvfrom(self, buffersize, flags=0):
"""Like recv(buffersize, flags) but also return the sender's
address."""
self.wait_for_data(False)
with rffi.scoped_alloc_buffer(buffersize) as buf:
address, addr_p, addrlen_p = self._addrbuf()
try:
read_bytes = _c.recvfrom(self.fd, buf.raw, buffersize, flags,
addr_p, addrlen_p)
addrlen = rffi.cast(lltype.Signed, addrlen_p[0])
finally:
lltype.free(addrlen_p, flavor='raw')
address.unlock()
if read_bytes >= 0:
if addrlen:
address.addrlen = addrlen
else:
address = None
data = buf.str(read_bytes)
return (data, address)
raise self.error_handler()
def recvfrom_into(self, rwbuffer, nbytes, flags=0):
try:
rwbuffer.get_raw_address()
except ValueError:
buf, addr = self.recvfrom(nbytes, flags)
rwbuffer.setslice(0, buf)
return len(buf), addr
else:
self.wait_for_data(False)
address, addr_p, addrlen_p = self._addrbuf()
try:
raw = rwbuffer.get_raw_address()
read_bytes = _c.recvfrom(self.fd, raw, nbytes, flags,
addr_p, addrlen_p)
keepalive_until_here(rwbuffer)
addrlen = rffi.cast(lltype.Signed, addrlen_p[0])
finally:
lltype.free(addrlen_p, flavor='raw')
address.unlock()
if read_bytes >= 0:
if addrlen:
address.addrlen = addrlen
else:
address = None
return (read_bytes, address)
raise self.error_handler()
def send_raw(self, dataptr, length, flags=0):
"""Send data from a CCHARP buffer."""
self.wait_for_data(True)
res = _c.send(self.fd, dataptr, length, flags)
if res < 0:
raise self.error_handler()
return res
def send(self, data, flags=0):
"""Send a data string to the socket. For the optional flags
argument, see the Unix manual. Return the number of bytes
sent; this may be less than len(data) if the network is busy."""
with rffi.scoped_nonmovingbuffer(data) as dataptr:
return self.send_raw(dataptr, len(data), flags)
def sendall(self, data, flags=0, signal_checker=None):
"""Send a data string to the socket. For the optional flags
argument, see the Unix manual. This calls send() repeatedly
until all data is sent. If an error occurs, it's impossible
to tell how much data has been sent."""
with rffi.scoped_nonmovingbuffer(data) as dataptr:
remaining = len(data)
p = dataptr
while remaining > 0:
try:
res = self.send_raw(p, remaining, flags)
p = rffi.ptradd(p, res)
remaining -= res
except CSocketError as e:
if e.errno != _c.EINTR:
raise
if signal_checker is not None:
signal_checker()
def sendto(self, data, length, flags, address):
"""Like send(data, flags) but allows specifying the destination
address. (Note that 'flags' is mandatory here.)"""
self.wait_for_data(True)
addr = address.lock()
res = _c.sendto(self.fd, data, length, flags,
addr, address.addrlen)
address.unlock()
if res < 0:
raise self.error_handler()
return res
def setblocking(self, block):
if block:
timeout = -1.0
else:
timeout = 0.0
self.settimeout(timeout)
def setsockopt(self, level, option, value):
with rffi.scoped_str2charp(value) as buf:
res = _c.socketsetsockopt(self.fd, level, option,
rffi.cast(rffi.VOIDP, buf),
len(value))
if res < 0:
raise self.error_handler()
def setsockopt_int(self, level, option, value):
with lltype.scoped_alloc(rffi.INTP.TO, 1) as flag_p:
flag_p[0] = rffi.cast(rffi.INT, value)
res = _c.socketsetsockopt(self.fd, level, option,
rffi.cast(rffi.VOIDP, flag_p),
rffi.sizeof(rffi.INT))
if res < 0:
raise self.error_handler()
def settimeout(self, timeout):
"""Set the timeout of the socket. A timeout < 0 means that
timeouts are dissabled in the socket."""
if timeout < 0.0:
self.timeout = -1.0
else:
self.timeout = timeout
self._setblocking(self.timeout < 0.0)
def shutdown(self, how):
"""Shut down the reading side of the socket (flag == SHUT_RD), the
writing side of the socket (flag == SHUT_WR), or both ends
(flag == SHUT_RDWR)."""
res = _c.socketshutdown(self.fd, how)
if res < 0:
raise self.error_handler()
# ____________________________________________________________
def make_socket(fd, family, type, proto, SocketClass=RSocket):
result = instantiate(SocketClass)
result.fd = fd
result.family = family
result.type = type
result.proto = proto
result.timeout = defaults.timeout
return result
make_socket._annspecialcase_ = 'specialize:arg(4)'
if _c.WIN32:
def sock_set_inheritable(fd, inheritable):
handle = rffi.cast(rwin32.HANDLE, fd)
try:
rwin32.set_handle_inheritable(handle, inheritable)
except WindowsError:
raise RSocketError("SetHandleInformation failed") # xxx
def sock_get_inheritable(fd):
handle = rffi.cast(rwin32.HANDLE, fd)
try:
return rwin32.get_handle_inheritable(handle)
except WindowsError:
raise RSocketError("GetHandleInformation failed") # xxx
else:
def sock_set_inheritable(fd, inheritable):
try:
rposix.set_inheritable(fd, inheritable)
except OSError as e:
raise CSocketError(e.errno)
def sock_get_inheritable(fd):
try:
return rposix.get_inheritable(fd)
except OSError as e:
raise CSocketError(e.errno)
class SocketError(Exception):
applevelerrcls = 'error'
def __init__(self):
pass
def get_msg(self):
return ''
def __str__(self):
return self.get_msg()
class SocketErrorWithErrno(SocketError):
def __init__(self, errno):
self.errno = errno
class RSocketError(SocketError):
def __init__(self, message):
self.message = message
def get_msg(self):
return self.message
class CSocketError(SocketErrorWithErrno):
def get_msg(self):
return _c.socket_strerror_str(self.errno)
def last_error():
return CSocketError(_c.geterrno())
class GAIError(SocketErrorWithErrno):
applevelerrcls = 'gaierror'
def get_msg(self):
return _c.gai_strerror_str(self.errno)
class HSocketError(SocketError):
applevelerrcls = 'herror'
def __init__(self, host):
self.host = host
# XXX h_errno is not easily available, and hstrerror() is
# marked as deprecated in the Linux man pages
def get_msg(self):
return "host lookup failed: '%s'" % (self.host,)
class SocketTimeout(SocketError):
applevelerrcls = 'timeout'
def get_msg(self):
return 'timed out'
class Defaults:
timeout = -1.0 # Blocking
defaults = Defaults()
# ____________________________________________________________
if 'AF_UNIX' not in constants or AF_UNIX is None:
socketpair_default_family = AF_INET
else:
socketpair_default_family = AF_UNIX
if hasattr(_c, 'socketpair'):
def socketpair(family=socketpair_default_family, type=SOCK_STREAM, proto=0,
SocketClass=RSocket, inheritable=True):
"""socketpair([family[, type[, proto]]]) -> (socket object, socket object)
Create a pair of socket objects from the sockets returned by the platform
socketpair() function.
The arguments are the same as for socket() except the default family is
AF_UNIX if defined on the platform; otherwise, the default is AF_INET.
"""
result = lltype.malloc(_c.socketpair_t, 2, flavor='raw')
try:
res = -1
remove_inheritable = not inheritable
if not inheritable and 'SOCK_CLOEXEC' in constants:
# Non-inheritable: we try to call socketpair() with
# SOCK_CLOEXEC, which may fail. If we get EINVAL,
# then we fall back to the SOCK_CLOEXEC-less case.
res = _c.socketpair(family, type | SOCK_CLOEXEC,
proto, result)
if res < 0:
if _c.geterrno() == EINVAL:
# Linux older than 2.6.27 does not support
# SOCK_CLOEXEC. An EINVAL might be caused by
# random other things, though. Don't cache.
pass
else:
raise last_error()
else:
remove_inheritable = False
#
if res < 0:
res = _c.socketpair(family, type, proto, result)
if res < 0:
raise last_error()
fd0 = rffi.cast(lltype.Signed, result[0])
fd1 = rffi.cast(lltype.Signed, result[1])
finally:
lltype.free(result, flavor='raw')
if remove_inheritable:
sock_set_inheritable(fd0, False)
sock_set_inheritable(fd1, False)
return (make_socket(fd0, family, type, proto, SocketClass),
make_socket(fd1, family, type, proto, SocketClass))
if _c.WIN32:
def dup(fd, inheritable=True):
with lltype.scoped_alloc(_c.WSAPROTOCOL_INFO, zero=True) as info:
if _c.WSADuplicateSocket(fd, rwin32.GetCurrentProcessId(), info):
raise last_error()
result = _c.WSASocket(
_c.FROM_PROTOCOL_INFO, _c.FROM_PROTOCOL_INFO,
_c.FROM_PROTOCOL_INFO, info, 0, 0)
if result == INVALID_SOCKET:
raise last_error()
return result
else:
def dup(fd, inheritable=True):
fd = rposix._dup(fd, inheritable)
if fd < 0:
raise last_error()
return fd
def fromfd(fd, family, type, proto=0, SocketClass=RSocket, inheritable=True):
# Dup the fd so it and the socket can be closed independently
fd = dup(fd, inheritable=inheritable)
return make_socket(fd, family, type, proto, SocketClass)
def getdefaulttimeout():
return defaults.timeout
def gethostname():
size = 1024
buf = lltype.malloc(rffi.CCHARP.TO, size, flavor='raw')
try:
res = _c.gethostname(buf, size)
if res < 0:
raise last_error()
return rffi.charp2strn(buf, size)
finally:
lltype.free(buf, flavor='raw')
def gethostbyname(name):
# this is explicitly not working with IPv6, because the docs say it
# should not. Just use makeipaddr(name) for an IPv6-friendly version...
result = instantiate(INETAddress)
makeipaddr(name, result)
return result
def gethost_common(hostname, hostent, addr=None):
if not hostent:
raise HSocketError(hostname)
family = rffi.getintfield(hostent, 'c_h_addrtype')
if addr is not None and addr.family != family:
raise CSocketError(_c.EAFNOSUPPORT)
h_aliases = hostent.c_h_aliases
if h_aliases: # h_aliases can be NULL, according to SF #1511317
aliases = rffi.charpp2liststr(h_aliases)
else:
aliases = []
address_list = []
h_addr_list = hostent.c_h_addr_list
i = 0
paddr = h_addr_list[0]
while paddr:
if family == AF_INET:
p = rffi.cast(lltype.Ptr(_c.in_addr), paddr)
addr = INETAddress.from_in_addr(p)
elif AF_INET6 is not None and family == AF_INET6:
p = rffi.cast(lltype.Ptr(_c.in6_addr), paddr)
addr = INET6Address.from_in6_addr(p)
else:
raise RSocketError("unknown address family")
address_list.append(addr)
i += 1
paddr = h_addr_list[i]
return (rffi.charp2str(hostent.c_h_name), aliases, address_list)
def gethostbyname_ex(name):
# XXX use gethostbyname_r() if available instead of locks
addr = gethostbyname(name)
with _get_netdb_lock():
hostent = _c.gethostbyname(name)
return gethost_common(name, hostent, addr)
def gethostbyaddr(ip):
# XXX use gethostbyaddr_r() if available, instead of locks
addr = makeipaddr(ip)
assert isinstance(addr, IPAddress)
with _get_netdb_lock():
p, size = addr.lock_in_addr()
try:
hostent = _c.gethostbyaddr(p, size, addr.family)
finally:
addr.unlock()
return gethost_common(ip, hostent, addr)
# RPython magic to make _netdb_lock turn either into a regular
# rthread.Lock or a rthread.DummyLock, depending on the config
def _get_netdb_lock():
return rthread.dummy_lock
class _Entry(ExtRegistryEntry):
_about_ = _get_netdb_lock
def compute_annotation(self):
config = self.bookkeeper.annotator.translator.config
if config.translation.thread:
fn = _get_netdb_lock_thread
else:
fn = _get_netdb_lock_nothread
return self.bookkeeper.immutablevalue(fn)
def _get_netdb_lock_nothread():
return rthread.dummy_lock
class _LockCache(object):
lock = None
_lock_cache = _LockCache()
@jit.elidable
def _get_netdb_lock_thread():
if _lock_cache.lock is None:
_lock_cache.lock = rthread.allocate_lock()
return _lock_cache.lock
# done RPython magic
def getaddrinfo(host, port_or_service,
family=AF_UNSPEC, socktype=0, proto=0, flags=0,
address_to_fill=None):
# port_or_service is a string, not an int (but try str(port_number)).
assert port_or_service is None or isinstance(port_or_service, str)
if _c._MACOSX and flags & AI_NUMERICSERV and \
(port_or_service is None or port_or_service == '0'):
port_or_service = '00'
hints = lltype.malloc(_c.addrinfo, flavor='raw', zero=True)
rffi.setintfield(hints, 'c_ai_family', family)
rffi.setintfield(hints, 'c_ai_socktype', socktype)
rffi.setintfield(hints, 'c_ai_protocol', proto)
rffi.setintfield(hints, 'c_ai_flags' , flags)
# XXX need to lock around getaddrinfo() calls?
p_res = lltype.malloc(rffi.CArray(_c.addrinfo_ptr), 1, flavor='raw')
error = intmask(_c.getaddrinfo(host, port_or_service, hints, p_res))
res = p_res[0]
lltype.free(p_res, flavor='raw')
lltype.free(hints, flavor='raw')
if error:
raise GAIError(error)
try:
result = []
info = res
while info:
addr = make_address(info.c_ai_addr,
rffi.getintfield(info, 'c_ai_addrlen'),
address_to_fill)
if info.c_ai_canonname:
canonname = rffi.charp2str(info.c_ai_canonname)
else:
canonname = ""
result.append((rffi.cast(lltype.Signed, info.c_ai_family),
rffi.cast(lltype.Signed, info.c_ai_socktype),
rffi.cast(lltype.Signed, info.c_ai_protocol),
canonname,
addr))
info = info.c_ai_next
address_to_fill = None # don't fill the same address repeatedly
finally:
_c.freeaddrinfo(res)
return result
def getservbyname(name, proto=None):
servent = _c.getservbyname(name, proto)
if not servent:
raise RSocketError("service/proto not found")
port = rffi.getintfield(servent, 'c_s_port')
return ntohs(port)
def getservbyport(port, proto=None):
# This function is only called from pypy/module/_socket and the range of
# port is checked there
assert isinstance(port, int)
servent = _c.getservbyport(htons(port), proto)
if not servent:
raise RSocketError("port/proto not found")
return rffi.charp2str(servent.c_s_name)
def getprotobyname(name):
protoent = _c.getprotobyname(name)
if not protoent:
raise RSocketError("protocol not found")
proto = protoent.c_p_proto
return rffi.cast(lltype.Signed, proto)
def getnameinfo(address, flags):
host = lltype.malloc(rffi.CCHARP.TO, NI_MAXHOST, flavor='raw')
try:
serv = lltype.malloc(rffi.CCHARP.TO, NI_MAXSERV, flavor='raw')
try:
addr = address.lock()
error = intmask(_c.getnameinfo(addr, address.addrlen,
host, NI_MAXHOST,
serv, NI_MAXSERV, flags))
address.unlock()
if error:
raise GAIError(error)
return rffi.charp2str(host), rffi.charp2str(serv)
finally:
lltype.free(serv, flavor='raw')
finally:
lltype.free(host, flavor='raw')
if hasattr(_c, 'inet_aton'):
def inet_aton(ip):
"IPv4 dotted string -> packed 32-bits string"
size = sizeof(_c.in_addr)
buf = mallocbuf(size)
try:
if _c.inet_aton(ip, rffi.cast(lltype.Ptr(_c.in_addr), buf)):
return ''.join([buf[i] for i in range(size)])
else:
raise RSocketError("illegal IP address string passed to inet_aton")
finally:
lltype.free(buf, flavor='raw')
else:
def inet_aton(ip):
"IPv4 dotted string -> packed 32-bits string"
if ip == "255.255.255.255":
return "\xff\xff\xff\xff"
packed_addr = _c.inet_addr(ip)
if packed_addr == rffi.cast(lltype.Unsigned, INADDR_NONE):
raise RSocketError("illegal IP address string passed to inet_aton")
size = sizeof(_c.in_addr)
buf = mallocbuf(size)
try:
rffi.cast(rffi.UINTP, buf)[0] = packed_addr
return ''.join([buf[i] for i in range(size)])
finally:
lltype.free(buf, flavor='raw')
def inet_ntoa(packed):
"packet 32-bits string -> IPv4 dotted string"
if len(packed) != sizeof(_c.in_addr):
raise RSocketError("packed IP wrong length for inet_ntoa")
buf = rffi.make(_c.in_addr)
try:
for i in range(sizeof(_c.in_addr)):
rffi.cast(rffi.CCHARP, buf)[i] = packed[i]
return rffi.charp2str(_c.inet_ntoa(buf))
finally:
lltype.free(buf, flavor='raw')
if hasattr(_c, 'inet_pton'):
def inet_pton(family, ip):
"human-readable string -> packed string"
if family == AF_INET:
size = sizeof(_c.in_addr)
elif AF_INET6 is not None and family == AF_INET6:
size = sizeof(_c.in6_addr)
else:
raise RSocketError("unknown address family")
buf = mallocbuf(size)
try:
res = _c.inet_pton(family, ip, buf)
if res < 0:
raise last_error()
elif res == 0:
raise RSocketError("illegal IP address string passed "
"to inet_pton")
else:
return ''.join([buf[i] for i in range(size)])
finally:
lltype.free(buf, flavor='raw')
if hasattr(_c, 'inet_ntop'):
def inet_ntop(family, packed):
"packed string -> human-readable string"
if family == AF_INET:
srcsize = sizeof(_c.in_addr)
dstsize = _c.INET_ADDRSTRLEN
elif AF_INET6 is not None and family == AF_INET6:
srcsize = sizeof(_c.in6_addr)
dstsize = _c.INET6_ADDRSTRLEN
else:
raise RSocketError("unknown address family")
if len(packed) != srcsize:
raise ValueError("packed IP wrong length for inet_ntop")
with rffi.scoped_nonmovingbuffer(packed) as srcbuf:
dstbuf = mallocbuf(dstsize)
try:
res = _c.inet_ntop(family, rffi.cast(rffi.VOIDP, srcbuf),
dstbuf, dstsize)
if not res:
raise last_error()
return rffi.charp2str(res)
finally:
lltype.free(dstbuf, flavor='raw')
def setdefaulttimeout(timeout):
if timeout < 0.0:
timeout = -1.0
defaults.timeout = timeout
_accept4_syscall = rposix.ENoSysCache()
|
oblique-labs/pyVM
|
rpython/rlib/rsocket.py
|
Python
|
mit
| 55,435
|
#!/usr/bin/python
"""
This is a unittest for qemu_qtree library.
:author: Lukas Doktor <ldoktor@redhat.com>
:copyright: 2012 Red Hat, Inc.
"""
__author__ = """Lukas Doktor (ldoktor@redhat.com)"""
import unittest
import common
from autotest.client.shared.test_utils import mock
import qemu_qtree
OFFSET_PER_LEVEL = qemu_qtree.OFFSET_PER_LEVEL
# Dummy classes and functions
class ParamsDict(dict):
""" params like dictionary """
def objects(self, item):
if self.get(item):
return self.get(item).split(' ')
def object_params(self, obj):
ret = self.copy()
for (param, value) in self.iteritems():
if param.endswith('_%s' % obj):
ret[param[:-len('_%s' % obj)]] = value
return ret
def combine(first, second, offset):
""" Add string line-by-line with offset*OFFSET_PER_LEVEL """
out = first[:]
offset = ' ' * OFFSET_PER_LEVEL * offset
for line in second.splitlines():
out += '\n' + offset + line
return out
# Dummy variables
qtree_header = """bus: main-system-bus
type System
"""
dev_ide_disk = """dev: piix3-ide, id ""
bus-prop: addr = 01.1
bus-prop: romfile = <null>
bus-prop: rombar = 1
bus-prop: multifunction = off
bus-prop: command_serr_enable = on
class IDE controller, addr 00:01.1, pci id 8086:7010 (sub 1af4:1100)
bar 4: i/o at 0xc2a0 [0xc2af]
bus: ide.0
type IDE
dev: ide-hd, id ""
dev-prop: drive = ide0-hd0
dev-prop: logical_block_size = 512
dev-prop: physical_block_size = 512
dev-prop: min_io_size = 0
dev-prop: opt_io_size = 0
dev-prop: bootindex = -1
dev-prop: discard_granularity = 0
dev-prop: ver = "1.0.50"
dev-prop: serial = "QM00001"
bus-prop: unit = 0"""
dev_usb_disk = """dev: ich9-usb-uhci1, id "usb1"
dev-prop: masterbus = <null>
dev-prop: firstport = 0
bus-prop: addr = 04.0
bus-prop: romfile = <null>
bus-prop: rombar = 1
bus-prop: multifunction = off
bus-prop: command_serr_enable = on
class USB controller, addr 00:04.0, pci id 8086:2934 (sub 1af4:1100)
bar 4: i/o at 0xc280 [0xc29f]
bus: usb1.0
type USB
dev: usb-hub, id ""
bus-prop: port = <null>
addr 0.3, port 2, speed 12, name QEMU USB Hub, attached
dev: usb-tablet, id "usb-tablet1"
bus-prop: port = <null>
addr 0.4, port 2.1, speed 12, name QEMU USB Tablet, attached
dev: usb-storage, id ""
dev-prop: drive = <null>
dev-prop: logical_block_size = 512
dev-prop: physical_block_size = 512
dev-prop: min_io_size = 0
dev-prop: opt_io_size = 0
dev-prop: bootindex = -1
dev-prop: discard_granularity = 0
dev-prop: serial = <null>
dev-prop: removable = off
bus-prop: port = <null>
addr 0.2, port 1, speed 12, name QEMU USB MSD, attached
bus: scsi.0
type SCSI
dev: scsi-disk, id ""
dev-prop: drive = usb2.6
dev-prop: logical_block_size = 512
dev-prop: physical_block_size = 512
dev-prop: min_io_size = 0
dev-prop: opt_io_size = 0
dev-prop: bootindex = -1
dev-prop: discard_granularity = 0
dev-prop: ver = "1.0.50"
dev-prop: serial = <null>
dev-prop: removable = off
bus-prop: channel = 0
bus-prop: scsi-id = 0
bus-prop: lun = 0"""
dev_dummy_mmio = """dev: fw_cfg, id ""
dev-prop: ctl_iobase = 0x510
dev-prop: data_iobase = 0x511
irq 0
mmio ffffffffffffffff/0000000000000002
mmio ffffffffffffffff/0000000000000001"""
info_block = {'ide0-hd0': {'removable': 0, 'io-status': 'ok',
'file': '/tmp/vl.UWzrkU',
'backing_file': '/dummy/directory/f16-64.qcow2',
'ro': 1, 'drv': 'qcow2', 'encrypted': 0, 'bps': 0,
'bps_rd': 0, 'bps_wr': 0, 'iops': 0, 'iops_rd': 0,
'iops_wr': 0},
'usb2.6': {'removable': 0, 'io-status': 'ok',
'file': '/tmp/stg4.qcow2', 'ro': 0, 'drv': 'qcow2',
'encrypted': 0, 'bps': 0, 'bps_rd': 0, 'bps_wr': 0,
'iops': 0, 'iops_rd': 0, 'iops_wr': 0}}
guest_proc_scsi = """Attached devices:
Host: scsi4 Channel: 00 Id: 00 Lun: 00
Vendor: QEMU Model: QEMU HARDDISK Rev: 1.0.
Type: Direct-Access ANSI SCSI revision: 05"""
params = ParamsDict({'images': 'image1 stg4',
'drive_format': 'ide',
'drive_format_stg4': 'usb2',
'drive_index_image1': '0',
'drive_index_stg4': '6',
'image_format': 'qcow2',
'image_name': '/dummy/directory/f16-64',
'image_name_stg4': 'stg4',
'image_size': '10G',
'image_size_stg4': '1M',
'image_snapshot': 'yes',
'image_snapshot_stg4': 'no',
'image_readonly_image1': 'yes',
'cdroms': 'cdrom1'})
class QtreeContainerTest(unittest.TestCase):
""" QtreeContainer tests """
def test_qtree(self):
""" Correct workflow """
reference_nodes = [qemu_qtree.QtreeDisk, qemu_qtree.QtreeBus,
qemu_qtree.QtreeDev, qemu_qtree.QtreeDev,
qemu_qtree.QtreeDev, qemu_qtree.QtreeDisk,
qemu_qtree.QtreeBus, qemu_qtree.QtreeDev,
qemu_qtree.QtreeBus, qemu_qtree.QtreeDev,
qemu_qtree.QtreeDev, qemu_qtree.QtreeBus]
info = qtree_header
info = combine(info, dev_ide_disk, 1)
info = combine(info, dev_usb_disk, 1)
info = combine(info, dev_dummy_mmio, 1)
info += "\n"
qtree = qemu_qtree.QtreeContainer()
qtree.parse_info_qtree(info)
nodes = qtree.get_nodes()
self.assertEqual(len(nodes), len(reference_nodes), ("Number of parsed "
"nodes is not equal to the number of qtree nodes. "
"%s != %s" % (len(nodes), len(reference_nodes))))
for i in xrange(len(nodes)):
self.assertTrue(isinstance(nodes[i], reference_nodes[i]),
("Node %d should be class %s but is %s instead" %
(i, reference_nodes[i], type(reference_nodes))))
tree = qtree.get_qtree()
self.assertTrue(isinstance(tree.str_qtree(), str),
"qtree.str_qtree() returns nonstring output.")
self.assertTrue(isinstance(str(tree), str),
"str(qtree) returns nonstring output.")
def test_bad_qtree(self):
""" Incorrect qtree """
qtree = qemu_qtree.QtreeContainer()
info = combine(qtree_header, "Very_bad_line", 1)
self.assertRaises(ValueError, qtree.parse_info_qtree, info)
class QtreeDiskContainerTest(unittest.TestCase):
""" QtreeDiskContainer tests """
def setUp(self):
# Get rid of logging errors
def dumm(*args, **kvargs):
pass
self.god = mock.mock_god(ut=self)
self.god.stub_with(qemu_qtree.logging, 'error', dumm)
info = qtree_header
info = combine(info, dev_ide_disk, 1)
info = combine(info, dev_usb_disk, 1)
info = combine(info, dev_dummy_mmio, 1)
info += "\n"
self.no_disks = 2
self.qtree = qemu_qtree.QtreeContainer()
self.qtree.parse_info_qtree(info)
self.disks = qemu_qtree.QtreeDisksContainer(self.qtree.get_nodes())
def tearDown(self):
self.god.unstub_all()
def test_check_params(self):
""" Correct workflow """
disks = self.disks
self.assertEqual(len(self.disks.disks), self.no_disks)
self.assertEqual(disks.parse_info_block(info_block), (0, 0))
self.assertEqual(disks.generate_params(), 0)
self.assertEqual(disks.check_disk_params(params), 2)
self.assertEqual(disks.check_guests_proc_scsi(guest_proc_scsi),
(0, 0, 1, 0))
# Check the full disk output (including params)
for disk in disks.disks:
self.assertTrue(isinstance(str(disk), str),
"str(disk) returns nonstring output.")
def test_check_params_bad(self):
""" Whole workflow with bad data """
disks = self.disks
# missing disk in info block
_info_block = info_block.copy()
_info_block.pop('ide0-hd0')
# snapshot in info qtree but not in params
_info_block['usb2.6']['file'] = 'none.qcow2'
_info_block['usb2.6']['backing_file'] = '/tmp/stg4.qcow2'
# additional disk in info block
_info_block['missing_bad_disk1'] = {}
# additional disk in params
_params = ParamsDict(params)
_params['images'] += ' bad_disk2'
# Missing disk in proc_scsi
_guest_proc_scsi = guest_proc_scsi.replace('Channel: 00',
'Channel: 01')
# Ignored disk in proc_scsi
_guest_proc_scsi += """
Host: scsi1 Channel: 00 Id: 00 Lun: 00
Vendor: ATA Model: QEMU HARDDISK Rev: 1.0.
Type: Direct-Access ANSI SCSI revision: 05"""
self.assertEqual(disks.parse_info_block(_info_block), (1, 1))
self.assertEqual(disks.generate_params(), 1)
self.assertEqual(disks.check_disk_params(_params), 4)
self.assertEqual(disks.check_guests_proc_scsi(_guest_proc_scsi),
(0, 1, 1, 0))
class KvmQtreeClassTest(unittest.TestCase):
""" Additional tests for qemu_qtree classes """
def test_qtree_bus_bus(self):
""" Bus' child can't be Bus() """
test = qemu_qtree.QtreeBus()
self.assertRaises(qemu_qtree.IncompatibleTypeError,
test.add_child, qemu_qtree.QtreeBus())
def test_qtree_dev_dev(self):
""" Dev's child can't be Dev() """
test = qemu_qtree.QtreeDev()
self.assertRaises(qemu_qtree.IncompatibleTypeError,
test.add_child, qemu_qtree.QtreeDev())
def test_qtree_disk_missing_filename(self):
""" in info_block must contain info about file or backing_file """
test = qemu_qtree.QtreeDisk()
test.set_qtree({'something': 'something'})
test.set_block_prop('prop', 'value')
self.assertRaises(ValueError, test.generate_params)
if __name__ == "__main__":
""" Run unittest """
unittest.main()
|
ypu/virt-test
|
virttest/qemu_qtree_unittest.py
|
Python
|
gpl-2.0
| 10,740
|
# -*- coding: utf8 -*-
# Copyright (c) 2017-2021 THL A29 Limited, a Tencent company. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# CAM签名/鉴权错误。
AUTHFAILURE = 'AuthFailure'
# 操作失败。
FAILEDOPERATION = 'FailedOperation'
# 内部错误。
INTERNALERROR = 'InternalError'
# 参数错误。
INVALIDPARAMETER = 'InvalidParameter'
# 参数取值错误。
INVALIDPARAMETERVALUE = 'InvalidParameterValue'
# 超过配额限制。
LIMITEXCEEDED = 'LimitExceeded'
# 图片使用期限已过期。
LIMITEXCEEDED_ORDEREXPIREDERROR = 'LimitExceeded.OrderExpiredError'
# 下单频率超过限制。
LIMITEXCEEDED_ORDERLIMITERROR = 'LimitExceeded.OrderLimitError'
# 资源被占用。
RESOURCEINUSE = 'ResourceInUse'
# 资源不存在。
RESOURCENOTFOUND = 'ResourceNotFound'
# 图片涉嫌违禁内容
RESOURCENOTFOUND_SENSITIVEIMAGE = 'ResourceNotFound.SensitiveImage'
# 包含敏感词汇。
RESOURCENOTFOUND_SENSITIVESEARCH = 'ResourceNotFound.SensitiveSearch'
# 资源不可用。
RESOURCEUNAVAILABLE = 'ResourceUnavailable'
|
tzpBingo/github-trending
|
codespace/python/tencentcloud/ape/v20200513/errorcodes.py
|
Python
|
mit
| 1,570
|
'''
Copyright (c) 2017 Vanessa Sochat
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
'''
from django.core.files.base import ContentFile
from notifications.signals import notify
from docfish.settings import (
BASE_DIR,
MEDIA_ROOT
)
from django.contrib.auth.models import User
from docfish.apps.users.utils import (
get_user
)
from docfish.apps.pubmed.utils import get
from docfish.apps.main.models import Collection
from django.core.exceptions import PermissionDenied, ValidationError
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import User
from django.contrib import messages
from django.http import HttpResponse, JsonResponse
from django.http.response import (
HttpResponseRedirect,
HttpResponseForbidden,
Http404
)
from django.shortcuts import (
get_object_or_404,
render_to_response,
render,
redirect
)
from django.utils import timezone
from django.urls import reverse
import pickle
import json
import os
media_dir = os.path.join(BASE_DIR,MEDIA_ROOT)
@login_required
def search_view(request):
collections = Collection.objects.filter(owner=request.user)
context = {"collections":collections}
return render(request, 'pubmed/search.html',context)
@login_required
def searching_view(request,page=None):
'''this is the function to do the search using pubmed (Entrez) utility
'''
if page is None:
retstart = 0
page = 0
else:
page = int(page)
retstart = page * 50
q = None
uid = None
if request.is_ajax():
q = request.GET.get('q')
uid = request.GET.get('uid')
if request.method == "POST":
q = request.POST.get('q')
uid = request.POST.get('uid')
if q is not None:
results = get(query=q,
user=request.user,
retstart=retstart,
retmax=50)
context = {"results":results,
"submit_result": "anything"}
if uid is not None:
user = User.objects.get(id=uid)
context['collections'] = Collection.objects.filter(owner=user)
context['page'] = page
context['next_page'] = page + 1
context['query'] = q
if page > 0:
context['previous_page'] = page - 1
if request.is_ajax():
return render(request,'pubmed/result.html', context)
return redirect('search_pubmed')
|
vsoch/docfish
|
docfish/apps/pubmed/views.py
|
Python
|
mit
| 3,474
|
# SPDX-License-Identifier: GPL-2.0
"""
Built-in function tests.
"""
def test(conf):
assert conf.oldaskconfig() == 0
assert conf.stdout_contains('expected_stdout')
assert conf.stderr_matches('expected_stderr')
|
koct9i/linux
|
scripts/kconfig/tests/preprocess/builtin_func/__init__.py
|
Python
|
gpl-2.0
| 222
|
import datetime
from typing import Optional, Union
import dateutil.parser
import h5py
import numpy as np
import scipy.constants
from ... import classes2
from ...misc.errorvalue import ErrorValue
# noinspection PyMethodOverriding
class Header(classes2.Header):
_data = None
@classmethod
def new_from_file(cls, filename: str, samplename: str, dist: float):
with h5py.File(filename) as f:
dist = sorted([d for d in f['Samples'][samplename].keys()], key=lambda d: abs(float(d) - dist))[0]
return cls.new_from_group(f['Samples'][samplename][dist])
@classmethod
def new_from_group(cls, grp: h5py.Group):
self = cls()
self._data = {'fsn': 0}
for a in grp.attrs:
self._data[a] = grp.attrs[a]
for a in list(self._data.keys()):
if isinstance(self._data[a], (float, np.number)) and (not a.endswith('.err')) and (
a + '.err' not in self._data):
self._data[a + '.err'] = 0.0
return self
@property
def title(self) -> str:
return self._data['title']
@title.setter
def title(self, value: str):
self._data['title'] = value
@property
def fsn(self) -> int:
return self._data['fsn']
@fsn.setter
def fsn(self, value: int):
self._data['fsn'] = value
@property
def energy(self) -> ErrorValue:
"""X-ray energy"""
return (ErrorValue(*(scipy.constants.physical_constants['speed of light in vacuum'][0::2])) *
ErrorValue(*(scipy.constants.physical_constants['Planck constant in eV s'][0::2])) /
scipy.constants.nano /
self.wavelength)
@energy.setter
def energy(self, value: Union[ErrorValue, float]):
if not isinstance(value, ErrorValue):
value = ErrorValue(value, 0)
self.wavelength = (ErrorValue(*(scipy.constants.physical_constants['speed of light in vacuum'][0::2])) *
ErrorValue(*(scipy.constants.physical_constants['Planck constant in eV s'][0::2])) /
scipy.constants.nano /
value)
@property
def wavelength(self) -> ErrorValue:
"""X-ray wavelength"""
return ErrorValue(self._data["wavelength"], self._data["wavelength.err"])
@wavelength.setter
def wavelength(self, value: Union[ErrorValue, float]):
if not isinstance(value, ErrorValue):
value = ErrorValue(value, 0)
self._data['wavelength'] = value.val
self._data['wavelength.err'] = value.err
@property
def distance(self) -> ErrorValue:
"""Sample-to-detector distance"""
return ErrorValue(self._data['distance'], self._data['distance.err'])
@distance.setter
def distance(self, value: Union[ErrorValue, float]):
if not isinstance(value, ErrorValue):
value = ErrorValue(value, 0)
self._data['distance'] = value.val
self._data['distance.err'] = value.err
@property
def temperature(self) -> Optional[ErrorValue]:
"""Sample temperature"""
try:
return ErrorValue(self._data['temperature'], self._data['temperature.err'])
except KeyError:
return None
@temperature.setter
def temperature(self, value: Union[ErrorValue, float]):
if not isinstance(value, ErrorValue):
value = ErrorValue(value, 0)
self._data['temperature'] = value.val
self._data['temperature.err'] = value.err
@property
def beamcenterx(self) -> ErrorValue:
"""X (column) coordinate of the beam center, pixel units, 0-based."""
return ErrorValue(self._data['beamcenterx'], self._data['beamcenterx.err'])
@beamcenterx.setter
def beamcenterx(self, value: Union[ErrorValue, float]):
if not isinstance(value, ErrorValue):
value = ErrorValue(value, 0)
self._data['beamcenterx'] = value.val
self._data['beamcenterx.err'] = value.err
@property
def beamcentery(self) -> ErrorValue:
"""Y (row) coordinate of the beam center, pixel units, 0-based."""
return ErrorValue(self._data['beamcentery'], self._data['beamcentery.err'])
@beamcentery.setter
def beamcentery(self, value: Union[ErrorValue, float]):
if not isinstance(value, ErrorValue):
value = ErrorValue(value, 0)
self._data['beamcentery'] = value.val
self._data['beamcentery.err'] = value.err
@property
def pixelsizex(self) -> ErrorValue:
"""X (column) size of a pixel, in mm units"""
return ErrorValue(self._data['pixelsizex'], self._data['pixelsizex.err'])
@pixelsizex.setter
def pixelsizex(self, value: Union[ErrorValue, float]):
if not isinstance(value, ErrorValue):
value = ErrorValue(value, 0)
self._data['pixelsizex'] = value.val
self._data['pixelsizex.err'] = value.err
@property
def pixelsizey(self) -> ErrorValue:
"""Y (row) size of a pixel, in mm units"""
return ErrorValue(self._data['pixelsizey'], self._data['pixelsizey.err'])
@pixelsizey.setter
def pixelsizey(self, value: Union[ErrorValue, float]):
if not isinstance(value, ErrorValue):
value = ErrorValue(value, 0)
self._data['pixelsizey'] = value.val
self._data['pixelsizey.err'] = value.err
@property
def exposuretime(self) -> ErrorValue:
"""Exposure time in seconds"""
return ErrorValue(self._data['exposuretime'], self._data['exposuretime.err'])
@exposuretime.setter
def exposuretime(self, value: Union[ErrorValue, float]):
if not isinstance(value, ErrorValue):
value = ErrorValue(value, 0)
self._data['exposuretime'] = value.val
self._data['exposuretime.err'] = value.val
@property
def date(self) -> datetime.datetime:
"""Date of the experiment (start of exposure)"""
return dateutil.parser.parse(self._data['date'])
@date.setter
def date(self, value: datetime.datetime):
self._data['date'] = str(value)
@property
def startdate(self) -> datetime.datetime:
return dateutil.parser.parse(self._data['startdate'])
@startdate.setter
def startdate(self, value: datetime.datetime):
self._data['startdate'] = str(value)
@property
def enddate(self) -> datetime.datetime:
return dateutil.parser.parse(self._data['enddate'])
@enddate.setter
def enddate(self, value: datetime.datetime):
self._data['enddate'] = str(value)
@property
def maskname(self) -> Optional[str]:
"""Name of the mask matrix file."""
try:
maskid = self._data['maskname']
if not maskid.endswith('.mat'):
maskid = maskid + '.mat'
return maskid
except KeyError:
return None
@maskname.setter
def maskname(self, value: str):
self._data['maskname'] = value
@property
def transmission(self) -> ErrorValue:
"""Sample transmission."""
return ErrorValue(self._data['transmission'], self._data['transmission.err'])
@transmission.setter
def transmission(self, value: Union[ErrorValue, float]):
if not isinstance(value, ErrorValue):
value = ErrorValue(value, 0)
self._data['transmission'] = value.val
self._data['transmission.err'] = value.err
@property
def vacuum(self) -> ErrorValue:
"""Vacuum pressure around the sample"""
return ErrorValue(self._data['vacuum'], self._data['vacuum.err'])
@vacuum.setter
def vacuum(self, value: Union[ErrorValue, float]):
if not isinstance(value, ErrorValue):
value = ErrorValue(value, 0)
self._data['vacuum'] = value.val
self._data['vacuum.err'] = value.err
@property
def flux(self) -> ErrorValue:
"""X-ray flux in photons/sec."""
try:
return ErrorValue(self._data['flux'], self._data['flux.err'])
except KeyError:
return 1 / self.pixelsizex / self.pixelsizey / ErrorValue(self._data['absintfactor'],
self._data['absintfactor.err'])
@flux.setter
def flux(self, value: Union[ErrorValue, float]):
if not isinstance(value, ErrorValue):
value = ErrorValue(value, 0)
self._data['flux'] = value.val
self._data['flux.err'] = value.err
@property
def thickness(self) -> ErrorValue:
"""Sample thickness in cm"""
return ErrorValue(self._data['thickness'], self._data['thickness.err'])
@thickness.setter
def thickness(self, value: Union[ErrorValue, float]):
if not isinstance(value, ErrorValue):
value = ErrorValue(value, 0)
self._data['thickness'] = value.val
self._data['thickness.err'] = value.err
@property
def distancedecrease(self) -> ErrorValue:
"""Distance by which the sample is nearer to the detector than the
distance calibration sample"""
return ErrorValue(self._data['distancedecrease'], self._data['distancedecrease.err'])
@distancedecrease.setter
def distancedecrease(self, value: Union[ErrorValue, float]):
if not isinstance(value, ErrorValue):
value = ErrorValue(value, 0)
self._data['distancedecrease'] = value.val
self._data['distancedecrease.err'] = value.err
@property
def samplex(self) -> ErrorValue:
"""Horizontal sample position"""
return ErrorValue(self._data['samplex'], self._data['samplex.err'])
@samplex.setter
def samplex(self, value: Union[ErrorValue, float]):
if not isinstance(value, ErrorValue):
value = ErrorValue(value, 0)
self._data['samplex'] = value.val
self._data['samplex.err'] = value.err
@property
def sampley(self) -> ErrorValue:
"""Vertical sample position"""
return ErrorValue(self._data['sampley'], self._data['sampley.err'])
@sampley.setter
def sampley(self, value: Union[ErrorValue, float]):
if not isinstance(value, ErrorValue):
value = ErrorValue(value, 0)
self._data['sampley'] = value.val
self._data['sampley.err'] = value.err
def motorposition(self, motorname: str) -> float:
"""Position of the motor `motorname`."""
return self._data[motorname]
@property
def username(self) -> str:
"""Name of the instrument operator"""
return self._data['username']
@username.setter
def username(self, value: str):
self._data['username'] = value
@property
def project(self) -> str:
"""Project name"""
return self._data['project']
@project.setter
def project(self, value: str):
self._data['project'] = value
@property
def fsn_emptybeam(self) -> int:
"""File sequence number of the empty beam measurement"""
return self._data['fsn_emptybeam']
@fsn_emptybeam.setter
def fsn_emptybeam(self, value: int):
self._data['fsn_emptybeam'] = value
@property
def fsn_absintref(self) -> int:
"""File sequence number of the absolute intensity reference measurement
"""
return self._data['fsn_absintref']
@fsn_absintref.setter
def fsn_absintref(self, value: int):
self._data['fsn_absintref'] = value
@property
def absintfactor(self) -> ErrorValue:
"""Absolute intensity calibration factor"""
return ErrorValue(self._data['absintfactor'], self._data['absintfactor.err'])
@absintfactor.setter
def absintfactor(self, value: Union[ErrorValue, float]):
if not isinstance(value, ErrorValue):
value = ErrorValue(value, 0)
self._data['absintfactor'] = value.val
self._data['absintfactor.err'] = value.err
@property
def samplex_motor(self) -> Optional[float]:
"""Sample X position, motor reading"""
try:
return self._data['samplex_motor']
except KeyError:
return None
@samplex_motor.setter
def samplex_motor(self, value: float):
self._data['samplex_motor'] = value
@property
def sampley_motor(self) -> Optional[float]:
"""Sample Y position, motor reading"""
try:
return self._data['sampley_motor']
except KeyError:
return None
@sampley_motor.setter
def sampley_motor(self, value: float):
self._data['sampley_motor'] = value
@property
def sample_category(self) -> str:
"""Sample category"""
try:
return self._data['sample_category']
except KeyError:
return 'sample'
@sample_category.setter
def sample_category(self, newvalue: str):
self._data['sample_category'] = newvalue
|
awacha/sastool
|
sastool/io/credo_cpth5/header.py
|
Python
|
bsd-3-clause
| 12,984
|
# Copyright (c) 2012-2021, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from .aws import Action as BaseAction
from .aws import BaseARN
service_name = "Amazon Pinpoint"
prefix = "mobiletargeting"
class Action(BaseAction):
def __init__(self, action: str = None) -> None:
super().__init__(prefix, action)
class ARN(BaseARN):
def __init__(self, resource: str = "", region: str = "", account: str = "") -> None:
super().__init__(
service=prefix, resource=resource, region=region, account=account
)
CreateApp = Action("CreateApp")
CreateCampaign = Action("CreateCampaign")
CreateEmailTemplate = Action("CreateEmailTemplate")
CreateExportJob = Action("CreateExportJob")
CreateImportJob = Action("CreateImportJob")
CreateInAppTemplate = Action("CreateInAppTemplate")
CreateJourney = Action("CreateJourney")
CreatePushTemplate = Action("CreatePushTemplate")
CreateRecommenderConfiguration = Action("CreateRecommenderConfiguration")
CreateSegment = Action("CreateSegment")
CreateSmsTemplate = Action("CreateSmsTemplate")
CreateVoiceTemplate = Action("CreateVoiceTemplate")
DeleteAdmChannel = Action("DeleteAdmChannel")
DeleteApnsChannel = Action("DeleteApnsChannel")
DeleteApnsSandboxChannel = Action("DeleteApnsSandboxChannel")
DeleteApnsVoipChannel = Action("DeleteApnsVoipChannel")
DeleteApnsVoipSandboxChannel = Action("DeleteApnsVoipSandboxChannel")
DeleteApp = Action("DeleteApp")
DeleteBaiduChannel = Action("DeleteBaiduChannel")
DeleteCampaign = Action("DeleteCampaign")
DeleteEmailChannel = Action("DeleteEmailChannel")
DeleteEmailTemplate = Action("DeleteEmailTemplate")
DeleteEndpoint = Action("DeleteEndpoint")
DeleteEventStream = Action("DeleteEventStream")
DeleteGcmChannel = Action("DeleteGcmChannel")
DeleteInAppTemplate = Action("DeleteInAppTemplate")
DeleteJourney = Action("DeleteJourney")
DeletePushTemplate = Action("DeletePushTemplate")
DeleteRecommenderConfiguration = Action("DeleteRecommenderConfiguration")
DeleteSegment = Action("DeleteSegment")
DeleteSmsChannel = Action("DeleteSmsChannel")
DeleteSmsTemplate = Action("DeleteSmsTemplate")
DeleteUserEndpoints = Action("DeleteUserEndpoints")
DeleteVoiceChannel = Action("DeleteVoiceChannel")
DeleteVoiceTemplate = Action("DeleteVoiceTemplate")
GetAdmChannel = Action("GetAdmChannel")
GetApnsChannel = Action("GetApnsChannel")
GetApnsSandboxChannel = Action("GetApnsSandboxChannel")
GetApnsVoipChannel = Action("GetApnsVoipChannel")
GetApnsVoipSandboxChannel = Action("GetApnsVoipSandboxChannel")
GetApp = Action("GetApp")
GetApplicationDateRangeKpi = Action("GetApplicationDateRangeKpi")
GetApplicationSettings = Action("GetApplicationSettings")
GetApps = Action("GetApps")
GetBaiduChannel = Action("GetBaiduChannel")
GetCampaign = Action("GetCampaign")
GetCampaignActivities = Action("GetCampaignActivities")
GetCampaignDateRangeKpi = Action("GetCampaignDateRangeKpi")
GetCampaignVersion = Action("GetCampaignVersion")
GetCampaignVersions = Action("GetCampaignVersions")
GetCampaigns = Action("GetCampaigns")
GetChannels = Action("GetChannels")
GetEmailChannel = Action("GetEmailChannel")
GetEmailTemplate = Action("GetEmailTemplate")
GetEndpoint = Action("GetEndpoint")
GetEventStream = Action("GetEventStream")
GetExportJob = Action("GetExportJob")
GetExportJobs = Action("GetExportJobs")
GetGcmChannel = Action("GetGcmChannel")
GetImportJob = Action("GetImportJob")
GetImportJobs = Action("GetImportJobs")
GetInAppMessages = Action("GetInAppMessages")
GetInAppTemplate = Action("GetInAppTemplate")
GetJourney = Action("GetJourney")
GetJourneyDateRangeKpi = Action("GetJourneyDateRangeKpi")
GetJourneyExecutionActivityMetrics = Action("GetJourneyExecutionActivityMetrics")
GetJourneyExecutionMetrics = Action("GetJourneyExecutionMetrics")
GetPushTemplate = Action("GetPushTemplate")
GetRecommenderConfiguration = Action("GetRecommenderConfiguration")
GetRecommenderConfigurations = Action("GetRecommenderConfigurations")
GetReports = Action("GetReports")
GetSegment = Action("GetSegment")
GetSegmentExportJobs = Action("GetSegmentExportJobs")
GetSegmentImportJobs = Action("GetSegmentImportJobs")
GetSegmentVersion = Action("GetSegmentVersion")
GetSegmentVersions = Action("GetSegmentVersions")
GetSegments = Action("GetSegments")
GetSmsChannel = Action("GetSmsChannel")
GetSmsTemplate = Action("GetSmsTemplate")
GetUserEndpoints = Action("GetUserEndpoints")
GetVoiceChannel = Action("GetVoiceChannel")
GetVoiceTemplate = Action("GetVoiceTemplate")
ListJourneys = Action("ListJourneys")
ListTagsForResource = Action("ListTagsForResource")
ListTemplateVersions = Action("ListTemplateVersions")
ListTemplates = Action("ListTemplates")
PhoneNumberValidate = Action("PhoneNumberValidate")
PutEventStream = Action("PutEventStream")
PutEvents = Action("PutEvents")
RemoveAttributes = Action("RemoveAttributes")
SendMessages = Action("SendMessages")
SendOTPMessage = Action("SendOTPMessage")
SendUsersMessages = Action("SendUsersMessages")
TagResource = Action("TagResource")
UntagResource = Action("UntagResource")
UpdateAdmChannel = Action("UpdateAdmChannel")
UpdateApnsChannel = Action("UpdateApnsChannel")
UpdateApnsSandboxChannel = Action("UpdateApnsSandboxChannel")
UpdateApnsVoipChannel = Action("UpdateApnsVoipChannel")
UpdateApnsVoipSandboxChannel = Action("UpdateApnsVoipSandboxChannel")
UpdateApplicationSettings = Action("UpdateApplicationSettings")
UpdateBaiduChannel = Action("UpdateBaiduChannel")
UpdateCampaign = Action("UpdateCampaign")
UpdateEmailChannel = Action("UpdateEmailChannel")
UpdateEmailTemplate = Action("UpdateEmailTemplate")
UpdateEndpoint = Action("UpdateEndpoint")
UpdateEndpointsBatch = Action("UpdateEndpointsBatch")
UpdateGcmChannel = Action("UpdateGcmChannel")
UpdateInAppTemplate = Action("UpdateInAppTemplate")
UpdateJourney = Action("UpdateJourney")
UpdateJourneyState = Action("UpdateJourneyState")
UpdatePushTemplate = Action("UpdatePushTemplate")
UpdateRecommenderConfiguration = Action("UpdateRecommenderConfiguration")
UpdateSegment = Action("UpdateSegment")
UpdateSmsChannel = Action("UpdateSmsChannel")
UpdateSmsTemplate = Action("UpdateSmsTemplate")
UpdateTemplateActiveVersion = Action("UpdateTemplateActiveVersion")
UpdateVoiceChannel = Action("UpdateVoiceChannel")
UpdateVoiceTemplate = Action("UpdateVoiceTemplate")
VerifyOTPMessage = Action("VerifyOTPMessage")
|
cloudtools/awacs
|
awacs/mobiletargeting.py
|
Python
|
bsd-2-clause
| 6,372
|
# -*- coding: utf-8 -*-
from module.plugins.internal.SimpleCrypter import SimpleCrypter
class ShareRapidComFolder(SimpleCrypter):
__name__ = "ShareRapidComFolder"
__type__ = "crypter"
__pattern__ = r"http://(?:www\.)?((share(-?rapid\.(biz|com|cz|info|eu|net|org|pl|sk)|-(central|credit|free|net)\.cz|-ms\.net)|(s-?rapid|rapids)\.(cz|sk))|(e-stahuj|mediatack|premium-rapidshare|rapidshare-premium|qiuck)\.cz|kadzet\.com|stahuj-zdarma\.eu|strelci\.net|universal-share\.com)/(slozka/.+)"
__version__ = "0.01"
__description__ = """Share-Rapid.com Folder Plugin"""
__author_name__ = ("zoidberg")
__author_mail__ = ("zoidberg@mujmail.cz")
LINK_PATTERN = r'<td class="soubor"[^>]*><a href="([^"]+)">'
|
chaosmaker/pyload
|
module/plugins/crypter/ShareRapidComFolder.py
|
Python
|
gpl-3.0
| 730
|
# Copyright 2009-2010 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
__metaclass__ = type
from lp.services.webapp.publisher import canonical_url
from lp.testing.breadcrumbs import BaseBreadcrumbTestCase
class TestHasSpecificationsBreadcrumbOnBlueprintsVHost(
BaseBreadcrumbTestCase):
"""Test Breadcrumbs for IHasSpecifications on the blueprints vhost."""
def setUp(self):
super(TestHasSpecificationsBreadcrumbOnBlueprintsVHost, self).setUp()
self.person = self.factory.makePerson()
self.person_specs_url = canonical_url(
self.person, rootsite='blueprints')
self.product = self.factory.makeProduct(
name='crumb-tester', displayname="Crumb Tester")
self.product_specs_url = canonical_url(
self.product, rootsite='blueprints')
def test_product(self):
crumbs = self.getBreadcrumbsForObject(
self.product, rootsite='blueprints')
last_crumb = crumbs[-1]
self.assertEquals(last_crumb.url, self.product_specs_url)
self.assertEquals(last_crumb.text, 'Blueprints')
def test_person(self):
crumbs = self.getBreadcrumbsForObject(
self.person, rootsite='blueprints')
last_crumb = crumbs[-1]
self.assertEquals(last_crumb.url, self.person_specs_url)
self.assertEquals(last_crumb.text, 'Blueprints')
class TestSpecificationBreadcrumb(BaseBreadcrumbTestCase):
"""Test breadcrumbs for an `ISpecification`."""
def setUp(self):
super(TestSpecificationBreadcrumb, self).setUp()
self.product = self.factory.makeProduct(
name='crumb-tester', displayname="Crumb Tester")
self.specification = self.factory.makeSpecification(
title="Crumby Specification", product=self.product)
self.specification_url = canonical_url(
self.specification, rootsite='blueprints')
def test_specification(self):
crumbs = self.getBreadcrumbsForObject(self.specification)
last_crumb = crumbs[-1]
self.assertEquals(last_crumb.url, self.specification_url)
self.assertEquals(
last_crumb.text, self.specification.title)
|
abramhindle/UnnaturalCodeFork
|
python/testdata/launchpad/lib/lp/blueprints/browser/tests/test_breadcrumbs.py
|
Python
|
agpl-3.0
| 2,264
|
import io
import factory
import datetime
from PIL import Image
from django.conf import settings
from factory.django import DjangoModelFactory, ImageField
from django.utils import timezone
from allauth.account.models import EmailAddress
from manopozicija import models
from manopozicija import services
class EmailAddressFactory(DjangoModelFactory):
email = factory.SelfAttribute('user.email')
verified = True
primary = True
class Meta:
model = EmailAddress
django_get_or_create = ('email',)
class UserFactory(DjangoModelFactory):
first_name = 'Vardenis'
last_name = 'Pavardenis'
username = factory.LazyAttribute(lambda x: x.first_name.lower())
email = factory.LazyAttribute(lambda x: '%s.%s@example.com' % (x.first_name.lower(), x.last_name.lower()))
is_active = True
emailaddress = factory.RelatedFactory(EmailAddressFactory, 'user')
class Meta:
model = settings.AUTH_USER_MODEL
django_get_or_create = ('email',)
class IndicatorFactory(DjangoModelFactory):
slug = 'voter-turnout'
title = 'Rinkimuose dalyvavusių rinkėjų skaičius, palyginti su visų rinkėjų skaičiumi'
ylabel = 'Aktyvumas procentais'
source = 'http://ec.europa.eu/eurostat/tgm/table.do?tab=table&init=1&language=en&pcode=tsdgo310&plugin=1'
class Meta:
model = models.Indicator
django_get_or_create = ('slug',)
class BodyFactory(DjangoModelFactory):
name = 'Seimas'
class Meta:
model = models.Body
django_get_or_create = ('name',)
class TermFactory(DjangoModelFactory):
body = factory.SubFactory(BodyFactory, name='Seimas')
since = datetime.datetime(2012, 10, 14)
until = datetime.datetime(2016, 10, 9)
class TopicFactory(DjangoModelFactory):
title = 'Balsavimas internetu'
description = ''
logo = ImageField(filename='logo.png', **settings.MANOPOZICIJA_TOPIC_LOGO_SIZE._asdict())
default_body = factory.SubFactory(BodyFactory, name='Seimas')
class Meta:
model = models.Topic
django_get_or_create = ('title',)
@factory.post_generation
def indicators(self, create, extracted, **kwargs):
if create:
self.indicators.set(extracted or [IndicatorFactory()])
class PartyActorFactory(DjangoModelFactory):
first_name = 'Lietuvos Žaliųjų Partija'
last_name = ''
title = 'politinė partija'
photo = ImageField()
group = True
body = factory.SubFactory(BodyFactory, name='Seimas')
class Meta:
model = models.Actor
django_get_or_create = ('first_name',)
class PersonActorFactory(DjangoModelFactory):
first_name = 'Mantas'
last_name = 'Adomėnas'
title = 'seimo narys'
photo = ImageField()
group = False
body = None
class Meta:
model = models.Actor
django_get_or_create = ('first_name', 'last_name')
class PostFactory(DjangoModelFactory):
body = factory.SubFactory(BodyFactory, name='Seimas')
topic = factory.SubFactory(TopicFactory)
actor = factory.SubFactory(PersonActorFactory)
position = 1
approved = datetime.datetime(2016, 3, 22, 16, 34, 0)
timestamp = datetime.datetime(2016, 3, 22, 16, 34, 0)
class Meta:
model = models.Post
class SourceFactory(DjangoModelFactory):
actor = factory.SubFactory(PersonActorFactory)
actor_title = 'seimo narys'
source_title = 'kauno.diena.lt'
source_link = 'http://kauno.diena.lt/naujienos/lietuva/politika/skinasi-kelia-balsavimas-internetu-740017'
timestamp = datetime.datetime(2016, 3, 22, 16, 34, 0)
position = -1
class Meta:
model = models.Source
django_get_or_create = ('source_link',)
class QuoteFactory(DjangoModelFactory):
user = factory.SubFactory(UserFactory)
source = factory.SubFactory(SourceFactory)
text = 'Nepasiduokime paviršutiniškiems šūkiams – šiuolaikiška, modernu.'
class Meta:
model = models.Quote
class ArgumentFactory(DjangoModelFactory):
topic = factory.SubFactory(TopicFactory)
title = 'šiuolaikiška, modernu'
class Meta:
model = models.Argument
django_get_or_create = ('topic', 'title')
class PostArgumentFactory(DjangoModelFactory):
topic = factory.SubFactory(TopicFactory)
post = factory.SubFactory(PostFactory)
quote = factory.SubFactory(QuoteFactory)
title = 'šiuolaikiška, modernu'
counterargument = None
counterargument_title = ''
position = 1
class Meta:
model = models.PostArgument
@factory.post_generation
def argument(self, create, extracted, **kwargs):
if create:
ArgumentFactory(topic=self.topic, title=self.title)
class EventFactory(DjangoModelFactory):
user = factory.SubFactory(UserFactory)
type = models.Event.DOCUMENT
title = 'Balsavimo internetu koncepcijos patvirtinimas'
source_title = 'e-seimas.lrs.lt'
source_link = 'https://e-seimas.lrs.lt/portal/legalAct/lt/TAD/TAIS.287235?positionInSearchResults=0&searchModelUUID=eaee1625-cf9f-46c0-931c-482a218029e8'
timestamp = datetime.datetime(2006, 11, 26)
position = 0
group = None
class Meta:
model = models.Event
django_get_or_create = ('title',)
class UserPositionFactory(DjangoModelFactory):
user = factory.SubFactory(UserFactory)
post = factory.SubFactory(PostFactory)
position = 1
class Meta:
model = models.UserPostPosition
class CuratorFactory(DjangoModelFactory):
user = factory.SubFactory(UserFactory)
actor = None
title = 'visuomenės veikėjas'
photo = ImageField()
class Meta:
model = models.Curator
django_get_or_create = ('user',)
class TopicCuratorFactory(DjangoModelFactory):
approved = datetime.datetime(2016, 3, 22, 16, 34, 0)
topic = factory.SubFactory(TopicFactory)
user = factory.SubFactory(UserFactory)
class Meta:
model = models.TopicCurator
django_get_or_create = ('user', 'topic')
class GroupFactory(DjangoModelFactory):
title = 'Kandidatai į 2016 metų Seimą'
timestamp = datetime.datetime(2006, 11, 26)
class Meta:
model = models.Group
django_get_or_create = ('title',)
@factory.post_generation
def members(self, create, extracted, **kwargs):
if create:
for member in extracted:
self.members.add(member)
def _prep_quote_arguments(arguments):
result = []
for position, argument, counterargument in arguments:
counterargument_title = ''
if counterargument is None:
counterargument = False
elif counterargument is not True:
counterargument = True
counterargument_title = counterargument
result.append({
'title': argument,
'position': position,
'counterargument': counterargument,
'counterargument_title': counterargument_title,
})
return result
def create_quote_agruments(topic, quote, post, arguments):
result = []
for data in _prep_quote_arguments(arguments):
argument = PostArgumentFactory(topic=topic, quote=quote, post=post, **data)
result.append(argument)
return result
def _get_source_link(source, date):
return 'http://%s/%s' % (source, date.replace('-', '/'))
def create_topic_quotes(topic, user, actor, title, source, date, quotes):
result = []
user = user or UserFactory()
first_name, last_name = actor.split()
actor = PersonActorFactory(first_name=first_name, last_name=last_name)
source = {
'actor': actor,
'source_link': _get_source_link(source, date),
'timestamp': datetime.datetime.strptime(date, '%Y-%m-%d'),
}
for upvotes, downvotes, text, arguments in quotes:
quote = {
'text': text,
'reference_link': '',
}
quote = services.create_quote(user, topic, source, quote, _prep_quote_arguments(arguments))
post = quote.post.first()
position = 1 if upvotes > downvotes else -1 if upvotes < downvotes else 0
services.update_user_position(user, post, position)
result.append(post)
return result
def create_topic_event(topic, user, upvotes, downvotes, title, source, date):
user = user or UserFactory()
timestamp = datetime.datetime.strptime(date, '%Y-%m-%d')
event = services.create_event(user, topic, {
'type': models.Event.DOCUMENT,
'title': title,
'source_link': _get_source_link(source, date),
'source_title': source,
'timestamp': timestamp,
})
post = event.post.first()
position = 1 if upvotes > downvotes else -1 if upvotes < downvotes else 0
services.update_user_position(user, post, position)
return post
def create_topic_curator(topic, user, name, title):
user_data = {}
user_data['first_name'], user_data['last_name'] = name.split()
curator = services.create_curator(user, topic, user_data, {
'title': title,
'photo': None,
})
post = curator.posts.first()
return post
def create_topic_posts(topic, user, posts):
result = []
user = user or UserFactory()
for content_type, *args in posts:
if content_type == 'event':
result.append(create_topic_event(topic, user, *args))
elif content_type == 'curator':
result.append(create_topic_curator(topic, user, *args))
else:
result.extend(create_topic_quotes(topic, user, *args))
return result
def create_arguments(topic, arguments, approved=True):
result = []
approved = timezone.now() if approved else None
for position, counterargument, argument in arguments:
quote = QuoteFactory()
post = PostFactory(topic=topic, content_object=quote, approved=approved)
argument = PostArgumentFactory(
topic=topic, post=post, quote=quote,
position=position, title=argument, counterargument=counterargument,
)
result.append(argument)
return result
def get_quote_form_data(**kwargs):
source = {
'actor': PersonActorFactory(),
'source_link': 'http://kauno.diena.lt/naujienos/lietuva/politika/skinasi-kelia-balsavimas-internetu-740017',
'timestamp': datetime.datetime(2016, 3, 22, 16, 34, 0),
}
quote = {
'reference_link': '',
'text': kwargs.get('text', 'Nepasiduokime paviršutiniškiems šūkiams – šiuolaikiška, modernu.'),
}
arguments = [
{
'title': 'šiuolaikiška, modernu',
'position': 1,
'counterargument': True,
'counterargument_title': '',
}
]
return source, quote, arguments
def get_image_bytes(width=100, height=100, format='JPEG', color='black'):
image = Image.new('RGB', (width, height), color)
output = io.BytesIO()
image.save(output, format=format)
return output.getvalue()
|
sirex/manopozicija.lt
|
manopozicija/factories.py
|
Python
|
agpl-3.0
| 10,988
|
#!/usr/bin/env python2.7
import sys
import csv
import pprint
import re
import gnupg
import os
gpg = gnupg.GPG(gnupghome=os.environ['CONTACTDB_HOME'] + '/.gnupg/')
#def get_pgpkey(key_id):
#
def extract_workinghours(field):
# input format:
# 09:00 to 17:00 Monday to Friday except public holidays.
# Timezone: GMT+01.
# Timezone with DST: GMT+02
match = re.search('.*([0-9]{2}:[0-9]{2}).*(to|\-).*([0-9]{2}:[0-9]{2}).*Timezone: ([^\.]+)', field, re.MULTILINE | re.IGNORECASE | re.DOTALL)
if (match == None):
begin_hh = ''
end_hh = ''
tz = ''
else:
begin_hh = match.group(1)
end_hh= match.group(3)
tz = match.group(4)
return (begin_hh, end_hh, tz)
# new TI format: (number indicates field number)
# 0 1 2 3 4 5 6 7 8 9
#Team Name,TI Level,First entered,Last changed,FIRST Membership,TI URL,Official Team Name,Former Team Names,Country,
# 9 10 11 12 13 14
#Date of Establishment,-Type of Constituency,Constituency ASNs,Constituency Domains,Costituency Nets,Country of Constituents,
# 15 16 17 18 19 20 21
#Email,PGP Key (Team),Telephone,Emergency Phone,Telefax,Other communication,Address,
# 22 23 24 25
#-Business Hours,Contacting outside Business Hours,Team Representative,Email (Rep),
# 26 27 28 29 30
#PGP Key (Rep),WWW,FTP,*RFC2350,Operating Status
#
#ACOnet-CERT,Accredited,8/31/00,3/28/03,Full Member,https://tiw.trusted-introducer.org/directory/teams/aconet-cert.html,ACOnet-CERT,,AT,1/1/03,Research & Education,AS1853. - AS679. - AS760. - AS1109-AS1123. - AS1205. - AS1776. - AS1921. - AS2036. - AS2494. - AS2604. - AS6720. - AS8692. - AS12991. - AS16314. - AS30971. - AS39837. - AS41915. - AS42685. - AS47515,ac.at,,AT,cert@aco.net,0x86EDDB8A,+43 1 427714045,+43 1 427714045,+43 1 42779140,N/A,Zentraler Informatikdienst. - Universitaet Wien. - Universitaetsstrasse 7. - A-1010 Wien. - Austria,09:00 to 17:00 Monday to Friday except public holidays. - Timezone: GMT+01. - Timezone with DST: GMT+02,eMail or leave Message on the Voicebox,Alexander Talos-Zens,alexander.talos-zens@univie.ac.at,0x9D9731C5,http://cert.aco.net/,,,
reader = csv.reader(sys.stdin, delimiter=',')
headers = reader.next()
for r in reader:
r = [ x.replace( " - ", "\n") for x in r ]
(begin_hh, end_hh, tz) = extract_workinghours(r[22])
mapping = { "name": r[0], "fullname": r[6],
"address":r[21],
"country_id":r[8], "phone":r[17], "emergency_phone":r[18],
"fax":r[19], "email":r[15], "website":r[27], "timezone":tz, ## XXX FIXME: timezone parsing from field r[22]
"business_hh_start": begin_hh, "business_hh_end": end_hh, ## XXX FIXME: parse and split this field r[22]
"date_established": r[9] ,
"isCERT": "t",
"ti_url": r[5],
"pgp_key_id": r[16],
"confirmed": "t",
"active": "t",
"source_id": "TI",
}
#pprint.pprint(mapping)
# do the mapping
keystr=""
valstr=""
for key in mapping.keys():
keystr += key + ", "
valstr += "" + ( 'E' + repr(mapping[key]) if (mapping[key] != '') else 'NULL' ) + ", "
keystr += 'parent_id'
valstr += 'NULL'
# order:
# first insert the pgp key and uids if it does not exist yet
# then the person
# then the organisation
print "INSERT INTO contactdb_pgpkey (pgp_key_id) values ( " + repr(r[16]) + " );"
print "INSERT INTO contactdb_organisation( " + keystr + ") values (" + valstr + ");"
# now insert all countries (if they don't exist yet) into contactdb_organisation_country
# database format:
# contactdb=# \d contactdb_organisation
#Column | Type | Modifiers
#-------------------------+--------------------------+---------------------------------------------------------------------
#id | integer | not null default nextval('contactdb_organisation_id_seq'::regclass)
#parent_id | integer |
#name | character varying(1000) | not null
#fullname | character varying(1000) |
#nesting | character varying(5000) |
#protection_profile | character varying(30) |
#isCERT | boolean
#address | character varying(1000) |
#housenr | character varying(50) |
#pobox | character varying(50) |
#city | character varying(200) |
#zipcode | character varying(20) |
#country_id | character varying(2) | not null
#phone | character varying(64) | not null
#emergency_phone | character varying(64) |
#fax | character varying(64) |
#email | character varying(256) | not null
#website | character varying(1000) |
#timezone | character varying(10) |
#business_hh_start | time without time zone | not null
#business_hh_end | time without time zone | not null
#date_established | date |
#pgp_key_id | character varying(1000) |
#confirmed | boolean | not null
#active | boolean | not null
#source_id | character varying(1000) |
#vouching_proposed_by_id | integer | not null
#ti_url | character varying(1000) |
#first_url | character varying(1000) |
#created | timestamp with time zone | not null
#last_updated | timestamp with time zone | not null
|
certtools/contactdb
|
old2/old/contrib/TI-import.py
|
Python
|
agpl-3.0
| 5,623
|
import sys
sys.path.append('..')
from src.sim import Sim
from src.node import Node
from src.link import Link
from src.transport import Transport
from tcp import TCP
from networks.network import Network
import optparse
import os
import subprocess
class AppHandler(object):
def __init__(self,filename):
self.filename = filename
self.directory = 'received'
if not os.path.exists(self.directory):
os.makedirs(self.directory)
self.f = open("%s/%s" % (self.directory,self.filename),'w')
def receive_data(self,data):
Sim.trace('AppHandler',"application got %d bytes" % (len(data)))
self.f.write(data)
self.f.flush()
class Main(object):
def __init__(self):
self.directory = 'received'
self.parse_options()
self.run()
self.diff()
def parse_options(self):
parser = optparse.OptionParser(usage = "%prog [options]",
version = "%prog 0.1")
parser.add_option("-f","--filename",type="str",dest="filename",
default='test.txt',
help="filename to send")
parser.add_option("-l","--loss",type="float",dest="loss",
default=0.0,
help="random loss rate")
(options,args) = parser.parse_args()
self.filename = options.filename
self.loss = options.loss
def diff(self):
args = ['diff','-u',self.filename,self.directory+'/'+self.filename]
result = subprocess.Popen(args,stdout = subprocess.PIPE).communicate()[0]
print
if not result:
print "File transfer correct!!!"
else:
print "File transfer failed. Here is the diff:"
print
print result
def run(self):
# parameters
Sim.scheduler.reset()
Sim.set_debug('AppHandler')
Sim.set_debug('TCP')
# setup network
net = Network('../networks/one-hop.txt')
net.loss(self.loss)
# setup routes
n1 = net.get_node('n1')
n2 = net.get_node('n2')
n1.add_forwarding_entry(address=n2.get_address('n1'),link=n1.links[0])
n2.add_forwarding_entry(address=n1.get_address('n2'),link=n2.links[0])
# setup transport
t1 = Transport(n1)
t2 = Transport(n2)
# setup application
a = AppHandler(self.filename)
# setup connection
c1 = TCP(t1,n1.get_address('n2'),1,n2.get_address('n1'),1,a,window=1000)
c2 = TCP(t2,n2.get_address('n1'),1,n1.get_address('n2'),1,a,window=1000)
# send a file
with open(self.filename,'r') as f:
while True:
data = f.read(1000)
if not data:
break
Sim.scheduler.add(delay=0, event=data, handler=c1.send)
# run the simulation
Sim.scheduler.run()
print c1.round_trip_map
if __name__ == '__main__':
m = Main()
|
freedomflyer/test
|
lab2/transfer.py
|
Python
|
gpl-2.0
| 3,031
|
import talon
import encoder
# setting up the ciphertext
ciphertext = ''
keyphrase = 'CARDCIPHERCARDCIPHER'
plaintext = "ABAB"*25
plaintext *= 5200
deck = [i for i in xrange(1,53)]
c = talon.Cipher()
#c.prepare_deck(deck)
for char in keyphrase:
c.mix_deck(deck)
deck = c.count_cut(deck, encoder.plist.index(char)+1)
ct_list = encoder.encrypt(plaintext, c, deck, 0)
for char in ct_list:
ciphertext += char
# finding the index of coincidence
n = len(ciphertext)
freq = {}
alphabet = list('ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz')
freqsum = 0.0
last = -1
count = 0
for char in ciphertext:
if not char in freq:
freq[char] = 1
else:
freq[char] += 1
if char == last:
count += 1
last = char
for char in alphabet:
freqsum += freq[char] * (freq[char] - 1)
ic = freqsum / (n * (n - 1))
ratio = float(count)/len(ciphertext)
print("Coincidences: {0}/{1}".format(count, len(ciphertext)))
print("Index: {0}".format(ic))
print("Hit Ratio: {0:.12f}".format(ratio))
print("Expected: {0:.12f}".format(1.0/len(alphabet)))
print("Bias: 1/{0:.12f}".format(1/ic))
|
atoponce/cardciphers
|
talon/coincidences.py
|
Python
|
gpl-3.0
| 1,121
|
import sys
import os
from collections import defaultdict
from omim import OMIM
class Labels:
def __init__(self, labels_dir=None, gmt=None, pos_label='1', neg_label='-1'):
self._standards = {}
if labels_dir:
for labels_file in os.listdir(labels_dir):
pos_genes, neg_genes = set(), set()
with open(labels_dir + '/' + labels_file) as labelf:
lines = labelf.readlines()
for l in lines:
gene, label = l.strip('\t').split()[:2]
if label == pos_label:
pos_genes.add(gene)
elif label == neg_label:
neg_genes.add(gene)
self._standards[labels_file] = (pos_genes, neg_genes)
else:
for gsid, genes in gmt.genesets.iteritems():
pos_genes = genes
neg_genes = gmt.genes - pos_genes
self._standards[gsid] = (pos_genes, neg_genes)
def get_labels(self, term_id):
return self._standards[term_id]
def get_terms(self):
return self._standards.keys()
class OntoLabels:
def __init__(self, obo=None, slim_terms=None):
self._slim_terms = slim_terms
self._obo = obo
def get_labels(self, term_id):
term = self._obo.get_term(term_id)
if not term:
return (set(), set())
# Positive genes are annotated to the term
pos = set(term.get_annotated_genes())
unknown, all_genes = set(), set()
term_tree = self._obo.get_ancestors(term_id)
for obo_term in self._obo.get_termobject_list():
obo_term_tree = self._obo.get_ancestors(obo_term.go_id)
genes = set(obo_term.get_annotated_genes())
# Terms share a slim in their ancestor tree
if len(set(self._slim_terms & term_tree & obo_term_tree)):
unknown |= genes
all_genes |= genes
neg = all_genes - unknown - pos
return (pos, neg)
if __name__ == '__main__':
from onto import DiseaseOntology
do = DiseaseOntology.generate()
OMIM().load_onto(onto=do)
do.propagate()
lines = open('../../files/do_slim.txt').readlines()
slim_terms = set([l.strip() for l in lines])
ol = OntoLabels(obo=do, slim_terms=slim_terms)
(pos1, neg1) = ol.get_labels('DOID:0060041')
print len(pos1), len(neg1)
|
FunctionLab/function
|
flib/core/labels.py
|
Python
|
gpl-3.0
| 2,468
|
'''
Picker class for Panda3d.
Created on Oct 31, 2017
@author: consultit
'''
from panda3d.core import CollisionTraverser, CollisionHandlerQueue, CollisionNode, CollisionRay, \
BitMask32, LPoint3f, NodePath, CardMaker
from direct.showbase.ShowBase import ShowBase
class Picker(object):
'''
A class for picking (Panda3d) objects.
'''
def __init__(self, app, render, camera, mouseWatcher, pickKeyOn, pickKeyOff, collideMask,
pickableTag='pickable'):
self.render = render
self.mouseWatcher = mouseWatcher.node()
self.camera = camera
self.camLens = camera.node().get_lens()
self.collideMask = collideMask
self.pickableTag = pickableTag
self.taskMgr = app.task_mgr
# setup event callback for picking body
self.pickKeyOn = pickKeyOn
self.pickKeyOff = pickKeyOff
app.accept(self.pickKeyOn, self._pickBody, [self.pickKeyOn])
app.accept(self.pickKeyOff, self._pickBody, [self.pickKeyOff])
# collision data
self.collideMask = collideMask
self.cTrav = CollisionTraverser()
self.collisionHandler = CollisionHandlerQueue()
self.pickerRay = CollisionRay()
pickerNode = CollisionNode('Utilities.pickerNode')
pickerNode.add_solid(self.pickerRay)
pickerNode.set_from_collide_mask(self.collideMask)
pickerNode.set_into_collide_mask(BitMask32.all_off())
self.cTrav.add_collider(self.render.attach_new_node(pickerNode), self.collisionHandler)
# service data
self.pickedBody = None
self.oldPickingDist = 0.0
self.deltaDist = 0.0
self.dragging = False
self.updateTask = None
def _pickBody(self, event):
# handle body picking
if event == self.pickKeyOn:
# check mouse position
if self.mouseWatcher.has_mouse():
# Get to and from pos in camera coordinates
pMouse = self.mouseWatcher.get_mouse()
#
pFrom = LPoint3f()
pTo = LPoint3f()
if self.camLens.extrude(pMouse, pFrom, pTo):
# Transform to global coordinates
rayFromWorld = self.render.get_relative_point(self.camera, pFrom)
rayToWorld = self.render.get_relative_point(self.camera, pTo)
# cast a ray to detect a body
# traverse downward starting at rayOrigin
self.pickerRay.set_direction(rayToWorld - rayFromWorld)
self.pickerRay.set_origin(rayFromWorld)
self.cTrav.traverse(self.render)
if self.collisionHandler.get_num_entries() > 0:
self.collisionHandler.sort_entries()
entry0 = self.collisionHandler.get_entry(0)
hitPos = entry0.get_surface_point(self.render)
# get the first parent with name
pickedObject = entry0.get_into_node_path()
while not pickedObject.has_tag(self.pickableTag):
pickedObject = pickedObject.getParent()
if not pickedObject:
return
if pickedObject == self.render:
return
#
self.pickedBody = pickedObject
self.oldPickingDist = (hitPos - rayFromWorld).length()
self.deltaDist = (self.pickedBody.get_pos(self.render) - hitPos)
print(self.pickedBody.get_name(), hitPos)
if not self.dragging:
self.dragging = True
# create the task for updating picked body motion
self.updateTask = self.taskMgr.add(self._movePickedBody,
'_movePickedBody')
# set sort/priority
self.updateTask.set_sort(0)
self.updateTask.set_priority(0)
else:
if self.dragging:
# remove pick body motion update task
self.taskMgr.remove('_movePickedBody')
self.updateTask = None
self.dragging = False
self.pickedBody = None
def _movePickedBody(self, task):
# handle picked body if any
if self.pickedBody and self.dragging:
# check mouse position
if self.mouseWatcher.has_mouse():
# Get to and from pos in camera coordinates
pMouse = self.mouseWatcher.get_mouse()
#
pFrom = LPoint3f()
pTo = LPoint3f()
if self.camLens.extrude(pMouse, pFrom, pTo):
# Transform to global coordinates
rayFromWorld = self.render.get_relative_point(self.camera, pFrom)
rayToWorld = self.render.get_relative_point(self.camera, pTo)
# keep it at the same picking distance
direction = (rayToWorld - rayFromWorld).normalized()
direction *= self.oldPickingDist
self.pickedBody.set_pos(self.render, rayFromWorld + direction + self.deltaDist)
#
return task.cont
if __name__ == '__main__':
app = ShowBase()
# create the picker
PICKABLETAG = 'pickable'
PICKKEYON = 'mouse3'
PICKKEYOFF = 'mouse3-up'
picker = Picker(app, app.render, app.cam, app.mouseWatcher, PICKKEYON, PICKKEYOFF,
BitMask32.all_on(), PICKABLETAG)
# some scene data
numR = 3
numC = 3
dist = 5
dimRMin = -((numR - 1) * dist) / 2.0
dimCMin = -((numC - 1) * dist) / 2.0
# ground
cm = CardMaker('ground')
left, right, bottom, top = dimCMin * 1.1, -dimCMin * 1.1, dimRMin * 1.1, -dimRMin * 1.1
cm.setFrame(left, right, bottom, top)
ground = app.render.attach_new_node(cm.generate())
ground.set_pos(0, 0, 0)
ground.set_p(-90)
ground.set_color(0.2, 0.6, 0.4, 1)
ground.set_tag(PICKABLETAG, '')
# panda
panda = app.loader.load_model('panda')
panda.reparent_to(app.render)
panda.set_pos(0, 0, 6)
panda.set_scale(0.5)
panda.set_tag(PICKABLETAG, '')
# smiley
smiley = app.loader.load_model('smiley')
for r in range(numR):
for c in range(numC):
smileyInst = NodePath('smiley_' + str(r) + '_' + str(c))
smiley.instance_to(smileyInst)
smileyInst.reparent_to(app.render)
smileyInst.set_pos(dimCMin + dist * c, dimRMin + dist * r, 3)
smileyInst.set_tag(PICKABLETAG, '')
# setup camera
# trackball = app.trackball.node()
# trackball.set_pos(0.0, max(-dimRMin * 2, -dimCMin * 2) * 2, -2.0)
# trackball.set_hpr(0.0, 25.0, 0.0)
app.disable_mouse()
app.camera.set_pos(0.0, max(dimRMin * 2, dimCMin * 2) * 3, 8.0)
app.camera.set_hpr(0.0, -5.0, 0.0)
# run
app.run()
|
consultit/Ely
|
test/direct/picker.py
|
Python
|
lgpl-3.0
| 7,225
|
"""
Tahoma library
~~~~~~~~~~~~~~
:copyright: (c) 2016 by Benjamin Pannier.
:license: Apache 2.0, see LICENSE for more details.
"""
from .action import Action, Command
from .actionGroup import ActionGroup
from .device import Device
from .event import Event, DeviceStateChangedEvent, ExecutionStateChangedEvent, CommandExecutionStateChangedEvent
from .eventState import EventState
from .execution import Execution
from .protocol import Protocol
__title__ = 'tahoma'
__version__ = '1.0.0'
__author__ = 'Benjamin Pannier'
__license__ = 'Apache 2.0'
__copyright__ = 'Copyright 2016 Benjamin Pannier'
|
bpannier/TahomaProtocol
|
tahoma/__init__.py
|
Python
|
apache-2.0
| 599
|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
import time
#将uninx转换为date的时间格式
def timestamp_datetime(value):
format = '%Y-%m-%d %H:%M:%S'
value = time.localtime(value)
dt = time.strftime(format,value)
return dt
def datetime_timestamp(value):
time.strptime(value, '%m/%d/%Y:%H:%M:%S')
#将字符串转化为指定的时间元组格式
s = time.mktime(time.strptime(value, '%m/%d/%Y:%H:%M:%S'))
return s
print timestamp_datetime(1332888820)
unix = '10/7/2017:22:52:02'
print datetime_timestamp(unix)
|
zhangyage/Python-oldboy
|
log-an/change_time.py
|
Python
|
apache-2.0
| 552
|
# Copyright (c) 2020 DDN. All rights reserved.
# Use of this source code is governed by a MIT-style
# license that can be found in the LICENSE file.
from chroma_core.services import log_register
from chroma_core.services.power_control.rpc import PowerControlRpc
log = log_register(__name__)
class PowerControlClient(object):
@classmethod
def query_device_outlets(cls, device):
return PowerControlRpc().query_device_outlets(device.id)
@classmethod
def toggle_device_outlets(cls, toggle_state, outlets):
outlet_ids = [o.id for o in outlets]
return PowerControlRpc().toggle_device_outlets(toggle_state, outlet_ids)
@classmethod
def create_device(cls, device_data):
from chroma_core.models import PowerControlDevice
device_id = PowerControlRpc().create_device(device_data)
return PowerControlDevice.objects.get(pk=device_id)
@classmethod
def remove_device(cls, sockaddr):
return PowerControlRpc().remove_device(sockaddr)
|
intel-hpdd/intel-manager-for-lustre
|
chroma_core/services/power_control/client.py
|
Python
|
mit
| 1,015
|
from lis.specimen.lab_aliquot_list.models import BaseAliquotCondition
from lis.specimen.lab_aliquot_list.managers import AliquotConditionManager
class AliquotCondition(BaseAliquotCondition):
objects = AliquotConditionManager()
class Meta:
app_label = 'lab_clinic_api'
|
botswana-harvard/edc-lab
|
old/lab_clinic_api/models/aliquot_condition.py
|
Python
|
gpl-2.0
| 288
|
from django.template import Library
from browser.models import *
from collections import defaultdict
register = Library()
@register.filter(name="get_range")
def get_range(value, offset=0):
return range(offset, value+offset)
@register.filter
def get_years(coursegroup):
years = [ p.year for p in coursegroup.partof_set.all()]
if min(years) == max(years):
return u'{year}'.format(year=min(years))
return u'{min} to {max}'.format(min=min(years), max=max(years))
@register.filter
def get_attendance(instance):
"""
Retrieve :class:`.Attendance` instances associated with a ``course``.
Results are ordered by role.
"""
if type(instance) is Course:
return instance.attendance_set.distinct('role', 'person_id').order_by('role', 'person_id')
elif type(instance) is Person:
return instance.attendance_set.distinct('year', 'role', 'course_id').order_by('year', 'role', 'course_id')
@register.filter
def get_affiliation(person, year):
"""
Get all unique :class:`.Affiliation` instances for ``person`` in ``year``.
"""
return person.affiliation_set.filter(year=year).distinct('institution_id')
@register.filter
def get_affiliations(person):
return person.affiliation_set.distinct('year', 'institution_id').order_by('year')
@register.filter
def get_location(person, year):
"""
Get all unique :class:`.Localization` isntances for ``person`` in ``year``.
"""
return person.localization_set.filter(year=year).distinct()
@register.filter
def get_localizations(person):
return person.localization_set.distinct('year', 'location_id').order_by('year', 'location_id')
@register.filter
def get_denizens(location):
return location.localization_set.distinct('year', 'person_id').order_by('year', 'person_id')
@register.filter
def get_locations(person):
return person.locations.distinct('id')
@register.filter
def get_affiliation_count(course):
"""
Calculate the number of :class:`.Institution`\s associated with a
``course``.
"""
return Affiliation.objects.filter(
person__in=course.attendees.distinct('id')
).filter(
year=course.year
).distinct(
'institution_id'
).count()
@register.filter
def get_coursegroup_attendance_count(coursegroup):
"""
Calculate the total number of attendees across all :class:`.Course`\s
that are part of ``coursegroup``.
TODO: This should count _unique_ people, not sum the course attendance
counts.
"""
return Attendance.objects.filter(course__in=coursegroup.courses.all()).distinct('person').count()
@register.filter
def get_coursegroup_attendance(coursegroup):
return Attendance.objects.filter(course__in=coursegroup.courses.all())
@register.filter
def get_coursegroup_attendees(coursegroup):
return Person.objects.filter(attendance_set__in=get_coursegroup_attendance(coursegroup)).distinct('id')
@register.filter
def get_coursegroup_affiliation_count(coursegroup):
return Affiliation.objects.filter(person__in=get_coursegroup_attendees(coursegroup))
@register.filter
def get_partof_set(coursegroup):
return coursegroup.partof_set.distinct('year', 'course_id').order_by('year', 'course_id')
@register.filter
def get_researches(person):
return Investigator.objects.filter(person=person).distinct('year', 'id').order_by('year', 'id')
@register.filter
def get_affiliates(institution):
afields = ['person_id', 'year', 'position']
aqs = Affiliation.objects.filter(institution_id=institution.id)\
.distinct(*afields)
person_affiliations = defaultdict(list)
for affiliation in aqs.values(*afields):
person_affiliations[affiliation['person_id']].append(affiliation)
pfields = ['last_name', 'first_name', 'pk']
qs = []
for person in institution.affiliates.distinct('pk').values(*pfields):
person['positions'] = person_affiliations[person['pk']]
qs.append(person)
# HyperlinkedIdentityField requires the HttpRequest to generate an
# absolute URL.
return qs
@register.filter
def get_positions(person):
return Position.objects.filter(person=person).order_by('year')
@register.simple_tag
def get_roles_of_positions():
return [i for i in Position.role_choices]
|
erickpeirson/mbl-browser
|
browser/templatetags/app_filters.py
|
Python
|
gpl-3.0
| 4,343
|
########################################################################
# Rancho - Open Source Group/Project Management Tool
# Copyright (C) 2008 The Rancho Team
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
########################################################################
from django.db.models import signals
from django.utils.translation import ugettext_noop as _
from rancho.notification import models as notification
from rancho.message import models as message_app
def create_notice_types(app, created_models, verbosity, **kwargs):
notification.create_notice_type("message_new", _("New message"), _("A new message has been created"))
notification.create_notice_type("message_replied", _("Message replyed"), _("A message has been replyed"))
signals.post_syncdb.connect(create_notice_types, message_app)
|
joaquimrocha/Rancho
|
rancho/message/management.py
|
Python
|
agpl-3.0
| 1,442
|
from pytest import mark
import sqlalchemy as sa
from sqlalchemy_continuum import version_class
from tests import TestCase, create_test_cases
class ColumnAliasesBaseTestCase(TestCase):
def create_models(self):
class TextItem(self.Model):
__tablename__ = 'text_item'
__versioned__ = {}
id = sa.Column(
'_id', sa.Integer, autoincrement=True, primary_key=True
)
name = sa.Column('_name', sa.Unicode(255))
self.TextItem = TextItem
@mark.skipif('True')
class TestVersionTableWithColumnAliases(ColumnAliasesBaseTestCase):
def test_column_reflection(self):
assert '_id' in version_class(self.TextItem).__table__.c
class ColumnAliasesTestCase(ColumnAliasesBaseTestCase):
def test_insert(self):
item = self.TextItem(name=u'Something')
self.session.add(item)
self.session.commit()
assert item.versions[0].name == u'Something'
def test_revert(self):
item = self.TextItem(name=u'Something')
self.session.add(item)
self.session.commit()
item.name = u'Some other thing'
self.session.commit()
item.versions[0].revert()
self.session.commit()
def test_previous_for_deleted_parent(self):
item = self.TextItem()
item.name = u'Some item'
item.content = u'Some content'
self.session.add(item)
self.session.commit()
self.session.delete(item)
self.session.commit()
TextItemVersion = version_class(self.TextItem)
versions = (
self.session.query(TextItemVersion)
.order_by(
getattr(
TextItemVersion,
self.options['transaction_column_name']
)
)
).all()
assert versions[1].previous.name == u'Some item'
create_test_cases(ColumnAliasesTestCase)
|
avilaton/sqlalchemy-continuum
|
tests/test_column_aliases.py
|
Python
|
bsd-3-clause
| 1,935
|
# coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=logging-format-interpolation
# pylint: disable=g-direct-tensorflow-import
r"""Common utils."""
import os
import re
import threading
from absl import logging
import numpy as np
import tensorflow.compat.v1 as tf
def get_worker_name(worker_id):
"""Returns `/job:tpu_worker/task:{worker_id}`."""
return f'/job:tpu_worker/task:{worker_id}'
def get_device_name(worker_id, core_id):
"""Returns `/job:tpu_worker/task:{worker_id}/device:tpu:{core_id}`."""
return f'/job:tpu_worker/task:{worker_id}/device:TPU:{core_id}'
def count_params():
"""Count model params."""
num_params = sum([np.prod([d.value for d in w.shape])
for w in tf.trainable_variables()
if 'teacher' not in w.name.lower()])
return num_params
def strip_var_name(var_name):
"""Strips variable name of sub-strings blocking variable name matching.
Removes sub-strings that should be ignored when matching checkpointed variable
names to variable names in the training graph, namely:
- trailing colon + number, e.g. "W:0" --> "W"
- partitioning info., e.g. "/a/part_12/b" --> "a/b".
(Note that checkpointed variables do not have partitioning info in their name,
while model variables do).
Args:
var_name: str, variable name.
Returns:
stripped variable name.
"""
# Strip trailing number, e.g. convert "lstm/W_0:0" to "lstm/W_0".
var_name = re.sub(r':\d+$', '', var_name)
# Strip partitioning info, e.g. convert "W_0/part_3/Adagrad" to "W_0/Adagrad".
var_name = re.sub(r'/part_\d+', '', var_name)
return var_name
def get_saver(max_to_keep=1, restore_ema=False):
"""Constructs a `Saver`."""
var_list = {}
if restore_ema:
logging.info('Restore EMA values')
for v in tf.global_variables():
if v.name.startswith('ema'):
logging.fatal(f'wrong ema var name `{v.name}`')
if 'global_step' in v.name:
var_list['global_step'] = v
else:
var_list['ema/' + strip_var_name(v.name)] = v
else:
for v in tf.global_variables():
var_list[strip_var_name(v.name)] = v
saver = tf.train.Saver(var_list,
max_to_keep=max_to_keep,
save_relative_paths=True)
return saver
class AsyncCheckpoint(object):
"""Saves checkpoint using a separated thread."""
def __init__(self, saver, ckpt_dir, max_to_keep=None):
self._saver = saver
self._ckpt_dir = ckpt_dir
self._max_to_keep = max_to_keep
self._thread = None
self.latest_checkpoint = None
def join(self):
if self._thread is not None:
self._thread.join()
def save(self, sess, step):
"""Docs."""
def _save_fn():
"""Run the saver process."""
raw_sess = sess if isinstance(sess, tf.Session) else sess.raw_session()
ckpt_path = self._saver.save(
raw_sess,
save_path=os.path.join(self._ckpt_dir, 'ckpt'),
global_step=step,
write_meta_graph=False,
write_state=False)
self.latest_checkpoint = ckpt_path[len(self._ckpt_dir) + 1:]
logging.info(f'Saved checkpoint `{ckpt_path}`')
all_checkpoints = get_all_checkpoints(self._ckpt_dir)
assert all_checkpoints is not None
new_ckpt_content = [f'model_checkpoint_path: "{all_checkpoints[-1]}"']
if (self._max_to_keep is not None and
self._max_to_keep < len(all_checkpoints)):
pattern = all_checkpoints[0] + '*'
tf.io.gfile.BulkDelete(tf.io.gfile.Glob(pattern))
# pylint: disable=invalid-unary-operand-type
all_checkpoints = all_checkpoints[-self._max_to_keep:]
# pylint: enable=invalid-unary-operand-type
for ckpt_name in all_checkpoints:
new_ckpt_content.append(f'all_model_checkpoint_paths: "{ckpt_name}"')
checkpoint_file = os.path.join(self._ckpt_dir, 'checkpoint')
with tf.io.gfile.GFile(checkpoint_file, 'w') as fout:
fout.write('\n'.join(new_ckpt_content))
if self._thread is not None:
self._thread.join(timeout=0.1)
if self._thread.is_alive():
logging.info('Saver thread still in progress, skipping checkpoint.')
return
self._thread = threading.Thread(target=_save_fn)
self._thread.start()
def should_log(params):
"""Returns a Boolean `tf.Tensor` dictating whether we should log values."""
global_step = tf.train.get_or_create_global_step()
first_run = tf.equal(global_step, 1)
log_every = tf.equal(tf.floormod(global_step, params.log_every), 0)
return tf.logical_or(first_run, log_every)
def get_all_checkpoints(ckpt_dir):
"""Returns a list of all checkpoints, eg `['ckpt-100', 'ckpt-500']`."""
if not tf.io.gfile.IsDirectory(ckpt_dir):
return []
pattern = ckpt_dir + '/ckpt-*'
s = len(ckpt_dir) + len('/ckpt-')
checkpoints = [int(f.split('.')[0][s:]) for f in tf.io.gfile.Glob(pattern)]
checkpoints = [os.path.join(ckpt_dir, 'ckpt-{0}'.format(v))
for v in sorted(set(checkpoints))]
return checkpoints
def get_latest_checkpoint(ckpt_dir):
"""Returns a list of all checkpoints, eg `['ckpt-100', 'ckpt-500']`."""
all_checkpoints = get_all_checkpoints(ckpt_dir)
all_checkpoints = [ckpt for ckpt in all_checkpoints if 'temp' not in ckpt]
if all_checkpoints:
return all_checkpoints[-1]
else:
return None
def get_outfeed_ops(params, signature):
"""Create TPU outfeed ops."""
outfeed_dtypes, outfeed_shapes = [], []
for dtype, shape in signature.values():
outfeed_dtypes.append(dtype)
outfeed_shapes.append(shape)
outfeed_ops = []
outfeed_graph = tf.Graph()
dev_assign = params.device_assignment
host_to_tpus = {}
for replica_id in range(params.num_replicas):
host_device = dev_assign.host_device(replica=replica_id, logical_core=0)
tpu_ordinal = dev_assign.tpu_ordinal(replica=replica_id, logical_core=0)
if host_device not in host_to_tpus:
host_to_tpus[host_device] = [tpu_ordinal]
else:
assert tpu_ordinal not in host_to_tpus[host_device]
host_to_tpus[host_device].append(tpu_ordinal)
with outfeed_graph.as_default():
for host, tpus in host_to_tpus.items():
with tf.device(host):
for device_ordinal in tpus:
device_outfeed = tf.raw_ops.OutfeedDequeueTuple(
dtypes=outfeed_dtypes,
shapes=outfeed_shapes,
device_ordinal=device_ordinal)
outfeed_ops.append(device_outfeed)
return outfeed_ops, outfeed_graph
class InfeedThread(object):
"""InfeedTread wrapper."""
def __init__(self, params, infeed_ops, infeed_graphs, name='infeed_thread'):
if infeed_graphs is not None:
assert isinstance(infeed_graphs, list)
assert len(infeed_graphs) == len(infeed_ops)
self.infeed_ops = infeed_ops
self.infeed_graphs = infeed_graphs
self.sessions = []
for g in infeed_graphs:
with g.as_default():
sess = tf.Session(target=params.master, graph=g)
self.sessions.append(sess)
self.name = name
self._threads = []
def stop(self):
self.join()
for sess in self.sessions:
sess.close()
def join(self):
for thread in self._threads:
if thread is not None:
thread.join(timeout=0.1)
del thread
def start(self, verbose=False):
"""Docs."""
if verbose:
logging.info(f'Start thread for `{self.name}`')
def _infeed_fn(sess, infeed_op, infeed_graph):
"""Run the infeed process."""
with infeed_graph.as_default():
sess.run(infeed_op)
for sess, op, g in zip(self.sessions, self.infeed_ops, self.infeed_graphs):
thread = threading.Thread(target=_infeed_fn, args=(sess, op, g))
thread.daemon = True
thread.start()
self._threads.append(thread)
class OutfeedThread(object):
"""OutfeedThread wrapper."""
def __init__(self, params, outfeed_ops, outfeed_graph, outfeed_signature,
name='outfeed_thread'):
self.params = params
self.outfeed_ops = outfeed_ops
self.outfeed_graph = outfeed_graph
self.outfeed_signature = outfeed_signature
with outfeed_graph.as_default():
self.session = tf.Session(target=params.master, graph=outfeed_graph)
self.name = name
self._thread = None
def join(self):
if self._thread is not None:
self._thread.join(timeout=0.1)
self._thread = None
self.session.close()
def start(self, verbose=False):
"""Docs."""
if verbose:
logging.info(f'Start thread for `{self.name}`')
if self._thread is not None:
return
params = self.params
outfeed_signature = self.outfeed_signature
def _outfeed_fn():
"""Read from `outfeed_dequeue` and write `Summary`."""
train_logdir = os.path.join(params.output_dir, 'logs', 'train')
summary_writer = tf.summary.FileWriter(train_logdir)
summary_tags = list(outfeed_signature.keys())
while True:
outfeeds = self.session.run(self.outfeed_ops)
outfeeds = np.array(outfeeds).reshape([params.num_replicas, -1])
outfeeds = np.sum(outfeeds, axis=0).tolist()
summary_values = []
for tag, value in zip(summary_tags, outfeeds):
if tag == 'global_step':
value /= params.num_replicas
step = value
else:
summary_values.append(tf.Summary.Value(tag=tag, simple_value=value))
summary_writer.add_summary(tf.Summary(value=summary_values), step)
summary_writer.flush()
if step >= params.num_train_steps:
summary_writer.close()
break
self._thread = threading.Thread(target=_outfeed_fn)
self._thread.daemon = True
self._thread.start()
def setup_ema(params, name_scope=None):
"""Create exponential moving average for all variables under `name_scope`."""
logging.info(f'ema_decay with rate {params.ema_decay}')
all_vars = tf.global_variables()
ema_ops = []
step = tf.cast(tf.train.get_or_create_global_step() - params.ema_start,
tf.float32)
decay = 1. - tf.minimum(params.ema_decay, (step+1.) / (step+10.))
decay = tf.cond(tf.train.get_or_create_global_step() < params.ema_start,
lambda: tf.constant(1, tf.float32), lambda: decay)
def should_skip(v):
key_words = ['momentum', 'rms', 'global_step', 'debug', 'adam', 'lars']
conditions = [k in v.name.lower() for k in key_words]
if name_scope is not None:
conditions += [not v.name.lower().startswith(name_scope)]
return any(conditions)
def get_init(v_name):
key_words = ['variance', 'beta']
if any([k in v_name for k in key_words]):
return tf.initializers.ones()
return tf.initializers.zeros()
with tf.variable_scope('ema'):
for v in all_vars:
if not should_skip(v):
v_name = strip_var_name(v.name)
with tf.device(v.device):
ema_var = tf.get_variable(
name=v_name,
shape=v.shape.as_list(),
initializer=get_init(v_name),
trainable=False)
ema_op = tf.assign_sub(ema_var, decay * (ema_var-v), use_locking=True)
ema_ops.append(ema_op)
ema_op = tf.group(*ema_ops)
return ema_op
def get_session(params, isolate_session_state=True):
"""Builds and returns a `tf.Session`."""
config = tf.ConfigProto(
isolate_session_state=isolate_session_state,
allow_soft_placement=True,
graph_options=tf.GraphOptions(
optimizer_options=tf.OptimizerOptions(
opt_level=tf.OptimizerOptions.L0,
do_common_subexpression_elimination=False,
do_function_inlining=False,
do_constant_folding=False)))
return tf.Session(target=params.master, config=config)
def get_learning_rate(params, initial_lr=None, num_warmup_steps=None,
num_wait_steps=None):
"""Build learning rate."""
global_step = tf.train.get_or_create_global_step()
if initial_lr is None:
initial_lr = params.lr
initial_lr = initial_lr * params.train_batch_size / 256.
if num_warmup_steps is None:
num_warmup_steps = params.num_warmup_steps
if num_wait_steps is not None:
global_step = global_step - num_wait_steps
if params.lr_decay_type == 'constant':
lr = tf.constant(initial_lr, dtype=tf.float32)
elif params.lr_decay_type == 'exponential':
lr = tf.train.exponential_decay(
learning_rate=initial_lr,
global_step=global_step-num_warmup_steps,
decay_steps=params.num_decay_steps,
decay_rate=params.lr_decay_rate,
staircase=True)
elif params.lr_decay_type == 'cosine':
if num_wait_steps is None:
lr = tf.train.cosine_decay(
learning_rate=initial_lr,
global_step=global_step-num_warmup_steps,
decay_steps=params.num_train_steps-num_warmup_steps,
alpha=0.0)
else:
lr = tf.train.cosine_decay(
learning_rate=initial_lr,
global_step=global_step-num_warmup_steps,
decay_steps=params.num_train_steps-num_warmup_steps-num_wait_steps,
alpha=0.0)
else:
raise ValueError(f'Unknown lr_decay_type `{params.lr_decay_type}`')
r = (tf.cast(global_step+1, tf.float32) /
tf.cast(num_warmup_steps, tf.float32))
warmup_lr = initial_lr * r
lr = tf.cond(global_step < num_warmup_steps, lambda: warmup_lr, lambda: lr)
if num_wait_steps is not None:
lr = tf.cond(global_step < 0,
lambda: tf.constant(0., tf.float32), lambda: lr)
return lr
def get_optimizer(params, learning_rate=None):
"""Build optimizer."""
if learning_rate is None:
learning_rate = get_learning_rate(params)
if params.optim_type.lower() == 'sgd':
logging.info('Use SGD')
optimizer = tf.train.GradientDescentOptimizer(learning_rate=learning_rate,
use_locking=True)
elif params.optim_type.lower() == 'momentum':
logging.info('Use Momentum')
optimizer = tf.train.MomentumOptimizer(learning_rate=learning_rate,
momentum=0.9,
use_nesterov=True,
use_locking=True)
elif params.optim_type.lower() == 'rmsprop':
optimizer = tf.train.RMSPropOptimizer(learning_rate=learning_rate,
decay=params.rmsprop_rho,
momentum=params.rmsprop_momentum,
epsilon=params.rmsprop_epsilon,
use_locking=True)
elif params.optim_type.lower() == 'lars':
class LARSOptimizer(tf.train.Optimizer):
"""Layer-wise Adaptive Rate Scaling for large batch training.
Introduced by "Large Batch Training of Convolutional Networks" by Y. You,
I. Gitman, and B. Ginsburg. (https://arxiv.org/abs/1708.03888)
Implements the LARS learning rate scheme presented in the paper above.
This optimizer is useful when scaling the batch size to up to 32K without
significant performance degradation. It is recommended to use the
optimizer in conjunction with:
- Gradual learning rate warm-up
- Linear learning rate scaling
- Poly rule learning rate decay
Note, LARS scaling is currently only enabled for dense tensors. Sparse
tensors use the default momentum optimizer.
"""
def __init__(
self,
learning_rate,
momentum=0.9,
weight_decay=0.0001,
# The LARS coefficient is a hyperparameter
eeta=0.001,
epsilon=0.0,
name='LARSOptimizer',
# Enable skipping variables from LARS scaling.
# TODO(sameerkm): Enable a direct mechanism to pass a
# subset of variables to the optimizer.
skip_list=None,
use_nesterov=False):
"""Construct a new LARS Optimizer.
Args:
learning_rate: A `Tensor` or floating point value.
momentum: A floating point value. Momentum hyperparameter.
weight_decay: A floating point value. Weight decay hyperparameter.
eeta: LARS coefficient as used in the paper. Dfault set to LARS
coefficient from the paper. (eeta / weight_decay) determines the
highest scaling factor in LARS.
epsilon: Optional epsilon parameter to be set in models that have very
small gradients. Default set to 0.0.
name: Optional name prefix for variables and ops created.
skip_list: List of strings to enable skipping variables from scaling.
If any of the strings in skip_list is a subset of var.name, variable
'var' is skipped from LARS scaling. For a typical classification
model with batch normalization, the skip_list is
['batch_normalization', 'bias']
use_nesterov: when set to True, nesterov momentum will be enabled
Raises:
ValueError: If a hyperparameter is set to a non-sensical value.
"""
if momentum < 0.0:
raise ValueError(f'momentum should be positive: {momentum}')
if weight_decay < 0.0:
raise ValueError(f'weight_decay should be positive: {weight_decay}')
super(LARSOptimizer, self).__init__(use_locking=False, name=name)
self._learning_rate = learning_rate
self._momentum = momentum
self._weight_decay = weight_decay
self._eeta = eeta
self._epsilon = epsilon
self._name = name
self._skip_list = skip_list
self._use_nesterov = use_nesterov
def _create_slots(self, var_list):
for v in var_list:
self._zeros_slot(v, 'momentum', self._name)
def compute_lr(self, grad, var):
scaled_lr = self._learning_rate
if self._skip_list is None or not any(v in var.name
for v in self._skip_list):
w_norm = tf.norm(var, ord=2)
g_norm = tf.norm(grad, ord=2)
trust_ratio = tf.where(
tf.math.greater(w_norm, 0),
tf.where(
tf.math.greater(g_norm, 0),
(self._eeta * w_norm / (
g_norm + self._weight_decay * w_norm + self._epsilon)),
1.0),
1.0)
scaled_lr = self._learning_rate * trust_ratio
# Add the weight regularization gradient
grad = grad + self._weight_decay * var
return scaled_lr, grad
def _apply_dense(self, grad, var):
scaled_lr, grad = self.compute_lr(grad, var)
mom = self.get_slot(var, 'momentum')
return tf.raw_ops.ApplyMomentum(
var,
mom,
tf.cast(1.0, var.dtype.base_dtype),
grad * scaled_lr,
self._momentum,
use_locking=False,
use_nesterov=self._use_nesterov)
def _resource_apply_dense(self, grad, var):
scaled_lr, grad = self.compute_lr(grad, var)
mom = self.get_slot(var, 'momentum')
return tf.raw_ops.ResourceApplyMomentum(
var=var.handle,
accum=mom.handle,
lr=tf.cast(1.0, var.dtype.base_dtype),
grad=grad * scaled_lr,
momentum=self._momentum,
use_locking=False,
use_nesterov=self._use_nesterov)
# Fallback to momentum optimizer for sparse tensors
def _apply_sparse(self, grad, var):
mom = self.get_slot(var, 'momentum')
return tf.raw_ops.SparseApplyMomentum(
var,
mom,
tf.cast(self._learning_rate_tensor, var.dtype.base_dtype),
grad.values,
grad.indices,
tf.cast(self._momentum_tensor, var.dtype.base_dtype),
use_locking=self._use_locking,
use_nesterov=self._use_nesterov).op
def _resource_apply_sparse(self, grad, var, indices):
mom = self.get_slot(var, 'momentum')
return tf.raw_ops.ResourceSparseApplyMomentum(
var.handle,
mom.handle,
tf.cast(self._learning_rate_tensor, grad.dtype),
grad,
indices,
tf.cast(self._momentum_tensor, grad.dtype),
use_locking=self._use_locking,
use_nesterov=self._use_nesterov)
def _prepare(self):
learning_rate = self._learning_rate
if callable(learning_rate):
learning_rate = learning_rate()
self._learning_rate_tensor = tf.convert_to_tensor(
learning_rate, name='learning_rate')
momentum = self._momentum
if callable(momentum):
momentum = momentum()
self._momentum_tensor = tf.convert_to_tensor(momentum, name='momentum')
optimizer = LARSOptimizer(
learning_rate=learning_rate,
weight_decay=params.weight_decay,
skip_list=['batch_norm', 'batchnorm', 'gamma', 'beta', 'bias'],
use_nesterov=True)
else:
raise ValueError(f'Unknown optim_type `{params.optim_type}`')
return learning_rate, optimizer
def get_l2_loss(excluded_keywords=None):
"""Traverse `tf.trainable_variables` compute L2 reg. Ignore `batch_norm`."""
def _is_excluded(v):
"""Guess whether a variable belongs to `batch_norm`."""
keywords = ['batchnorm', 'batch_norm', 'bn',
'layernorm', 'layer_norm']
if excluded_keywords is not None:
keywords += excluded_keywords
return any([k in v.name.lower() for k in keywords])
l2_losses = [tf.nn.l2_loss(v) for v in tf.trainable_variables()
if not _is_excluded(v)]
return tf.add_n(l2_losses)
|
google-research/google-research
|
differentiable_data_selection/common_utils.py
|
Python
|
apache-2.0
| 22,272
|
'''
DoctorSpaceBot by MadScotty
BEGIN LICENSE
By existing on the same mortal coil as the author of this software you hereby
allow the author, henceforth known as Dr. Awesomeweiner, to sleep on your couch,
watch your television, and use your microwave. By reading this license you agree
that Lord Satan isn't as bad as everyone says #fakenews
END LICENSE
'''
# Ship lookup module for DoctorSpaceBot
# All data pulled from the Star Citizen Wiki at http://starcitizen.tools
# It will return a message for the bot to say and it will be called by client.send_message(message,channel, ship_lookup(message.content))
import asyncio
import aiohttp
import discord
from async_timeout import timeout
from bs4 import BeautifulSoup as soup # BeautifulSoup is too much to type. Who has time for the shift key?
# Other globals
ship_index = []
has_error = False
# Grab the ship name from the index
async def ship_finder(ship_name):
global ship_index
global has_error
has_error = False
ship_name = ship_name.lower()
await get_ship_index()
# Neat boolean trick to see if list is empty
if not ship_index:
print("Ship lookup borked. ship_index is empty")
return "Something borked with the ship lookup. Scotty has been notified"
# Grab the index of ship name
ship_index_location = -1
for i in range(0, len(ship_index) - 1):
index_lowercase = str(ship_index[i]).lower()
if ship_name in index_lowercase:
ship_index_location = i
break
if ship_index_location == -1:
return
# Seperate link from rest of text. Start with the entire thing and trim pieces.
raw_link = str(ship_index[ship_index_location])
trim_start = raw_link[9:] # Trim <a href=" from the beginning
end_quote = trim_start.find('"')
link = trim_start[:end_quote] # Trim the rest of the fat
ship_info = await parse_ship_info(link)
if ship_info == -1:
has_error = True
return -1
else:
return make_table(ship_info)
# Parse data from the Category:Ships page into a list of links
async def get_ship_index():
global ship_index
global has_error
# Pull rendered version of page
with timeout(10):
async with aiohttp.ClientSession() as sesh:
async with sesh.get('http://starcitizen.tools/Category:Ships?action=render') as page:
try:
assert page.status == 200
raw_page = await page.text()
except:
print("Failed to load Category:Ships with satus code ") + str(page.status)
has_error = True
return -1
# Pass to BeautifulSoup then scrape just the table, then make a list of the hyperlinks
# Example item:
# <a href="https://starcitizen.tools/300i" title="300i">300i</a>
souped = soup(raw_page, 'html.parser')
tabled = souped.table
ship_index = tabled.find_all('a')
# At the moment, the Category:Ships page also has a link to the Alpha 3.0 page.
# Hacky solution, I know. I'll fix it later
for i in range(0, len(ship_index) - 1):
if '3.0' in str(ship_index[i]):
ship_index.pop(i)
# Pull info from the infobox on the ship page
async def parse_ship_info(link):
global has_error
rendered_link = link + "?action=render"
# Pull rendered version of page
with timeout(10):
async with aiohttp.ClientSession() as sesh:
async with sesh.get(rendered_link) as page:
try:
assert page.status == 200
raw_page = await page.text()
except:
print("Failed to load ship page " + link + " with satus code ") + str(page.status)
has_error = True
return -1
# Pass to BeautifulSoup then scrape the infobox table
souped = soup(raw_page, 'html.parser')
tabled = souped.table
infobox = souped.table.find_all('tr')
# Info dict
ship_info = {"link" : link, \
"img_link" : "", \
"name" : "", \
"manf" : "", \
"focus" : "", \
"prod_state" : "", \
"crew" : "", \
"cargo" : "", \
"price" : "", \
"mass" : "", \
"speed" : "", \
"ab_speed": "", \
"length" : "", \
"height" : "", \
"beam" : "", \
}
# -------------------------------------------------
# Parse data from infobox
# -------------------------------------------------
# Image link
infobox_item = str(infobox[0])
start = infobox_item.find('src')
end = infobox_item.find('"', start + 5, len(infobox_item))
if start == -1 or end == -1:
ship_info['img_link'] = -1
else:
ship_info["img_link"] = "https://starcitizen.tools/" + infobox_item[start + 6:end]
# Ship name
ship_info["name"] = infobox[1].text
# Ship manufacturer
infobox_item = ""
for i in infobox:
if "Manufacturer" in i.text:
infobox_item = i.text
if infobox_item == "":
ship_info["manf"] = -1
else:
end = infobox_item.find('\xa0')
ship_info["manf"] = infobox_item[12:end]
# Because MISC breaks the damn formatting
if ship_info["manf"] == "Musashi Industrial and Starflight Concern":
ship_info["manf"] = "MISC"
# Ship focus
infobox_item = ""
for i in infobox:
# Workaround for whips with a primary/secondary focus
if "Primary Focus" in i.text:
infobox_item = i.text
ship_info['focus'] = infobox_item[13:]
break
elif "Focus" in i.text:
infobox_item = i.text
if infobox_item == "":
ship_info["focus"] = -1
elif ship_info['focus'] == "":
ship_info["focus"] = infobox_item[5:]
# Production State
infobox_item = ""
for i in infobox:
if "Production State" in i.text:
infobox_item = i.text
if infobox_item == "":
ship_info["prod_state"] = -1
else:
ship_info["prod_state"] = infobox_item[16:]
# Maximum Crew
infobox_item = ""
for i in infobox:
if "Maximum Crew" in i.text:
infobox_item = i.text
if infobox_item == "":
ship_info["crew"] = -1
else:
ship_info["crew"] = infobox_item[12:]
# Cargo
infobox_item = ""
for i in infobox:
if "Cargo Capacity" in i.text:
infobox_item = i.text
if infobox_item == "":
ship_info["cargo"] = -1
else:
ship_info["cargo"] = infobox_item[14:]
# Pledge cost
infobox_item = ""
for i in infobox:
if "Pledge Cost" in i.text:
infobox_item = i.text
if infobox_item == "":
ship_info["price"] = -1
else:
ship_info["price"] = infobox_item[11:]
# Mass
infobox_item = ""
for i in infobox:
if "Null-cargo Mass" in i.text:
infobox_item = i.text
if infobox_item == "":
ship_info["mass"] = -1
else:
ship_info["mass"] = infobox_item[15:]
# Max SCM speed
infobox_item = ""
for i in infobox:
if "Max. SCM Speed" in i.text:
infobox_item = i.text
if infobox_item == "":
ship_info["speed"] = -1
else:
ship_info["speed"] = infobox_item[14:]
# Max afterburner speed
infobox_item = ""
for i in infobox:
if "Max. Afterburner Speed" in i.text:
infobox_item = i.text
if infobox_item == "":
ship_info["ab_speed"] = -1
else:
ship_info["ab_speed"] = infobox_item[22:]
# Length
infobox_item = ""
for i in infobox:
if "Length" in i.text:
infobox_item = i.text
if infobox_item == "":
ship_info["length"] = -1
else:
ship_info["length"] = infobox_item[6:]
# Height
infobox_item = ""
for i in infobox:
if "Height" in i.text:
infobox_item = i.text
if infobox_item == "":
ship_info["height"] = -1
else:
ship_info["height"] = infobox_item[6:]
# Beam
infobox_item = ""
for i in infobox:
if "Beam" in i.text:
infobox_item = i.text
if infobox_item == "":
ship_info["beam"] = -1
else:
ship_info["beam"] = infobox_item[4:]
return ship_info
# Prettifies data using a Discord embed object
def make_table(ship_info):
ship_embed = discord.Embed(title = ship_info['name'], url = str(ship_info['link']), description = "--------------------", color = 0x23af40,)
if ship_info["manf"] != -1: ship_embed.add_field(name = "Manufacturer", value = ship_info["manf"], inline = True)
if ship_info["focus"] != -1: ship_embed.add_field(name = "Focus", value = ship_info["focus"], inline = True)
if ship_info["prod_state"] != -1: ship_embed.add_field(name = "Production State", value = ship_info["prod_state"], inline = True)
if ship_info["crew"] != -1: ship_embed.add_field(name = "Crew", value = ship_info["crew"], inline = True)
if ship_info["cargo"] != -1: ship_embed.add_field(name = "Cargo", value = ship_info["cargo"], inline = True)
if ship_info["mass"] != -1: ship_embed.add_field(name = "Null-cargo Mass", value = ship_info["mass"], inline = True)
if ship_info["speed"] != -1: ship_embed.add_field(name = "Max SCM Speed", value = ship_info["speed"], inline = True)
if ship_info["ab_speed"] != -1: ship_embed.add_field(name = "Max AFB Speed", value = ship_info["ab_speed"], inline = True)
if ship_info["price"] != -1: ship_embed.add_field(name = "Pledge Cost", value = ship_info["price"], inline = True)
if ship_info["length"] != -1: ship_embed.add_field(name = "Length", value = ship_info["length"], inline = True)
if ship_info["height"] != -1: ship_embed.add_field(name = "Height", value = ship_info["height"], inline = True)
if ship_info["beam"] != -1: ship_embed.add_field(name = "Beam", value = ship_info["beam"], inline = True)
if ship_info["img_link"] != -1: ship_embed.set_image(url=str(ship_info['img_link']))
return ship_embed
|
MadScotty/DoctorSpaceBot
|
ship_lookup.py
|
Python
|
gpl-3.0
| 10,342
|
from dft import common
def define():
l = common(dim=3, n_eigs=7)
return l
def fun_v( ts, coor, region, ig, mode = None, vhxc = None ):
import numpy as nm
if vhxc is None:
vhxc = 0.0
out = {}
C = 0.5
r = nm.sqrt( coor[:,0]**2 + coor[:,1]**2 + coor[:,2]**2 )
vc = - C * 5.0 / r
V = vhxc + vc
out['V'] = V
return out
|
certik/sfepy
|
input/quantum/dft3d.py
|
Python
|
bsd-3-clause
| 371
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron_lbaas.tests.tempest.lib.services.volume.json import snapshots_client
class SnapshotsV2ClientJSON(snapshots_client.BaseSnapshotsClientJSON):
"""Client class to send CRUD Volume V2 API requests."""
api_version = "v2"
create_resp = 202
|
gandelman-a/neutron-lbaas
|
neutron_lbaas/tests/tempest/lib/services/volume/v2/json/snapshots_client.py
|
Python
|
apache-2.0
| 833
|
#
# spyne - Copyright (C) Spyne contributors.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301
#
"""The HTTP (urllib2) client transport."""
from spyne import RemoteService, ClientBase, RemoteProcedureBase
from spyne.util.six.moves.urllib.request import Request, urlopen
from spyne.util.six.moves.urllib.error import HTTPError
class _RemoteProcedure(RemoteProcedureBase):
def __call__(self, *args, **kwargs):
# there's no point in having a client making the same request more than
# once, so if there's more than just one context, it is a bug.
# the comma-in-assignment trick is a general way of getting the first
# and the only variable from an iterable. so if there's more than one
# element in the iterable, it'll fail miserably.
self.ctx, = self.contexts
# sets ctx.out_object
self.get_out_object(self.ctx, args, kwargs)
# sets ctx.out_string
self.get_out_string(self.ctx)
out_string = b''.join(self.ctx.out_string) # FIXME: just send the iterable to the http stream.
request = Request(self.url, out_string)
code = 200
try:
response = urlopen(request)
self.ctx.in_string = [response.read()]
except HTTPError as e:
code = e.code
self.ctx.in_string = [e.read()]
# this sets ctx.in_error if there's an error, and ctx.in_object if
# there's none.
self.get_in_object(self.ctx)
if not (self.ctx.in_error is None):
raise self.ctx.in_error
elif code >= 400:
raise self.ctx.in_error
else:
return self.ctx.in_object
class HttpClient(ClientBase):
def __init__(self, url, app):
super(HttpClient, self).__init__(url, app)
self.service = RemoteService(_RemoteProcedure, url, app)
|
arskom/spyne
|
spyne/client/http.py
|
Python
|
lgpl-2.1
| 2,536
|
"""Undocumented Module"""
__all__ = ['DirectObject']
from direct.directnotify.DirectNotifyGlobal import directNotify
from .MessengerGlobal import messenger
class DirectObject:
"""
This is the class that all Direct/SAL classes should inherit from
"""
def __init__(self):
pass
#def __del__(self):
# This next line is useful for debugging leaks
#print "Destructing: ", self.__class__.__name__
# Wrapper functions to have a cleaner, more object oriented approach to
# the messenger functionality.
def accept(self, event, method, extraArgs=[]):
return messenger.accept(event, self, method, extraArgs, 1)
def acceptOnce(self, event, method, extraArgs=[]):
return messenger.accept(event, self, method, extraArgs, 0)
def ignore(self, event):
return messenger.ignore(event, self)
def ignoreAll(self):
return messenger.ignoreAll(self)
def isAccepting(self, event):
return messenger.isAccepting(event, self)
def getAllAccepting(self):
return messenger.getAllAccepting(self)
def isIgnoring(self, event):
return messenger.isIgnoring(event, self)
#This function must be used if you want a managed task
def addTask(self, *args, **kwargs):
if(not hasattr(self,"_taskList")):
self._taskList = {}
kwargs['owner']=self
task = taskMgr.add(*args, **kwargs)
return task
def doMethodLater(self, *args, **kwargs):
if(not hasattr(self,"_taskList")):
self._taskList ={}
kwargs['owner']=self
task = taskMgr.doMethodLater(*args, **kwargs)
return task
def removeTask(self, taskOrName):
if type(taskOrName) == type(''):
# we must use a copy, since task.remove will modify self._taskList
if hasattr(self, '_taskList'):
taskListValues = list(self._taskList.values())
for task in taskListValues:
if task.name == taskOrName:
task.remove()
else:
taskOrName.remove()
def removeAllTasks(self):
if hasattr(self,'_taskList'):
for task in list(self._taskList.values()):
task.remove()
def _addTask(self, task):
self._taskList[task.id] = task
def _clearTask(self, task):
del self._taskList[task.id]
def detectLeaks(self):
if not __dev__:
return
# call this after the DirectObject instance has been destroyed
# if it's leaking, will notify user
# make sure we're not still listening for messenger events
events = messenger.getAllAccepting(self)
# make sure we're not leaking tasks
# TODO: include tasks that were added directly to the taskMgr
tasks = []
if hasattr(self, '_taskList'):
tasks = [task.name for task in self._taskList.values()]
if len(events) or len(tasks):
estr = choice(len(events), 'listening to events: %s' % events, '')
andStr = choice(len(events) and len(tasks), ' and ', '')
tstr = choice(len(tasks), '%srunning tasks: %s' % (andStr, tasks), '')
notify = directNotify.newCategory('LeakDetect')
func = choice(getRepository()._crashOnProactiveLeakDetect,
self.notify.error, self.notify.warning)
func('destroyed %s instance is still %s%s' % (self.__class__.__name__, estr, tstr))
|
tobspr/panda3d
|
direct/src/showbase/DirectObject.py
|
Python
|
bsd-3-clause
| 3,514
|
var_3 = var_3
var_4 = 1
<weak_warning descr="Assignment can be replaced with augmented assignment">var_6 = var_6 + var_4</weak_warning>
#PY-2482
<weak_warning descr="Assignment can be replaced with augmented assignment">var = 2 + var</weak_warning>
|
asedunov/intellij-community
|
python/testData/inspections/PyAugmentAssignmentInspection/numeric.py
|
Python
|
apache-2.0
| 252
|
"""
"""
from DIRAC import S_OK, S_ERROR
from DIRAC.Core.Utilities.CFG import CFG
from DIRAC.Core.Utilities import List
from DIRAC.ConfigurationSystem.Client.Helpers.Operations import Operations
from DIRAC.Core.Utilities.JDL import loadJDLAsCFG, dumpCFGAsJDL
from DIRAC.WorkloadManagementSystem.Agent.SiteDirector import getSubmitPools
class JobManifest(object):
def __init__(self, manifest=""):
self.__manifest = CFG()
self.__dirty = False
self.__ops = False
if manifest:
result = self.load(manifest)
if not result['OK']:
raise Exception(result['Message'])
def isDirty(self):
return self.__dirty
def setDirty(self):
self.__dirty = True
def clearDirty(self):
self.__dirty = False
def load(self, dataString):
"""
Auto discover format type based on [ .. ] of JDL
"""
dataString = dataString.strip()
if dataString[0] == "[" and dataString[-1] == "]":
return self.loadJDL(dataString)
else:
return self.loadCFG(dataString)
def loadJDL(self, jdlString):
"""
Load job manifest from JDL format
"""
result = loadJDLAsCFG(jdlString.strip())
if not result['OK']:
self.__manifest = CFG()
return result
self.__manifest = result['Value'][0]
return S_OK()
def loadCFG(self, cfgString):
"""
Load job manifest from CFG format
"""
try:
self.__manifest.loadFromBuffer(cfgString)
except Exception as e:
return S_ERROR("Can't load manifest from cfg: %s" % str(e))
return S_OK()
def dumpAsCFG(self):
return str(self.__manifest)
def getAsCFG(self):
return self.__manifest.clone()
def dumpAsJDL(self):
return dumpCFGAsJDL(self.__manifest)
def __getCSValue(self, varName, defaultVal=None):
if not self.__ops:
self.__ops = Operations(group=self.__manifest['OwnerGroup'], setup=self.__manifest['DIRACSetup'])
if varName[0] != "/":
varName = "JobDescription/%s" % varName
return self.__ops.getValue(varName, defaultVal)
def __checkNumericalVar(self, varName, defaultVal, minVal, maxVal):
"""
Check a numerical var
"""
initialVal = False
if varName not in self.__manifest:
varValue = self.__getCSValue("Default%s" % varName, defaultVal)
else:
varValue = self.__manifest[varName]
initialVal = varValue
try:
varValue = long(varValue)
except BaseException:
return S_ERROR("%s must be a number" % varName)
minVal = self.__getCSValue("Min%s" % varName, minVal)
maxVal = self.__getCSValue("Max%s" % varName, maxVal)
varValue = max(minVal, min(varValue, maxVal))
if initialVal != varValue:
self.__manifest.setOption(varName, varValue)
return S_OK(varValue)
def __checkChoiceVar(self, varName, defaultVal, choices):
"""
Check a choice var
"""
initialVal = False
if varName not in self.__manifest:
varValue = self.__getCSValue("Default%s" % varName, defaultVal)
else:
varValue = self.__manifest[varName]
initialVal = varValue
if varValue not in self.__getCSValue("Choices%s" % varName, choices):
return S_ERROR("%s is not a valid value for %s" % (varValue, varName))
if initialVal != varValue:
self.__manifest.setOption(varName, varValue)
return S_OK(varValue)
def __checkMultiChoice(self, varName, choices):
"""
Check a multi choice var
"""
initialVal = False
if varName not in self.__manifest:
return S_OK()
else:
varValue = self.__manifest[varName]
initialVal = varValue
choices = self.__getCSValue("Choices%s" % varName, choices)
for v in List.fromChar(varValue):
if v not in choices:
return S_ERROR("%s is not a valid value for %s" % (v, varName))
if initialVal != varValue:
self.__manifest.setOption(varName, varValue)
return S_OK(varValue)
def __checkMaxInputData(self, maxNumber):
"""
Check Maximum Number of Input Data files allowed
"""
varName = "InputData"
if varName not in self.__manifest:
return S_OK()
varValue = self.__manifest[varName]
if len(List.fromChar(varValue)) > maxNumber:
return S_ERROR('Number of Input Data Files (%s) greater than current limit: %s' %
(len(List.fromChar(varValue)), maxNumber))
return S_OK()
def __contains__(self, key):
""" Check if the manifest has the required key
"""
return key in self.__manifest
def setOptionsFromDict(self, varDict):
for k in sorted(varDict):
self.setOption(k, varDict[k])
def check(self):
"""
Check that the manifest is OK
"""
for k in ['OwnerName', 'OwnerDN', 'OwnerGroup', 'DIRACSetup']:
if k not in self.__manifest:
return S_ERROR("Missing var %s in manifest" % k)
# Check CPUTime
result = self.__checkNumericalVar("CPUTime", 86400, 100, 500000)
if not result['OK']:
return result
result = self.__checkNumericalVar("Priority", 1, 0, 10)
if not result['OK']:
return result
allowedSubmitPools = getSubmitPools(self.__manifest['OwnerGroup'])
result = self.__checkMultiChoice("SubmitPools", list(set(allowedSubmitPools)))
if not result['OK']:
return result
result = self.__checkMultiChoice("PilotTypes", ['private'])
if not result['OK']:
return result
maxInputData = Operations().getValue("JobDescription/MaxInputData", 500)
result = self.__checkMaxInputData(maxInputData)
if not result['OK']:
return result
operation = Operations(group=self.__manifest['OwnerGroup'])
allowedJobTypes = operation.getValue("JobDescription/AllowedJobTypes", ['User', 'Test', 'Hospital'])
transformationTypes = operation.getValue("Transformations/DataProcessing", [])
result = self.__checkMultiChoice("JobType", allowedJobTypes + transformationTypes)
if not result['OK']:
return result
return S_OK()
def createSection(self, secName, contents=False):
if secName not in self.__manifest:
if contents and not isinstance(contents, CFG):
return S_ERROR("Contents for section %s is not a cfg object" % secName)
self.__dirty = True
return S_OK(self.__manifest.createNewSection(secName, contents=contents))
return S_ERROR("Section %s already exists" % secName)
def getSection(self, secName):
self.__dirty = True
if secName not in self.__manifest:
return S_ERROR("%s does not exist" % secName)
sec = self.__manifest[secName]
if not sec:
return S_ERROR("%s section empty" % secName)
return S_OK(sec)
def setSectionContents(self, secName, contents):
if contents and not isinstance(contents, CFG):
return S_ERROR("Contents for section %s is not a cfg object" % secName)
self.__dirty = True
if secName in self.__manifest:
self.__manifest[secName].reset()
self.__manifest[secName].mergeWith(contents)
else:
self.__manifest.createNewSection(secName, contents=contents)
def setOption(self, varName, varValue):
"""
Set a var in job manifest
"""
self.__dirty = True
levels = List.fromChar(varName, "/")
cfg = self.__manifest
for l in levels[:-1]:
if l not in cfg:
cfg.createNewSection(l)
cfg = cfg[l]
cfg.setOption(levels[-1], varValue)
def remove(self, opName):
levels = List.fromChar(opName, "/")
cfg = self.__manifest
for l in levels[:-1]:
if l not in cfg:
return S_ERROR("%s does not exist" % opName)
cfg = cfg[l]
if cfg.deleteKey(levels[-1]):
self.__dirty = True
return S_OK()
return S_ERROR("%s does not exist" % opName)
def getOption(self, varName, defaultValue=None):
"""
Get a variable from the job manifest
"""
cfg = self.__manifest
return cfg.getOption(varName, defaultValue)
def getOptionList(self, section=""):
"""
Get a list of variables in a section of the job manifest
"""
cfg = self.__manifest.getRecursive(section)
if not cfg or 'value' not in cfg:
return []
cfg = cfg['value']
return cfg.listOptions()
def isOption(self, opName):
"""
Check if it is a valid option
"""
return self.__manifest.isOption(opName)
def getSectionList(self, section=""):
"""
Get a list of sections in the job manifest
"""
cfg = self.__manifest.getRecursive(section)
if not cfg or 'value' not in cfg:
return []
cfg = cfg['value']
return cfg.listSections()
|
andresailer/DIRAC
|
WorkloadManagementSystem/Client/JobState/JobManifest.py
|
Python
|
gpl-3.0
| 8,502
|
#(C) 2018 Muthiah Annamalai
# This file is part of Open-Tamil project
# You may use or distribute this file under terms of MIT license
class Tree:
"""
Implement binary Huffman codes
Ref: https://www2.cs.duke.edu/csed/poop/huff/info/
"""
def __init__(self,val,prob):
self.value = val
self.prob = prob
self.left = None
self.right = None
@staticmethod
def make(leftTree,rightTree):
""" Utility make function."""
t = Tree('%s%s'%(leftTree.prob,rightTree.prob),
leftTree.prob+rightTree.prob)
if leftTree.prob >= rightTree.prob:
leftTree,rightTree = rightTree,leftTree
t.left = leftTree
t.right = rightTree
return t
def huffman_reduce(treelist):
""" Intermediate steps in Huffman code. """
if len(treelist) < 2:
return
if len(treelist) == 2:
v = Tree.make(treelist[0],treelist[1])
treelist.pop()
treelist.pop()
treelist.append(v)
return
# find least two nodes in the treelist
pvalues = [t.prob for t in treelist]
idx0 = pvalues.index(min(pvalues))
tree0 = treelist[idx0]
del treelist[idx0]
pvalues = [t.prob for t in treelist]
idx1 = pvalues.index(min(pvalues))
tree1 = treelist[idx1]
del treelist[idx1]
treejoin = Tree.make(tree0,tree1)
treelist.append(treejoin)
return
def huffman_get_codes(codes,tree,sym=None,pfx='',level=0):
if (not tree.left) and (not tree.right):
#codes are made at leaf nodes only
codes[tree.value] = pfx + '%d'%sym
return
if level > 0:
pfx = pfx+'%d'%sym
if tree.left:
huffman_get_codes(codes,tree.left,1-sym,pfx,level+1)
if tree.right:
huffman_get_codes(codes,tree.right,sym,pfx,level+1)
return
def huffman( v, p ):
""" v - list of symbols. p - list of corresponding probabilities for symbol """
assert sum(p) >= 0.99,"Sum of p = %g"%sum(p)
treelist = [Tree(vv,pp) for vv,pp in zip(v,p)]
while len(treelist) > 1:
huffman_reduce(treelist)
assert len(treelist) == 1
codes = {}
huffman_get_codes(codes,treelist[0],0,'')
return codes,treelist[0]
def print_huffman_code_cwl(code,p,v):
""" code - code dictionary with symbol -> code map, p, v is probability map """
cwl = 0.0
for k,_v in code.items():
print(u"%s -> %s"%(k,_v))
cwl += p[v.index(k)]*len(_v)
print(u"cwl = %g"%cwl)
return cwl,code.values()
#examples
def __demo__():
##p = [0.1,0.15,0.30,0.16,0.29]
p = [0.125 for i in range(0,8)]
v = ['a','b','c','d','e','f','g','h']
code,_ = huffman(v,p)
cwl,codelist = print_huffman_code_cwl(code,p,v)
assert( cwl == 3 )
p = [0.4, 0.35, 0.2, 0.05]
v = ['a','b','c','d']
code,_ = huffman(v,p)
cwl,codelist = print_huffman_code_cwl(code,p,v)
assert( cwl == 1.85 )
if __name__ == u"__main__":
__demo__()
|
Ezhil-Language-Foundation/open-tamil
|
tamilmorse/huffman.py
|
Python
|
mit
| 2,966
|
# -*- coding: UTF-8 -*-
"""
Package-wide constants.
"""
CALL = 'C'
PUT = 'P'
|
zzzoidberg/landscape
|
finance/consts.py
|
Python
|
mit
| 78
|
# -*- coding: utf-8 -*-
""" S3 Synchronization
@copyright: 2011-15 (c) Sahana Software Foundation
@license: MIT
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
import sys
import urllib, urllib2
import datetime
import time
import traceback
try:
from cStringIO import StringIO # Faster, where available
except:
from StringIO import StringIO
try:
from lxml import etree
except ImportError:
print >> sys.stderr, "ERROR: lxml module needed for XML handling"
raise
try:
import json # try stdlib (Python 2.6)
except ImportError:
try:
import simplejson as json # try external module
except:
import gluon.contrib.simplejson as json # fallback to pure-Python module
from gluon import *
from gluon.storage import Storage
from s3rest import S3Method
from s3import import S3ImportItem
from s3query import S3URLQuery
from s3utils import s3_unicode
DEBUG = False
if DEBUG:
print >> sys.stderr, "S3SYNC: DEBUG MODE"
def _debug(m):
print >> sys.stderr, m
else:
_debug = lambda m: None
# =============================================================================
class S3Sync(S3Method):
""" Synchronization Handler """
# -------------------------------------------------------------------------
def __init__(self):
""" Constructor """
S3Method.__init__(self)
self.log = S3SyncLog()
# -------------------------------------------------------------------------
def apply_method(self, r, **attr):
"""
RESTful method handler (repository/sync, repository/register)
@param r: the S3Request instance
@param attr: controller attributes for the request
"""
output = dict()
if r.method == "sync":
if r.http == "GET":
# Incoming pull
output = self.__send(r, **attr)
elif r.http in ("PUT", "POST"):
# Incoming push
output = self.__receive(r, **attr)
else:
r.error(405, current.ERROR.BAD_METHOD)
elif r.name == "repository" and r.method == "register":
if r.http == "GET":
# Incoming registration request
output = self.__register(r, **attr)
else:
r.error(405, current.ERROR.BAD_METHOD)
else:
r.error(405, current.ERROR.BAD_METHOD)
return output
# -------------------------------------------------------------------------
def get_status(self):
""" Read the current sync status """
table = current.s3db.sync_status
row = current.db().select(table.ALL, limitby=(0, 1)).first()
if not row:
row = Storage()
return row
# -------------------------------------------------------------------------
def set_status(self, **attr):
""" Update the current sync status """
table = current.s3db.sync_status
data = Storage([(k, attr[k]) for k in attr if k in table.fields])
data.update(timestmp = datetime.datetime.utcnow())
row = current.db().select(table._id, limitby=(0, 1)).first()
if row:
row.update_record(**data)
else:
table.insert(**data)
row = data
return row
# -------------------------------------------------------------------------
def __get_config(self):
""" Read the sync settings, avoid repeated DB lookups """
if not hasattr(self, "config"):
table = current.s3db.sync_config
row = current.db().select(table.ALL, limitby=(0, 1)).first()
self.config = row
return self.config
# -------------------------------------------------------------------------
def synchronize(self, repository):
"""
Synchronize with a repository
@param repository: the repository Row
@return: True if successful, False if there was an error
"""
_debug("S3Sync.synchronize(%s)" % repository.url)
log = self.log
if not repository.url:
message = "No URL set for repository"
log.write(repository_id=repository.id,
resource_name=None,
transmission=None,
mode=None,
action="connect",
remote=False,
result=self.log.FATAL,
message=message)
return False
ttable = current.s3db.sync_task
query = (ttable.repository_id == repository.id) & \
(ttable.deleted != True)
tasks = current.db(query).select()
connector = S3SyncRepository(repository)
error = connector.login()
if error:
log.write(repository_id=repository.id,
resource_name=None,
transmission=log.OUT,
mode=None,
action="login",
remote=True,
result=log.FATAL,
message=error)
return False
success = True
for task in tasks:
# Pull
mtime = None
if task.mode in (1, 3):
error, mtime = connector.pull(task,
onconflict=self.onconflict)
if error:
success = False
_debug("S3Sync.synchronize: %s PULL error: %s" %
(task.resource_name, error))
continue
if mtime is not None:
task.update_record(last_pull=mtime)
# Push
mtime = None
if task.mode in (2, 3):
error, mtime = connector.push(task)
if error:
success = False
_debug("S3Sync.synchronize: %s PUSH error: %s" %
(task.resource_name, error))
continue
if mtime is not None:
task.update_record(last_push=mtime)
_debug("S3Sync.synchronize: %s done" % task.resource_name)
return success
# -------------------------------------------------------------------------
def __register(self, r, **attr):
"""
Respond to an incoming registration request
@param r: the S3Request
@param attr: the controller attributes
"""
log = self.log
result = log.SUCCESS
message = "registration successful"
repository_id = None
config = self.__get_config()
if "repository" in r.vars:
ruid = r.vars["repository"]
db = current.db
rtable = current.s3db.sync_repository
row = db(rtable.uuid == ruid).select(limitby=(0, 1)).first()
if row:
repository_id = row.id
if not row.accept_push and current.auth.s3_has_role("ADMIN"):
row.update_record(accept_push=True)
else:
if current.auth.s3_has_role("ADMIN"):
accept_push = True
else:
accept_push = False
repository_id = rtable.insert(name=ruid,
uuid=ruid,
accept_push=accept_push)
if not repository_id:
result = log.ERROR
message = "registration failed"
else:
result = log.ERROR
message = "no repository identifier specified"
if result == log.SUCCESS:
output = current.xml.json_message(message=message,
sender="%s" % config.uuid)
else:
output = current.xml.json_message(False, 400,
message=message,
sender="%s" % config.uuid)
# Set content type header
headers = current.response.headers
headers["Content-Type"] = "application/json"
# Log the operation
log.write(repository_id=repository_id,
resource_name=log.NONE,
transmission=log.IN,
mode=log.PUSH,
action="register repository",
result=result,
message=message)
return output
# -------------------------------------------------------------------------
def __send(self, r, **attr):
"""
Respond to an incoming pull
@param r: the S3Request
@param attr: the controller attributes
"""
_debug("S3Sync.__send")
resource = r.resource
# Identify the requesting repository
repository_id = None
if "repository" in r.vars:
db = current.db
s3db = current.s3db
ruid = r.vars["repository"]
rtable = s3db.sync_repository
ttable = s3db.sync_task
left = ttable.on((rtable.id == ttable.repository_id) & \
(ttable.resource_name == resource.tablename))
row = db(rtable.uuid == ruid).select(rtable.id,
ttable.id,
left=left,
limitby=(0, 1)).first()
if row:
repository_id = row[rtable.id]
task_id = row[ttable.id]
# Additional export parameters
_vars = r.get_vars
start = _vars.get("start", None)
if start is not None:
try:
start = int(start)
except ValueError:
start = None
limit = _vars.get("limit", None)
if limit is not None:
try:
limit = int(limit)
except ValueError:
limit = None
msince = _vars.get("msince", None)
if msince is not None:
tfmt = current.xml.ISOFORMAT
try:
(y, m, d, hh, mm, ss, t0, t1, t2) = \
time.strptime(msince, tfmt)
msince = datetime.datetime(y, m, d, hh, mm, ss)
except ValueError:
msince = None
# Sync filters from peer
filters = {}
for k, v in _vars.items():
if k[0] == "[" and "]" in k:
tablename, urlvar = k[1:].split("]", 1)
if urlvar:
if not tablename or tablename == "~":
tablename = resource.tablename
f = filters.get(tablename, {})
u = f.get(urlvar, None)
if u:
u = "%s&%s" % (u, v)
else:
u = v
f[urlvar] = u
filters[tablename] = f
if not filters:
filters = None
# Export the resource
output = resource.export_xml(start=start,
limit=limit,
filters=filters,
msince=msince)
count = resource.results
# Set content type header
headers = current.response.headers
headers["Content-Type"] = "text/xml"
# Log the operation
log = self.log
log.write(repository_id=repository_id,
resource_name=r.resource.tablename,
transmission=log.IN,
mode=log.PULL,
result=log.SUCCESS,
message="data sent to peer (%s records)" % count)
return output
# -------------------------------------------------------------------------
def __receive(self, r, **attr):
"""
Respond to an incoming push
@param r: the S3Request
@param attr: the controller attributes
"""
_debug("S3Sync.__receive")
s3db = current.s3db
db = current.db
# Identify the sending repository
repository = Storage(id=None)
if "repository" in r.vars:
ruid = r.vars["repository"]
rtable = s3db.sync_repository
row = db(rtable.uuid == ruid).select(limitby=(0, 1)).first()
if row:
repository = row
if not repository.id or \
not repository.accept_push:
r.error(403, current.ERROR.NOT_PERMITTED)
# Get strategy and policy
default_update_policy = S3ImportItem.POLICY.NEWER
default_conflict_policy = S3ImportItem.POLICY.MASTER
ttable = s3db.sync_task
query = (ttable.repository_id == repository.id) & \
(ttable.resource_name == r.tablename) & \
(ttable.deleted != True)
task = db(query).select(limitby=(0, 1)).first()
last_sync = None
if task:
strategy = task.strategy
update_policy = task.update_policy or default_update_policy
conflict_policy = task.conflict_policy or default_conflict_policy
if update_policy not in ("THIS", "OTHER"):
last_sync = task.last_pull
else:
policies = S3ImportItem.POLICY
p = r.get_vars.get("update_policy", None)
values = {"THIS": "OTHER", "OTHER": "THIS"}
switch = lambda p: p in values and values[p] or p
if p and p in policies:
p = switch(p)
update_policy = policies[p]
else:
update_policy = default_update_policy
p = r.get_vars.get("conflict_policy", None)
if p and p in policies:
p = switch(p)
conflict_policy = policies[p]
else:
conflict_policy = default_conflict_policy
msince = r.get_vars.get("msince", None)
if msince is not None:
tfmt = current.xml.ISOFORMAT
try:
(y, m, d, hh, mm, ss, t0, t1, t2) = \
time.strptime(msince, tfmt)
last_sync = datetime.datetime(y, m, d, hh, mm, ss)
except ValueError:
last_sync = None
s = r.get_vars.get("strategy", None)
if s:
s = str(s).split(",")
methods = S3ImportItem.METHOD
strategy = [method for method in methods.values()
if method in s]
else:
strategy = ttable.strategy.default
# Other parameters
ignore_errors = True
# Get the source
source = r.read_body()
# Import resource
resource = r.resource
onconflict = lambda item: self.onconflict(item, repository, resource)
try:
output = resource.import_xml(source, format="xml",
ignore_errors=ignore_errors,
strategy=strategy,
update_policy=update_policy,
conflict_policy=conflict_policy,
last_sync=last_sync,
onconflict=onconflict)
except IOError:
current.auth.permission.fail()
except SyntaxError:
e = sys.exc_info()[1]
r.error(400, e)
log = self.log
if resource.error_tree is not None:
# Validation error (log in any case)
if ignore_errors:
result = log.WARNING
else:
result = log.FATAL
message = "%s" % resource.error
for element in resource.error_tree.findall("resource"):
error_msg = element.get("error", "unknown error")
error_fields = element.findall("data[@error]")
if error_fields:
for field in error_fields:
error_msg = field.get("error", "unknown error")
if error_msg:
msg = "(UID: %s) %s.%s=%s: %s" % \
(element.get("uuid", None),
element.get("name", None),
field.get("field", None),
field.get("value", field.text),
error_msg)
message = "%s, %s" % (message, msg)
else:
msg = "(UID: %s) %s: %s" % \
(element.get("uuid", None),
element.get("name", None),
error_msg)
message = "%s, %s" % (message, msg)
else:
result = log.SUCCESS
message = "data received from peer"
log.write(repository_id=repository.id,
resource_name=resource.tablename,
transmission=log.IN,
mode=log.PUSH,
result=result,
message=message)
return output
# -------------------------------------------------------------------------
def onconflict(self, item, repository, resource):
"""
Automatic conflict resolution
@param item: the conflicting import item
@param repository: the repository the item comes from
@param resource: the resource the item shall be imported to
"""
s3db = current.s3db
tablename = resource.tablename
resolver = s3db.get_config(tablename, "onconflict")
_debug("Resolving conflict in %s" % resource.tablename)
_debug("Repository: %s" % repository.name)
_debug("Conflicting item: %s" % item)
_debug("Method: %s" % item.method)
if resolver:
_debug("Applying custom rule")
resolver(item, repository, resource)
if item.conflict:
_debug("Do not accept")
else:
_debug("Accept per custom rule")
else:
_debug("Applying default rule")
ttable = s3db.sync_task
policies = S3ImportItem.POLICY
query = (ttable.repository_id == repository.id) & \
(ttable.resource_name == tablename) & \
(ttable.deleted != True)
task = current.db(query).select(limitby=(0, 1)).first()
if task and item.original:
original = item.original
conflict_policy = task.conflict_policy
if conflict_policy == policies.OTHER:
# Always accept
_debug("Accept by default")
item.conflict = False
elif conflict_policy == policies.NEWER:
# Accept if newer
xml = current.xml
if xml.MTIME in original and \
xml.as_utc(original[xml.MTIME]) <= item.mtime:
_debug("Accept because newer")
item.conflict = False
else:
_debug("Do not accept")
elif conflict_policy == policies.MASTER:
# Accept if master
if current.xml.MCI in original and \
original.mci == 0 or item.mci == 1:
_debug("Accept because master")
item.conflict = False
else:
_debug("Do not accept")
else:
# Never accept
_debug("Do not accept")
pass
else:
# No rule - accept always
_debug("Accept because no rule found")
item.conflict = False
# -------------------------------------------------------------------------
@staticmethod
def get_filters(task_id):
"""
Get all filters for a synchronization task
@param task_id: the task ID
@return: a dict of dicts like {tablename: {url_var: value}}
"""
db = current.db
s3db = current.s3db
ftable = s3db.sync_resource_filter
query = (ftable.task_id == task_id) & \
(ftable.deleted != True)
rows = db(query).select(ftable.tablename,
ftable.filter_string)
filters = {}
for row in rows:
tablename = row.tablename
if tablename in filters:
filters[tablename] = "%s&%s" % (filters[tablename],
row.filter_string)
else:
filters[tablename] = row.filter_string
parse_url = S3URLQuery.parse_url
for tablename in filters:
filters[tablename] = parse_url(filters[tablename])
return filters
# =============================================================================
class S3SyncLog(S3Method):
""" Synchronization Logger """
TABLENAME = "sync_log"
SUCCESS = "success"
WARNING = "warning"
ERROR = "error"
FATAL = "fatal"
IN = "incoming"
OUT = "outgoing"
PULL = "pull"
PUSH = "push"
NONE = "none"
# -------------------------------------------------------------------------
def apply_method(self, r, **attr):
"""
RESTful method handler
@param r: the S3Request instance
@param attr: controller attributes for the request
"""
output = dict()
resource = r.resource
if resource.tablename == "sync_log":
return resource.crud.select(r, **attr)
elif resource.tablename == "sync_repository":
# READ for sync log for this repository
pass
else:
if r.interactive:
# READ for sync log for this resource
here = "%s.%s" % (r.controller, r.function)
sync_log = current.s3db[self.TABLENAME]
sync_log.resource_name.readable = False
query = (sync_log.resource_name == resource.tablename)
r = r.factory(prefix="sync", name="log", args=[])
s3 = current.response.s3
s3.filter = query
s3.prep = None
s3.postp = None
s3.actions = [
dict(label=str(current.T("Details")),
_class="action-btn",
url=URL(c="sync", f="log",
args=["[id]"],
vars={"return":here}))
]
output = r(subtitle=None,
rheader=self.rheader)
else:
r.error(501, current.ERROR.BAD_FORMAT)
return output
# -------------------------------------------------------------------------
@classmethod
def write(cls,
repository_id=None,
resource_name=None,
transmission=None,
mode=None,
action=None,
result=None,
remote=False,
message=None):
"""
Writes a new entry to the log
@param repository_id: the repository record ID
@param resource_name: the resource name
@param transmission: transmission mode (IN, OUT or None)
@param mode: synchronization mode (PULL, PUSH or None)
@param action: action taken to resolve errors (if any)
@param result: the result of the transaction
(SUCCESS, WARNING, ERROR or FATAL)
@param remote: boolean, True if this is a remote error
@param message: clear text message
"""
if result not in (cls.SUCCESS,
cls.WARNING,
cls.ERROR,
cls.FATAL):
result = cls.SUCCESS
if transmission not in (cls.IN, cls.OUT):
transmission = cls.NONE
if mode not in (cls.PULL, cls.PUSH):
mode = cls.NONE
mode = "%s/%s" % (mode, transmission)
if not action:
action = cls.NONE
now = datetime.datetime.utcnow()
entry = Storage(timestmp=now,
repository_id=repository_id,
resource_name=resource_name,
mode=mode,
action=action,
result=result,
remote=remote,
message=message)
table = current.s3db[cls.TABLENAME]
table.insert(**entry)
return
# -------------------------------------------------------------------------
@staticmethod
def rheader(r, **attr):
if r.id is None:
return DIV(current.T("Showing latest entries first"))
else:
return None
# =============================================================================
class S3SyncRepository(object):
""" Class representation a peer repository """
def __init__(self, repository):
"""
Constructor
@param repository: the repository record (Row)
"""
self.log = S3SyncLog
self._config = None
self.id = repository.id
self.name = repository.name
self.url = repository.url
self.username = repository.username
self.password = repository.password
self.client_id = repository.client_id
self.client_secret = repository.client_secret
self.site_key = repository.site_key
self.refresh_token = repository.refresh_token
self.proxy = repository.proxy
self.apitype = repository.apitype
import sync_adapter
api = sync_adapter.__dict__.get(self.apitype)
if api:
adapter = api.S3SyncAdapter(self)
else:
adapter = S3SyncBaseAdapter(self)
self.adapter = adapter
# -------------------------------------------------------------------------
@property
def config(self):
"""
Lazy access to synchronization settings
"""
if self._config is None:
table = current.s3db.sync_config
row = current.db().select(table.ALL, limitby=(0, 1)).first()
self._config = row
return self._config
# -------------------------------------------------------------------------
def __getattr__(self, name):
"""
Delegate other attributes and methods to the adapter
@param name: the attribute/method
"""
return object.__getattribute__(self.adapter, name)
# =============================================================================
class S3SyncBaseAdapter(object):
def __init__(self, repository):
self.repository = repository
self.log = repository.log
# -------------------------------------------------------------------------
def register(self):
raise NotImplementedError
# -------------------------------------------------------------------------
def login(self):
raise NotImplementedError
# -------------------------------------------------------------------------
def pull(self, task, onconflict=None):
raise NotImplementedError
# -------------------------------------------------------------------------
def push(self, task):
raise NotImplementedError
# End =========================================================================
|
flavour/Turkey
|
modules/s3/s3sync.py
|
Python
|
mit
| 29,149
|
# This file is part of Androguard.
#
# Copyright (C) 2012, Anthony Desnos <desnos at t0t0.fr>
# All rights reserved.
#
# Androguard is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Androguard is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Androguard. If not, see <http://www.gnu.org/licenses/>.
from androguard.core import bytecode
from androguard.core import androconf
from androguard.core.bytecode import SV
from androguard.core.bytecodes.dvm_permissions import DVM_PERMISSIONS
import zipfile, StringIO
from struct import pack, unpack
from xml.dom import minidom
from xml.sax.saxutils import escape
from zlib import crc32
import re
import sys
if sys.hexversion < 0x2070000 :
try :
import chilkat
ZIPMODULE = 0
# UNLOCK : change it with your valid key !
try :
CHILKAT_KEY = open("key.txt", "rb").read()
except Exception :
CHILKAT_KEY = "testme"
except ImportError :
ZIPMODULE = 1
else :
ZIPMODULE = 1
################################################### CHILKAT ZIP FORMAT #####################################################
class ChilkatZip :
def __init__(self, raw) :
self.files = []
self.zip = chilkat.CkZip()
self.zip.UnlockComponent( CHILKAT_KEY )
self.zip.OpenFromMemory( raw, len(raw) )
filename = chilkat.CkString()
e = self.zip.FirstEntry()
while e != None :
e.get_FileName(filename)
self.files.append( filename.getString() )
e = e.NextEntry()
def delete(self, patterns) :
el = []
filename = chilkat.CkString()
e = self.zip.FirstEntry()
while e != None :
e.get_FileName(filename)
if re.match(patterns, filename.getString()) != None :
el.append( e )
e = e.NextEntry()
for i in el :
self.zip.DeleteEntry( i )
def remplace_file(self, filename, buff) :
entry = self.zip.GetEntryByName(filename)
if entry != None :
obj = chilkat.CkByteData()
obj.append( buff, len(buff) )
return entry.ReplaceData( obj )
return False
def write(self) :
obj = chilkat.CkByteData()
self.zip.WriteToMemory( obj )
return obj.getBytes()
def namelist(self) :
return self.files
def read(self, elem) :
e = self.zip.GetEntryByName( elem )
s = chilkat.CkByteData()
e.Inflate( s )
return s.getBytes()
def sign_apk(filename, keystore, storepass) :
from subprocess import Popen, PIPE, STDOUT
# jarsigner -verbose -sigalg MD5withRSA -digestalg SHA1 -keystore tmp/androguard.androtrace tmp/toto.apk alias_name
compile = Popen([ androconf.CONF["PATH_JARSIGNER"],
"-sigalg",
"MD5withRSA",
"-digestalg",
"SHA1",
"-storepass",
storepass,
"-keystore",
keystore,
filename,
"alias_name" ],
stdout=PIPE, stderr=STDOUT)
stdout, stderr = compile.communicate()
######################################################## APK FORMAT ########################################################
class APK :
"""APK manages apk file format"""
def __init__(self, filename, raw=False, mode="r") :
"""
@param filename : specify the path of the file, or raw data
@param raw : specify (boolean) if the filename is a path or raw data
@param mode
"""
self.filename = filename
self.xml = {}
self.package = ""
self.androidversion = {}
self.permissions = []
self.validAPK = False
self.files = {}
self.files_crc32 = {}
if raw == True :
self.__raw = filename
else :
fd = open( filename, "rb" )
self.__raw = fd.read()
fd.close()
if ZIPMODULE == 0 :
self.zip = ChilkatZip( self.__raw )
else :
self.zip = zipfile.ZipFile( StringIO.StringIO( self.__raw ), mode=mode )
# CHECK if there is only one embedded file
#self._reload_apk()
for i in self.zip.namelist() :
if i == "AndroidManifest.xml" :
self.xml[i] = minidom.parseString( AXMLPrinter( self.zip.read( i ) ).getBuff() )
self.package = self.xml[i].documentElement.getAttribute( "package" )
self.androidversion["Code"] = self.xml[i].documentElement.getAttribute( "android:versionCode" )
self.androidversion["Name"] = self.xml[i].documentElement.getAttribute( "android:versionName")
for item in self.xml[i].getElementsByTagName('uses-permission') :
self.permissions.append( str( item.getAttribute("android:name") ) )
self.validAPK = True
def get_AndroidManifest(self) :
"""
Return the Android Manifest XML file
"""
return self.xml["AndroidManifest.xml"]
def is_valid_APK(self) :
"""
Return true if APK is valid, false otherwise
"""
return self.validAPK
#def _reload_apk(self) :
# if len(files) == 1 :
# if ".apk" in files[0] :
# self.__raw = self.zip.read( files[0] )
# if ZIPMODULE == 0 :
# self.zip = ChilkatZip( self.__raw )
# else :
# self.zip = zipfile.ZipFile( StringIO.StringIO( self.__raw ) )
def get_filename(self) :
"""
Return the filename of the APK
"""
return self.filename
def get_package(self) :
"""
Return the name of the package
"""
return self.package
def get_androidversion_code(self) :
"""
Return the android version code
"""
return self.androidversion["Code"]
def get_androidversion_name(self) :
"""
Return the android version name
"""
return self.androidversion["Name"]
def get_files(self) :
"""
Return the files inside the APK
"""
return self.zip.namelist()
def get_files_types(self) :
"""
Return the files inside the APK with their types (by using python-magic)
"""
try :
import magic
except ImportError :
for i in self.get_files() :
buffer = self.zip.read( i )
self.files_crc32[ i ] = crc32( buffer )
return self.files
if self.files != {} :
return self.files
builtin_magic = 0
try :
getattr(magic, "Magic")
except AttributeError :
builtin_magic = 1
if builtin_magic :
ms = magic.open(magic.MAGIC_NONE)
ms.load()
for i in self.get_files() :
buffer = self.zip.read( i )
self.files[ i ] = ms.buffer( buffer )
self.files[ i ] = self.patch_magic(buffer, self.files[ i ])
self.files_crc32[ i ] = crc32( buffer )
else :
m = magic.Magic()
for i in self.get_files() :
buffer = self.zip.read( i )
self.files[ i ] = m.from_buffer( buffer )
self.files[ i ] = self.patch_magic(buffer, self.files[ i ])
self.files_crc32[ i ] = crc32( buffer )
return self.files
def patch_magic(self, buffer, orig) :
if ("Zip" in orig) or ("DBase" in orig) :
val = androconf.is_android_raw( buffer )
if val == "APK" :
return "Android application package file"
elif val == "AXML" :
return "Android's binary XML"
return orig
def get_files_crc32(self) :
if self.files_crc32 == {} :
self.get_files_types()
return self.files_crc32
def get_files_information(self) :
if self.files == {} :
self.get_files_types()
for i in self.get_files() :
yield i, self.files[ i ], self.files_crc32[ i ]
def get_raw(self) :
"""
Return raw bytes of the APK
"""
return self.__raw
def get_file(self, filename) :
"""
Return the raw data of the specified filename
"""
try :
return self.zip.read( filename )
except KeyError :
return ""
def get_dex(self) :
"""
Return the raw data of the classes dex file
"""
return self.get_file( "classes.dex" )
def get_elements(self, tag_name, attribute) :
"""
Return elements in xml files which match with the tag name and the specific attribute
@param tag_name : a string which specify the tag name
@param attribute : a string which specify the attribute
"""
l = []
for i in self.xml :
for item in self.xml[i].getElementsByTagName(tag_name) :
value = item.getAttribute(attribute)
value = self.format_value( value )
l.append( str( value ) )
return l
def format_value(self, value) :
if len(value) > 0 :
if value[0] == "." :
value = self.package + value
else :
v_dot = value.find(".")
if v_dot == 0 :
value = self.package + "." + value
elif v_dot == -1 :
value = self.package + "." + value
return value
def get_element(self, tag_name, attribute) :
"""
Return element in xml files which match with the tag name and the specific attribute
@param tag_name : a string which specify the tag name
@param attribute : a string which specify the attribute
"""
for i in self.xml :
for item in self.xml[i].getElementsByTagName(tag_name) :
value = item.getAttribute(attribute)
if len(value) > 0 :
return value
return None
def get_main_activity(self) :
"""
Return the name of the main activity
"""
for i in self.xml :
x = set()
y = set()
for item in self.xml[i].getElementsByTagName("activity") :
for sitem in item.getElementsByTagName( "action" ) :
val = sitem.getAttribute( "android:name" )
if val == "android.intent.action.MAIN" :
x.add( item.getAttribute( "android:name" ) )
for sitem in item.getElementsByTagName( "category" ) :
val = sitem.getAttribute( "android:name" )
if val == "android.intent.category.LAUNCHER" :
y.add( item.getAttribute( "android:name" ) )
z = x.intersection(y)
if len(z) > 0 :
return self.format_value(z.pop())
return None
def get_activities(self) :
"""
Return the android:name attribute of all activities
"""
return self.get_elements("activity", "android:name")
def get_services(self) :
"""
Return the android:name attribute of all services
"""
return self.get_elements("service", "android:name")
def get_receivers(self) :
"""
Return the android:name attribute of all receivers
"""
return self.get_elements("receiver", "android:name")
def get_providers(self) :
"""
Return the android:name attribute of all providers
"""
return self.get_elements("provider", "android:name")
def get_permissions(self) :
"""
Return permissions
"""
return self.permissions
def get_details_permissions(self) :
"""
Return permissions with details
"""
l = {}
for i in self.permissions :
perm = i
pos = i.rfind(".")
if pos != -1 :
perm = i[pos+1:]
try :
l[ i ] = DVM_PERMISSIONS["MANIFEST_PERMISSION"][ perm ]
except KeyError :
l[ i ] = [ "dangerous", "Unknown permission from android reference", "Unknown permission from android reference" ]
return l
def get_min_sdk_version(self) :
"""
Return the android:minSdkVersion attribute
"""
return self.get_element( "uses-sdk", "android:minSdkVersion" )
def get_target_sdk_version(self) :
"""
Return the android:targetSdkVersion attribute
"""
return self.get_element( "uses-sdk", "android:targetSdkVersion" )
def get_libraries(self) :
"""
Return the android:name attributes for libraries
"""
return self.get_elements( "uses-library", "android:name" )
def show(self) :
self.get_files_types()
print "FILES: "
for i in self.get_files() :
try :
print "\t", i, self.files[i], "%x" % self.files_crc32[i]
except KeyError :
print "\t", i, "%x" % self.files_crc32[i]
print "PERMISSIONS: "
details_permissions = self.get_details_permissions()
for i in details_permissions :
print "\t", i, details_permissions[i]
print "MAIN ACTIVITY: ", self.get_main_activity()
print "ACTIVITIES: ", self.get_activities()
print "SERVICES: ", self.get_services()
print "RECEIVERS: ", self.get_receivers()
print "PROVIDERS: ", self.get_providers()
def get_certificate(self, filename) :
"""
Return a certificate object by giving the name in the apk file
"""
import chilkat
cert = chilkat.CkCert()
f = self.get_file( filename )
success = cert.LoadFromBinary(f, len(f))
return success, cert
def new_zip(self, filename, deleted_files=None, new_files={}) :
zout = zipfile.ZipFile (filename, 'w')
for item in self.zip.infolist() :
if deleted_files != None :
if re.match(deleted_files, item.filename) == None :
if item.filename in new_files :
zout.writestr(item, new_files[item.filename])
else :
buffer = self.zip.read(item.filename)
zout.writestr(item, buffer)
zout.close()
def show_Certificate(cert) :
print "Issuer: C=%s, CN=%s, DN=%s, E=%s, L=%s, O=%s, OU=%s, S=%s" % (cert.issuerC(), cert.issuerCN(), cert.issuerDN(), cert.issuerE(), cert.issuerL(), cert.issuerO(), cert.issuerOU(), cert.issuerS())
print "Subject: C=%s, CN=%s, DN=%s, E=%s, L=%s, O=%s, OU=%s, S=%s" % (cert.subjectC(), cert.subjectCN(), cert.subjectDN(), cert.subjectE(), cert.subjectL(), cert.subjectO(), cert.subjectOU(), cert.subjectS())
######################################################## AXML FORMAT ########################################################
# Translated from http://code.google.com/p/android4me/source/browse/src/android/content/res/AXmlResourceParser.java
class StringBlock :
def __init__(self, buff) :
buff.read( 4 )
self.chunkSize = SV( '<L', buff.read( 4 ) )
self.stringCount = SV( '<L', buff.read( 4 ) )
self.styleOffsetCount = SV( '<L', buff.read( 4 ) )
# unused value ?
buff.read(4) # ?
self.stringsOffset = SV( '<L', buff.read( 4 ) )
self.stylesOffset = SV( '<L', buff.read( 4 ) )
self.m_stringOffsets = []
self.m_styleOffsets = []
self.m_strings = []
self.m_styles = []
for i in range(0, self.stringCount.get_value()) :
self.m_stringOffsets.append( SV( '<L', buff.read( 4 ) ) )
for i in range(0, self.styleOffsetCount.get_value()) :
self.m_stylesOffsets.append( SV( '<L', buff.read( 4 ) ) )
size = self.chunkSize.get_value() - self.stringsOffset.get_value()
if self.stylesOffset.get_value() != 0 :
size = self.stylesOffset.get_value() - self.stringsOffset.get_value()
# FIXME
if (size%4) != 0 :
pass
for i in range(0, size / 4) :
self.m_strings.append( SV( '=L', buff.read( 4 ) ) )
if self.stylesOffset.get_value() != 0 :
size = self.chunkSize.get_value() - self.stringsOffset.get_value()
# FIXME
if (size%4) != 0 :
pass
for i in range(0, size / 4) :
self.m_styles.append( SV( '=L', buff.read( 4 ) ) )
def getRaw(self, idx) :
if idx < 0 or self.m_stringOffsets == [] or idx >= len(self.m_stringOffsets) :
return None
offset = self.m_stringOffsets[ idx ].get_value()
length = self.getShort(self.m_strings, offset)
data = ""
while length > 0 :
offset += 2
# Unicode character
data += unichr( self.getShort(self.m_strings, offset) )
# FIXME
if data[-1] == "&" :
data = data[:-1]
length -= 1
return data
def getShort(self, array, offset) :
value = array[offset/4].get_value()
if ((offset%4)/2) == 0 :
return value & 0xFFFF
else :
return value >> 16
ATTRIBUTE_IX_NAMESPACE_URI = 0
ATTRIBUTE_IX_NAME = 1
ATTRIBUTE_IX_VALUE_STRING = 2
ATTRIBUTE_IX_VALUE_TYPE = 3
ATTRIBUTE_IX_VALUE_DATA = 4
ATTRIBUTE_LENGHT = 5
CHUNK_AXML_FILE = 0x00080003
CHUNK_RESOURCEIDS = 0x00080180
CHUNK_XML_FIRST = 0x00100100
CHUNK_XML_START_NAMESPACE = 0x00100100
CHUNK_XML_END_NAMESPACE = 0x00100101
CHUNK_XML_START_TAG = 0x00100102
CHUNK_XML_END_TAG = 0x00100103
CHUNK_XML_TEXT = 0x00100104
CHUNK_XML_LAST = 0x00100104
START_DOCUMENT = 0
END_DOCUMENT = 1
START_TAG = 2
END_TAG = 3
TEXT = 4
class AXMLParser :
def __init__(self, raw_buff) :
self.reset()
self.buff = bytecode.BuffHandle( raw_buff )
self.buff.read(4)
self.buff.read(4)
self.sb = StringBlock( self.buff )
self.m_resourceIDs = []
self.m_prefixuri = {}
self.m_uriprefix = {}
self.m_prefixuriL = []
def reset(self) :
self.m_event = -1
self.m_lineNumber = -1
self.m_name = -1
self.m_namespaceUri = -1
self.m_attributes = []
self.m_idAttribute = -1
self.m_classAttribute = -1
self.m_styleAttribute = -1
def next(self) :
self.doNext()
return self.m_event
def doNext(self) :
if self.m_event == END_DOCUMENT :
return
event = self.m_event
self.reset()
while 1 :
chunkType = -1
# Fake END_DOCUMENT event.
if event == END_TAG :
pass
# START_DOCUMENT
if event == START_DOCUMENT :
chunkType = CHUNK_XML_START_TAG
else :
if self.buff.end() == True :
self.m_event = END_DOCUMENT
break
chunkType = SV( '<L', self.buff.read( 4 ) ).get_value()
if chunkType == CHUNK_RESOURCEIDS :
chunkSize = SV( '<L', self.buff.read( 4 ) ).get_value()
# FIXME
if chunkSize < 8 or chunkSize%4 != 0 :
raise("ooo")
for i in range(0, chunkSize/4-2) :
self.m_resourceIDs.append( SV( '<L', self.buff.read( 4 ) ) )
continue
# FIXME
if chunkType < CHUNK_XML_FIRST or chunkType > CHUNK_XML_LAST :
raise("ooo")
# Fake START_DOCUMENT event.
if chunkType == CHUNK_XML_START_TAG and event == -1 :
self.m_event = START_DOCUMENT
break
self.buff.read( 4 ) #/*chunkSize*/
lineNumber = SV( '<L', self.buff.read( 4 ) ).get_value()
self.buff.read( 4 ) #0xFFFFFFFF
if chunkType == CHUNK_XML_START_NAMESPACE or chunkType == CHUNK_XML_END_NAMESPACE :
if chunkType == CHUNK_XML_START_NAMESPACE :
prefix = SV( '<L', self.buff.read( 4 ) ).get_value()
uri = SV( '<L', self.buff.read( 4 ) ).get_value()
self.m_prefixuri[ prefix ] = uri
self.m_uriprefix[ uri ] = prefix
self.m_prefixuriL.append( (prefix, uri) )
else :
self.buff.read( 4 )
self.buff.read( 4 )
(prefix, uri) = self.m_prefixuriL.pop()
#del self.m_prefixuri[ prefix ]
#del self.m_uriprefix[ uri ]
continue
self.m_lineNumber = lineNumber
if chunkType == CHUNK_XML_START_TAG :
self.m_namespaceUri = SV( '<L', self.buff.read( 4 ) ).get_value()
self.m_name = SV( '<L', self.buff.read( 4 ) ).get_value()
# FIXME
self.buff.read( 4 ) #flags
attributeCount = SV( '<L', self.buff.read( 4 ) ).get_value()
self.m_idAttribute = (attributeCount>>16) - 1
attributeCount = attributeCount & 0xFFFF
self.m_classAttribute = SV( '<L', self.buff.read( 4 ) ).get_value()
self.m_styleAttribute = (self.m_classAttribute>>16) - 1
self.m_classAttribute = (self.m_classAttribute & 0xFFFF) - 1
for i in range(0, attributeCount*ATTRIBUTE_LENGHT) :
self.m_attributes.append( SV( '<L', self.buff.read( 4 ) ).get_value() )
for i in range(ATTRIBUTE_IX_VALUE_TYPE, len(self.m_attributes), ATTRIBUTE_LENGHT) :
self.m_attributes[i] = (self.m_attributes[i]>>24)
self.m_event = START_TAG
break
if chunkType == CHUNK_XML_END_TAG :
self.m_namespaceUri = SV( '<L', self.buff.read( 4 ) ).get_value()
self.m_name = SV( '<L', self.buff.read( 4 ) ).get_value()
self.m_event = END_TAG
break
if chunkType == CHUNK_XML_TEXT :
self.m_name = SV( '<L', self.buff.read( 4 ) ).get_value()
# FIXME
self.buff.read( 4 ) #?
self.buff.read( 4 ) #?
self.m_event = TEXT
break
def getPrefixByUri(self, uri) :
try :
return self.m_uriprefix[ uri ]
except KeyError :
return -1
def getPrefix(self) :
try :
return self.sb.getRaw(self.m_prefixuri[ self.m_namespaceUri ])
except KeyError :
return ""
def getName(self) :
if self.m_name == -1 or (self.m_event != START_TAG and self.m_event != END_TAG) :
return ""
return self.sb.getRaw(self.m_name)
def getText(self) :
if self.m_name == -1 or self.m_event != TEXT :
return ""
return self.sb.getRaw(self.m_name)
def getNamespacePrefix(self, pos) :
prefix = self.m_prefixuriL[ pos ][0]
return self.sb.getRaw( prefix )
def getNamespaceUri(self, pos) :
uri = self.m_prefixuriL[ pos ][1]
return self.sb.getRaw( uri )
def getXMLNS(self) :
buff = ""
i = 0
while 1 :
try :
buff += "xmlns:%s=\"%s\"\n" % ( self.getNamespacePrefix( i ), self.getNamespaceUri( i ) )
except IndexError:
break
i += 1
return buff
def getNamespaceCount(self, pos) :
pass
def getAttributeOffset(self, index) :
# FIXME
if self.m_event != START_TAG :
raise("Current event is not START_TAG.")
offset = index * 5
# FIXME
if offset >= len(self.m_attributes) :
raise("Invalid attribute index")
return offset
def getAttributeCount(self) :
if self.m_event != START_TAG :
return -1
return len(self.m_attributes) / ATTRIBUTE_LENGHT
def getAttributePrefix(self, index) :
offset = self.getAttributeOffset(index)
uri = self.m_attributes[offset+ATTRIBUTE_IX_NAMESPACE_URI]
prefix = self.getPrefixByUri( uri )
if prefix == -1 :
return ""
return self.sb.getRaw( prefix )
def getAttributeName(self, index) :
offset = self.getAttributeOffset(index)
name = self.m_attributes[offset+ATTRIBUTE_IX_NAME]
if name == -1 :
return ""
return self.sb.getRaw( name )
def getAttributeValueType(self, index) :
offset = self.getAttributeOffset(index)
return self.m_attributes[offset+ATTRIBUTE_IX_VALUE_TYPE]
def getAttributeValueData(self, index) :
offset = self.getAttributeOffset(index)
return self.m_attributes[offset+ATTRIBUTE_IX_VALUE_DATA]
def getAttributeValue(self, index) :
offset = self.getAttributeOffset(index)
valueType = self.m_attributes[offset+ATTRIBUTE_IX_VALUE_TYPE]
if valueType == TYPE_STRING :
valueString = self.m_attributes[offset+ATTRIBUTE_IX_VALUE_STRING]
return self.sb.getRaw( valueString )
# WIP
return ""
#int valueData=m_attributes[offset+ATTRIBUTE_IX_VALUE_DATA];
#return TypedValue.coerceToString(valueType,valueData);
TYPE_ATTRIBUTE = 2
TYPE_DIMENSION = 5
TYPE_FIRST_COLOR_INT = 28
TYPE_FIRST_INT = 16
TYPE_FLOAT = 4
TYPE_FRACTION = 6
TYPE_INT_BOOLEAN = 18
TYPE_INT_COLOR_ARGB4 = 30
TYPE_INT_COLOR_ARGB8 = 28
TYPE_INT_COLOR_RGB4 = 31
TYPE_INT_COLOR_RGB8 = 29
TYPE_INT_DEC = 16
TYPE_INT_HEX = 17
TYPE_LAST_COLOR_INT = 31
TYPE_LAST_INT = 31
TYPE_NULL = 0
TYPE_REFERENCE = 1
TYPE_STRING = 3
RADIX_MULTS = [ 0.00390625, 3.051758E-005, 1.192093E-007, 4.656613E-010 ]
DIMENSION_UNITS = [ "px","dip","sp","pt","in","mm","","" ]
FRACTION_UNITS = [ "%","%p","","","","","","" ]
COMPLEX_UNIT_MASK = 15
class AXMLPrinter :
def __init__(self, raw_buff) :
self.axml = AXMLParser( raw_buff )
self.xmlns = False
self.buff = ""
while 1 :
_type = self.axml.next()
# print "tagtype = ", _type
if _type == START_DOCUMENT :
self.buff += "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n"
elif _type == START_TAG :
self.buff += "<%s%s\n" % ( self.getPrefix( self.axml.getPrefix() ), self.axml.getName() )
# FIXME : use namespace
if self.xmlns == False :
self.buff += self.axml.getXMLNS()
self.xmlns = True
for i in range(0, self.axml.getAttributeCount()) :
self.buff += "%s%s=\"%s\"\n" % ( self.getPrefix(
self.axml.getAttributePrefix(i) ), self.axml.getAttributeName(i), self._escape( self.getAttributeValue( i ) ) )
self.buff += ">\n"
elif _type == END_TAG :
self.buff += "</%s%s>\n" % ( self.getPrefix( self.axml.getPrefix() ), self.axml.getName() )
elif _type == TEXT :
self.buff += "%s\n" % self.axml.getText()
elif _type == END_DOCUMENT :
break
# pleed patch
def _escape(self, s) :
s = s.replace("&","&")
s = s.replace('"',""")
s = s.replace("'","'")
s = s.replace("<","<")
s = s.replace(">",">")
return escape(s)
def getBuff(self) :
return self.buff.encode("utf-8")
def getPrefix(self, prefix) :
if prefix == None or len(prefix) == 0 :
return ""
return prefix + ":"
def getAttributeValue(self, index) :
_type = self.axml.getAttributeValueType(index)
_data = self.axml.getAttributeValueData(index)
#print _type, _data
if _type == TYPE_STRING :
return self.axml.getAttributeValue( index )
elif _type == TYPE_ATTRIBUTE :
return "?%s%08X" % (self.getPackage(_data), _data)
elif _type == TYPE_REFERENCE :
return "@%s%08X" % (self.getPackage(_data), _data)
# WIP
elif _type == TYPE_FLOAT :
return "%f" % unpack("=f", pack("=L", _data))[0]
elif _type == TYPE_INT_HEX :
return "0x%08X" % _data
elif _type == TYPE_INT_BOOLEAN :
if _data == 0 :
return "false"
return "true"
elif _type == TYPE_DIMENSION :
return "%f%s" % (self.complexToFloat(_data), DIMENSION_UNITS[_data & COMPLEX_UNIT_MASK])
elif _type == TYPE_FRACTION :
return "%f%s" % (self.complexToFloat(_data), FRACTION_UNITS[_data & COMPLEX_UNIT_MASK])
elif _type >= TYPE_FIRST_COLOR_INT and _type <= TYPE_LAST_COLOR_INT :
return "#%08X" % _data
elif _type >= TYPE_FIRST_INT and _type <= TYPE_LAST_INT :
return "%d" % androconf.long2int( _data )
return "<0x%X, type 0x%02X>" % (_data, _type)
def complexToFloat(self, xcomplex) :
return (float)(xcomplex & 0xFFFFFF00)*RADIX_MULTS[(xcomplex>>4) & 3];
def getPackage(self, id) :
if id >> 24 == 1 :
return "android:"
return ""
|
xtiankisutsa/MARA_Framework
|
tools/androwarn/androguard/core/bytecodes/apk.py
|
Python
|
lgpl-3.0
| 30,879
|
# vim:expandtab:autoindent:tabstop=4:shiftwidth=4:filetype=python:textwidth=0:
# License: GPL2 or later see COPYING
import os.path
from mockbuild.trace_decorator import traceLog, getLog
from mockbuild import util
requires_api_version = "1.1"
class CompressLogsPlugin(object):
"""Compress logs in resultdir."""
@traceLog()
def __init__(self, plugins, conf, buildroot):
self.buildroot = buildroot
self.config = buildroot.config
self.state = buildroot.state
self.conf = conf
self.command = self.conf['command']
plugins.add_hook("postbuild", self._compress_logs)
getLog().info("compress_logs: initialized")
@traceLog()
def _compress_logs(self):
logger = getLog()
for f_name in ('root.log', 'build.log', 'state.log'):
f_path = os.path.join(self.buildroot.resultdir, f_name)
if os.path.exists(f_path):
command = "{0} {1}".format(self.command, f_path)
logger.debug("Running {0}".format(command))
util.do(command, shell=True)
def init(plugins, compress_conf, buildroot):
CompressLogsPlugin(plugins, compress_conf, buildroot)
|
heysion/mock_clone
|
py/mockbuild/plugins/compress_logs.py
|
Python
|
gpl-2.0
| 1,192
|
# Greg Suner and Alex Ciarmella
# Concrete Player Class
import Player
import Message
class GSACPlayer(Player.Player):
def __init__(self):
# Call super class constructor
Player.Player.__init__(self)
self.reset()
self.name = "Alex and Greg"
def play(self):
return RpsPlayingStrategy.play(self.opponents_moves)
def reset(self):
self.opponents_moves = []
def get_name(self):
return self.name
def set_name (self, playername):
self.name = playername
def notify(self, msg):
# We use notifications to store opponent's moves in past rounds
# Process match-start and round-end messages
# At the start of the match, clear opponent moves history since a new match has started
# At the end of a round, append move to opponent's move history. Move history is used
# to compute the next move played.
if msg.is_match_start_message():
players = msg.get_players()
if players[0] == self or players[1] == self:
self.reset()
elif msg.is_round_end_message():
players = msg.get_players()
# Check if this message is for me and only then proceed
if (players[0] == self) or (players[1] == self):
# In this case, (by convention) the info is a tuple of the moves made and result e.g. ((1, 0), 1) which
# means player 1 played paper (1), the player 2 played rock(0) and the result was that
# player 1 won
moves, result = msg.get_info()
# RPS is a two person game; figure out which of the players is me
# and which one is the opponent
if players[0] == self:
opponent = 1
else:
opponent = 0
# Update opponent's past moves history
self.opponents_moves.append(moves[opponent])
class RpsPlayingStrategy(object):
#Decides next move based on opponents past moves
#Looks for what opponent throws the most and tries to counter it. 0-rock, 1-paper, 2-scissors
#Takes list of opponents past moves
def play(pastmoves):
rock, paper, scissors = 0, 0, 0
for index, move in enumerate(pastmoves):
if move == 0:
rock += 1
elif move == 1:
paper += 1
else:
scissors += 1
else:
return 0
#Look for most thrown move and throw counter
if rock > paper and rock > scissors:
return 0
elif paper > rock and paper > scissors:
return 2
elif scissors > rock and scissors > paper:
return 1
else: #Arbitrary throw if 2 or 3 are thrown evenly
return 0
# Test driver
# Run by typing "python3 GSACPlayer.py"
if __name__ == "__main__":
player = GSACPlayer()
opponent = GSACPlayer()
players = [opponent,player]
fakeinfo = ((0,1),1)
fakeresult = 1
fakemoves = (1,2)
player.notify(Message.Message.get_match_start_message(players))
player.notify(Message.Message.get_round_start_message(players))
move = player.play()
print ("Move played: ", move)
player.notify(Message.Message.get_round_end_message(players,fakemoves,fakeresult))
|
geebzter/game-framework
|
GSACPlayer.py
|
Python
|
apache-2.0
| 3,258
|
import unittest
from typing import List
import utils
# O(V+E) time. O(V) space. Graph, DFS, backtracking.
class Solution:
def allPathsSourceTarget(self, graph: List[List[int]]) -> List[List[int]]:
visited = [False] * len(graph)
path = [0]
paths = []
def dfs(i):
if i == len(graph) - 1:
paths.append(list(path))
return
for j in graph[i]:
if visited[j]:
continue
visited[j] = True
path.append(j)
dfs(j)
path.pop()
visited[j] = False
dfs(0)
return paths
class Test(unittest.TestCase):
def test(self):
utils.test(self, __file__, Solution)
if __name__ == '__main__':
unittest.main()
|
chrisxue815/leetcode_python
|
problems/test_0797_dfs.py
|
Python
|
unlicense
| 831
|
from TM1py.Services.AnnotationService import AnnotationService
from TM1py.Services.ApplicationService import ApplicationService
from TM1py.Services.CellService import CellService
from TM1py.Services.ChoreService import ChoreService
from TM1py.Services.CubeService import CubeService
from TM1py.Services.DimensionService import DimensionService
from TM1py.Services.ElementService import ElementService
from TM1py.Services.GitService import GitService
from TM1py.Services.HierarchyService import HierarchyService
from TM1py.Services.MonitoringService import MonitoringService
from TM1py.Services.PowerBiService import PowerBiService
from TM1py.Services.ProcessService import ProcessService
from TM1py.Services.RestService import RestService
from TM1py.Services.SandboxService import SandboxService
from TM1py.Services.SecurityService import SecurityService
from TM1py.Services.ServerService import ServerService
from TM1py.Services.SubsetService import SubsetService
from TM1py.Services.ViewService import ViewService
from TM1py.Services.GitService import GitService
from TM1py.Services.TM1Service import TM1Service
|
OLAPLINE/TM1py
|
TM1py/Services/__init__.py
|
Python
|
mit
| 1,114
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Finds desktop browsers that can be controlled by telemetry."""
import logging
import os
import subprocess
import sys
from telemetry.core import exceptions
from telemetry.core import platform as platform_module
from telemetry.internal.backends.chrome import desktop_browser_backend
from telemetry.internal.browser import browser
from telemetry.internal.browser import possible_browser
from telemetry.internal.platform import desktop_device
from telemetry.internal.util import path
class PossibleDesktopBrowser(possible_browser.PossibleBrowser):
"""A desktop browser that can be controlled."""
def __init__(self, browser_type, finder_options, executable, flash_path,
is_content_shell, browser_directory, is_local_build=False):
target_os = sys.platform.lower()
super(PossibleDesktopBrowser, self).__init__(
browser_type, target_os, not is_content_shell)
assert browser_type in FindAllBrowserTypes(finder_options), (
'Please add %s to desktop_browser_finder.FindAllBrowserTypes' %
browser_type)
self._local_executable = executable
self._flash_path = flash_path
self._is_content_shell = is_content_shell
self._browser_directory = browser_directory
self.is_local_build = is_local_build
def __repr__(self):
return 'PossibleDesktopBrowser(type=%s, executable=%s, flash=%s)' % (
self.browser_type, self._local_executable, self._flash_path)
def _InitPlatformIfNeeded(self):
if self._platform:
return
self._platform = platform_module.GetHostPlatform()
# pylint: disable=W0212
self._platform_backend = self._platform._platform_backend
def Create(self, finder_options):
if self._flash_path and not os.path.exists(self._flash_path):
logging.warning(
'Could not find Flash at %s. Continuing without Flash.\n'
'To run with Flash, check it out via http://go/read-src-internal',
self._flash_path)
self._flash_path = None
self._InitPlatformIfNeeded()
browser_backend = desktop_browser_backend.DesktopBrowserBackend(
self._platform_backend,
finder_options.browser_options, self._local_executable,
self._flash_path, self._is_content_shell, self._browser_directory,
output_profile_path=finder_options.output_profile_path,
extensions_to_load=finder_options.extensions_to_load)
return browser.Browser(
browser_backend, self._platform_backend, self._credentials_path)
def SupportsOptions(self, finder_options):
if (len(finder_options.extensions_to_load) != 0) and self._is_content_shell:
return False
return True
def UpdateExecutableIfNeeded(self):
pass
def last_modification_time(self):
if os.path.exists(self._local_executable):
return os.path.getmtime(self._local_executable)
return -1
def SelectDefaultBrowser(possible_browsers):
local_builds_by_date = [
b for b in sorted(possible_browsers,
key=lambda b: b.last_modification_time())
if b.is_local_build]
if local_builds_by_date:
return local_builds_by_date[-1]
return None
def CanFindAvailableBrowsers():
return not platform_module.GetHostPlatform().GetOSName() == 'chromeos'
def CanPossiblyHandlePath(target_path):
_, extension = os.path.splitext(target_path.lower())
if sys.platform == 'darwin' or sys.platform.startswith('linux'):
return not extension
elif sys.platform.startswith('win'):
return extension == '.exe'
return False
def FindAllBrowserTypes(_):
return [
'exact',
'reference',
'release',
'release_x64',
'debug',
'debug_x64',
'default',
'stable',
'beta',
'dev',
'canary',
'content-shell-debug',
'content-shell-debug_x64',
'content-shell-release',
'content-shell-release_x64',
'content-shell-default',
'system']
def FindAllAvailableBrowsers(finder_options, device):
"""Finds all the desktop browsers available on this machine."""
if not isinstance(device, desktop_device.DesktopDevice):
return []
browsers = []
if not CanFindAvailableBrowsers():
return []
has_x11_display = True
if (sys.platform.startswith('linux') and
os.getenv('DISPLAY') == None):
has_x11_display = False
# Look for a browser in the standard chrome build locations.
if finder_options.chrome_root:
chrome_root = finder_options.chrome_root
else:
chrome_root = path.GetChromiumSrcDir()
flash_bin_dir = os.path.join(
chrome_root, 'third_party', 'adobe', 'flash', 'binaries', 'ppapi')
chromium_app_names = []
if sys.platform == 'darwin':
chromium_app_names.append('Chromium.app/Contents/MacOS/Chromium')
chromium_app_names.append('Google Chrome.app/Contents/MacOS/Google Chrome')
content_shell_app_name = 'Content Shell.app/Contents/MacOS/Content Shell'
flash_bin = 'PepperFlashPlayer.plugin'
flash_path = os.path.join(flash_bin_dir, 'mac', flash_bin)
flash_path_64 = os.path.join(flash_bin_dir, 'mac_64', flash_bin)
elif sys.platform.startswith('linux'):
chromium_app_names.append('chrome')
content_shell_app_name = 'content_shell'
flash_bin = 'libpepflashplayer.so'
flash_path = os.path.join(flash_bin_dir, 'linux', flash_bin)
flash_path_64 = os.path.join(flash_bin_dir, 'linux_x64', flash_bin)
elif sys.platform.startswith('win'):
chromium_app_names.append('chrome.exe')
content_shell_app_name = 'content_shell.exe'
flash_bin = 'pepflashplayer.dll'
flash_path = os.path.join(flash_bin_dir, 'win', flash_bin)
flash_path_64 = os.path.join(flash_bin_dir, 'win_x64', flash_bin)
else:
raise Exception('Platform not recognized')
# Add the explicit browser executable if given and we can handle it.
if (finder_options.browser_executable and
CanPossiblyHandlePath(finder_options.browser_executable)):
normalized_executable = os.path.expanduser(
finder_options.browser_executable)
if path.IsExecutable(normalized_executable):
browser_directory = os.path.dirname(finder_options.browser_executable)
browsers.append(PossibleDesktopBrowser('exact', finder_options,
normalized_executable, flash_path,
False, browser_directory))
else:
raise exceptions.PathMissingError(
'%s specified by --browser-executable does not exist' %
normalized_executable)
def AddIfFound(browser_type, build_dir, type_dir, app_name, content_shell):
browser_directory = os.path.join(chrome_root, build_dir, type_dir)
app = os.path.join(browser_directory, app_name)
if path.IsExecutable(app):
is_64 = browser_type.endswith('_x64')
browsers.append(PossibleDesktopBrowser(
browser_type, finder_options, app,
flash_path_64 if is_64 else flash_path,
content_shell, browser_directory, is_local_build=True))
return True
return False
# Add local builds
for build_dir, build_type in path.GetBuildDirectories():
for chromium_app_name in chromium_app_names:
AddIfFound(build_type.lower(), build_dir, build_type,
chromium_app_name, False)
AddIfFound('content-shell-' + build_type.lower(), build_dir, build_type,
content_shell_app_name, True)
reference_build_root = os.path.join(
chrome_root, 'chrome', 'tools', 'test', 'reference_build')
# Mac-specific options.
if sys.platform == 'darwin':
mac_canary_root = '/Applications/Google Chrome Canary.app/'
mac_canary = mac_canary_root + 'Contents/MacOS/Google Chrome Canary'
mac_system_root = '/Applications/Google Chrome.app'
mac_system = mac_system_root + '/Contents/MacOS/Google Chrome'
mac_reference_root = reference_build_root + '/chrome_mac/Google Chrome.app/'
mac_reference = mac_reference_root + 'Contents/MacOS/Google Chrome'
if path.IsExecutable(mac_canary):
browsers.append(PossibleDesktopBrowser('canary', finder_options,
mac_canary, None, False,
mac_canary_root))
if path.IsExecutable(mac_system):
browsers.append(PossibleDesktopBrowser('system', finder_options,
mac_system, None, False,
mac_system_root))
if path.IsExecutable(mac_reference):
browsers.append(PossibleDesktopBrowser('reference', finder_options,
mac_reference, None, False,
mac_reference_root))
# Linux specific options.
if sys.platform.startswith('linux'):
versions = {
'system': ('google-chrome',
os.path.split(os.path.realpath('google-chrome'))[0]),
'stable': ('google-chrome-stable', '/opt/google/chrome'),
'beta': ('google-chrome-beta', '/opt/google/chrome-beta'),
'dev': ('google-chrome-unstable', '/opt/google/chrome-unstable')
}
for version, (name, root) in versions.iteritems():
found = False
try:
with open(os.devnull, 'w') as devnull:
found = subprocess.call([name, '--version'],
stdout=devnull, stderr=devnull) == 0
except OSError:
pass
if found:
browsers.append(PossibleDesktopBrowser(version, finder_options, name,
None, False, root))
linux_reference_root = os.path.join(reference_build_root, 'chrome_linux')
linux_reference = os.path.join(linux_reference_root, 'chrome')
if path.IsExecutable(linux_reference):
browsers.append(PossibleDesktopBrowser('reference', finder_options,
linux_reference, None, False,
linux_reference_root))
# Win32-specific options.
if sys.platform.startswith('win'):
app_paths = (
('system', os.path.join('Google', 'Chrome', 'Application')),
('canary', os.path.join('Google', 'Chrome SxS', 'Application')),
('reference', os.path.join(reference_build_root, 'chrome_win')),
)
for browser_name, app_path in app_paths:
for chromium_app_name in chromium_app_names:
app_path = os.path.join(app_path, chromium_app_name)
app_path = path.FindInstalledWindowsApplication(app_path)
if app_path:
browsers.append(PossibleDesktopBrowser(
browser_name, finder_options, app_path,
None, False, os.path.dirname(app_path)))
has_ozone_platform = False
for arg in finder_options.browser_options.extra_browser_args:
if "--ozone-platform" in arg:
has_ozone_platform = True
if len(browsers) and not has_x11_display and not has_ozone_platform:
logging.warning(
'Found (%s), but you do not have a DISPLAY environment set.' %
','.join([b.browser_type for b in browsers]))
return []
return browsers
|
Chilledheart/chromium
|
tools/telemetry/telemetry/internal/backends/chrome/desktop_browser_finder.py
|
Python
|
bsd-3-clause
| 11,222
|
# -*- coding: utf-8 -*-
"""
ast
~~~
The `ast` module helps Python applications to process trees of the Python
abstract syntax grammar. The abstract syntax itself might change with
each Python release; this module helps to find out programmatically what
the current grammar looks like and allows modifications of it.
An abstract syntax tree can be generated by passing `ast.PyCF_ONLY_AST` as
a flag to the `compile()` builtin function or by using the `parse()`
function from this module. The result will be a tree of objects whose
classes all inherit from `ast.AST`.
A modified abstract syntax tree can be compiled into a Python code object
using the built-in `compile()` function.
Additionally various helper functions are provided that make working with
the trees simpler. The main intention of the helper functions and this
module in general is to provide an easy to use interface for libraries
that work tightly with the python syntax (template engines for example).
:copyright: Copyright 2008 by Armin Ronacher.
:license: Python License.
"""
from _ast import *
from _ast import __version__
def parse(expr, filename='<unknown>', mode='exec'):
"""
Parse an expression into an AST node.
Equivalent to compile(expr, filename, mode, PyCF_ONLY_AST).
"""
return compile(expr, filename, mode, PyCF_ONLY_AST)
def literal_eval(node_or_string):
"""
Safely evaluate an expression node or a string containing a Python
expression. The string or node provided may only consist of the following
Python literal structures: strings, numbers, tuples, lists, dicts, booleans,
and None.
"""
_safe_names = {'None': None, 'True': True, 'False': False}
if isinstance(node_or_string, basestring):
node_or_string = parse(node_or_string, mode='eval')
if isinstance(node_or_string, Expression):
node_or_string = node_or_string.body
def _convert(node):
if isinstance(node, Str):
return node.s
elif isinstance(node, Num):
return node.n
elif isinstance(node, Tuple):
return tuple(map(_convert, node.elts))
elif isinstance(node, List):
return list(map(_convert, node.elts))
elif isinstance(node, Dict):
return dict((_convert(k), _convert(v)) for k, v
in zip(node.keys, node.values))
elif isinstance(node, Name):
if node.id in _safe_names:
return _safe_names[node.id]
raise ValueError('malformed string')
return _convert(node_or_string)
def dump(node, annotate_fields=True, include_attributes=False):
"""
Return a formatted dump of the tree in *node*. This is mainly useful for
debugging purposes. The returned string will show the names and the values
for fields. This makes the code impossible to evaluate, so if evaluation is
wanted *annotate_fields* must be set to False. Attributes such as line
numbers and column offsets are not dumped by default. If this is wanted,
*include_attributes* can be set to True.
"""
def _format(node):
if isinstance(node, AST):
fields = [(a, _format(b)) for a, b in iter_fields(node)]
rv = '%s(%s' % (node.__class__.__name__, ', '.join(
('%s=%s' % field for field in fields)
if annotate_fields else
(b for a, b in fields)
))
if include_attributes and node._attributes:
rv += fields and ', ' or ' '
rv += ', '.join('%s=%s' % (a, _format(getattr(node, a)))
for a in node._attributes)
return rv + ')'
elif isinstance(node, list):
return '[%s]' % ', '.join(_format(x) for x in node)
return repr(node)
if not isinstance(node, AST):
raise TypeError('expected AST, got %r' % node.__class__.__name__)
return _format(node)
def copy_location(new_node, old_node):
"""
Copy source location (`lineno` and `col_offset` attributes) from
*old_node* to *new_node* if possible, and return *new_node*.
"""
for attr in 'lineno', 'col_offset':
if attr in old_node._attributes and attr in new_node._attributes \
and hasattr(old_node, attr):
setattr(new_node, attr, getattr(old_node, attr))
return new_node
def fix_missing_locations(node):
"""
When you compile a node tree with compile(), the compiler expects lineno and
col_offset attributes for every node that supports them. This is rather
tedious to fill in for generated nodes, so this helper adds these attributes
recursively where not already set, by setting them to the values of the
parent node. It works recursively starting at *node*.
"""
def _fix(node, lineno, col_offset):
if 'lineno' in node._attributes:
if not hasattr(node, 'lineno'):
node.lineno = lineno
else:
lineno = node.lineno
if 'col_offset' in node._attributes:
if not hasattr(node, 'col_offset'):
node.col_offset = col_offset
else:
col_offset = node.col_offset
for child in iter_child_nodes(node):
_fix(child, lineno, col_offset)
_fix(node, 1, 0)
return node
def increment_lineno(node, n=1):
"""
Increment the line number of each node in the tree starting at *node* by *n*.
This is useful to "move code" to a different location in a file.
"""
if 'lineno' in node._attributes:
node.lineno = getattr(node, 'lineno', 0) + n
for child in walk(node):
if 'lineno' in child._attributes:
child.lineno = getattr(child, 'lineno', 0) + n
return node
def iter_fields(node):
"""
Yield a tuple of ``(fieldname, value)`` for each field in ``node._fields``
that is present on *node*.
"""
for field in node._fields:
try:
yield field, getattr(node, field)
except AttributeError:
pass
def iter_child_nodes(node):
"""
Yield all direct child nodes of *node*, that is, all fields that are nodes
and all items of fields that are lists of nodes.
"""
for name, field in iter_fields(node):
if isinstance(field, AST):
yield field
elif isinstance(field, list):
for item in field:
if isinstance(item, AST):
yield item
def get_docstring(node, clean=True):
"""
Return the docstring for the given node or None if no docstring can
be found. If the node provided does not have docstrings a TypeError
will be raised.
"""
if not isinstance(node, (FunctionDef, ClassDef, Module)):
raise TypeError("%r can't have docstrings" % node.__class__.__name__)
if node.body and isinstance(node.body[0], Expr) and \
isinstance(node.body[0].value, Str):
if clean:
import inspect
return inspect.cleandoc(node.body[0].value.s)
return node.body[0].value.s
def walk(node):
"""
Recursively yield all child nodes of *node*, in no specified order. This is
useful if you only want to modify nodes in place and don't care about the
context.
"""
from collections import deque
todo = deque([node])
while todo:
node = todo.popleft()
todo.extend(iter_child_nodes(node))
yield node
class NodeVisitor(object):
"""
A node visitor base class that walks the abstract syntax tree and calls a
visitor function for every node found. This function may return a value
which is forwarded by the `visit` method.
This class is meant to be subclassed, with the subclass adding visitor
methods.
Per default the visitor functions for the nodes are ``'visit_'`` +
class name of the node. So a `TryFinally` node visit function would
be `visit_TryFinally`. This behavior can be changed by overriding
the `visit` method. If no visitor function exists for a node
(return value `None`) the `generic_visit` visitor is used instead.
Don't use the `NodeVisitor` if you want to apply changes to nodes during
traversing. For this a special visitor exists (`NodeTransformer`) that
allows modifications.
"""
def visit(self, node):
"""Visit a node."""
method = 'visit_' + node.__class__.__name__
visitor = getattr(self, method, self.generic_visit)
return visitor(node)
def generic_visit(self, node):
"""Called if no explicit visitor function exists for a node."""
for field, value in iter_fields(node):
if isinstance(value, list):
for item in value:
if isinstance(item, AST):
self.visit(item)
elif isinstance(value, AST):
self.visit(value)
class NodeTransformer(NodeVisitor):
"""
A :class:`NodeVisitor` subclass that walks the abstract syntax tree and
allows modification of nodes.
The `NodeTransformer` will walk the AST and use the return value of the
visitor methods to replace or remove the old node. If the return value of
the visitor method is ``None``, the node will be removed from its location,
otherwise it is replaced with the return value. The return value may be the
original node in which case no replacement takes place.
Here is an example transformer that rewrites all occurrences of name lookups
(``foo``) to ``data['foo']``::
class RewriteName(NodeTransformer):
def visit_Name(self, node):
return copy_location(Subscript(
value=Name(id='data', ctx=Load()),
slice=Index(value=Str(s=node.id)),
ctx=node.ctx
), node)
Keep in mind that if the node you're operating on has child nodes you must
either transform the child nodes yourself or call the :meth:`generic_visit`
method for the node first.
For nodes that were part of a collection of statements (that applies to all
statement nodes), the visitor may also return a list of nodes rather than
just a single node.
Usually you use the transformer like this::
node = YourTransformer().visit(node)
"""
def generic_visit(self, node):
for field, old_value in iter_fields(node):
old_value = getattr(node, field, None)
if isinstance(old_value, list):
new_values = []
for value in old_value:
if isinstance(value, AST):
value = self.visit(value)
if value is None:
continue
elif not isinstance(value, AST):
new_values.extend(value)
continue
new_values.append(value)
old_value[:] = new_values
elif isinstance(old_value, AST):
new_node = self.visit(old_value)
if new_node is None:
delattr(node, field)
else:
setattr(node, field, new_node)
return node
|
zephyrplugins/zephyr
|
zephyr.plugin.jython/jython2.5.2rc3/Lib/ast.py
|
Python
|
epl-1.0
| 11,347
|
import rospy
import smach
import smach_ros
from std_msgs.msg import Float32, String
import time
from Robosub.msg import ModuleEnableMsg
from SubImageRecognition.msg import ImgRecObject
from utils import dive, turn, forward, strafe, ScanNarrow, move
class NewBuoyTask(smach.State):
def __init__(self):
super(NewBuoyTask, self)._init__(outcomes=['succeeded', 'preempted', 'timeout'])
self.buoyHit = False
self.timeout = 100
def extendTimeout(self):
if(self.timeout < 30):
self.timeout = 30
def execute(self, userdata):
self.publisher = rospy.Publisher('/Module_Enable', ModuleEnableMsg)
self.subscriber = rospy.Subscriber('/Task_Completion', String, self.taskCompleted)
self.buoySubscriber = rospy.Subscriber('img_rec/buoys/red', ImgRecObject, self.buoyLoc)
self.depthSubscriber = rospy.Subscriber('Sub_Depth', Float32, self.depth)
msg = ModuleEnableMsg()
msg.Module = 'NewBuoyTask'
msg.State = True
self.publisher.publish(msg)
#keep trying until preempted, success, or timeout
while self.timeout > 0:
if self.buoyHit:
self.beDone()
return 'succeeded'
if self.preempt_requested():
self.beDone()
self.service_preempt()
return 'preempted'
if self.objectLost:
self.scanForBuoy()
else:
self.descendToBuoy()
self.alignWithBouy()
self.advance()
self.extendTimeout()
# we decide the object is lost until we receive another message
self.objectLost = True
rospy.sleep(1)
self.timeout -= 1
self.objectLost = True
#we timed out
self.beDone()
return 'timeout'
def taskCompleted(self):
if msg.data == 'NewBuoyTask':
self.buoyHit=True
def beDone(self):
msg = ModuleEnableMsg()
msg.Module = 'NewBuoyTask'
msg.State = False
self.publisher.publish(msg)
def descendToBuoy(self):
#dive to buoy depth
if(self.lastKnownBuoyLoc.center_y > 50):
dive(-.1)
elif(self.lastKnownBuoyLoc.center_y < -50):
dive(.1)
else:
dive(0)
def scanForBuoy(self):
# we don't know where it is so let's try right
# we'll go into a circle til we time out or find the buoy
dive(0)
strafe(0)
turn(.1)
def alignWithBouy(self):
#align x
if(self.lastKnownBuoyLoc.center_x > 50):
strafe(-.1)
elif(self.lastKnownBuoyLoc.center_x < -50):
strafe(.1)
else:
strafe(0)
def advance(self):
#if it's taking up the whole camera it's definately hit or just about to
#not sure what the whole camera is though.
if(self.lastKnownBuoyLoc.width < 500):
forward(.3)
else:
forward(0.0)
def buoyLoc(self, msg):
self.objectLost = False
self.lastKnownBuoyLoc = msg
self.prevTime = time.time()
def depth(self, msg):
self.lastKnownDepth = msg.data
|
USU-Robosub/Gilligan
|
rosWorkspace/Brain/src/tasks/NewBuoyTask.py
|
Python
|
apache-2.0
| 3,363
|
# Any git command can check for the upconvert from bloom -> config_branch
from bloom.config import upconvert_bloom_to_config_branch
upconvert_bloom_to_config_branch()
|
130s/bloom
|
bloom/commands/git/__init__.py
|
Python
|
bsd-3-clause
| 169
|
import datetime
import tables
from sapphire import esd
DATAFILE = 'data.h5'
START = datetime.datetime(2016, 1, 1)
END = datetime.datetime(2016, 1, 2)
if 'data' not in globals():
data = tables.open_file(DATAFILE, 'a')
if '/s501' not in data:
esd.download_data(data, '/s501', 501, START, END)
|
HiSPARC/sapphire
|
doc/scripts/simple-download-with-checks.py
|
Python
|
gpl-3.0
| 305
|
# coding: utf-8
"""
Onshape REST API
The Onshape REST API consumed by all clients. # noqa: E501
The version of the OpenAPI document: 1.113
Contact: api-support@onshape.zendesk.com
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
import sys # noqa: F401
# python 2 and python 3 compatibility library
import six
from onshape_client.oas.api_client import ApiClient
from onshape_client.oas.exceptions import ApiTypeError, ApiValueError
from onshape_client.oas.model_utils import ( # noqa: F401
check_allowed_values,
check_validations,
date,
datetime,
file_type,
int,
none_type,
str,
validate_and_convert_types,
)
from onshape_client.oas.models import bt_drawing_params
from onshape_client.oas.models import bt_document_element_info
from onshape_client.oas.models import bt_translation_request_info
from onshape_client.oas.models import bt_translate_format_params
from onshape_client.oas.models import bt_model_format_info
class DrawingsApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def __create_drawing_app_element(self, bt_drawing_params, **kwargs):
"""create_drawing_app_element # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_drawing_app_element(bt_drawing_params, async_req=True)
>>> result = thread.get()
Args:
bt_drawing_params (bt_drawing_params.BTDrawingParams):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int): specifies the index of the server
that we want to use.
Default is 0.
async_req (bool): execute request asynchronously
Returns:
bt_document_element_info.BTDocumentElementInfo
If the method is called asynchronously, returns the request
thread.
"""
kwargs["async_req"] = kwargs.get("async_req", False)
kwargs["_return_http_data_only"] = kwargs.get(
"_return_http_data_only", True
)
kwargs["_preload_content"] = kwargs.get("_preload_content", True)
kwargs["_request_timeout"] = kwargs.get("_request_timeout", None)
kwargs["_check_input_type"] = kwargs.get("_check_input_type", True)
kwargs["_check_return_type"] = kwargs.get("_check_return_type", True)
kwargs["_host_index"] = kwargs.get("_host_index", 0)
kwargs["bt_drawing_params"] = bt_drawing_params
return self.call_with_http_info(**kwargs)
self.create_drawing_app_element = Endpoint(
settings={
"response_type": (bt_document_element_info.BTDocumentElementInfo,),
"auth": ["OAuth2"],
"endpoint_path": "/api/drawings/create",
"operation_id": "create_drawing_app_element",
"http_method": "POST",
"servers": [],
},
params_map={
"all": ["bt_drawing_params",],
"required": ["bt_drawing_params",],
"nullable": [],
"enum": [],
"validation": [],
},
root_map={
"validations": {},
"allowed_values": {},
"openapi_types": {
"bt_drawing_params": (bt_drawing_params.BTDrawingParams,),
},
"attribute_map": {},
"location_map": {"bt_drawing_params": "body",},
"collection_format_map": {},
},
headers_map={
"accept": ["application/json;charset=UTF-8; qs=0.09"],
"content_type": ["application/json;charset=UTF-8; qs=0.09"],
},
api_client=api_client,
callable=__create_drawing_app_element,
)
def __create_drawing_translation(
self, did, wv, wvid, eid, bt_translate_format_params, **kwargs
):
"""Create Drawing translation # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_drawing_translation(did, wv, wvid, eid, bt_translate_format_params, async_req=True)
>>> result = thread.get()
Args:
did (str):
wv (str):
wvid (str):
eid (str):
bt_translate_format_params (bt_translate_format_params.BTTranslateFormatParams):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int): specifies the index of the server
that we want to use.
Default is 0.
async_req (bool): execute request asynchronously
Returns:
bt_translation_request_info.BTTranslationRequestInfo
If the method is called asynchronously, returns the request
thread.
"""
kwargs["async_req"] = kwargs.get("async_req", False)
kwargs["_return_http_data_only"] = kwargs.get(
"_return_http_data_only", True
)
kwargs["_preload_content"] = kwargs.get("_preload_content", True)
kwargs["_request_timeout"] = kwargs.get("_request_timeout", None)
kwargs["_check_input_type"] = kwargs.get("_check_input_type", True)
kwargs["_check_return_type"] = kwargs.get("_check_return_type", True)
kwargs["_host_index"] = kwargs.get("_host_index", 0)
kwargs["did"] = did
kwargs["wv"] = wv
kwargs["wvid"] = wvid
kwargs["eid"] = eid
kwargs["bt_translate_format_params"] = bt_translate_format_params
return self.call_with_http_info(**kwargs)
self.create_drawing_translation = Endpoint(
settings={
"response_type": (
bt_translation_request_info.BTTranslationRequestInfo,
),
"auth": ["OAuth2"],
"endpoint_path": "/api/drawings/d/{did}/{wv}/{wvid}/e/{eid}/translations",
"operation_id": "create_drawing_translation",
"http_method": "POST",
"servers": [],
},
params_map={
"all": ["did", "wv", "wvid", "eid", "bt_translate_format_params",],
"required": ["did", "wv", "wvid", "eid", "bt_translate_format_params",],
"nullable": [],
"enum": [],
"validation": [],
},
root_map={
"validations": {},
"allowed_values": {},
"openapi_types": {
"did": (str,),
"wv": (str,),
"wvid": (str,),
"eid": (str,),
"bt_translate_format_params": (
bt_translate_format_params.BTTranslateFormatParams,
),
},
"attribute_map": {
"did": "did",
"wv": "wv",
"wvid": "wvid",
"eid": "eid",
},
"location_map": {
"did": "path",
"wv": "path",
"wvid": "path",
"eid": "path",
"bt_translate_format_params": "body",
},
"collection_format_map": {},
},
headers_map={
"accept": ["application/vnd.onshape.v1+json;charset=UTF-8;qs=0.1"],
"content_type": ["application/json;charset=UTF-8; qs=0.09"],
},
api_client=api_client,
callable=__create_drawing_translation,
)
def __get_drawing_translator_formats(self, did, wid, eid, **kwargs):
"""get_drawing_translator_formats # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_drawing_translator_formats(did, wid, eid, async_req=True)
>>> result = thread.get()
Args:
did (str):
wid (str):
eid (str):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int): specifies the index of the server
that we want to use.
Default is 0.
async_req (bool): execute request asynchronously
Returns:
[bt_model_format_info.BTModelFormatInfo]
If the method is called asynchronously, returns the request
thread.
"""
kwargs["async_req"] = kwargs.get("async_req", False)
kwargs["_return_http_data_only"] = kwargs.get(
"_return_http_data_only", True
)
kwargs["_preload_content"] = kwargs.get("_preload_content", True)
kwargs["_request_timeout"] = kwargs.get("_request_timeout", None)
kwargs["_check_input_type"] = kwargs.get("_check_input_type", True)
kwargs["_check_return_type"] = kwargs.get("_check_return_type", True)
kwargs["_host_index"] = kwargs.get("_host_index", 0)
kwargs["did"] = did
kwargs["wid"] = wid
kwargs["eid"] = eid
return self.call_with_http_info(**kwargs)
self.get_drawing_translator_formats = Endpoint(
settings={
"response_type": ([bt_model_format_info.BTModelFormatInfo],),
"auth": [],
"endpoint_path": "/api/drawings/d/{did}/w/{wid}/e/{eid}/translationformats",
"operation_id": "get_drawing_translator_formats",
"http_method": "GET",
"servers": [],
},
params_map={
"all": ["did", "wid", "eid",],
"required": ["did", "wid", "eid",],
"nullable": [],
"enum": [],
"validation": [],
},
root_map={
"validations": {},
"allowed_values": {},
"openapi_types": {"did": (str,), "wid": (str,), "eid": (str,),},
"attribute_map": {"did": "did", "wid": "wid", "eid": "eid",},
"location_map": {"did": "path", "wid": "path", "eid": "path",},
"collection_format_map": {},
},
headers_map={
"accept": ["application/vnd.onshape.v1+json;charset=UTF-8;qs=0.1"],
"content_type": [],
},
api_client=api_client,
callable=__get_drawing_translator_formats,
)
class Endpoint(object):
def __init__(
self,
settings=None,
params_map=None,
root_map=None,
headers_map=None,
api_client=None,
callable=None,
):
"""Creates an endpoint
Args:
settings (dict): see below key value pairs
'response_type' (tuple/None): response type
'auth' (list): a list of auth type keys
'endpoint_path' (str): the endpoint path
'operation_id' (str): endpoint string identifier
'http_method' (str): POST/PUT/PATCH/GET etc
'servers' (list): list of str servers that this endpoint is at
params_map (dict): see below key value pairs
'all' (list): list of str endpoint parameter names
'required' (list): list of required parameter names
'nullable' (list): list of nullable parameter names
'enum' (list): list of parameters with enum values
'validation' (list): list of parameters with validations
root_map
'validations' (dict): the dict mapping endpoint parameter tuple
paths to their validation dictionaries
'allowed_values' (dict): the dict mapping endpoint parameter
tuple paths to their allowed_values (enum) dictionaries
'openapi_types' (dict): param_name to openapi type
'attribute_map' (dict): param_name to camelCase name
'location_map' (dict): param_name to 'body', 'file', 'form',
'header', 'path', 'query'
collection_format_map (dict): param_name to `csv` etc.
headers_map (dict): see below key value pairs
'accept' (list): list of Accept header strings
'content_type' (list): list of Content-Type header strings
api_client (ApiClient) api client instance
callable (function): the function which is invoked when the
Endpoint is called
"""
self.settings = settings
self.params_map = params_map
self.params_map["all"].extend(
[
"async_req",
"_host_index",
"_preload_content",
"_request_timeout",
"_return_http_data_only",
"_check_input_type",
"_check_return_type",
]
)
self.params_map["nullable"].extend(["_request_timeout"])
self.validations = root_map["validations"]
self.allowed_values = root_map["allowed_values"]
self.openapi_types = root_map["openapi_types"]
extra_types = {
"async_req": (bool,),
"_host_index": (int,),
"_preload_content": (bool,),
"_request_timeout": (none_type, int, (int,), [int]),
"_return_http_data_only": (bool,),
"_check_input_type": (bool,),
"_check_return_type": (bool,),
}
self.openapi_types.update(extra_types)
self.attribute_map = root_map["attribute_map"]
self.location_map = root_map["location_map"]
self.collection_format_map = root_map["collection_format_map"]
self.headers_map = headers_map
self.api_client = api_client
self.callable = callable
def __validate_inputs(self, kwargs):
for param in self.params_map["enum"]:
if param in kwargs:
check_allowed_values(self.allowed_values, (param,), kwargs[param])
for param in self.params_map["validation"]:
if param in kwargs:
check_validations(self.validations, (param,), kwargs[param])
if kwargs["_check_input_type"] is False:
return
for key, value in six.iteritems(kwargs):
fixed_val = validate_and_convert_types(
value,
self.openapi_types[key],
[key],
False,
kwargs["_check_input_type"],
configuration=self.api_client.configuration,
)
kwargs[key] = fixed_val
def __gather_params(self, kwargs):
params = {
"body": None,
"collection_format": {},
"file": {},
"form": [],
"header": {},
"path": {},
"query": [],
}
for param_name, param_value in six.iteritems(kwargs):
param_location = self.location_map.get(param_name)
if param_location is None:
continue
if param_location:
if param_location == "body":
params["body"] = param_value
continue
base_name = self.attribute_map[param_name]
if param_location == "form" and self.openapi_types[param_name] == (
file_type,
):
params["file"][param_name] = [param_value]
elif param_location == "form" and self.openapi_types[param_name] == (
[file_type],
):
# param_value is already a list
params["file"][param_name] = param_value
elif param_location in {"form", "query"}:
param_value_full = (base_name, param_value)
params[param_location].append(param_value_full)
if param_location not in {"form", "query"}:
params[param_location][base_name] = param_value
collection_format = self.collection_format_map.get(param_name)
if collection_format:
params["collection_format"][base_name] = collection_format
return params
def __call__(self, *args, **kwargs):
""" This method is invoked when endpoints are called
Example:
pet_api = PetApi()
pet_api.add_pet # this is an instance of the class Endpoint
pet_api.add_pet() # this invokes pet_api.add_pet.__call__()
which then invokes the callable functions stored in that endpoint at
pet_api.add_pet.callable or self.callable in this class
"""
return self.callable(self, *args, **kwargs)
def call_with_http_info(self, **kwargs):
try:
_host = self.settings["servers"][kwargs["_host_index"]]
except IndexError:
if self.settings["servers"]:
raise ApiValueError(
"Invalid host index. Must be 0 <= index < %s"
% len(self.settings["servers"])
)
_host = None
for key, value in six.iteritems(kwargs):
if key not in self.params_map["all"]:
raise ApiTypeError(
"Got an unexpected parameter '%s'"
" to method `%s`" % (key, self.settings["operation_id"])
)
# only throw this nullable ApiValueError if _check_input_type
# is False, if _check_input_type==True we catch this case
# in self.__validate_inputs
if (
key not in self.params_map["nullable"]
and value is None
and kwargs["_check_input_type"] is False
):
raise ApiValueError(
"Value may not be None for non-nullable parameter `%s`"
" when calling `%s`" % (key, self.settings["operation_id"])
)
for key in self.params_map["required"]:
if key not in kwargs.keys():
raise ApiValueError(
"Missing the required parameter `%s` when calling "
"`%s`" % (key, self.settings["operation_id"])
)
self.__validate_inputs(kwargs)
params = self.__gather_params(kwargs)
accept_headers_list = self.headers_map["accept"]
if accept_headers_list:
params["header"]["Accept"] = self.api_client.select_header_accept(
accept_headers_list
)
content_type_headers_list = self.headers_map["content_type"]
if content_type_headers_list:
header_list = self.api_client.select_header_content_type(
content_type_headers_list
)
params["header"]["Content-Type"] = header_list
return self.api_client.call_api(
self.settings["endpoint_path"],
self.settings["http_method"],
params["path"],
params["query"],
params["header"],
body=params["body"],
post_params=params["form"],
files=params["file"],
response_type=self.settings["response_type"],
auth_settings=self.settings["auth"],
async_req=kwargs["async_req"],
_check_type=kwargs["_check_return_type"],
_return_http_data_only=kwargs["_return_http_data_only"],
_preload_content=kwargs["_preload_content"],
_request_timeout=kwargs["_request_timeout"],
_host=_host,
collection_formats=params["collection_format"],
)
|
onshape-public/onshape-clients
|
python/onshape_client/oas/api/drawings_api.py
|
Python
|
mit
| 23,491
|
import os,sys
import numpy as np
import mne
from jumeg.jumeg_base import JuMEG_Base_IO
#print "########## Refchan geo data:"
# This is just for info to locate special 4D-refs.
#for iref in refpick:
# print raw.info['chs'][iref]['ch_name'],
#raw.info['chs'][iref]['loc'][0:3]
#fname=opt.fname,path=opt.path,verbose=opt.v,debug=opt.d,experiment=opt.exp,
# duration=opt.duration,start=opt.start,n_channels=opt.n_channels,bads=opt.bads
#----------------------------------------------------------------------------------------
class JuMEG_TSV_IO_DATA(JuMEG_Base_IO):
def __init__(self, fname=None,path=None,raw=None,experiment=None,verbose=False,bads=None):
super(JuMEG_TSV_IO_DATA, self).__init__()
self.verbose = verbose
self.fname = fname
self.path = path
self.raw = raw
self.experiment = experiment
self.bads = bads
self.append_bads= True
self.dtype_original = None
self.dtype_plot = np.float32
self.raw_is_loaded = False
def update(self,path=None,fname=None,raw=None,reload=False):
if (reload and self.raw_is_loaded):
fname = self.raw.info.get('filename')
self.raw = None
#---
self.raw_is_loaded = False
if raw:
self.dtype_original = self.raw._data.dtype
self.path,self.fname = os.path.split(raw.info.get('filename'))
self.raw = raw
self.bads = raw.info.get('bads')
self.raw_is_loaded = True
return self.raw,self.bads
if fname:
if path:
self.path = path
else:
self.path = os.path.dirname( fname )
self.fname = os.path.basename( fname )
if not self.path:
self.path ="."+ os.path.sep
elif not os.path.exists(self.path):
self.path ="."+ os.path.sep
print "JuMEG TSV IO error: path not exist: " + self.path
if self.fname:
self.load_raw()
# self.update_channel_info()
def load_raw(self):
self.raw_is_loaded = False
self.raw = None
if self.verbose:
print "---> JuMEG TSV IO loading data"
print" path: "+ self.path
print" file: "+ self.fname +"\n"
if self.fname is None:
print"ERROR no file found!!\n"
else:
if os.path.exists(self.path):
self.raw = mne.io.Raw(self.path+"/"+self.fname,preload=True)
else:
self.raw = mne.io.Raw(self.fname,preload=True)
self.raw_is_loaded = True
self.dtype_original = self.raw._data.dtype
if self.bads:
self.raw,self.bads = self.update_bad_channels(raw=self.raw,bads=self.bads,append=self.append_bads,save=False)
if self.verbose:
print "---> JuMEG TSV IO done loading data"
print self.raw_is_loaded
print "\n"
return self.raw,self.bads
def save_bads(self):
return self.update_bad_channels(raw=self.raw,bads=self.bads,append=self.append_bads,save=True)
|
fboers/jumegX
|
tsvgl/io/jumeg_tsv_io_data.py
|
Python
|
bsd-3-clause
| 3,441
|
# coding: utf-8
import os
import time
# 父进程
ret = os.fork()
if ret == 0:
print('---process-1---')
else:
print('---process-2---')
# 父子进程
ret = os.fork()
if ret == 0:
print('---process-11---')
else:
print('---process-22---')
# fork 炸弹,不能执行
# while 1:
# os.fork()
|
kaideyi/KDYSample
|
kYPython/FluentPython/BasicLearn/OOP/Fork3.py
|
Python
|
mit
| 301
|
import inspect
import time
from functools import wraps
from twisted.internet.defer import inlineCallbacks, returnValue
from twisted.python import log
def profile(func):
"""
Simple profile decorator, monitors method execution time
"""
@inlineCallbacks
def callme(*args, **kwargs):
start = time.time()
ret = yield func(*args, **kwargs)
time_to_execute = time.time() - start
log.msg('%s executed in %.3f seconds' % (func.__name__, time_to_execute))
returnValue(ret)
return callme
def monkeypatch(method, check):
def ret(new_impl):
assert check(), "Check failed, can't monkeypatch"
overriden = wraps(method)(new_impl)
setattr(method.im_class, method.im_func.func_name, overriden)
return ret
def selfdocumenting(f):
f.__doc__ = inspect.getsource(f)
return f
|
eallik/spinoff
|
spinoff/util/meta.py
|
Python
|
bsd-2-clause
| 864
|
import json
from geo.models import *
from geo.forms import *
from hgl.settings.local import GOOGLE_API
from django.contrib.gis.geos import Point, MultiPoint, LineString, MultiPolygon
from django.contrib.gis.geos import GEOSGeometry, GeometryCollection
from django.template import RequestContext
from django.shortcuts import (
get_object_or_404,
HttpResponse,
render,
HttpResponseRedirect,
)
from django.http import JsonResponse
from django.shortcuts import render
from django.contrib.auth.models import User, Group
from django.contrib.auth import authenticate, login, logout
from dal import autocomplete
def getKey(item):
return item[0]
def kml(request):
polis_list = Locus.objects.filter(locus_type__name="Polis").filter(
related_locus__name="Cyrenaica"
)
return render(
request,
"../templates/geo/kml.xml", {"polis_list": polis_list}, content_type="text/xml"
)
def geojson(request):
id = request.GET.get("id", "")
obj = Locus.objects.get(pk=id)
geojson = obj.geojsion
return JsonResponse(geojson, safe=False)
def convex_hull(request):
parent_id = request.GET.get("parent", "")
if parent_id != "":
try:
locus = Locus.objects.get(pk=parent_id)
except Exception:
locus = None
else:
locus = None
if locus:
geom = locus.getConvexHull()
return JsonResponse(geom)
else:
return JsonResponse({"Records": "None"})
# rels = Related_Locus.objects\
# .filter(obj=locus)\
# .filter(related_locus_type__name='forms part of')
# points = []
# # Try to recover convex hull for poly within poly
# # polys = []
# for r in rels:
# for c in r.subject.locus_coordinate.all():
# points.append(c.point)
# subrels = Related_Locus.objects\
# .filter(obj=r.subject)\
# .filter(related_locus_type__name='forms part of')
# for sr in subrels:
# for cc in sr.subject.locus_coordinate.all():
# points.append(cc.point)
# subsubrels = Related_Locus.objects\
# .filter(obj=sr.subject)\
# .filter(related_locus_type__name='forms part of')
# for ssr in subsubrels:
# for ccc in ssr.subject.locus_coordinate.all():
# points.append(ccc.point)
# #polys.append(convex_hull_children(r.subject.id))
# #Maybe going about this wrongly Neil
# mp = MultiPoint(points)
# #return HttpResponse(polys[0].__str__())
# #if polys.__len__() > 0:
# # cx = MultiPolygon(polys)
# # cnvx = cx.convex_hull
# if rels.__len__() < 3: #and polys.__len__() > 0 :
# #Not enough coords for a hull?
# coords = []
# for p in points:
# coords.append( [p.x, p.y ])
# geojson = {}
# geojson["type"] = "Feature"
# geojson["geometry"] = {}
# geojson["geometry"]["type"] = "MultiPoint"
# geojson["geometry"]["coordinates"] = coords
# return JsonResponse( geojson )
# # We need to convert this intoa dict object
# coords = []
# try:
# for css in mp.convex_hull.coords[0]:
# coords.append( [css[0],css[1]] )
# # If polys exist the add their coords to the array
# #if polys.__len__() < 0:
# # cnvx = MultiPolygon([cnvx,mp]).convex_hull
# # for i in cnvx.coords[0]:
# # coords.append( [i[0],i[1]] )
# geojson = {}
# geojson["type"] = "Feature"
# geojson["geometry"] = {}
# geojson["geometry"]["type"] = "Polygon"
# geojson["geometry"]["coordinates"] = []
# geojson["geometry"]["coordinates"].append(coords)
# except Exception:
# return JsonResponse({'Records':'None'})
# # Debug responder
# return JsonResponse( geojson )
def convex_hull_children(id):
try:
locus = Locus.objects.get(pk=id)
except Exception:
locus = None
if locus:
rels = Related_Locus.objects.filter(obj=locus).filter(
related_locus_type__name="forms part of"
)
points = []
for r in rels:
for c in r.subject.locus_coordinate.all():
points.append(c.point)
# for r in rels:
mp = MultiPoint(points)
if rels.__len__() < 3:
# Not enough coords for a hull?
pass
else:
# We need to convert this in to a dict object
coords = []
try:
for css in mp.convex_hull.coords[0]:
coords.append([css[0], css[1]])
geojson = {}
geojson["type"] = "Feature"
geojson["geometry"] = {}
geojson["geometry"]["type"] = "Polygon"
geojson["geometry"]["coordinates"] = []
geojson["geometry"]["coordinates"].append(coords)
except Exception:
pass
# Debug responder
return mp.convex_hull
def line(request):
# Return a JSON line response when line type feature
parent_id = request.GET.get("parent", "")
locus = Locus.objects.get(pk=parent_id)
debug = []
points = []
if locus:
rels = Related_Locus.objects.filter(obj=locus) # .filter(related_locus_type=3)
for r in rels:
for c in r.subject.locus_coordinate.all():
points.append(c.point)
debug.append(r.subject.id)
debug.append("-")
subrels = Related_Locus.objects.filter(
obj=r.subject
) # .filter(related_locus_type=3)
for sr in subrels:
for sc in sr.subject.locus_coordinate.all():
points.append(sc.point)
debug.append(sr.subject.id)
debug.append("+")
subsubrels = Related_Locus.objects.filter(obj=sr.subject)
for ssr in subsubrels:
for ssc in ssr.subject.locus_coordinate.all():
points.append(ssc.point)
debug.append("++")
# return HttpResponse(points)
pl = LineString(points)
pl = pl.geojson
coords = []
for p in points:
coords.append([p.x, p.y])
coords_sort = sorted(coords, key=getKey)
geojson = {}
geojson["type"] = "Feature"
geojson["geometry"] = {}
geojson["geometry"]["type"] = "LineString"
geojson["geometry"]["coordinates"] = coords_sort
# geojson["geometry"]["coordinates"].append(coords)
# return HttpResponse(points)
return JsonResponse(geojson)
def popupcontent(request):
id = request.GET.get("id", "")
locus = Locus.objects.get(pk=id)
# if Related_Locus.objects.filter(obj=locus).filter(related_locus_type__name='forms part of').count() > 0:
# return HttpResponse('<div>' + locus.name + ' I have children! </div><div><a href="/irt_geo/convex-hull/?parent='+ locus.id.__str__() +'">Geojson hull</a></div>')
return HttpResponse(
'<div><a href="/irt_geo/recordview/?id='
+ str(id)
+ '">'
+ locus.name
+ "</a></div>"
)
def recordview(request):
id = request.GET.get("id", "")
locus = Locus.objects.get(pk=id)
context = {}
context["record"] = locus
context["google_api"] = GOOGLE_API
return render(
request,
"../templates/single-record.html",
context,
)
def recordview_simple(request, record_id):
id = record_id
locus = Locus.objects.get(pk=id)
context = {}
context["record"] = locus
return render(
request,
"../templates/single-record.html",
context,
)
def login_user(request):
username = password = ""
if request.POST:
username = request.POST["username"]
password = request.POST["password"]
user = authenticate(username=username, password=password)
if user is not None:
if user.is_active:
login(request, user)
return HttpResponseRedirect("/")
else:
form = LoginForm()
return render(request, "login.html", {"form": form})
else:
form = LoginForm()
return render(request, "login.html", {"form": form})
def logout_user(request):
logout(request)
username = password = ""
return HttpResponseRedirect("/")
class autocomplete(autocomplete.Select2QuerySetView):
def get_queryset(self):
qs = Locus.objects.all()
if self.q:
qs = qs.filter(name__istartswith=self.q)
return qs
def json_dump(request):
dumpJson = dumpEmptyClass()
for ae in Monument.objects.all():
f = create_geojson_monument(ae)
dumpJson.featureDict["features"].append(f)
for ae in HistoricalUnit.objects.all():
f = create_geojson_hu(ae)
dumpJson.featureDict["features"].append(f)
return HttpResponse(
json.dumps(dumpJson.featureDict), mimetype="application/json"
)
|
kingsdigitallab/hgl-django
|
geo/views.py
|
Python
|
gpl-2.0
| 9,473
|
# -*- coding: utf-8 -*-
'''Deprecated np_extractors module. Import ``textblob.np_extractors`` instead.
'''
from textblob.np_extractors import *
|
bbengfort/TextBlob
|
text/np_extractors.py
|
Python
|
mit
| 145
|
#!/usr/bin/python
# @lint-avoid-python-3-compatibility-imports
#
# mdflush Trace md flush events.
# For Linux, uses BCC, eBPF.
#
# Todo: add more details of the flush (latency, I/O count).
#
# Copyright 2016 Netflix, Inc.
# Licensed under the Apache License, Version 2.0 (the "License")
#
# 13-Feb-2015 Brendan Gregg Created this.
from __future__ import print_function
from bcc import BPF
from time import strftime
# load BPF program
b = BPF(text="""
#include <uapi/linux/ptrace.h>
#include <linux/sched.h>
#include <linux/genhd.h>
#include <linux/bio.h>
struct data_t {
u64 pid;
char comm[TASK_COMM_LEN];
char disk[DISK_NAME_LEN];
};
BPF_PERF_OUTPUT(events);
int kprobe__md_flush_request(struct pt_regs *ctx, void *mddev, struct bio *bio)
{
struct data_t data = {};
u32 pid = bpf_get_current_pid_tgid() >> 32;
data.pid = pid;
bpf_get_current_comm(&data.comm, sizeof(data.comm));
/*
* The following deals with a kernel version change (in mainline 4.14, although
* it may be backported to earlier kernels) with how the disk name is accessed.
* We handle both pre- and post-change versions here. Please avoid kernel
* version tests like this as much as possible: they inflate the code, test,
* and maintenance burden.
*/
#ifdef bio_dev
struct gendisk *bi_disk = bio->bi_disk;
#else
struct gendisk *bi_disk = bio->bi_bdev->bd_disk;
#endif
bpf_probe_read_kernel(&data.disk, sizeof(data.disk), bi_disk->disk_name);
events.perf_submit(ctx, &data, sizeof(data));
return 0;
}
""")
# header
print("Tracing md flush requests... Hit Ctrl-C to end.")
print("%-8s %-6s %-16s %s" % ("TIME", "PID", "COMM", "DEVICE"))
# process event
def print_event(cpu, data, size):
event = b["events"].event(data)
print("%-8s %-6d %-16s %s" % (strftime("%H:%M:%S"), event.pid,
event.comm.decode('utf-8', 'replace'),
event.disk.decode('utf-8', 'replace')))
# read events
b["events"].open_perf_buffer(print_event)
while 1:
try:
b.perf_buffer_poll()
except KeyboardInterrupt:
exit()
|
brendangregg/bcc
|
tools/mdflush.py
|
Python
|
apache-2.0
| 2,074
|
"""Declare basic string types unambiguously for various Python versions.
Authors
-------
* MinRK
"""
# Copyright (C) PyZMQ Developers
# Distributed under the terms of the Modified BSD License.
import sys
if sys.version_info[0] >= 3:
bytes = bytes
unicode = str
basestring = (bytes, unicode)
else:
unicode = unicode
bytes = str
basestring = basestring
def cast_bytes(s, encoding='utf8', errors='strict'):
"""cast unicode or bytes to bytes"""
if isinstance(s, bytes):
return s
elif isinstance(s, unicode):
return s.encode(encoding, errors)
else:
raise TypeError("Expected unicode or bytes, got %r" % s)
def cast_unicode(s, encoding='utf8', errors='strict'):
"""cast bytes or unicode to unicode"""
if isinstance(s, bytes):
return s.decode(encoding, errors)
elif isinstance(s, unicode):
return s
else:
raise TypeError("Expected unicode or bytes, got %r" % s)
# give short 'b' alias for cast_bytes, so that we can use fake b('stuff')
# to simulate b'stuff'
b = asbytes = cast_bytes
u = cast_unicode
__all__ = ['asbytes', 'bytes', 'unicode', 'basestring', 'b', 'u', 'cast_bytes', 'cast_unicode']
|
yyt030/pyzmq
|
zmq/utils/strtypes.py
|
Python
|
bsd-3-clause
| 1,202
|
from boiler import *
class TestRadialResolution(unittest.TestCase):
def test_cylinder_radial_resolution_scad(self):
c = Cylinder(h=10, r=20, fn=10, fa=1)
answer = "cylinder(r=20.0, h=10.0, center=false, $fn=10.0);"
code_compare(c.render_scad(), answer)
c.fn = 0
answer = "cylinder(r=20.0, h=10.0, center=false, $fa=1.0);"
code_compare(c.render_scad(), answer)
c.fs = 3
answer = "cylinder(r=20.0, h=10.0, center=false, $fa=1.0, $fs=3.0);"
code_compare(c.render_scad(), answer)
c.fs = 2
c.fa = 2
answer = "cylinder(r=20.0, h=10.0, center=false);"
code_compare(c.render_scad(), answer)
|
vishnubob/pyscad
|
tests/test_radial_resolution.py
|
Python
|
mit
| 692
|
import numpy as np
from netCDF4 import Dataset
f = Dataset("../data/ocean_avg_0014.nc")
i, j = 66, 92
k = 28 # near surface
n = 3 # time step
print("i,j,k,n = ", i, j, k, n)
H = f.variables["h"]
M = f.variables["mask_rho"]
print("H = ", H[j, i])
print("M = ", M[j, i])
U = f.variables["u"]
V = f.variables["v"]
print("U = ", U[n, k, j, i - 1], U[n, k, j, i])
print("V = ", V[n, k, j - 1, i], V[n, k, j, i])
|
bjornaa/ladim
|
examples/outline/sjekk.py
|
Python
|
mit
| 415
|
import os
from buildings.models import BuildingFloor, Campus
from django.contrib.gis.db import models as gis_model
from django.contrib.postgres.fields import ArrayField
from django.db import models
from django.db.models.signals import post_delete
from django.dispatch.dispatcher import receiver
from django.utils.translation import ugettext_lazy as _
from mptt.models import MPTTModel, TreeForeignKey
from taggit.managers import TaggableManager
# class BaseLookupDomain(models.Model):
# code = models.CharField(verbose_name=_("code value"), max_length=150, null=True, blank=True)
# name = models.CharField(verbose_name=_("name value"), max_length=256, null=True, blank=True)
#
# class Meta:
# abstract = True
# ordering = ['code', ]
#
# def __str__(self):
# return str(self.name) or ''
# class TimeStampedModelMixin(models.Model):
# # Computed values (managed at DB-level with triggers)
# date_insert = models.DateTimeField(auto_now_add=True, editable=False, verbose_name=_(u"Insertion date"),
# db_column='date_insert', null=True, blank=True)
# date_update = models.DateTimeField(auto_now=True, editable=False, verbose_name=_(u"Update date"),
# db_column='date_update', null=True, blank=True)
#
# class Meta:
# abstract = True
#
# def reload(self, fromdb=None):
# """Reload fields computed at DB-level (triggers)
# """
# if fromdb is None:
# fromdb = self.__class__.objects.get(pk=self.pk)
# self.date_insert = fromdb.date_insert
# self.date_update = fromdb.date_update
# return self
# Receive the pre_delete signal and delete the file associated with the model instance.
class PoiIcon(models.Model):
"""
An image added to an icon of the map.
"""
name = models.CharField(verbose_name=_('Name of map icon'),max_length=255)
icon = models.ImageField(verbose_name=_('Poi icon image'), upload_to='poi-icons', max_length=512)
def pictogram_img(self):
parse_url = self.icon.url
n_url = parse_url.replace("media/", "")
print(parse_url)
return u'<img src="%s" />' % (n_url if self.icon else "")
pictogram_img.short_description = _("Pictogram")
pictogram_img.allow_tags = True
class Meta:
ordering = ('name', )
@property
def json(self):
return {
"id": self.pk,
"name": self.name,
"src": self.icon.url
}
def __str__(self):
return self.name
@receiver(post_delete, sender=PoiIcon)
def poi_icon_delete(sender, instance, **kwargs):
# Pass false so FileField doesn't save the model.
if instance.poi_icon and instance.poi_icon.name:
if os.path.isfile(instance.poi_icon.path):
os.remove(instance.poi_icon.path)
class PoiCategory(MPTTModel):
cat_name = models.CharField(verbose_name=_('Category name'),max_length=255, null=True, blank=True)
icon_css_name = models.CharField(verbose_name=_("Icon CSS name"), max_length=255, null=True, blank=True)
fk_poi_icon = models.ForeignKey(PoiIcon, on_delete=models.CASCADE, null=True, blank=True)
description = models.CharField(verbose_name=_("description"), max_length=255, null=True, blank=True)
force_mid_point = models.BooleanField(verbose_name=_("Force route to this location"), null=True, blank=True)
enabled = models.BooleanField(verbose_name=_("Activated and enabled"), null=True, blank=True)
tree_order = models.IntegerField(verbose_name=_("Tree order in legend"), null=True, blank=True)
sort_order = models.IntegerField(verbose_name=_("Sort oder of POI items"), null=True, blank=True)
tags = TaggableManager(blank=True)
parent = TreeForeignKey('self',
related_name='children', on_delete = models.CASCADE,
db_index=True,
blank=True,
null=True,
default=9999)
cat_name_en = models.CharField(max_length=255, null=True, blank=True)
cat_name_de = models.CharField(max_length=255, null=True, blank=True)
def __str__(self):
return str(self.cat_name) or ''
@property
def icon(self):
if self.fk_poi_icon:
if self.fk_poi_icon.icon:
return self.fk_poi_icon.icon.url
else:
return ""
else:
return ""
class Poi(models.Model):
"""
Points of Interest in and around buildings
"""
name = models.CharField(max_length=255, null=True, blank=True)
name_en = models.CharField(max_length=255, null=True, blank=True)
name_de = models.CharField(max_length=255, null=True, blank=True)
floor_num = models.FloatField(verbose_name=_("floor number"), null=True, blank=True)
floor_name = models.CharField(verbose_name=_("floor name"), max_length=200, null=True, blank=True)
description = models.CharField(verbose_name=_("description"), max_length=255, null=True, blank=True)
enabled = models.BooleanField(verbose_name=_("Activated and enabled"), null=True, blank=True)
floor = models.ForeignKey(BuildingFloor, on_delete=models.DO_NOTHING, null=True, blank=True)
campus = models.ForeignKey(Campus, on_delete=models.DO_NOTHING, null=True, blank=True)
category = models.ForeignKey(PoiCategory, on_delete=models.CASCADE)
geom = gis_model.MultiPointField(srid=3857, spatial_index=True, db_column='geom', null=True, blank=True)
poi_tags = ArrayField(models.CharField(max_length=50, blank=True), blank=True, null=True)
@property
def icon(self):
if self.category.fk_poi_icon:
if self.category.fk_poi_icon.icon:
return self.category.fk_poi_icon.icon.url
else:
return ""
else:
return ""
def __str__(self):
return str(self.name) or ''
|
indrz/indrz
|
indrz/poi_manager/models.py
|
Python
|
gpl-3.0
| 5,972
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2016, Jianfeng Chen <jchen37@ncsu.edu>
# vim: set ts=4 sts=4 sw=4 expandtab smartindent:
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import division
import re
import sys
from SAT_Guide.universe import PROJECT_PATH
sys.dont_write_bytecode = True
def load_product_url(url):
feature_names = []
featureNum = 0
cnfNum = 0
cnfs = []
feature_name_pattern = re.compile(r'c (\d+)\$? (\w+)\n')
stat_line_pattern = re.compile(r'p cnf (\d+) (\d+)\n')
with open(url, 'r') as f:
features_names_dict = dict()
for line in f:
if line.startswith('c'): # record the feature names
m = feature_name_pattern.match(line)
"""
m.group(1) id
m.group(2) name
"""
features_names_dict[int(m.group(1))] = m.group(2)
elif line.startswith('p'):
m = stat_line_pattern.match(line)
"""
m.group(1) feature number
m.group(2) cnf
"""
featureNum = int(m.group(1))
cnfNum = int(m.group(2))
# transfer the features_names into the list if dimacs file is valid
assert len(features_names_dict) == featureNum, "There exists some features without any name"
for i in range(1, featureNum+1):
feature_names.append(features_names_dict[i])
del features_names_dict
elif line.endswith('0\n'): # the cnf
cnfs.append(map(int, line[:-1].split(' '))[:-1]) # delete the 0, store as the lint list
else:
assert True, "Unknown line" + line
assert len(cnfs) == cnfNum, "Unmatched cnfNum."
return feature_names, featureNum, cnfs, cnfNum
def demo(name):
url = "{0}/dimacs_data/{1}.dimacs".format(PROJECT_PATH, name)
load_product_url(url)
if __name__ == '__main__':
demo('uclinux')
|
ai-se/SPL
|
ProductLine/dimacs_parser.py
|
Python
|
mit
| 3,083
|
import sys
from itertools import ifilter
from requests_oauthlib import OAuth1Session
from requests.exceptions import ChunkedEncodingError
import json
import time
import datetime
from DataSource import DataSource
def isVideo(tweet):
"""Returns True if a tweet had a video uploaded with it, false otherwise."""
if 'extended_entities' in tweet:
if 'media' in tweet['extended_entities']:
for i in range(len(tweet['extended_entities']['media'])):
if tweet['extended_entities']['media'][i]['type'] == 'video':
return True
return False
else:
return False
else:
return False
class TwitterDataIngestSource(DataSource):
"""Ingest data from Twitter"""
def __init__(self, config, data_store):
self.config = config
self.data_store = data_store
self.update_items = []
def __iter__(self):
if 'track' in self.config:
self.track = self.config['track']
else:
self.track = 'ski,surf,board'
auth = OAuth1Session(
self.config['consumer_key'],
client_secret = self.config['consumer_secret'],
resource_owner_key = self.config['access_token'],
resource_owner_secret = self.config['access_token_secret']
)
request = auth.post(
'https://stream.twitter.com/1.1/statuses/filter.json',
data = 'track=' + self.track,
stream = True
)
# filter out empty lines sent to keep the stream alive
self.source_iterator = ifilter(lambda x: x, request.iter_lines())
return self
def next(self):
# Returns the next NEW tweet
while True:
try:
next_tweet = json.loads(self.source_iterator.next())
filtered_tweet = self.processTweet(next_tweet)
if filtered_tweet != {}:
break
# could put more error handling in here to handle HTTP errors and
# disconnection errors
except ChunkedEncodingError:
print('Chunked Encoding Error')
self.__iter__()
continue
return filtered_tweet
def getUpdateItems(self):
""" Returns a copy of the list of items that need to be updated in the
database vs inserted, None otherwise. Right now the self.update_items
list is not being updated so this method always returns None.
"""
if self.update_items == []:
return None
else:
list_copy = list(self.update_items)
self.update_items = []
return list_copy
def updateRetweet(self, id_str, history):
""" Update the tweet with ID id_str with information from history."""
# Create the update according to the structure of a tweet. Update
# the retweet_count and favorite_count for this retweet. Push the
# history information into the rt_history array.
update = ({'ID':id_str},
{'$set':{'tweet.orig_retweet_count':history['orig_retweet_count'],
'tweet.orig_favorite_count':history['orig_favorite_count'],
'tweet.orig_user_followers_count':history['orig_user_followers_count'],
'tweet.orig_user_friends_count':history['orig_user_friends_count'],
'tweet.orig_user_statuses_count':history['orig_user_statuses_count'],
'tweet.orig_user_favourites_count':history['orig_user_favourites_count'],
'last_modified':datetime.datetime.utcnow().isoformat()},
'$push':{'tweet.rt_history':history}
})
# Update the database with this update
self.data_store.update_one(update)
def updateReplyTweet(self, id_str, history):
""" Update the tweet with ID id_str with information from history."""
# Create the update according to the structure of a tweet.
# Push the history information into the reply_history array.
update = ({'ID':id_str},
{'$set':{'last_modified':datetime.datetime.utcnow().isoformat()},
'$inc':{'tweet.reply_count':1},
'$push':{'tweet.reply_history':history}
})
# Update the database with this update
self.data_store.update_one(update)
def processTweet(self, tweet):
""" Processes tweet and returns {} if the tweet is a retweet and already in
the database. In this case the tweet is updated directly in the database.
If the tweet is a new tweet then build the object f_tweet with the
relevant tweet information that needs to be stored.
"""
# Initialize variables
f_tweet = {}
f_tweet['retweet'] = 0
f_tweet['reply_count'] = 0
f_tweet['rt_history'] = []
f_tweet['reply_history'] = []
# If the tweet is a retweet it will have a 'retweeted_status' object in it.
if 'retweeted_status' in tweet:
# Set t_object to be the nested object within the retweet that contains
# all of the original tweet information.
t_object = tweet['retweeted_status']
# If it's a video proceed, otherwise return {}
if isVideo(t_object):
# initialize a history dictionary and set it with all of the
# retweet information that is being stored.
history = {}
history['rt_id_str'] = tweet['id_str']
history['rt_created_at'] = time.strftime('%Y-%m-%dT%H:%M:%S',
time.strptime(str(tweet['created_at']),'%a %b %d %H:%M:%S +0000 %Y'))
history['rt_text'] = tweet['text']
history['orig_retweet_count'] = t_object['retweet_count']
history['orig_favorite_count'] = t_object['favorite_count']
history['orig_user_followers_count'] = t_object['user']['followers_count']
history['orig_user_friends_count'] = t_object['user']['friends_count']
history['orig_user_statuses_count'] = t_object['user']['statuses_count']
history['orig_user_favourites_count'] = t_object['user']['favourites_count']
# If the tweet is in the database, then return {} and update the tweet
# in the database
orig_id_str = t_object['id_str']
updates = self.data_store.find({'ID':orig_id_str})
if updates != []:
self.updateRetweet(orig_id_str, history)
return {}
else:
f_tweet['retweet'] = 1
f_tweet['rt_history'] = [history]
else:
return {}
# If the tweet is a reply. If we are storing the original tweet then
# update the reply information. If we are not storing the original tweet
# then return {}.
elif 'in_reply_to_status_id_str' in tweet and tweet['in_reply_to_status_id_str'] is not None:
orig_id_str = tweet['in_reply_to_status_id_str']
updates = self.data_store.find({'ID':orig_id_str})
if updates != []:
print 'Reply: ' + orig_id_str
# initialize a history dictionary and set it with all of the
# reply information that is being stored.
history = {}
history['reply_id_str'] = tweet['id_str']
history['reply_created_at'] = time.strftime('%Y-%m-%dT%H:%M:%S',
time.strptime(str(tweet['created_at']),'%a %b %d %H:%M:%S +0000 %Y'))
history['reply_text'] = tweet['text']
self.updateReplyTweet(orig_id_str, history)
return {}
else:
# It's an original tweet.
# If it's a video, then store it, otherwise return {}
if isVideo(tweet):
t_object = tweet
orig_id_str = t_object['id_str']
else:
return {}
# Store the desired tweet information in the f_tweet object.
f_tweet['orig_tweet_object'] = t_object
f_tweet['orig_id_str'] = orig_id_str
f_tweet['orig_created_at'] = time.strftime('%Y-%m-%dT%H:%M:%S',
time.strptime(str(t_object['created_at']),'%a %b %d %H:%M:%S +0000 %Y'))
f_tweet['orig_text'] = t_object['text']
f_tweet['orig_retweet_count'] = t_object['retweet_count']
f_tweet['orig_favorite_count'] = t_object['favorite_count']
f_tweet['orig_user_id_str'] = t_object['user']['id_str']
f_tweet['orig_user_screen_name'] = t_object['user']['screen_name']
f_tweet['orig_user_name'] = t_object['user']['name']
f_tweet['orig_user_followers_count'] = t_object['user']['followers_count']
f_tweet['orig_user_friends_count'] = t_object['user']['friends_count']
f_tweet['orig_user_statuses_count'] = t_object['user']['statuses_count']
f_tweet['orig_user_favourites_count'] = t_object['user']['favourites_count']
if 'entities' in t_object:
if 'hashtags' in t_object['entities']:
# hashtags are arrays
f_tweet['orig_hashtags'] = t_object['entities']['hashtags']
if 'user_mentions' in t_object['entities']:
# user_mentions are arrays
f_tweet['orig_user_mentions'] = t_object['entities']['user_mentions']
if 'urls' in t_object['entities']:
# urls are arrays
f_tweet['orig_urls'] = t_object['entities']['urls']
if 'media' in t_object['entities']:
# media is an array
for i in range(len(t_object['entities']['media'])):
if 'source_status_id_str' in t_object['entities']['media'][i]:
f_tweet['orig_source_status_id_str'] = t_object['entities']['media'][i]['source_status_id_str']
for i in range(len(t_object['extended_entities']['media'])):
if t_object['extended_entities']['media'][i]['type'] == 'video':
f_tweet['orig_media_type'] = t_object['extended_entities']['media'][i]['type']
f_tweet['orig_video_length_ms'] = t_object['extended_entities']['media'][i]['video_info']['duration_millis']
f_tweet['orig_video_expanded_url'] = t_object['extended_entities']['media'][i]['expanded_url']
f_tweet['orig_video_display_url'] = t_object['extended_entities']['media'][i]['display_url']
f_tweet['orig_video_url'] = t_object['extended_entities']['media'][i]['url']
# add the key tweet before all tweets
f_tweet = { 'tweet' : f_tweet }
# add an ID field to the f_tweet dictionary that is the unique ID string
# for the tweet.
f_tweet.update({'ID': orig_id_str})
return f_tweet
|
abessou/w251-FinalProject
|
code/sources/TwitterDataIngestSource.py
|
Python
|
mit
| 9,958
|
"""SCons.Conftest
Autoconf-like configuration support; low level implementation of tests.
"""
#
# Copyright (c) 2003 Stichting NLnet Labs
# Copyright (c) 2001, 2002, 2003 Steven Knight
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
#
# The purpose of this module is to define how a check is to be performed.
# Use one of the Check...() functions below.
#
#
# A context class is used that defines functions for carrying out the tests,
# logging and messages. The following methods and members must be present:
#
# context.Display(msg) Function called to print messages that are normally
# displayed for the user. Newlines are explicitly used.
# The text should also be written to the logfile!
#
# context.Log(msg) Function called to write to a log file.
#
# context.BuildProg(text, ext)
# Function called to build a program, using "ext" for the
# file extention. Must return an empty string for
# success, an error message for failure.
# For reliable test results building should be done just
# like an actual program would be build, using the same
# command and arguments (including configure results so
# far).
#
# context.CompileProg(text, ext)
# Function called to compile a program, using "ext" for
# the file extention. Must return an empty string for
# success, an error message for failure.
# For reliable test results compiling should be done just
# like an actual source file would be compiled, using the
# same command and arguments (including configure results
# so far).
#
# context.AppendLIBS(lib_name_list)
# Append "lib_name_list" to the value of LIBS.
# "lib_namelist" is a list of strings.
# Return the value of LIBS before changing it (any type
# can be used, it is passed to SetLIBS() later.)
#
# context.PrependLIBS(lib_name_list)
# Prepend "lib_name_list" to the value of LIBS.
# "lib_namelist" is a list of strings.
# Return the value of LIBS before changing it (any type
# can be used, it is passed to SetLIBS() later.)
#
# context.SetLIBS(value)
# Set LIBS to "value". The type of "value" is what
# AppendLIBS() returned.
# Return the value of LIBS before changing it (any type
# can be used, it is passed to SetLIBS() later.)
#
# context.headerfilename
# Name of file to append configure results to, usually
# "confdefs.h".
# The file must not exist or be empty when starting.
# Empty or None to skip this (some tests will not work!).
#
# context.config_h (may be missing). If present, must be a string, which
# will be filled with the contents of a config_h file.
#
# context.vardict Dictionary holding variables used for the tests and
# stores results from the tests, used for the build
# commands.
# Normally contains "CC", "LIBS", "CPPFLAGS", etc.
#
# context.havedict Dictionary holding results from the tests that are to
# be used inside a program.
# Names often start with "HAVE_". These are zero
# (feature not present) or one (feature present). Other
# variables may have any value, e.g., "PERLVERSION" can
# be a number and "SYSTEMNAME" a string.
#
import re
from types import IntType
#
# PUBLIC VARIABLES
#
LogInputFiles = 1 # Set that to log the input files in case of a failed test
LogErrorMessages = 1 # Set that to log Conftest-generated error messages
#
# PUBLIC FUNCTIONS
#
# Generic remarks:
# - When a language is specified which is not supported the test fails. The
# message is a bit different, because not all the arguments for the normal
# message are available yet (chicken-egg problem).
def CheckBuilder(context, text = None, language = None):
"""
Configure check to see if the compiler works.
Note that this uses the current value of compiler and linker flags, make
sure $CFLAGS, $CPPFLAGS and $LIBS are set correctly.
"language" should be "C" or "C++" and is used to select the compiler.
Default is "C".
"text" may be used to specify the code to be build.
Returns an empty string for success, an error message for failure.
"""
lang, suffix, msg = _lang2suffix(language)
if msg:
context.Display("%s\n" % msg)
return msg
if not text:
text = """
int main() {
return 0;
}
"""
context.Display("Checking if building a %s file works... " % lang)
ret = context.BuildProg(text, suffix)
_YesNoResult(context, ret, None, text)
return ret
def CheckCC(context):
"""
Configure check for a working C compiler.
This checks whether the C compiler, as defined in the $CC construction
variable, can compile a C source file. It uses the current $CCCOM value
too, so that it can test against non working flags.
"""
context.Display("Checking whether the C compiler works... ")
text = """
int main()
{
return 0;
}
"""
ret = _check_empty_program(context, 'CC', text, 'C')
_YesNoResult(context, ret, None, text)
return ret
def CheckSHCC(context):
"""
Configure check for a working shared C compiler.
This checks whether the C compiler, as defined in the $SHCC construction
variable, can compile a C source file. It uses the current $SHCCCOM value
too, so that it can test against non working flags.
"""
context.Display("Checking whether the (shared) C compiler works... ")
text = """
int foo()
{
return 0;
}
"""
ret = _check_empty_program(context, 'SHCC', text, 'C', use_shared = True)
_YesNoResult(context, ret, None, text)
return ret
def CheckCXX(context):
"""
Configure check for a working CXX compiler.
This checks whether the CXX compiler, as defined in the $CXX construction
variable, can compile a CXX source file. It uses the current $CXXCOM value
too, so that it can test against non working flags.
"""
context.Display("Checking whether the C++ compiler works... ")
text = """
int main()
{
return 0;
}
"""
ret = _check_empty_program(context, 'CXX', text, 'C++')
_YesNoResult(context, ret, None, text)
return ret
def CheckSHCXX(context):
"""
Configure check for a working shared CXX compiler.
This checks whether the CXX compiler, as defined in the $SHCXX construction
variable, can compile a CXX source file. It uses the current $SHCXXCOM value
too, so that it can test against non working flags.
"""
context.Display("Checking whether the (shared) C++ compiler works... ")
text = """
int main()
{
return 0;
}
"""
ret = _check_empty_program(context, 'SHCXX', text, 'C++', use_shared = True)
_YesNoResult(context, ret, None, text)
return ret
def _check_empty_program(context, comp, text, language, use_shared = False):
"""Return 0 on success, 1 otherwise."""
if comp not in context.env or not context.env[comp]:
# The compiler construction variable is not set or empty
return 1
lang, suffix, msg = _lang2suffix(language)
if msg:
return 1
if use_shared:
return context.CompileSharedObject(text, suffix)
else:
return context.CompileProg(text, suffix)
def CheckFunc(context, function_name, header = None, language = None):
"""
Configure check for a function "function_name".
"language" should be "C" or "C++" and is used to select the compiler.
Default is "C".
Optional "header" can be defined to define a function prototype, include a
header file or anything else that comes before main().
Sets HAVE_function_name in context.havedict according to the result.
Note that this uses the current value of compiler and linker flags, make
sure $CFLAGS, $CPPFLAGS and $LIBS are set correctly.
Returns an empty string for success, an error message for failure.
"""
# Remarks from autoconf:
# - Don't include <ctype.h> because on OSF/1 3.0 it includes <sys/types.h>
# which includes <sys/select.h> which contains a prototype for select.
# Similarly for bzero.
# - assert.h is included to define __stub macros and hopefully few
# prototypes, which can conflict with char $1(); below.
# - Override any gcc2 internal prototype to avoid an error.
# - We use char for the function declaration because int might match the
# return type of a gcc2 builtin and then its argument prototype would
# still apply.
# - The GNU C library defines this for functions which it implements to
# always fail with ENOSYS. Some functions are actually named something
# starting with __ and the normal name is an alias.
if context.headerfilename:
includetext = '#include "%s"' % context.headerfilename
else:
includetext = ''
if not header:
header = """
#ifdef __cplusplus
extern "C"
#endif
char %s();""" % function_name
lang, suffix, msg = _lang2suffix(language)
if msg:
context.Display("Cannot check for %s(): %s\n" % (function_name, msg))
return msg
text = """
%(include)s
#include <assert.h>
%(hdr)s
int main() {
#if defined (__stub_%(name)s) || defined (__stub___%(name)s)
fail fail fail
#else
%(name)s();
#endif
return 0;
}
""" % { 'name': function_name,
'include': includetext,
'hdr': header }
context.Display("Checking for %s function %s()... " % (lang, function_name))
ret = context.BuildProg(text, suffix)
_YesNoResult(context, ret, "HAVE_" + function_name, text,
"Define to 1 if the system has the function `%s'." %\
function_name)
return ret
def CheckHeader(context, header_name, header = None, language = None,
include_quotes = None):
"""
Configure check for a C or C++ header file "header_name".
Optional "header" can be defined to do something before including the
header file (unusual, supported for consistency).
"language" should be "C" or "C++" and is used to select the compiler.
Default is "C".
Sets HAVE_header_name in context.havedict according to the result.
Note that this uses the current value of compiler and linker flags, make
sure $CFLAGS and $CPPFLAGS are set correctly.
Returns an empty string for success, an error message for failure.
"""
# Why compile the program instead of just running the preprocessor?
# It is possible that the header file exists, but actually using it may
# fail (e.g., because it depends on other header files). Thus this test is
# more strict. It may require using the "header" argument.
#
# Use <> by default, because the check is normally used for system header
# files. SCons passes '""' to overrule this.
# Include "confdefs.h" first, so that the header can use HAVE_HEADER_H.
if context.headerfilename:
includetext = '#include "%s"\n' % context.headerfilename
else:
includetext = ''
if not header:
header = ""
lang, suffix, msg = _lang2suffix(language)
if msg:
context.Display("Cannot check for header file %s: %s\n"
% (header_name, msg))
return msg
if not include_quotes:
include_quotes = "<>"
text = "%s%s\n#include %s%s%s\n\n" % (includetext, header,
include_quotes[0], header_name, include_quotes[1])
context.Display("Checking for %s header file %s... " % (lang, header_name))
ret = context.CompileProg(text, suffix)
_YesNoResult(context, ret, "HAVE_" + header_name, text,
"Define to 1 if you have the <%s> header file." % header_name)
return ret
def CheckType(context, type_name, fallback = None,
header = None, language = None):
"""
Configure check for a C or C++ type "type_name".
Optional "header" can be defined to include a header file.
"language" should be "C" or "C++" and is used to select the compiler.
Default is "C".
Sets HAVE_type_name in context.havedict according to the result.
Note that this uses the current value of compiler and linker flags, make
sure $CFLAGS, $CPPFLAGS and $LIBS are set correctly.
Returns an empty string for success, an error message for failure.
"""
# Include "confdefs.h" first, so that the header can use HAVE_HEADER_H.
if context.headerfilename:
includetext = '#include "%s"' % context.headerfilename
else:
includetext = ''
if not header:
header = ""
lang, suffix, msg = _lang2suffix(language)
if msg:
context.Display("Cannot check for %s type: %s\n" % (type_name, msg))
return msg
# Remarks from autoconf about this test:
# - Grepping for the type in include files is not reliable (grep isn't
# portable anyway).
# - Using "TYPE my_var;" doesn't work for const qualified types in C++.
# Adding an initializer is not valid for some C++ classes.
# - Using the type as parameter to a function either fails for K&$ C or for
# C++.
# - Using "TYPE *my_var;" is valid in C for some types that are not
# declared (struct something).
# - Using "sizeof(TYPE)" is valid when TYPE is actually a variable.
# - Using the previous two together works reliably.
text = """
%(include)s
%(header)s
int main() {
if ((%(name)s *) 0)
return 0;
if (sizeof (%(name)s))
return 0;
}
""" % { 'include': includetext,
'header': header,
'name': type_name }
context.Display("Checking for %s type %s... " % (lang, type_name))
ret = context.BuildProg(text, suffix)
_YesNoResult(context, ret, "HAVE_" + type_name, text,
"Define to 1 if the system has the type `%s'." % type_name)
if ret and fallback and context.headerfilename:
f = open(context.headerfilename, "a")
f.write("typedef %s %s;\n" % (fallback, type_name))
f.close()
return ret
def CheckTypeSize(context, type_name, header = None, language = None, expect = None):
"""This check can be used to get the size of a given type, or to check whether
the type is of expected size.
Arguments:
- type : str
the type to check
- includes : sequence
list of headers to include in the test code before testing the type
- language : str
'C' or 'C++'
- expect : int
if given, will test wether the type has the given number of bytes.
If not given, will automatically find the size.
Returns:
status : int
0 if the check failed, or the found size of the type if the check succeeded."""
# Include "confdefs.h" first, so that the header can use HAVE_HEADER_H.
if context.headerfilename:
includetext = '#include "%s"' % context.headerfilename
else:
includetext = ''
if not header:
header = ""
lang, suffix, msg = _lang2suffix(language)
if msg:
context.Display("Cannot check for %s type: %s\n" % (type_name, msg))
return msg
src = includetext + header
if not expect is None:
# Only check if the given size is the right one
context.Display('Checking %s is %d bytes... ' % (type_name, expect))
# test code taken from autoconf: this is a pretty clever hack to find that
# a type is of a given size using only compilation. This speeds things up
# quite a bit compared to straightforward code using TryRun
src = src + r"""
typedef %s scons_check_type;
int main()
{
static int test_array[1 - 2 * !(((long int) (sizeof(scons_check_type))) == %d)];
test_array[0] = 0;
return 0;
}
"""
st = context.CompileProg(src % (type_name, expect), suffix)
if not st:
context.Display("yes\n")
_Have(context, "SIZEOF_%s" % type_name, expect,
"The size of `%s', as computed by sizeof." % type_name)
return expect
else:
context.Display("no\n")
_LogFailed(context, src, st)
return 0
else:
# Only check if the given size is the right one
context.Message('Checking size of %s ... ' % type_name)
# We have to be careful with the program we wish to test here since
# compilation will be attempted using the current environment's flags.
# So make sure that the program will compile without any warning. For
# example using: 'int main(int argc, char** argv)' will fail with the
# '-Wall -Werror' flags since the variables argc and argv would not be
# used in the program...
#
src = src + """
#include <stdlib.h>
#include <stdio.h>
int main() {
printf("%d", (int)sizeof(""" + type_name + """));
return 0;
}
"""
st, out = context.RunProg(src, suffix)
try:
size = int(out)
except ValueError:
# If cannot convert output of test prog to an integer (the size),
# something went wront, so just fail
st = 1
size = 0
if not st:
context.Display("yes\n")
_Have(context, "SIZEOF_%s" % type_name, size,
"The size of `%s', as computed by sizeof." % type_name)
return size
else:
context.Display("no\n")
_LogFailed(context, src, st)
return 0
return 0
def CheckDeclaration(context, symbol, includes = None, language = None):
"""Checks whether symbol is declared.
Use the same test as autoconf, that is test whether the symbol is defined
as a macro or can be used as an r-value.
Arguments:
symbol : str
the symbol to check
includes : str
Optional "header" can be defined to include a header file.
language : str
only C and C++ supported.
Returns:
status : bool
True if the check failed, False if succeeded."""
# Include "confdefs.h" first, so that the header can use HAVE_HEADER_H.
if context.headerfilename:
includetext = '#include "%s"' % context.headerfilename
else:
includetext = ''
if not includes:
includes = ""
lang, suffix, msg = _lang2suffix(language)
if msg:
context.Display("Cannot check for declaration %s: %s\n" % (symbol, msg))
return msg
src = includetext + includes
context.Display('Checking whether %s is declared... ' % symbol)
src = src + r"""
int main()
{
#ifndef %s
(void) %s;
#endif
;
return 0;
}
""" % (symbol, symbol)
st = context.CompileProg(src, suffix)
_YesNoResult(context, st, "HAVE_DECL_" + symbol, src,
"Set to 1 if %s is defined." % symbol)
return st
def CheckLib(context, libs, func_name = None, header = None,
extra_libs = None, call = None, language = None, autoadd = 1,
append = True):
"""
Configure check for a C or C++ libraries "libs". Searches through
the list of libraries, until one is found where the test succeeds.
Tests if "func_name" or "call" exists in the library. Note: if it exists
in another library the test succeeds anyway!
Optional "header" can be defined to include a header file. If not given a
default prototype for "func_name" is added.
Optional "extra_libs" is a list of library names to be added after
"lib_name" in the build command. To be used for libraries that "lib_name"
depends on.
Optional "call" replaces the call to "func_name" in the test code. It must
consist of complete C statements, including a trailing ";".
Both "func_name" and "call" arguments are optional, and in that case, just
linking against the libs is tested.
"language" should be "C" or "C++" and is used to select the compiler.
Default is "C".
Note that this uses the current value of compiler and linker flags, make
sure $CFLAGS, $CPPFLAGS and $LIBS are set correctly.
Returns an empty string for success, an error message for failure.
"""
# Include "confdefs.h" first, so that the header can use HAVE_HEADER_H.
if context.headerfilename:
includetext = '#include "%s"' % context.headerfilename
else:
includetext = ''
if not header:
header = ""
text = """
%s
%s""" % (includetext, header)
# Add a function declaration if needed.
if func_name and func_name != "main":
if not header:
text = text + """
#ifdef __cplusplus
extern "C"
#endif
char %s();
""" % func_name
# The actual test code.
if not call:
call = "%s();" % func_name
# if no function to test, leave main() blank
text = text + """
int
main() {
%s
return 0;
}
""" % (call or "")
if call:
i = call.find("\n")
if i > 0:
calltext = call[:i] + ".."
elif call[-1] == ';':
calltext = call[:-1]
else:
calltext = call
for lib_name in libs:
lang, suffix, msg = _lang2suffix(language)
if msg:
context.Display("Cannot check for library %s: %s\n" % (lib_name, msg))
return msg
# if a function was specified to run in main(), say it
if call:
context.Display("Checking for %s in %s library %s... "
% (calltext, lang, lib_name))
# otherwise, just say the name of library and language
else:
context.Display("Checking for %s library %s... "
% (lang, lib_name))
if lib_name:
l = [ lib_name ]
if extra_libs:
l.extend(extra_libs)
if append:
oldLIBS = context.AppendLIBS(l)
else:
oldLIBS = context.PrependLIBS(l)
sym = "HAVE_LIB" + lib_name
else:
oldLIBS = -1
sym = None
ret = context.BuildProg(text, suffix)
_YesNoResult(context, ret, sym, text,
"Define to 1 if you have the `%s' library." % lib_name)
if oldLIBS != -1 and (ret or not autoadd):
context.SetLIBS(oldLIBS)
if not ret:
return ret
return ret
#
# END OF PUBLIC FUNCTIONS
#
def _YesNoResult(context, ret, key, text, comment = None):
"""
Handle the result of a test with a "yes" or "no" result.
"ret" is the return value: empty if OK, error message when not.
"key" is the name of the symbol to be defined (HAVE_foo).
"text" is the source code of the program used for testing.
"comment" is the C comment to add above the line defining the symbol (the
comment is automatically put inside a /* */). If None, no comment is added.
"""
if key:
_Have(context, key, not ret, comment)
if ret:
context.Display("no\n")
_LogFailed(context, text, ret)
else:
context.Display("yes\n")
def _Have(context, key, have, comment = None):
"""
Store result of a test in context.havedict and context.headerfilename.
"key" is a "HAVE_abc" name. It is turned into all CAPITALS and non-
alphanumerics are replaced by an underscore.
The value of "have" can be:
1 - Feature is defined, add "#define key".
0 - Feature is not defined, add "/* #undef key */".
Adding "undef" is what autoconf does. Not useful for the
compiler, but it shows that the test was done.
number - Feature is defined to this number "#define key have".
Doesn't work for 0 or 1, use a string then.
string - Feature is defined to this string "#define key have".
Give "have" as is should appear in the header file, include quotes
when desired and escape special characters!
"""
key_up = key.upper()
key_up = re.sub('[^A-Z0-9_]', '_', key_up)
context.havedict[key_up] = have
if have == 1:
line = "#define %s 1\n" % key_up
elif have == 0:
line = "/* #undef %s */\n" % key_up
elif isinstance(have, IntType):
line = "#define %s %d\n" % (key_up, have)
else:
line = "#define %s %s\n" % (key_up, str(have))
if comment is not None:
lines = "\n/* %s */\n" % comment + line
else:
lines = "\n" + line
if context.headerfilename:
f = open(context.headerfilename, "a")
f.write(lines)
f.close()
elif hasattr(context,'config_h'):
context.config_h = context.config_h + lines
def _LogFailed(context, text, msg):
"""
Write to the log about a failed program.
Add line numbers, so that error messages can be understood.
"""
if LogInputFiles:
context.Log("Failed program was:\n")
lines = text.split('\n')
if len(lines) and lines[-1] == '':
lines = lines[:-1] # remove trailing empty line
n = 1
for line in lines:
context.Log("%d: %s\n" % (n, line))
n = n + 1
if LogErrorMessages:
context.Log("Error message: %s\n" % msg)
def _lang2suffix(lang):
"""
Convert a language name to a suffix.
When "lang" is empty or None C is assumed.
Returns a tuple (lang, suffix, None) when it works.
For an unrecognized language returns (None, None, msg).
Where:
lang = the unified language name
suffix = the suffix, including the leading dot
msg = an error message
"""
if not lang or lang in ["C", "c"]:
return ("C", ".c", None)
if lang in ["c++", "C++", "cpp", "CXX", "cxx"]:
return ("C++", ".cpp", None)
return None, None, "Unsupported language: %s" % lang
# vim: set sw=4 et sts=4 tw=79 fo+=l:
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
stonekyx/binary
|
vendor/scons-local-2.3.4/SCons/Conftest.py
|
Python
|
gpl-3.0
| 27,721
|
"""Implementation of RootOf class and related tools. """
from __future__ import print_function, division
from sympy.core import (S, Expr, Integer, Float, I, Add, Lambda, symbols,
sympify, Rational, Dummy)
from sympy.core.cache import cacheit
from sympy.core.function import AppliedUndef
from sympy.functions.elementary.miscellaneous import root as _root
from sympy.polys.polytools import Poly, PurePoly, factor
from sympy.polys.rationaltools import together
from sympy.polys.polyfuncs import symmetrize, viete
from sympy.polys.rootisolation import (
dup_isolate_complex_roots_sqf,
dup_isolate_real_roots_sqf)
from sympy.polys.polyroots import (
roots_linear, roots_quadratic, roots_binomial,
preprocess_roots, roots)
from sympy.polys.polyerrors import (
MultivariatePolynomialError,
GeneratorsNeeded,
PolynomialError,
DomainError)
from sympy.polys.domains import QQ
from mpmath import mpf, mpc, findroot, workprec
from mpmath.libmp.libmpf import prec_to_dps
from sympy.utilities import lambdify, public
from sympy.core.compatibility import xrange
from math import log as mathlog
def _ispow2(i):
v = mathlog(i, 2)
return v == int(v)
_reals_cache = {}
_complexes_cache = {}
@public
class RootOf(Expr):
"""Represents ``k``-th root of a univariate polynomial. """
__slots__ = ['poly', 'index']
is_complex = True
is_number = True
def __new__(cls, f, x, index=None, radicals=True, expand=True):
"""Construct a new ``RootOf`` object for ``k``-th root of ``f``. """
x = sympify(x)
if index is None and x.is_Integer:
x, index = None, x
else:
index = sympify(index)
if index is not None and index.is_Integer:
index = int(index)
else:
raise ValueError("expected an integer root index, got %s" % index)
poly = PurePoly(f, x, greedy=False, expand=expand)
if not poly.is_univariate:
raise PolynomialError("only univariate polynomials are allowed")
degree = poly.degree()
if degree <= 0:
raise PolynomialError("can't construct RootOf object for %s" % f)
if index < -degree or index >= degree:
raise IndexError("root index out of [%d, %d] range, got %d" %
(-degree, degree - 1, index))
elif index < 0:
index += degree
dom = poly.get_domain()
if not dom.is_Exact:
poly = poly.to_exact()
roots = cls._roots_trivial(poly, radicals)
if roots is not None:
return roots[index]
coeff, poly = preprocess_roots(poly)
dom = poly.get_domain()
if not dom.is_ZZ:
raise NotImplementedError("RootOf is not supported over %s" % dom)
root = cls._indexed_root(poly, index)
return coeff*cls._postprocess_root(root, radicals)
@classmethod
def _new(cls, poly, index):
"""Construct new ``RootOf`` object from raw data. """
obj = Expr.__new__(cls)
obj.poly = PurePoly(poly)
obj.index = index
try:
_reals_cache[obj.poly] = _reals_cache[poly]
_complexes_cache[obj.poly] = _complexes_cache[poly]
except KeyError:
pass
return obj
def _hashable_content(self):
return (self.poly, self.index)
@property
def expr(self):
return self.poly.as_expr()
@property
def args(self):
return (self.expr, Integer(self.index))
@property
def free_symbols(self):
# RootOf currently only works with univariate expressions and although
# the poly attribute is often a PurePoly, sometimes it is a Poly. In
# either case no free symbols should be reported.
return set()
def _eval_is_real(self):
"""Return ``True`` if the root is real. """
return self.index < len(_reals_cache[self.poly])
@classmethod
def real_roots(cls, poly, radicals=True):
"""Get real roots of a polynomial. """
return cls._get_roots("_real_roots", poly, radicals)
@classmethod
def all_roots(cls, poly, radicals=True):
"""Get real and complex roots of a polynomial. """
return cls._get_roots("_all_roots", poly, radicals)
@classmethod
def _get_reals_sqf(cls, factor):
"""Compute real root isolating intervals for a square-free polynomial. """
if factor in _reals_cache:
real_part = _reals_cache[factor]
else:
_reals_cache[factor] = real_part = \
dup_isolate_real_roots_sqf(
factor.rep.rep, factor.rep.dom, blackbox=True)
return real_part
@classmethod
def _get_complexes_sqf(cls, factor):
"""Compute complex root isolating intervals for a square-free polynomial. """
if factor in _complexes_cache:
complex_part = _complexes_cache[factor]
else:
_complexes_cache[factor] = complex_part = \
dup_isolate_complex_roots_sqf(
factor.rep.rep, factor.rep.dom, blackbox=True)
return complex_part
@classmethod
def _get_reals(cls, factors):
"""Compute real root isolating intervals for a list of factors. """
reals = []
for factor, k in factors:
real_part = cls._get_reals_sqf(factor)
reals.extend([ (root, factor, k) for root in real_part ])
return reals
@classmethod
def _get_complexes(cls, factors):
"""Compute complex root isolating intervals for a list of factors. """
complexes = []
for factor, k in factors:
complex_part = cls._get_complexes_sqf(factor)
complexes.extend([ (root, factor, k) for root in complex_part ])
return complexes
@classmethod
def _reals_sorted(cls, reals):
"""Make real isolating intervals disjoint and sort roots. """
cache = {}
for i, (u, f, k) in enumerate(reals):
for j, (v, g, m) in enumerate(reals[i + 1:]):
u, v = u.refine_disjoint(v)
reals[i + j + 1] = (v, g, m)
reals[i] = (u, f, k)
reals = sorted(reals, key=lambda r: r[0].a)
for root, factor, _ in reals:
if factor in cache:
cache[factor].append(root)
else:
cache[factor] = [root]
for factor, roots in cache.items():
_reals_cache[factor] = roots
return reals
@classmethod
def _separate_imaginary_from_complex(cls, complexes):
from sympy.utilities.iterables import sift
def is_imag(c):
'''
return True if all roots are imaginary (ax**2 + b)
return False if no roots are imaginary
return None if 2 roots are imaginary (ax**N'''
u, f, k = c
deg = f.degree()
if f.length() == 2:
if deg == 2:
return True # both imag
elif _ispow2(deg):
if f.LC()*f.TC() < 0:
return None # 2 are imag
return False # none are imag
# separate according to the function
sifted = sift(complexes, lambda c: c[1])
del complexes
imag = []
complexes = []
for f in sifted:
isift = sift(sifted[f], lambda c: is_imag(c))
imag.extend(isift.pop(True, []))
complexes.extend(isift.pop(False, []))
mixed = isift.pop(None, [])
assert not isift
if not mixed:
continue
while True:
# the non-imaginary ones will be on one side or the other
# of the y-axis
i = 0
while i < len(mixed):
u, f, k = mixed[i]
if u.ax*u.bx > 0:
complexes.append(mixed.pop(i))
else:
i += 1
if len(mixed) == 2:
imag.extend(mixed)
break
# refine
for i, (u, f, k) in enumerate(mixed):
u = u._inner_refine()
mixed[i] = u, f, k
return imag, complexes
@classmethod
def _refine_complexes(cls, complexes):
"""return complexes such that no bounding rectangles of non-conjugate
roots would intersect if slid horizontally or vertically/
"""
while complexes: # break when all are distinct
# get the intervals pairwise-disjoint. If rectangles were drawn around
# the coordinates of the bounding rectangles, no rectangles would
# intersect after this procedure
for i, (u, f, k) in enumerate(complexes):
for j, (v, g, m) in enumerate(complexes[i + 1:]):
u, v = u.refine_disjoint(v)
complexes[i + j + 1] = (v, g, m)
complexes[i] = (u, f, k)
# Although there are no intersecting rectangles, a given rectangle
# might intersect another when slid horizontally. We have to refine
# intervals until this is not true so we can sort the roots
# unambiguously. Since complex roots come in conjugate pairs, we
# will always have 2 rectangles above each other but we should not
# have more than that.
N = len(complexes)//2 - 1
# check x (real) parts: there must be N + 1 disjoint x ranges, i.e.
# the first one must be different from N others
uu = set([(u.ax, u.bx) for u, _, _ in complexes])
u = uu.pop()
if sum([u[1] <= v[0] or v[1] <= u[0] for v in uu]) < N:
# refine
for i, (u, f, k) in enumerate(complexes):
u = u._inner_refine()
complexes[i] = u, f, k
else:
# intervals with identical x-values have disjoint y-values or
# else they would not be disjoint so there is no need for
# further checks
break
return complexes
@classmethod
def _complexes_sorted(cls, complexes):
"""Make complex isolating intervals disjoint and sort roots. """
if not complexes:
return []
cache = {}
# imaginary roots can cause a problem in terms of sorting since
# their x-intervals will never refine as distinct from others
# so we handle them separately
imag, complexes = cls._separate_imaginary_from_complex(complexes)
complexes = cls._refine_complexes(complexes)
# sort imaginary roots
def key(c):
'''return, for ax**n+b, +/-root(abs(b/a), b) according to the
apparent sign of the imaginary interval, e.g. if the interval
were (0, 3) the positive root would be returned.
'''
u, f, k = c
r = _root(abs(f.TC()/f.LC()), f.degree())
if u.ay < 0 or u.by < 0:
return -r
return r
imag = sorted(imag, key=lambda c: key(c))
# sort complexes and combine with imag
if complexes:
# key is (x1, y1) e.g. (1, 2)x(3, 4) -> (1,3)
complexes = sorted(complexes, key=
lambda c: c[0].a)
# find insertion point for imaginary
for i, c in enumerate(reversed(complexes)):
if c[0].bx <= 0:
break
i = len(complexes) - i - 1
if i:
i += 1
complexes = complexes[:i] + imag + complexes[i:]
else:
complexes = imag
# update cache
for root, factor, _ in complexes:
if factor in cache:
cache[factor].append(root)
else:
cache[factor] = [root]
for factor, roots in cache.items():
_complexes_cache[factor] = roots
return complexes
@classmethod
def _reals_index(cls, reals, index):
"""Map initial real root index to an index in a factor where the root belongs. """
i = 0
for j, (_, factor, k) in enumerate(reals):
if index < i + k:
poly, index = factor, 0
for _, factor, _ in reals[:j]:
if factor == poly:
index += 1
return poly, index
else:
i += k
@classmethod
def _complexes_index(cls, complexes, index):
"""Map initial complex root index to an index in a factor where the root belongs. """
index, i = index, 0
for j, (_, factor, k) in enumerate(complexes):
if index < i + k:
poly, index = factor, 0
for _, factor, _ in complexes[:j]:
if factor == poly:
index += 1
index += len(_reals_cache[poly])
return poly, index
else:
i += k
@classmethod
def _count_roots(cls, roots):
"""Count the number of real or complex roots including multiplicites. """
return sum([ k for _, _, k in roots ])
@classmethod
def _indexed_root(cls, poly, index):
"""Get a root of a composite polynomial by index. """
(_, factors) = poly.factor_list()
reals = cls._get_reals(factors)
reals_count = cls._count_roots(reals)
if index < reals_count:
reals = cls._reals_sorted(reals)
return cls._reals_index(reals, index)
else:
complexes = cls._get_complexes(factors)
complexes = cls._complexes_sorted(complexes)
return cls._complexes_index(complexes, index - reals_count)
@classmethod
def _real_roots(cls, poly):
"""Get real roots of a composite polynomial. """
(_, factors) = poly.factor_list()
reals = cls._get_reals(factors)
reals = cls._reals_sorted(reals)
reals_count = cls._count_roots(reals)
roots = []
for index in xrange(0, reals_count):
roots.append(cls._reals_index(reals, index))
return roots
@classmethod
def _all_roots(cls, poly):
"""Get real and complex roots of a composite polynomial. """
(_, factors) = poly.factor_list()
reals = cls._get_reals(factors)
reals = cls._reals_sorted(reals)
reals_count = cls._count_roots(reals)
roots = []
for index in xrange(0, reals_count):
roots.append(cls._reals_index(reals, index))
complexes = cls._get_complexes(factors)
complexes = cls._complexes_sorted(complexes)
complexes_count = cls._count_roots(complexes)
for index in xrange(0, complexes_count):
roots.append(cls._complexes_index(complexes, index))
return roots
@classmethod
@cacheit
def _roots_trivial(cls, poly, radicals):
"""Compute roots in linear, quadratic and binomial cases. """
if poly.degree() == 1:
return roots_linear(poly)
if not radicals:
return None
if poly.degree() == 2:
return roots_quadratic(poly)
elif poly.length() == 2 and poly.TC():
return roots_binomial(poly)
else:
return None
@classmethod
def _preprocess_roots(cls, poly):
"""Take heroic measures to make ``poly`` compatible with ``RootOf``. """
dom = poly.get_domain()
if not dom.is_Exact:
poly = poly.to_exact()
coeff, poly = preprocess_roots(poly)
dom = poly.get_domain()
if not dom.is_ZZ:
raise NotImplementedError(
"sorted roots not supported over %s" % dom)
return coeff, poly
@classmethod
def _postprocess_root(cls, root, radicals):
"""Return the root if it is trivial or a ``RootOf`` object. """
poly, index = root
roots = cls._roots_trivial(poly, radicals)
if roots is not None:
return roots[index]
else:
return cls._new(poly, index)
@classmethod
def _get_roots(cls, method, poly, radicals):
"""Return postprocessed roots of specified kind. """
if not poly.is_univariate:
raise PolynomialError("only univariate polynomials are allowed")
coeff, poly = cls._preprocess_roots(poly)
roots = []
for root in getattr(cls, method)(poly):
roots.append(coeff*cls._postprocess_root(root, radicals))
return roots
def _get_interval(self):
"""Internal function for retrieving isolation interval from cache. """
if self.is_real:
return _reals_cache[self.poly][self.index]
else:
reals_count = len(_reals_cache[self.poly])
return _complexes_cache[self.poly][self.index - reals_count]
def _set_interval(self, interval):
"""Internal function for updating isolation interval in cache. """
if self.is_real:
_reals_cache[self.poly][self.index] = interval
else:
reals_count = len(_reals_cache[self.poly])
_complexes_cache[self.poly][self.index - reals_count] = interval
def _eval_evalf(self, prec):
"""Evaluate this complex root to the given precision. """
with workprec(prec):
g = self.poly.gen
if not g.is_Symbol:
d = Dummy('x')
func = lambdify(d, self.expr.subs(g, d))
else:
func = lambdify(g, self.expr)
interval = self._get_interval()
if not self.is_real:
# For complex intervals, we need to keep refining until the
# imaginary interval is disjunct with other roots, that is,
# until both ends get refined.
ay = interval.ay
by = interval.by
while interval.ay == ay or interval.by == by:
interval = interval.refine()
while True:
if self.is_real:
a = mpf(str(interval.a))
b = mpf(str(interval.b))
if a == b:
root = a
break
x0 = mpf(str(interval.center))
else:
ax = mpf(str(interval.ax))
bx = mpf(str(interval.bx))
ay = mpf(str(interval.ay))
by = mpf(str(interval.by))
if ax == bx and ay == by:
# the sign of the imaginary part will be assigned
# according to the desired index using the fact that
# roots are sorted with negative imag parts coming
# before positive (and all imag roots coming after real
# roots)
deg = self.poly.degree()
i = self.index # a positive attribute after creation
if (deg - i) % 2:
if ay < 0:
ay = -ay
else:
if ay > 0:
ay = -ay
root = mpc(ax, ay)
break
x0 = mpc(*map(str, interval.center))
try:
root = findroot(func, x0)
# If the (real or complex) root is not in the 'interval',
# then keep refining the interval. This happens if findroot
# accidentally finds a different root outside of this
# interval because our initial estimate 'x0' was not close
# enough. It is also possible that the secant method will
# get trapped by a max/min in the interval; the root
# verification by findroot will raise a ValueError in this
# case and the interval will then be tightened -- and
# eventually the root will be found.
if self.is_real:
if (a < root < b):
break
elif (ax < root.real < bx and ay < root.imag < by):
break
except ValueError:
pass
interval = interval.refine()
return Float._new(root.real._mpf_, prec) + I*Float._new(root.imag._mpf_, prec)
def eval_rational(self, tol):
"""
Returns a Rational approximation to ``self`` with the tolerance ``tol``.
This method uses bisection, which is very robust and it will always
converge. The returned Rational instance will be at most 'tol' from the
exact root.
The following example first obtains Rational approximation to 1e-7
accuracy for all roots of the 4-th order Legendre polynomial, and then
evaluates it to 5 decimal digits (so all digits will be correct
including rounding):
>>> from sympy import S, legendre_poly, Symbol
>>> x = Symbol("x")
>>> p = legendre_poly(4, x, polys=True)
>>> roots = [r.eval_rational(S(1)/10**7) for r in p.real_roots()]
>>> roots = [str(r.n(5)) for r in roots]
>>> roots
['-0.86114', '-0.33998', '0.33998', '0.86114']
"""
if not self.is_real:
raise NotImplementedError("eval_rational() only works for real polynomials so far")
func = lambdify(self.poly.gen, self.expr)
interval = self._get_interval()
a = Rational(str(interval.a))
b = Rational(str(interval.b))
return bisect(func, a, b, tol)
def _eval_Eq(self, other):
# RootOf represents a Root, so if other is that root, it should set
# the expression to zero *and* it should be in the interval of the
# RootOf instance. It must also be a number that agrees with the
# is_real value of the RootOf instance.
if type(self) == type(other):
return sympify(self.__eq__(other))
if not (other.is_number and not other.has(AppliedUndef)):
return S.false
if not other.is_finite:
return S.false
z = self.expr.subs(self.expr.free_symbols.pop(), other).is_zero
if z is False: # all roots will make z True but we don't know
# whether this is the right root if z is True
return S.false
o = other.is_real, other.is_imaginary
s = self.is_real, self.is_imaginary
if o != s and None not in o and None not in s:
return S.false
i = self._get_interval()
was = i.a, i.b
need = [True]*2
# make sure it would be distinct from others
while any(need):
i = i.refine()
a, b = i.a, i.b
if need[0] and a != was[0]:
need[0] = False
if need[1] and b != was[1]:
need[1] = False
re, im = other.as_real_imag()
if not im:
if self.is_real:
a, b = [Rational(str(i)) for i in (a, b)]
return sympify(a < other and other < b)
return S.false
if self.is_real:
return S.false
z = r1, r2, i1, i2 = [Rational(str(j)) for j in (
i.ax, i.bx, i.ay, i.by)]
return sympify((
r1 < re and re < r2) and (
i1 < im and im < i2))
@public
class RootSum(Expr):
"""Represents a sum of all roots of a univariate polynomial. """
__slots__ = ['poly', 'fun', 'auto']
def __new__(cls, expr, func=None, x=None, auto=True, quadratic=False):
"""Construct a new ``RootSum`` instance carrying all roots of a polynomial. """
coeff, poly = cls._transform(expr, x)
if not poly.is_univariate:
raise MultivariatePolynomialError(
"only univariate polynomials are allowed")
if func is None:
func = Lambda(poly.gen, poly.gen)
else:
try:
is_func = func.is_Function
except AttributeError:
is_func = False
if is_func and 1 in func.nargs:
if not isinstance(func, Lambda):
func = Lambda(poly.gen, func(poly.gen))
else:
raise ValueError(
"expected a univariate function, got %s" % func)
var, expr = func.variables[0], func.expr
if coeff is not S.One:
expr = expr.subs(var, coeff*var)
deg = poly.degree()
if not expr.has(var):
return deg*expr
if expr.is_Add:
add_const, expr = expr.as_independent(var)
else:
add_const = S.Zero
if expr.is_Mul:
mul_const, expr = expr.as_independent(var)
else:
mul_const = S.One
func = Lambda(var, expr)
rational = cls._is_func_rational(poly, func)
(_, factors), terms = poly.factor_list(), []
for poly, k in factors:
if poly.is_linear:
term = func(roots_linear(poly)[0])
elif quadratic and poly.is_quadratic:
term = sum(map(func, roots_quadratic(poly)))
else:
if not rational or not auto:
term = cls._new(poly, func, auto)
else:
term = cls._rational_case(poly, func)
terms.append(k*term)
return mul_const*Add(*terms) + deg*add_const
@classmethod
def _new(cls, poly, func, auto=True):
"""Construct new raw ``RootSum`` instance. """
obj = Expr.__new__(cls)
obj.poly = poly
obj.fun = func
obj.auto = auto
return obj
@classmethod
def new(cls, poly, func, auto=True):
"""Construct new ``RootSum`` instance. """
if not func.expr.has(*func.variables):
return func.expr
rational = cls._is_func_rational(poly, func)
if not rational or not auto:
return cls._new(poly, func, auto)
else:
return cls._rational_case(poly, func)
@classmethod
def _transform(cls, expr, x):
"""Transform an expression to a polynomial. """
poly = PurePoly(expr, x, greedy=False)
return preprocess_roots(poly)
@classmethod
def _is_func_rational(cls, poly, func):
"""Check if a lambda is areational function. """
var, expr = func.variables[0], func.expr
return expr.is_rational_function(var)
@classmethod
def _rational_case(cls, poly, func):
"""Handle the rational function case. """
roots = symbols('r:%d' % poly.degree())
var, expr = func.variables[0], func.expr
f = sum(expr.subs(var, r) for r in roots)
p, q = together(f).as_numer_denom()
domain = QQ[roots]
p = p.expand()
q = q.expand()
try:
p = Poly(p, domain=domain, expand=False)
except GeneratorsNeeded:
p, p_coeff = None, (p,)
else:
p_monom, p_coeff = zip(*p.terms())
try:
q = Poly(q, domain=domain, expand=False)
except GeneratorsNeeded:
q, q_coeff = None, (q,)
else:
q_monom, q_coeff = zip(*q.terms())
coeffs, mapping = symmetrize(p_coeff + q_coeff, formal=True)
formulas, values = viete(poly, roots), []
for (sym, _), (_, val) in zip(mapping, formulas):
values.append((sym, val))
for i, (coeff, _) in enumerate(coeffs):
coeffs[i] = coeff.subs(values)
n = len(p_coeff)
p_coeff = coeffs[:n]
q_coeff = coeffs[n:]
if p is not None:
p = Poly(dict(zip(p_monom, p_coeff)), *p.gens).as_expr()
else:
(p,) = p_coeff
if q is not None:
q = Poly(dict(zip(q_monom, q_coeff)), *q.gens).as_expr()
else:
(q,) = q_coeff
return factor(p/q)
def _hashable_content(self):
return (self.poly, self.fun)
@property
def expr(self):
return self.poly.as_expr()
@property
def args(self):
return (self.expr, self.fun, self.poly.gen)
@property
def free_symbols(self):
return self.poly.free_symbols | self.fun.free_symbols
@property
def is_commutative(self):
return True
def doit(self, **hints):
if not hints.get('roots', True):
return self
_roots = roots(self.poly, multiple=True)
if len(_roots) < self.poly.degree():
return self
else:
return Add(*[ self.fun(r) for r in _roots ])
def _eval_evalf(self, prec):
try:
_roots = self.poly.nroots(n=prec_to_dps(prec))
except (DomainError, PolynomialError):
return self
else:
return Add(*[ self.fun(r) for r in _roots ])
def _eval_derivative(self, x):
var, expr = self.fun.args
func = Lambda(var, expr.diff(x))
return self.new(self.poly, func, self.auto)
def bisect(f, a, b, tol):
"""
Implements bisection. This function is used in RootOf.eval_rational() and
it needs to be robust.
Examples
========
>>> from sympy import S
>>> from sympy.polys.rootoftools import bisect
>>> bisect(lambda x: x**2-1, -10, 0, S(1)/10**2)
-1025/1024
>>> bisect(lambda x: x**2-1, -10, 0, S(1)/10**4)
-131075/131072
"""
a = sympify(a)
b = sympify(b)
fa = f(a)
fb = f(b)
if fa * fb >= 0:
raise ValueError("bisect: f(a) and f(b) must have opposite signs")
while (b - a > tol):
c = (a + b)/2
fc = f(c)
if (fc == 0):
return c # We need to make sure f(c) is not zero below
if (fa * fc < 0):
b = c
fb = fc
else:
a = c
fa = fc
return (a + b)/2
|
AunShiLord/sympy
|
sympy/polys/rootoftools.py
|
Python
|
bsd-3-clause
| 30,338
|
# Copyright (c) 2008, Aldo Cortesi. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""
A command shell for Qtile.
"""
from __future__ import division, print_function
import readline
import sys
import pprint
import re
import textwrap
import fcntl
import termios
import struct
import six
from six.moves import input
from . import command
from . import ipc
def terminalWidth():
width = None
try:
cr = struct.unpack('hh', fcntl.ioctl(0, termios.TIOCGWINSZ, '1234'))
width = int(cr[1])
except (IOError, ImportError):
pass
return width or 80
class QSh:
def __init__(self, client, completekey="tab"):
self.clientroot = client
self.current = client
self.completekey = completekey
self.termwidth = terminalWidth()
readline.set_completer(self.complete)
readline.parse_and_bind(self.completekey + ": complete")
readline.set_completer_delims(" ()|")
self.builtins = [i[3:] for i in dir(self) if i.startswith("do_")]
def _complete(self, buf, arg, state):
if not re.search(r" |\(", buf) or buf.startswith("help "):
options = self.builtins + self._commands()
lst = [i for i in options if i.startswith(arg)]
if lst and state < len(lst):
return lst[state]
elif buf.startswith("cd ") or buf.startswith("ls "):
path = [i for i in arg.split("/") if i]
if arg.endswith("/"):
last = ""
else:
last = path[-1]
path = path[:-1]
node = self._findNode(self.current, *path)
options = [str(i) for i in self._ls(node)]
lst = []
path = "/".join(path)
if path:
path += "/"
for i in options:
if i.startswith(last):
lst.append(path + i)
if lst and state < len(lst):
return lst[state]
def complete(self, arg, state):
buf = readline.get_line_buffer()
return self._complete(buf, arg, state)
@property
def prompt(self):
return "%s> " % self.current.path
def columnize(self, lst):
ret = []
if lst:
lst = [str(i) for i in lst]
mx = max([len(i) for i in lst])
cols = self.termwidth // (mx + 2) or 1
for i in range(len(lst) // cols):
sl = lst[i * cols: (i + 1) * cols]
sl = [x + " " * (mx - len(x)) for x in sl]
ret.append(" ".join(sl))
if len(lst) % cols:
sl = lst[-(len(lst) % cols):]
sl = [x + " " * (mx - len(x)) for x in sl]
ret.append(" ".join(sl))
return "\n".join(ret)
def _inspect(self, obj):
"""
Returns an (attrs, keys) tuple.
"""
if obj.parent and obj.myselector is None:
t, itms = obj.parent.items(obj.name)
attrs = obj._contains if t else None
return (attrs, itms)
else:
return (obj._contains, [])
def _ls(self, obj):
attrs, itms = self._inspect(obj)
all = []
if attrs:
all.extend(attrs)
if itms:
all.extend(itms)
return all
def _commands(self):
try:
return self.current.commands()
except command.CommandError:
return []
def _findNode(self, src, *path):
"""
Returns a node, or None if no such node exists.
"""
if not path:
return src
attrs, itms = self._inspect(src)
next = None
if path[0] == "..":
next = src.parent or src
else:
for trans in [str, int]:
try:
tpath = trans(path[0])
except ValueError:
continue
if attrs and tpath in attrs:
next = getattr(src, tpath)
elif itms and tpath in itms:
next = src[tpath]
if next:
if path[1:]:
return self._findNode(next, *path[1:])
else:
return next
else:
return None
def do_cd(self, arg):
"""
Change to another path.
Examples:
cd layout/0
cd ../layout
"""
next = self._findNode(self.current, *[i for i in arg.split("/") if i])
if next:
self.current = next
else:
return "No such path."
def do_ls(self, arg):
"""
List contained items on a node.
Examples:
ls
ls ../layout
"""
l = self._ls(self.current)
l = ["%s/" % i for i in l]
return self.columnize(l)
def do_help(self, arg):
"""
Provide an overview of all commands or detailed
help on a specific command or builtin.
Examples:
help
help command
"""
cmds = self._commands()
if not arg:
lst = [
"help command -- Help for a specific command.",
"",
"Builtins:",
"=========",
self.columnize(self.builtins),
]
if cmds:
lst += [
"",
"Commands for this object:",
"=========================",
self.columnize(cmds),
]
return "\n".join(lst)
elif arg in cmds:
return self._call("doc", "(\"%s\")" % arg)
elif arg in self.builtins:
c = getattr(self, "do_" + arg)
return textwrap.dedent(c.__doc__).lstrip()
else:
return "No such command: %s" % arg
def do_exit(self, args):
"""
Exit qsh.
"""
sys.exit(0)
do_quit = do_exit
do_q = do_exit
def _call(self, cmd_name, args):
cmds = self._commands()
if cmd_name not in cmds:
return "No such command: %s" % cmd_name
cmd = getattr(self.current, cmd_name)
if args:
args = "".join(args)
else:
args = "()"
try:
val = eval(
"cmd%s" % args,
{},
dict(cmd=cmd)
)
return val
except SyntaxError as v:
return "Syntax error in expression: %s" % v.text
except command.CommandException as val:
return "Command exception: %s\n" % val
except ipc.IPCError:
# on restart, try to reconnect
if cmd_name == 'restart':
client = command.Client(self.clientroot.client.fname)
self.clientroot = client
self.current = client
else:
raise
def process_command(self, line):
match = re.search(r"\W", line)
if match:
cmd = line[:match.start()].strip()
args = line[match.start():].strip()
else:
cmd = line
args = ''
builtin = getattr(self, "do_" + cmd, None)
if builtin:
val = builtin(args)
else:
val = self._call(cmd, args)
if isinstance(val, six.string_types):
print(val)
elif val:
pprint.pprint(val)
def loop(self):
while True:
try:
line = input(self.prompt)
except (EOFError, KeyboardInterrupt):
print()
return
if not line:
continue
self.process_command(line)
|
kiniou/qtile
|
libqtile/sh.py
|
Python
|
mit
| 8,873
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""Tests for the MacKeeper Cache event formatter."""
import unittest
from plaso.formatters import mackeeper_cache
from tests.formatters import test_lib
class MacKeeperCacheFormatterTest(test_lib.EventFormatterTestCase):
"""Tests for the MacKeeper Cache event formatter."""
def testInitialization(self):
"""Tests the initialization."""
event_formatter = mackeeper_cache.MacKeeperCacheFormatter()
self.assertNotEqual(event_formatter, None)
def testGetFormatStringAttributeNames(self):
"""Tests the GetFormatStringAttributeNames function."""
event_formatter = mackeeper_cache.MacKeeperCacheFormatter()
expected_attribute_names = [
u'description',
u'event_type',
u'text',
u'url',
u'record_id',
u'room']
self._TestGetFormatStringAttributeNames(
event_formatter, expected_attribute_names)
# TODO: add test for GetMessages.
if __name__ == '__main__':
unittest.main()
|
ostree/plaso
|
tests/formatters/mackeeper_cache.py
|
Python
|
apache-2.0
| 1,008
|
#/usr/bin/python2.7
import json, urllib2
# API Endpoints
BASE_URL = "http://a.4cdn.org/"
ALL_BOARDS_URL = BASE_URL + "boards.json"
BOARD_URL = BASE_URL + "%s/threads.json"
THREAD_URL = BASE_URL + "%s/thread/%s.json"
class Post:
def __init__(self, data, b):
self.board = b
self.id = data["no"]
self.time_created = data["now"]
try:
self.author = data["name"]
except KeyError:
self.author = None
self.reply_to = data["resto"]
try:
self.comment = data["com"]
except KeyError:
self.comment = None
try:
self.filename = data["filename"]
self.cdn_filename = data["tim"]
self.file_width = data["w"]
self.file_height = data["h"]
self.file_ext = data["ext"]
self.md5 = data["md5"]
self.filesize = data["fsize"]
self.file_url = "http://i.4cdn.org/%s/%s%s" % (self.board, self.cdn_filename,
self.file_ext)
self.has_file = True
except KeyError:
self.has_file = False
class Thread:
def __init__(self, data, b):
self.orig_post = data["posts"][0]
self.remaining_data = data["posts"][1:]
self.board = b
self.id = self.orig_post["no"]
self.short_title = unicode(self.orig_post["semantic_url"].replace("-"," ")) if "semantic_url" in self.orig_post else None
self.time_created = self.orig_post["now"]
self.created_by = self.orig_post["name"]
self.file_name = self.orig_post["filename"]
self.file_ext = self.orig_post["ext"]
self.md5 = self.orig_post["md5"]
self.img_url = "http://i.4cdn.org/%s/%s%s" % (self.board, self.orig_post["tim"], self.file_ext)
self.file_dim = "%s x %s" % (self.orig_post["w"], self.orig_post["h"])
self.filesize = self.orig_post["fsize"]
self.reply_count = self.orig_post["replies"]
self.image_count = self.orig_post["images"]
self.posts = []
def get_posts(self):
self.posts = []
for post in self.remaining_data:
self.posts.append(Post(post, self.board))
return self.posts
class Board:
def __init__(self, data):
self.short_name = data["board"]
self.bump_limit = data["bump_limit"]
self.cooldowns = data["cooldowns"]
self.cooldown_images = data["cooldowns"]["images"]
self.cooldown_replies = data["cooldowns"]["replies"]
self.cooldown_images_intra = data["cooldowns"]["images_intra"]
self.cooldown_replies_intra = data["cooldowns"]["replies_intra"]
self.image_limit = data["image_limit"]
self.max_comment_chars = data["max_comment_chars"]
self.max_webm_filesize = data["max_webm_filesize"]
self.max_filesize = data["max_filesize"]
self.description = data["meta_description"]
self.pages = data["pages"]
self.per_page = data["per_page"]
self.title = data["title"]
self.nsfw = True if data["ws_board"] == 0 else False
self.threads = []
def get_all_threads(self):
self.threads = []
for page in json.load(urllib2.urlopen(BOARD_URL % self.short_name)):
for thread in page["threads"]:
try:
self.threads.append(Thread(json.load(urllib2.urlopen(THREAD_URL % (self.short_name, thread["no"]))), self.short_name))
except:
continue
return self.threads
def __str__(self):
return "%s - /%s/" % (self.title.encode("utf-8"), self.short_name.encode("utf-8"))
# Get thread by specifying only the thread ID
def get_thread(self, id):
return Thread(json.load(urllib2.urlopen(THREAD_URL % (self.short_name, id))),
self.short_name)
class PyChan:
def __init__(self):
self.boards = []
for board in json.load(urllib2.urlopen(ALL_BOARDS_URL))["boards"]:
self.boards.append(Board(board))
# Print a formatted list of all boards
def list_boards(self):
for board in self.boards:
print board
# Select board by either 'short_name' or 'title' (e.g. 'b' or 'Random')
def select_board(self, b):
for board in self.boards:
if board.short_name == b or board.title == b:
return board
else:
continue
# Get thread by specifying the board and thread ID
def get_thread(self, b, id):
return Thread(json.load(urllib2.urlopen(THREAD_URL % (b, id))), b)
|
DW3B/pychan
|
pychan/__init__.py
|
Python
|
mit
| 4,785
|
# Copyright 2021, Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for the single_task_trainer."""
from mint.ctl import single_task_trainer
from third_party.tf_models import orbit
import tensorflow as tf
import tensorflow_datasets as tfds
class SingleTaskTrainerTest(tf.test.TestCase):
def test_single_task_training(self):
iris = tfds.load('iris')
train_ds = iris['train'].batch(32).repeat()
model = tf.keras.Sequential([
tf.keras.Input(shape=(4,), name='features'),
tf.keras.layers.Dense(10, activation=tf.nn.relu),
tf.keras.layers.Dense(10, activation=tf.nn.relu),
tf.keras.layers.Dense(3)
])
trainer = single_task_trainer.SingleTaskTrainer(
train_ds,
label_key='label',
model=model,
loss_fn=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True),
optimizer=tf.keras.optimizers.SGD(
learning_rate=tf.keras.optimizers.schedules.PiecewiseConstantDecay(
[0], [0.01, 0.01])))
controller = orbit.Controller(
trainer=trainer,
steps_per_loop=100,
global_step=trainer.optimizer.iterations)
controller.train(1)
start_loss = trainer.train_loss.result().numpy()
controller.train(500)
end_loss = trainer.train_loss.result().numpy()
# Assert that the model has trained 'significantly' - that the loss
# has dropped by over 50%.
self.assertLess(end_loss, start_loss / 2)
if __name__ == '__main__':
tf.test.main()
|
google-research/mint
|
mint/ctl/single_task_trainer_test.py
|
Python
|
apache-2.0
| 2,020
|
#!/usr/bin/python2
'''
This is an example of how to build a simple generator
'''
def my_reverse(data):
for index in range(len(data) - 1, -1, -1):
yield data[index]
for char in my_reverse('golf'):
print(char)
'''
Notice that 'my_reverse' is still recognized as a plain function and not
'generator' or something.
When you do use it for data the return value is a 'generator'.
Compare this to pythons own 'reversed' generator:
- it is built in so it's type is type
- when using it as a generator it's type is 'reversed'.
'''
print(type(my_reverse))
print(type(my_reverse('golf')))
print(type(reversed))
print(type(reversed('golf')))
|
nonZero/demos-python
|
src/examples/short/iteration/iterator_generator.py
|
Python
|
gpl-3.0
| 652
|
from django.contrib import admin
# Register your models here.
from tardis import models
admin.site.register(models.Trip, admin.ModelAdmin)
|
ptevans/django-tardis
|
tardis/admin.py
|
Python
|
mit
| 142
|
#!/usr/bin/env python
#
# Copyright (c) Vicent Marti. All rights reserved.
#
# This file is part of clar, distributed under the ISC license.
# For full terms see the included COPYING file.
#
from __future__ import with_statement
from string import Template
import re, fnmatch, os, codecs, pickle
class Module(object):
class Template(object):
def __init__(self, module):
self.module = module
def _render_callback(self, cb):
if not cb:
return ' { NULL, NULL }'
return ' { "%s", &%s }' % (cb['short_name'], cb['symbol'])
class DeclarationTemplate(Template):
def render(self):
out = "\n".join("extern %s;" % cb['declaration'] for cb in self.module.callbacks) + "\n"
if self.module.initialize:
out += "extern %s;\n" % self.module.initialize['declaration']
if self.module.cleanup:
out += "extern %s;\n" % self.module.cleanup['declaration']
return out
class CallbacksTemplate(Template):
def render(self):
out = "static const struct clar_func _clar_cb_%s[] = {\n" % self.module.name
out += ",\n".join(self._render_callback(cb) for cb in self.module.callbacks)
out += "\n};\n"
return out
class InfoTemplate(Template):
def render(self):
return Template(
r"""
{
"${clean_name}",
${initialize},
${cleanup},
${cb_ptr}, ${cb_count}, ${enabled}
}"""
).substitute(
clean_name = self.module.clean_name(),
initialize = self._render_callback(self.module.initialize),
cleanup = self._render_callback(self.module.cleanup),
cb_ptr = "_clar_cb_%s" % self.module.name,
cb_count = len(self.module.callbacks),
enabled = int(self.module.enabled)
)
def __init__(self, name):
self.name = name
self.enabled = True
def clean_name(self):
return self.name.replace("_", "::")
def _skip_comments(self, text):
SKIP_COMMENTS_REGEX = re.compile(
r'//.*?$|/\*.*?\*/|\'(?:\\.|[^\\\'])*\'|"(?:\\.|[^\\"])*"',
re.DOTALL | re.MULTILINE)
def _replacer(match):
s = match.group(0)
return "" if s.startswith('/') else s
return re.sub(SKIP_COMMENTS_REGEX, _replacer, text)
def parse(self, contents):
TEST_FUNC_REGEX = r"^(void\s+(test_%s__(\w+))\(\s*void\s*\))\s*\{"
contents = self._skip_comments(contents)
regex = re.compile(TEST_FUNC_REGEX % self.name, re.MULTILINE)
self.callbacks = []
self.initialize = None
self.cleanup = None
for (declaration, symbol, short_name) in regex.findall(contents):
data = {
"short_name" : short_name,
"declaration" : declaration,
"symbol" : symbol
}
if short_name == 'initialize':
self.initialize = data
elif short_name == 'cleanup':
self.cleanup = data
else:
self.callbacks.append(data)
return self.callbacks != []
def load(self, path):
try:
with open(path) as fp:
return self.parse(fp.read())
except IOError:
return False
class TestSuite(object):
def __init__(self, path):
self.path = path
def find_modules(self):
modules = []
for root, _, files in os.walk(self.path):
module_root = root[len(self.path):]
module_root = [c for c in module_root.split(os.sep) if c]
tests_in_module = fnmatch.filter(files, "*.c")
for test_file in tests_in_module:
full_path = os.path.join(root, test_file)
module_name = "_".join(module_root + [test_file[:-2]])
modules.append((full_path, module_name))
return modules
def load(self, force = False):
module_data = self.find_modules()
self.modules = {}
for path, name in module_data:
if name not in self.modules:
self.modules[name] = Module(name)
if not self.modules[name].load(path):
del self.modules[name]
def disable(self, excluded):
for exclude in excluded:
for module in self.modules.values():
name = module.clean_name()
if name.startswith(exclude):
module.enabled = False
module.modified = True
def suite_count(self):
return len(self.modules)
def callback_count(self):
return sum(len(module.callbacks) for module in self.modules.values())
def write(self):
output = os.path.join(self.path, 'clar.suite')
with open(output, 'w') as data:
for module in self.modules.values():
t = Module.DeclarationTemplate(module)
data.write(t.render())
for module in self.modules.values():
t = Module.CallbacksTemplate(module)
data.write(t.render())
suites = "static struct clar_suite _clar_suites[] = {" + ','.join(
Module.InfoTemplate(module).render() for module in sorted(self.modules.values(), key=lambda module: module.name)
) + "\n};\n"
data.write(suites)
data.write("static const size_t _clar_suite_count = %d;\n" % self.suite_count())
data.write("static const size_t _clar_callback_count = %d;\n" % self.callback_count())
if __name__ == '__main__':
from optparse import OptionParser
parser = OptionParser()
parser.add_option('-x', '--exclude', dest='excluded', action='append', default=[])
options, args = parser.parse_args()
for path in args or ['.']:
suite = TestSuite(path)
suite.load()
suite.disable(options.excluded)
suite.write()
print("Written `clar.suite` (%d suites)" % len(suite.modules))
|
duralog/node-sencillo
|
deps/libgit2/tests-clar/generate.py
|
Python
|
mit
| 6,148
|
"""Support for ANEL PwrCtrl switches."""
from datetime import timedelta
import logging
from anel_pwrctrl import DeviceMaster
import voluptuous as vol
from homeassistant.components.switch import PLATFORM_SCHEMA, SwitchEntity
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME
import homeassistant.helpers.config_validation as cv
from homeassistant.util import Throttle
_LOGGER = logging.getLogger(__name__)
CONF_PORT_RECV = "port_recv"
CONF_PORT_SEND = "port_send"
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=5)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_PORT_RECV): cv.port,
vol.Required(CONF_PORT_SEND): cv.port,
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Optional(CONF_HOST): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up PwrCtrl devices/switches."""
host = config.get(CONF_HOST)
username = config[CONF_USERNAME]
password = config[CONF_PASSWORD]
port_recv = config[CONF_PORT_RECV]
port_send = config[CONF_PORT_SEND]
try:
master = DeviceMaster(
username=username,
password=password,
read_port=port_send,
write_port=port_recv,
)
master.query(ip_addr=host)
except OSError as ex:
_LOGGER.error("Unable to discover PwrCtrl device: %s", str(ex))
return False
devices = []
for device in master.devices.values():
parent_device = PwrCtrlDevice(device)
devices.extend(
PwrCtrlSwitch(switch, parent_device) for switch in device.switches.values()
)
add_entities(devices)
class PwrCtrlSwitch(SwitchEntity):
"""Representation of a PwrCtrl switch."""
def __init__(self, port, parent_device):
"""Initialize the PwrCtrl switch."""
self._port = port
self._parent_device = parent_device
@property
def unique_id(self):
"""Return the unique ID of the device."""
return f"{self._port.device.host}-{self._port.get_index()}"
@property
def name(self):
"""Return the name of the device."""
return self._port.label
@property
def is_on(self):
"""Return true if the device is on."""
return self._port.get_state()
def update(self):
"""Trigger update for all switches on the parent device."""
self._parent_device.update()
def turn_on(self, **kwargs):
"""Turn the switch on."""
self._port.on()
def turn_off(self, **kwargs):
"""Turn the switch off."""
self._port.off()
class PwrCtrlDevice:
"""Device representation for per device throttling."""
def __init__(self, device):
"""Initialize the PwrCtrl device."""
self._device = device
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
"""Update the device and all its switches."""
self._device.update()
|
tboyce021/home-assistant
|
homeassistant/components/anel_pwrctrl/switch.py
|
Python
|
apache-2.0
| 3,005
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-05-18 00:01
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('reports', '0025_auto_20170518_0955'),
]
operations = [
migrations.AlterField(
model_name='cloudprojectfaculty',
name='allocated_faculty',
field=models.CharField(blank=True, help_text='The faculty we have decided the project belongs to', max_length=7, null=True),
),
migrations.AlterField(
model_name='cloudprojectfaculty',
name='chief_investigator',
field=models.CharField(blank=True, help_text="The nominated chief investigator's email", max_length=75, null=True),
),
migrations.AlterField(
model_name='cloudprojectfaculty',
name='contact_email',
field=models.CharField(blank=True, help_text="The applicant's email", max_length=75, null=True),
),
migrations.AlterField(
model_name='cloudprojectfaculty',
name='for_code',
field=models.CharField(blank=True, help_text='The highest weighted FOR code', max_length=6, null=True),
),
migrations.AlterField(
model_name='cloudprojectfaculty',
name='name',
field=models.CharField(blank=True, help_text='The project name', max_length=64, null=True),
),
migrations.AlterField(
model_name='cloudprojectfaculty',
name='project_id',
field=models.TextField(default='0', help_text='The projects ID', max_length=32, primary_key=True, serialize=False, unique=True),
),
]
|
MartinPaulo/ReportsAlpha
|
reports/migrations/0026_auto_20170518_1001.py
|
Python
|
gpl-3.0
| 1,755
|
#coding:utf-8
bind = 'unix:/var/run/gunicorn.sock'
workers = 4
# you should change this
user = 'root'
# maybe you like error
loglevel = 'debug'
errorlog = '-'
logfile = '/var/log/gunicorn/debug.log'
timeout = 300
secure_scheme_headers = {
'X-SCHEME': 'https',
}
x_forwarded_for_header = 'X-FORWARDED-FOR'
|
chenke91/ckPermission
|
settings.py
|
Python
|
mit
| 313
|
from sys import stderr
from urlparse import urlparse, urljoin
from urllib import urlencode
from itertools import count
from hashlib import sha1
import json
from socket import getaddrinfo, AF_INET, SOCK_STREAM
from xml.etree.ElementTree import fromstring as parse_xml
from requests import get, post
api_defaults = dict(
UserName='codeforamerica',
ApiUser='codeforamerica',
SLD='codeforamerica',
TLD='org'
)
def format_csv_row(row):
''' Format row from input CSV so it is ready for hash_host_records().
Return a sorted list of two-element tuples.
'''
return sorted([('type', row['Type']), ('name', row['Host']),
('value', row['Value']), ('ttl', row['TTL']),
('mxpref', row['MXPref'] or '0')])
def format_xml_element(el):
''' Format element from API response so it is ready for hash_host_records().
Return a sorted list of two-element tuples.
'''
return sorted([('type', el.attrib['Type']), ('name', el.attrib['Name']),
('value', el.attrib['Address']), ('ttl', el.attrib['TTL']),
('mxpref', el.attrib['MXPref'])])
def hash_host_records(formatted_records):
'''
'''
kwargs = dict(ensure_ascii=True, separators=(',', ':'))
serialized = json.dumps(sorted(formatted_records), **kwargs)
return sha1(serialized).hexdigest()
def get_proxy_ipaddr(api_proxy_base):
''' Get an IP address based on the API proxy URL.
NameCheap uses IP address white-listing to secure their DNS records
API, while this app is designed to be hosted on Heroku with its
unstable IP addresses. So, we use an HTTP proxy to route requests
from a stable location.
'''
_, hostname, _, _, _, _ = urlparse(api_proxy_base)
((_, _, _, _, (ip_addr, _)), ) = getaddrinfo(hostname, 443, AF_INET, SOCK_STREAM)
return ip_addr
def check_upstream(api_proxy_base, api_key):
''' Check connectivity and consistency of NameCheap-hosted records.
Throw exceptions if a problem is found, otherwise return nothing.
'''
query = dict(
ApiKey=api_key,
Command='namecheap.domains.dns.getHosts',
ClientIp=get_proxy_ipaddr(api_proxy_base)
)
query.update(api_defaults)
got = get(api_proxy_base + '?' + urlencode(query))
tree = parse_xml(got.content)
for el in tree.iter('{http://api.namecheap.com/xml.response}Error'):
raise ValueError('Upstream API error: {}'.format(el.text))
hosts, expected_hash = [], None
for el in tree.iter('{http://api.namecheap.com/xml.response}host'):
if (el.attrib['Type'], el.attrib['Name']) == ('TXT', 'hosts-hash'):
expected_hash = el.attrib['Address']
else:
hosts.append(format_xml_element(el))
found_hash = hash_host_records(hosts)
if expected_hash != found_hash:
raise ValueError('Calculated hash {} but expected {}'.format(found_hash, expected_hash))
print >> stderr, 'Remote host checks out with hash "{}"'.format(found_hash)
def push_upstream(api_proxy_base, api_key, host_records):
''' Post replacement host records to NameCheap.
Throw exceptions if a problem is found, otherwise return nothing.
'''
hash = hash_host_records(map(format_csv_row, host_records))
form = dict(
ApiKey=api_key,
Command='namecheap.domains.dns.setHosts',
ClientIp=get_proxy_ipaddr(api_proxy_base),
# Hash record is the first record.
HostName1='hosts-hash',
RecordType1='TXT',
Address1=hash,
MXPref1=0,
TTL1=300
)
form.update(api_defaults)
for (record, number) in zip(host_records, count(2)):
form.update({
'HostName{:d}'.format(number): record['Host'],
'RecordType{:d}'.format(number): record['Type'],
'Address{:d}'.format(number): record['Value'],
'MXPref{:d}'.format(number): record['MXPref'] or '0',
'TTL{:d}'.format(number): record['TTL']
})
posted = post(api_proxy_base, data=form)
tree = parse_xml(posted.content)
for el in tree.iter('{http://api.namecheap.com/xml.response}Error'):
raise ValueError('Upstream API error: {}'.format(el.text))
if posted.status_code not in range(200, 299):
raise Exception('Bad response status {}'.format(posted.status_code))
|
codeforamerica/DNS-Service
|
cfa_dns/api.py
|
Python
|
isc
| 4,496
|
# -*- coding: utf-8 -*-
# Copyright (C) 2007-2018, Raffaele Salmaso <raffaele@salmaso.org>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import, division, print_function, unicode_literals
def get_version(version=None):
"Returns a PEP 386-compliant version number from VERSION."
if version is None:
from . import VERSION as version
assert len(version) == 5
assert version[3] in ('alpha', 'beta', 'rc', 'final')
parts = 2 if version[2] == 0 else 3
main = '.'.join(str(x) for x in version[:parts])
sub = ''
if version[3] != 'final':
mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'c'}
sub = mapping[version[3]] + str(version[4])
return str(main + sub)
|
rsalmaso/django-babeljs
|
babeljs/version.py
|
Python
|
mit
| 1,758
|
'''
Copyright (C) 2013 Rasmus Eneman <rasmus@eneman.eu>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
from backend.plugin_helpers import PluginMount
class InputBlockFunction:
"""
Mount point for plugins which need to registrar an input block function
Plugins implementing this reference should provide the following attributes:
label The label of the button
Plugins implementing this reference may provide the following attributes:
settings_view A class based generic_view which provides settings form, an id to a LogicBlock
model will be provides as a GET variable if applicable
Plugins implementing this reference should provide the following methods:
get_label(model)
model LogicBlock the model the block represent
returns string with up to three rows separated by \n
"""
__metaclass__ = PluginMount
class LogicBlockFunction:
"""
Mount point for plugins which need to registrar an input block function
Plugins implementing this reference should provide the following attributes:
label The label of the button
Plugins implementing this reference may provide the following attributes:
settings_view A class based generic_view which provides settings form, an id to a LogicBlock
model will be provides as a GET variable if applicable
Plugins implementing this reference should provide the following methods:
get_label(model)
model LogicBlock the model the block represent
returns string with up to two rows separated by \n
check_logic(inputs, model)
attribute inputs list<boolean> a list of the active state of all inputs
model LogicBlock the model the block represent
returns boolean a boolean of the active state based on the state of the inputs
"""
__metaclass__ = PluginMount
class OutputBlockFunction:
"""
Mount point for plugins which need to registrar an input block function
Plugins implementing this reference should provide the following attributes:
label The label of the button
Plugins implementing this reference may provide the following attributes:
settings_view A class based generic_view which provides settings form, an id to a LogicBlock
model will be provides as a GET variable if applicable
Plugins implementing this reference should provide the following methods:
get_label(model)
model LogicBlock the model the block represent
returns string with up to three rows separated by \n
do_action(model)
model LogicBlock the model the block represent
"""
__metaclass__ = PluginMount
|
Pajn/RAXA-Django
|
automation/plugin_mounts.py
|
Python
|
agpl-3.0
| 3,296
|
# -*- coding: utf-8 -*-
import gnupg
import shutil
import yaml
from humanfriendly import parse_size
from jinja2 import Environment, PackageLoader
from json import load, dumps
from os import makedirs, mkdir, chmod, environ, listdir, remove, stat
from os.path import exists, isdir, join, splitext, getmtime, split
from datetime import datetime
from random import SystemRandom
from zipfile import ZipFile, ZIP_STORED
from subprocess import call
from .notifications import (
checkRecipient,
sendMultiPart,
setup_smtp_factory
)
jinja_env = Environment(loader=PackageLoader('briefkasten', 'templates'))
allchars = '23456qwertasdfgzxcvbQWERTASDFGZXCVB789yuiophjknmYUIPHJKLNM'
def generate_drop_id(length=8):
rng = SystemRandom()
drop_id = ""
for i in range(length):
drop_id += rng.choice(allchars)
return drop_id
def sanitize_filename(filename):
"""preserve the file ending, but replace the name with a random token """
# TODO: fix broken splitext (it reveals everything of the filename after the first `.` - doh!)
token = generate_drop_id()
name, extension = splitext(filename)
if extension:
return '%s%s' % (token, extension)
else:
return token
class DropboxContainer(object):
def __init__(self, root=None, settings=None):
self.fs_root = root
self.fs_path = join(root, 'drops')
self.fs_submission_queue = join(root, 'submissions')
self.fs_scratch = join(root, 'scratch')
# initialise settings from disk and parameters
# settings provided as init parameter take precedence over values on-disk
# which in turn take precedence over default values
self.settings = dict(
attachment_size_threshold=u'2Mb',
)
self.settings.update(**self.parse_settings())
if settings is not None:
self.settings.update(**settings)
# set archive paths
self.fs_archive_cleansed = self.settings.get('dropbox_cleansed_archive_path', join(root, 'archive_cleansed'))
self.fs_archive_dirty = self.settings.get('dropbox_dirty_archive_path', join(root, 'archive_dirty'))
self.fs_archive = dict(
clean=self.fs_archive_cleansed,
dirty=self.fs_archive_dirty,
)
# set smtp instance defensively, to not overwrite mocked version from test settings:
if 'smtp' not in self.settings:
self.settings['smtp'] = setup_smtp_factory(**self.settings)
# setup GPG
self.gpg_context = gnupg.GPG(
gnupghome=self.settings['fs_pgp_pubkeys'],
gpgbinary=self.settings.get('fs_gpg_path', 'gpg'),
)
# convert human readable size to bytes
self.settings['attachment_size_threshold'] = parse_size(self.settings['attachment_size_threshold'])
# ensure directories exist
for directory in [
self.fs_root,
self.fs_path,
self.fs_submission_queue,
self.fs_archive_cleansed,
self.fs_archive_dirty,
self.fs_scratch]:
if not exists(directory):
makedirs(directory)
def parse_settings(self):
fs_settings = join(self.fs_root, 'settings.yaml')
if exists(fs_settings):
with open(fs_settings, 'r') as settings:
return yaml.load(settings)
else:
return dict()
def add_dropbox(self, drop_id, message=None, attachments=None, from_watchdog=False):
return Dropbox(self, drop_id, message=message, attachments=attachments, from_watchdog=from_watchdog)
def get_dropbox(self, drop_id):
""" returns the dropbox with the given id, if it does not exist an empty dropbox
will be created and returned"""
return Dropbox(self, drop_id=drop_id)
def destroy(self):
shutil.rmtree(self.fs_root)
def __contains__(self, drop_id):
return exists(join(self.fs_path, drop_id))
def __iter__(self):
for candidate in listdir(self.fs_path):
if isdir(join(self.fs_path, candidate)):
yield self.get_dropbox(candidate)
class Dropbox(object):
def __init__(self, container, drop_id, message=None, attachments=None, from_watchdog=False):
"""
the attachments are expected to conform to what the webob library uses for file uploads,
namely an instance of `cgi.FieldStorage` with the following attributes:
- a file handle under the key `file`
- the name of the file under `filename`
"""
self.drop_id = drop_id
self.container = container
self.paths_created = []
self.send_attachments = False
self.fs_path = fs_dropbox_path = join(container.fs_path, drop_id)
self.fs_attachment_container = join(self.fs_path, 'attach')
self.fs_cleansed_attachment_container = join(self.fs_path, 'clean')
self.fs_replies_path = join(self.fs_path, 'replies')
self.gpg_context = self.container.gpg_context
self.admins = self.settings['admins']
if not exists(fs_dropbox_path):
mkdir(fs_dropbox_path)
chmod(fs_dropbox_path, 0770)
self.paths_created.append(fs_dropbox_path)
self.status = u'010 created'
# create an editor token
self.editor_token = editor_token = generate_drop_id()
self._write_message(fs_dropbox_path, 'editor_token', editor_token)
self.from_watchdog = from_watchdog
else:
self.editor_token = open(join(self.fs_path, 'editor_token')).readline()
# set recipients of email depending on watchdog status
if self.from_watchdog:
self.editors = [self.settings['watchdog_imap_recipient']]
else:
self.editors = self.settings['editors']
if message is not None:
# write the message into a file
self._write_message(fs_dropbox_path, 'message', message)
# write the attachment into a file
if attachments is not None:
for attachment in attachments:
if attachment is None:
continue
self.add_attachment(attachment)
#
# top level methods that govern the life cycle of a dropbox:
def add_attachment(self, attachment):
fs_attachment_container = self.fs_attachment_container
if not exists(fs_attachment_container):
mkdir(fs_attachment_container)
chmod(fs_attachment_container, 0770)
self.paths_created.append(fs_attachment_container)
sanitized_filename = sanitize_filename(attachment.filename)
fs_attachment_path = join(fs_attachment_container, sanitized_filename)
with open(fs_attachment_path, 'w') as fs_attachment:
shutil.copyfileobj(attachment.file, fs_attachment)
fs_attachment.close()
chmod(fs_attachment_path, 0660)
self.paths_created.append(fs_attachment_path)
return sanitized_filename
def submit(self):
with open(join(self.container.fs_submission_queue, self.drop_id), 'w'):
pass
self.status = u'020 submitted'
def process(self):
""" Calls the external cleanser scripts to (optionally) purge the meta data and then
send the contents of the dropbox via email.
"""
if self.num_attachments > 0:
self.status = u'100 processor running'
fs_dirty_archive = self._create_backup()
# calling _process_attachments has the side-effect of updating `send_attachments`
self._process_attachments()
if self.status_int < 500 and not self.send_attachments:
self._create_archive()
if self.status_int >= 500 and self.status_int < 600:
# cleansing failed
# if configured, we need to move the uncleansed archive to
# the appropriate folder and notify the editors
if 'dropbox_dirty_archive_url_format' in self.settings:
# create_archive
shutil.move(
fs_dirty_archive,
'%s/%s.zip.pgp' % (self.container.fs_archive_dirty, self.drop_id))
# update status
# it's now considered 'successful-ish' again
self.status = '490 cleanser failure but notify success'
if self.status_int == 800:
# at least one attachment was not supported
# if configured, we need to move the uncleansed archive to
# the appropriate folder and notify the editors
if 'dropbox_dirty_archive_url_format' in self.settings:
# create_archive
shutil.move(
fs_dirty_archive,
'%s/%s.zip.pgp' % (self.container.fs_archive_dirty, self.drop_id))
if self.status_int < 500 or self.status_int == 800:
try:
if self._notify_editors() > 0:
if self.status_int < 500:
self.status = '900 success'
else:
self.status = '605 smtp failure'
except Exception:
import traceback
tb = traceback.format_exc()
self.status = '610 smtp error (%s)' % tb
self.cleanup()
return self.status
def cleanup(self):
""" ensures that no data leaks from drop after processing by
removing all data except the status file"""
try:
remove(join(self.fs_path, u'message'))
remove(join(self.fs_path, 'dirty.zip.pgp'))
except OSError:
pass
shutil.rmtree(join(self.fs_path, u'clean'), ignore_errors=True)
shutil.rmtree(join(self.fs_path, u'attach'), ignore_errors=True)
def add_reply(self, reply):
""" Add an editorial reply to the drop box.
:param reply: the message, must conform to :class:`views.DropboxReplySchema`
"""
self._write_message(self.fs_replies_path, 'message_001.txt', dumps(reply))
#
# "private" helper methods for processing a drop
def _create_encrypted_zip(self, source='dirty', fs_target_dir=None):
""" creates a zip file from the drop and encrypts it to the editors.
the encrypted archive is created inside fs_target_dir"""
backup_recipients = [r for r in self.editors if checkRecipient(self.gpg_context, r)]
# this will be handled by watchdog, no need to send for each drop
if not backup_recipients:
self.status = u'500 no valid keys at all'
return self.status
# calculate paths
fs_backup = join(self.fs_path, '%s.zip' % source)
if fs_target_dir is None:
fs_backup_pgp = join(self.fs_path, '%s.zip.pgp' % source)
else:
fs_backup_pgp = join(fs_target_dir, '%s.zip.pgp' % self.drop_id)
fs_source = dict(
dirty=self.fs_dirty_attachments,
clean=self.fs_cleansed_attachments
)
# create archive
with ZipFile(fs_backup, 'w', ZIP_STORED) as backup:
if exists(join(self.fs_path, 'message')):
backup.write(join(self.fs_path, 'message'), arcname='message')
for fs_attachment in fs_source[source]:
backup.write(fs_attachment, arcname=split(fs_attachment)[-1])
# encrypt archive
with open(fs_backup, "rb") as backup:
self.gpg_context.encrypt_file(
backup,
backup_recipients,
always_trust=True,
output=fs_backup_pgp
)
# cleanup
remove(fs_backup)
return fs_backup_pgp
def _create_backup(self):
self.status = u'101 creating initial encrypted backup'
return self._create_encrypted_zip(source='dirty')
def _process_attachments(self):
self.status = u'105 processing attachments'
fs_process = join(self.settings['fs_bin_path'], 'process-attachments.sh')
fs_config = join(self.settings['fs_bin_path'], 'briefkasten.conf')
shellenv = environ.copy()
shellenv['PATH'] = '%s:%s:/usr/local/bin/:/usr/local/sbin/' % (shellenv['PATH'], self.settings['fs_bin_path'])
call(
"%s -d %s -c %s" % (fs_process, self.fs_path, fs_config),
shell=True,
env=shellenv)
# status is now < 500 if cleansing was successful or >= 500 && < 600 if cleansing failed
# or 800 if cleansing was not supported
# update the decision whether to include attachments in email or not based on size of cleansed attachments:
# and whether we have an archive for uncleansed attachemts (if we do, don't send them via email, if we
# don't do send them via email, because otherwise editors would never receive those at all.)
if self.status_int < 500:
self.send_attachments = self.size_attachments < self.settings.get('attachment_size_threshold', 0)
elif self.status_int == 800 and 'dropbox_dirty_archive_url_format' not in self.settings:
self.send_attachments = True
else:
self.send_attachments = False
def _create_archive(self):
""" creates an encrypted archive of the dropbox outside of the drop directory.
"""
self.status = u'270 creating final encrypted backup of cleansed attachments'
return self._create_encrypted_zip(source='clean', fs_target_dir=self.container.fs_archive_cleansed)
def _notify_editors(self):
if self.send_attachments:
attachments = self.fs_cleansed_attachments
else:
attachments = []
return sendMultiPart(
self.settings['smtp'],
self.gpg_context,
self.settings['mail.default_sender'],
self.editors,
u'Drop %s' % self.drop_id,
self._notification_text,
attachments
)
#
# helper properties:
@property
def num_attachments(self):
"""returns the current number of uploaded attachments in the filesystem"""
if exists(self.fs_attachment_container):
return len(listdir(self.fs_attachment_container))
else:
return 0
@property
def size_attachments(self):
"""returns the number of bytes that the cleansed attachments take up on disk"""
total_size = 0
for attachment in self.fs_cleansed_attachments:
total_size += stat(attachment).st_size
return total_size
@property
def replies(self):
""" returns a list of strings """
fs_reply_path = join(self.fs_replies_path, 'message_001.txt')
if exists(fs_reply_path):
return [load(open(fs_reply_path, 'r'))]
else:
return []
@property
def message(self):
""" returns the user submitted text
"""
try:
with open(join(self.fs_path, u'message')) as message_file:
return u''.join([line.decode('utf-8') for line in message_file.readlines()])
except IOError:
return u''
@message.setter
def message(self, newtext):
""" overwrite the message text. this also updates the corresponding file. """
self._write_message(self.fs_path, 'message', newtext)
@property
def from_watchdog(self):
try:
with open(join(self.fs_path, u'from_watchdog')):
return True
except IOError:
return False
@from_watchdog.setter
def from_watchdog(self, value):
fs_path = join(self.fs_path, u'from_watchdog')
if value:
with open(fs_path, 'w') as status_file:
status_file.write('True')
else:
if exists(fs_path):
remove(fs_path)
@property
def status(self):
try:
with open(join(self.fs_path, u'status')) as status_file:
return status_file.readline()
except IOError:
return u'000 no status file'
@property
def status_int(self):
""" returns the status as integer, so it can be used in comparisons"""
return int(self.status.split()[0])
@status.setter
def status(self, state):
with open(join(self.fs_path, u'status'), 'w') as status_file:
status_file.write(state)
def _write_message(self, fs_container, fs_name, message):
if message is None:
return
if not exists(fs_container):
mkdir(fs_container)
chmod(fs_container, 0770)
fs_reply_path = join(fs_container, fs_name)
with open(fs_reply_path, 'w') as fs_reply:
fs_reply.write(message.encode('utf-8'))
chmod(fs_reply_path, 0660)
self.paths_created.append(fs_reply_path)
@property
def _notification_text(self):
return jinja_env.get_template('editor_email.j2').render(
num_attachments=self.num_attachments,
dropbox=self)
@property
def settings(self):
return self.container.settings
@property
def fs_dirty_attachments(self):
""" returns a list of absolute paths to the attachements"""
if exists(self.fs_attachment_container):
return [join(self.fs_attachment_container, attachment)
for attachment in listdir(self.fs_attachment_container)]
else:
return []
@property
def fs_cleansed_attachments(self):
""" returns a list of absolute paths to the cleansed attachements"""
if exists(self.fs_cleansed_attachment_container):
return [join(self.fs_cleansed_attachment_container, attachment)
for attachment in listdir(self.fs_cleansed_attachment_container)]
else:
return []
@property
def cleansed_archive_url(self):
if 'dropbox_cleansed_archive_url_format' in self.settings:
return self.settings['dropbox_cleansed_archive_url_format'] % self.drop_id
@property
def dirty_archive_url(self):
if 'dropbox_dirty_archive_url_format' in self.settings:
return self.settings['dropbox_dirty_archive_url_format'] % self.drop_id
@property
def drop_url(self):
return self.settings['dropbox_view_url_format'] % self.drop_id
@property
def editor_url(self):
return self.settings['dropbox_editor_url_format'] % (
self.drop_id,
self.editor_token)
def last_changed(self):
# TODO: maybe use last reply from editor
if exists(join(self.fs_path, u'status')):
mtime = getmtime(join(self.fs_path, u'status'))
return datetime.utcfromtimestamp(mtime)
return datetime.utcfromtimestamp(0)
def destroy(self):
shutil.rmtree(self.fs_path)
def __repr__(self):
return u'Dropbox %s (%s) at %s' % (
self.drop_id,
self.status,
self.fs_path,
)
|
tomster/briefkasten
|
application/briefkasten/dropbox.py
|
Python
|
bsd-3-clause
| 19,173
|
"""
Django settings for demo project.
Generated by 'django-admin startproject' using Django 3.0.5.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.0/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'df4bk+cyjf8nn$_7m*iesi@31_#v4#ail1w%cslle!ax%)y7+%'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'demo.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'demo.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.0/howto/static-files/
STATIC_URL = '/static/'
|
williamlagos/django-shipping
|
demo/demo/settings.py
|
Python
|
lgpl-3.0
| 3,082
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Model Analyzer.
Analyze model, including shape, params, time, memory, structure, etc.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import six
from tensorflow.core.profiler import tfprof_options_pb2
from tensorflow.core.profiler import tfprof_output_pb2
from tensorflow.python import pywrap_tensorflow as print_mdl
from tensorflow.python.framework import errors
from tensorflow.python.profiler import option_builder
from tensorflow.python.profiler import tfprof_logger
_DEFAULT_PROFILE_OPTIONS = 0
_DEFAULT_ADVISE_OPTIONS = 0
# The following options are for 'advise' cmd.
# Show all advice.
ALL_ADVICE = {
'ExpensiveOperationChecker': {},
'AcceleratorUtilizationChecker': {},
'JobChecker': {}, # Only available internally.
'OperationChecker': {},
}
def _build_options(options):
"""Build tfprof.OptionsProto.
Args:
options: A dictionary of options.
Returns:
tfprof.OptionsProto.
"""
opts = tfprof_options_pb2.OptionsProto()
opts.max_depth = options.get('max_depth', 10)
opts.min_bytes = options.get('min_bytes', 0)
opts.min_peak_bytes = options.get('min_peak_bytes', 0)
opts.min_residual_bytes = options.get('min_residual_bytes', 0)
opts.min_output_bytes = options.get('min_output_bytes', 0)
opts.min_micros = options.get('min_micros', 0)
opts.min_accelerator_micros = options.get('min_accelerator_micros', 0)
opts.min_cpu_micros = options.get('min_cpu_micros', 0)
opts.min_params = options.get('min_params', 0)
opts.min_float_ops = options.get('min_float_ops', 0)
opts.min_occurrence = options.get('min_occurrence', 0)
opts.step = options.get('step', -1)
opts.order_by = options.get('order_by', 'name')
for p in options.get('account_type_regexes', []):
opts.account_type_regexes.append(p)
for p in options.get('start_name_regexes', []):
opts.start_name_regexes.append(p)
for p in options.get('trim_name_regexes', []):
opts.trim_name_regexes.append(p)
for p in options.get('show_name_regexes', []):
opts.show_name_regexes.append(p)
for p in options.get('hide_name_regexes', []):
opts.hide_name_regexes.append(p)
opts.account_displayed_op_only = options.get('account_displayed_op_only',
False)
for p in options.get('select', []):
opts.select.append(p)
opts.output = options.get('output', 'stdout')
opts.dump_to_file = options.get('dump_to_file', '')
return opts
def _build_advisor_options(options):
"""Build tfprof.AdvisorOptionsProto.
Args:
options: A dictionary of options. See ALL_ADVICE example.
Returns:
tfprof.AdvisorOptionsProto.
"""
opts = tfprof_options_pb2.AdvisorOptionsProto()
if options is None:
return opts
for checker, checker_opts in six.iteritems(options):
checker_ops_pb = tfprof_options_pb2.AdvisorOptionsProto.CheckerOption()
for k, v in six.iteritems(checker_opts):
checker_ops_pb[k] = v
opts.checkers[checker].MergeFrom(checker_ops_pb)
return opts
class Profiler(object):
"""TensorFlow multi-step profiler.
https://github.com/tensorflow/tensorflow/tree/master/tensorflow/core/profiler/README.md
```python
Typical use case:
# Currently we are only allowed to create 1 profiler per process.
profiler = Profile(sess.graph)
for i in xrange(total_steps):
if i % 10000 == 0:
run_meta = tf.RunMetadata()
_ = sess.run(...,
options=tf.RunOptions(
trace_level=tf.RunOptions.FULL_TRACE),
run_metadata=run_meta)
profiler.add_step(i, run_meta)
# Profile the parameters of your model.
profiler.profile_name_scope(options=(option_builder.ProfileOptionBuilder
.trainable_variables_parameter()))
# Or profile the timing of your model operations.
opts = option_builder.ProfileOptionBuilder.time_and_memory()
profiler.profile_operations(options=opts)
# Or you can generate a timeline:
opts = (option_builder.ProfileOptionBuilder(
option_builder.ProfileOptionBuilder.time_and_memory())
.with_step(i)
.with_timeline_output(filename).build())
profiler.profile_graph(options=opts)
else:
_ = sess.run(...)
# Auto detect problems and generate advice.
profiler.advise()
```
"""
def __init__(self, graph, op_log=None):
"""Constructor.
Args:
graph: tf.Graph.
op_log: optional. tensorflow::tfprof::OpLogProto proto. Used to define
extra op types.
"""
self._graph = graph
# pylint: disable=protected-access
op_log = tfprof_logger._merge_default_with_oplog(
self._graph, op_log=op_log)
# pylint: enable=protected-access
print_mdl.NewProfiler(
self._graph.as_graph_def(add_shapes=True).SerializeToString(),
op_log.SerializeToString())
def __del__(self):
print_mdl.DeleteProfiler()
def add_step(self, step, run_meta):
"""Add statistics of a step.
Args:
step: A step uint64 used to identify the RunMetadata. Must be different
across different AddStep() calls.
run_meta: RunMetadata proto that contains statistics of a session run.
"""
# pylint: disable=protected-access
op_log = tfprof_logger._merge_default_with_oplog(
self._graph, run_meta=run_meta, add_trace=False,
add_trainable_var=False)
# pylint: enable=protected-access
print_mdl.AddStep(
step, run_meta.SerializeToString(), op_log.SerializeToString())
def profile_python(self, options):
"""Profile the statistics of the Python codes.
By default, it shows the call stack from root. To avoid
redundant output, you may use options to filter as below
options['show_name_regexes'] = ['.*my_code.py.*']
Args:
options: A dict of options. See core/profiler/g3doc/options.md.
Returns:
a MultiGraphNodeProto that records the results.
"""
opts = _build_options(options)
tfprof_node = tfprof_output_pb2.MultiGraphNodeProto()
tfprof_node.ParseFromString(
print_mdl.Profile('code'.encode('utf-8'), opts.SerializeToString()))
return tfprof_node
def profile_operations(self, options):
"""Profile the statistics of the Operation types (e.g. MatMul, Conv2D).
Args:
options: A dict of options. See core/profiler/g3doc/options.md.
Returns:
a MultiGraphNodeProto that records the results.
"""
opts = _build_options(options)
tfprof_node = tfprof_output_pb2.MultiGraphNodeProto()
tfprof_node.ParseFromString(
print_mdl.Profile('op'.encode('utf-8'), opts.SerializeToString()))
return tfprof_node
def profile_name_scope(self, options):
"""Profile the statistics of graph nodes, organized by name scope.
Args:
options: A dict of options. See core/profiler/g3doc/options.md.
Returns:
a GraphNodeProto that records the results.
"""
opts = _build_options(options)
tfprof_node = tfprof_output_pb2.GraphNodeProto()
tfprof_node.ParseFromString(
print_mdl.Profile('scope'.encode('utf-8'), opts.SerializeToString()))
return tfprof_node
def profile_graph(self, options):
"""Profile the statistics of graph nodes, organized by dataflow graph.
Args:
options: A dict of options. See core/profiler/g3doc/options.md.
Returns:
a GraphNodeProto that records the results.
"""
opts = _build_options(options)
tfprof_node = tfprof_output_pb2.GraphNodeProto()
tfprof_node.ParseFromString(
print_mdl.Profile('graph'.encode('utf-8'), opts.SerializeToString()))
return tfprof_node
def advise(self, options):
"""Automatically detect problems and generate reports.
Args:
options: A dict of options. See ALL_ADVICE example above.
Returns:
A Advise proto that conains the reports from all checkers.
"""
advise_pb = tfprof_output_pb2.AdviceProto()
opts = _build_advisor_options(options)
advise_pb.ParseFromString(
print_mdl.Profile('advise'.encode('utf-8'), opts.SerializeToString()))
return advise_pb
def profile(graph,
run_meta=None,
op_log=None,
cmd='scope',
options=_DEFAULT_PROFILE_OPTIONS):
"""Profile model.
Tutorials and examples can be found in:
https://github.com/tensorflow/tensorflow/tree/master/tensorflow/core/profiler/README.md
Args:
graph: required tf.Graph.
run_meta: optional tensorflow.RunMetadata proto. It is necessary to
to support run time information profiling, such as time and memory.
op_log: tensorflow.tfprof.OpLogProto proto. User can assign "types" to
graph nodes with op_log. "types" allow user to flexibly group and
account profiles using options['accounted_type_regexes'].
cmd: string. Either 'op', 'scope', 'graph' or 'code'.
'op' view organizes profile using operation type. (e.g. MatMul)
'scope' view organizes profile using graph node name scope.
'graph' view organizes profile using graph node inputs/outputs.
'code' view organizes profile using Python call stack.
options: A dict of options. See core/profiler/g3doc/options.md.
Returns:
If cmd is 'scope' or 'graph', returns GraphNodeProto proto.
If cmd is 'op' or 'code', returns MultiGraphNodeProto proto.
Side effect: stdout/file/timeline.json depending on options['output']
"""
if options == _DEFAULT_PROFILE_OPTIONS:
options = (option_builder.ProfileOptionBuilder
.trainable_variables_parameter())
# pylint: disable=protected-access
op_log = tfprof_logger._merge_default_with_oplog(
graph, op_log, run_meta, add_trace=cmd == 'code')
# pylint: enable=protected-access
opts = _build_options(options)
run_meta_str = run_meta.SerializeToString() if run_meta else b''
if cmd == 'code' or cmd == 'op':
tfprof_node = tfprof_output_pb2.MultiGraphNodeProto()
tfprof_node.ParseFromString(
print_mdl.PrintModelAnalysis(
graph.as_graph_def(add_shapes=True).SerializeToString(),
run_meta_str,
op_log.SerializeToString(),
cmd.encode('utf-8'),
opts.SerializeToString()))
elif cmd == 'graph' or cmd == 'scope':
tfprof_node = tfprof_output_pb2.GraphNodeProto()
tfprof_node.ParseFromString(
print_mdl.PrintModelAnalysis(
graph.as_graph_def(add_shapes=True).SerializeToString(),
run_meta_str,
op_log.SerializeToString(),
cmd.encode('utf-8'),
opts.SerializeToString()))
else:
raise errors.InvalidArgumentError(
None, None, 'unknown cmd: %s\n' % cmd)
return tfprof_node
def advise(graph, run_meta=None, options=_DEFAULT_ADVISE_OPTIONS):
"""Auto profile and advise.
Builds profiles and automatically check anomalies of various
aspects. For more details:
https://github.com/tensorflow/tensorflow/tree/master/tensorflow/core/profiler/README.md
Args:
graph: required tf.Graph.
run_meta: optional tensorflow.RunMetadata proto. It is necessary to
to support run time information profiling, such as time and memory.
options: see ALL_ADVICE example above. Default checks everything.
Returns:
Returns AdviceProto proto
"""
if options == _DEFAULT_ADVISE_OPTIONS:
options = ALL_ADVICE.copy()
# pylint: disable=protected-access
op_log = tfprof_logger._merge_default_with_oplog(
graph, None, run_meta, add_trace=True)
# pylint: enable=protected-access
run_meta_str = run_meta.SerializeToString() if run_meta else b''
opts = _build_advisor_options(options)
ret = tfprof_output_pb2.AdviceProto()
ret.ParseFromString(
print_mdl.PrintModelAnalysis(
graph.as_graph_def(add_shapes=True).SerializeToString(),
run_meta_str,
op_log.SerializeToString(),
'advise'.encode('utf-8'),
opts.SerializeToString()))
return ret
|
xuleiboy1234/autoTitle
|
tensorflow/tensorflow/python/profiler/model_analyzer.py
|
Python
|
mit
| 12,780
|
import os
import pathlib
import time
import traceback
import Ice
import IceStorm
from rich.console import Console, Text
console = Console()
class Publishes:
def __init__(self, ice_connector, topic_manager):
self.ice_connector = ice_connector
self.mprx={}
self.topic_manager = topic_manager
def create_topic(self, topic_name, ice_proxy):
# Create a proxy to publish a AprilBasedLocalization topic
topic = False
try:
topic = self.topic_manager.retrieve(topic_name)
except:
pass
while not topic:
try:
topic = self.topic_manager.retrieve(topic_name)
except IceStorm.NoSuchTopic:
try:
topic = self.topic_manager.create(topic_name)
except:
print(f'Another client created the {topic_name} topic? ...')
pub = topic.getPublisher().ice_oneway()
proxy = ice_proxy.uncheckedCast(pub)
self.mprx[topic_name] = proxy
return proxy
def get_proxies_map(self):
return self.mprx
class Requires:
def __init__(self, ice_connector):
self.ice_connector = ice_connector
self.mprx={}
def get_proxies_map(self):
return self.mprx
def create_proxy(self, property_name, ice_proxy):
# Remote object connection for
try:
proxy_string = self.ice_connector.getProperties().getProperty(property_name)
try:
base_prx = self.ice_connector.stringToProxy(proxy_string)
proxy = ice_proxy.uncheckedCast(base_prx)
self.mprx[property_name] = proxy
return True, proxy
except Ice.Exception:
print('Cannot connect to the remote object (CameraSimple)', proxy_string)
# traceback.print_exc()
return False, None
except Ice.Exception as e:
console.print_exception(e)
console.log(f'Cannot get {property_name} property.')
return False, None
class Subscribes:
def __init__(self, ice_connector, topic_manager, default_handler):
self.ice_connector = ice_connector
self.topic_manager = topic_manager
def create_adapter(self, property_name, interface_handler):
adapter = self.ice_connector.createObjectAdapter(property_name)
handler = interface_handler
proxy = adapter.addWithUUID(handler).ice_oneway()
topic_name = property_name.replace('Topic','')
subscribe_done = False
while not subscribe_done:
try:
topic = self.topic_manager.retrieve(topic_name)
subscribe_done = True
except Ice.Exception as e:
console.log("Error. Topic does not exist (creating)", style="blue")
time.sleep(1)
try:
topic = self.topic_manager.create(topic_name)
subscribe_done = True
except:
console.log(f"Error. Topic {Text(topic_name, style='red')} could not be created. Exiting")
status = 0
qos = {}
topic.subscribeAndGetPublisher(qos, proxy)
adapter.activate()
return adapter
class Implements:
def __init__(self, ice_connector, default_handler):
self.ice_connector = ice_connector
def create_adapter(self, property_name, interface_handler):
adapter = self.ice_connector.createObjectAdapter(property_name)
adapter.add(interface_handler, self.ice_connector.stringToIdentity(property_name.lower()))
adapter.activate()
class InterfaceManager:
def __init__(self, ice_config_file):
# TODO: Make ice connector singleton
self.ice_config_file = ice_config_file
self.ice_connector = Ice.initialize(self.ice_config_file)
needs_rcnode = False
self.topic_manager = self.init_topic_manager() if needs_rcnode else None
self.status = 0
self.parameters = {}
for i in self.ice_connector.getProperties():
self.parameters[str(i)] = str(self.ice_connector.getProperties().getProperty(i))
self.requires = Requires(self.ice_connector)
self.publishes = Publishes(self.ice_connector, self.topic_manager)
self.implements = None
self.subscribes = None
def init_topic_manager(self):
# Topic Manager
proxy = self.ice_connector.getProperties().getProperty("TopicManager.Proxy")
obj = self.ice_connector.stringToProxy(proxy)
try:
return IceStorm.TopicManagerPrx.checkedCast(obj)
except Ice.ConnectionRefusedException as e:
console.log(Text('Cannot connect to rcnode! This must be running to use pub/sub.', 'red'))
exit(-1)
def set_default_hanlder(self, handler):
self.implements = Implements(self.ice_connector, handler)
self.subscribes = Subscribes(self.ice_connector, self.topic_manager, handler)
def get_proxies_map(self):
result = {}
result.update(self.requires.get_proxies_map())
result.update(self.publishes.get_proxies_map())
return result
def destroy(self):
if self.ice_connector:
self.ice_connector.destroy()
|
Kmayankkr/robocomp
|
tools/robocompdsl/test/resources/reference_components/test_subStatesTestPython/src/interfaces.py
|
Python
|
gpl-3.0
| 5,349
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for conversion module."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import gast
from tensorflow.python.autograph import utils
from tensorflow.python.autograph.core import converter
from tensorflow.python.autograph.impl import api
from tensorflow.python.autograph.impl import conversion
from tensorflow.python.autograph.pyct import compiler
from tensorflow.python.framework import constant_op
from tensorflow.python.keras.engine import training
from tensorflow.python.platform import test
class ConversionTest(test.TestCase):
def _simple_program_ctx(self):
return converter.ProgramContext(
options=converter.ConversionOptions(recursive=True),
autograph_module=api)
def test_is_whitelisted_for_graph(self):
def test_fn():
return constant_op.constant(1)
self.assertFalse(conversion.is_whitelisted_for_graph(test_fn))
self.assertTrue(conversion.is_whitelisted_for_graph(utils))
self.assertTrue(conversion.is_whitelisted_for_graph(constant_op.constant))
def test_convert_entity_to_ast_unsupported_types(self):
with self.assertRaises(NotImplementedError):
program_ctx = self._simple_program_ctx()
conversion.convert_entity_to_ast('dummy', program_ctx)
def test_convert_entity_to_ast_callable(self):
b = 2
def f(a):
return a + b
program_ctx = self._simple_program_ctx()
nodes, name, info = conversion.convert_entity_to_ast(f, program_ctx)
fn_node, = nodes
self.assertIsInstance(fn_node, gast.FunctionDef)
self.assertEqual('tf__f', name)
self.assertIs(info.namespace['b'], b)
def test_convert_entity_to_ast_function_with_defaults(self):
b = 2
c = 1
def f(a, d=c + 1):
return a + b + d
program_ctx = self._simple_program_ctx()
nodes, name, _ = conversion.convert_entity_to_ast(f, program_ctx)
fn_node, = nodes
self.assertIsInstance(fn_node, gast.FunctionDef)
self.assertEqual('tf__f', name)
self.assertEqual(
compiler.ast_to_source(fn_node.args.defaults[0]).strip(), 'None')
def test_convert_entity_to_ast_call_tree(self):
def g(a):
return a
def f(a):
return g(a)
program_ctx = self._simple_program_ctx()
nodes, _, _ = conversion.convert_entity_to_ast(f, program_ctx)
f_node, = nodes
self.assertEqual('tf__f', f_node.name)
def test_convert_entity_to_ast_class_hierarchy(self):
class TestBase(object):
def __init__(self, x='base'):
self.x = x
def foo(self):
return self.x
def bar(self):
return self.x
class TestSubclass(TestBase):
def __init__(self, y):
super(TestSubclass, self).__init__('sub')
self.y = y
def foo(self):
return self.y
def baz(self):
return self.y
program_ctx = self._simple_program_ctx()
with self.assertRaisesRegex(NotImplementedError, 'classes.*whitelisted'):
conversion.convert_entity_to_ast(TestSubclass, program_ctx)
def test_convert_entity_to_ast_class_hierarchy_whitelisted(self):
class TestSubclass(training.Model):
def __init__(self, y):
super(TestSubclass, self).__init__()
self.built = False
def call(self, x):
return 3 * x
program_ctx = self._simple_program_ctx()
(import_node, class_node), name, _ = conversion.convert_entity_to_ast(
TestSubclass, program_ctx)
self.assertEqual(import_node.names[0].name, 'Model')
self.assertEqual(name, 'TfTestSubclass')
self.assertEqual(class_node.name, 'TfTestSubclass')
def test_convert_entity_to_ast_lambda(self):
b = 2
f = lambda x: b * x if x > 0 else -x
program_ctx = self._simple_program_ctx()
(fn_node,), name, entity_info = conversion.convert_entity_to_ast(
f, program_ctx)
self.assertIsInstance(fn_node, gast.Assign)
self.assertIsInstance(fn_node.value, gast.Lambda)
self.assertEqual('tf__lambda', name)
self.assertIs(entity_info.namespace['b'], b)
def test_convert_entity_to_ast_multiple_lambdas(self):
a, b = 1, 2
f, _ = (lambda x: a * x, lambda y: b * y)
program_ctx = self._simple_program_ctx()
(fn_node,), name, entity_info = conversion.convert_entity_to_ast(
f, program_ctx)
self.assertIsInstance(fn_node, gast.Assign)
self.assertIsInstance(fn_node.value, gast.Lambda)
self.assertEqual('tf__lambda', name)
self.assertIs(entity_info.namespace['a'], a)
def test_convert_entity_to_ast_multiple_lambdas_ambiguous_definitions(self):
a, b = 1, 2
f, _ = (lambda x: a * x, lambda x: b * x)
program_ctx = self._simple_program_ctx()
with self.assertRaises(ValueError):
conversion.convert_entity_to_ast(f, program_ctx)
def test_convert_entity_to_ast_lambda_code_with_garbage(self):
# pylint:disable=g-long-lambda
f = ( # intentional wrap
lambda x: (
x # intentional wrap
+ 1),)[0]
# pylint:enable=g-long-lambda
program_ctx = self._simple_program_ctx()
(fn_node,), name, _ = conversion.convert_entity_to_ast(f, program_ctx)
self.assertIsInstance(fn_node, gast.Assign)
self.assertIsInstance(fn_node.value, gast.Lambda)
self.assertEqual('tf__lambda', name)
def test_convert_entity_to_ast_nested_functions(self):
b = 2
def f(x):
def g(x):
return b * x
return g(x)
program_ctx = self._simple_program_ctx()
(fn_node,), name, entity_info = conversion.convert_entity_to_ast(
f, program_ctx)
self.assertIsInstance(fn_node, gast.FunctionDef)
self.assertEqual(fn_node.name, 'tf__f')
self.assertEqual('tf__f', name)
self.assertIs(entity_info.namespace['b'], b)
if __name__ == '__main__':
test.main()
|
kevin-coder/tensorflow-fork
|
tensorflow/python/autograph/impl/conversion_test.py
|
Python
|
apache-2.0
| 6,484
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.