hexsha string | size int64 | ext string | lang string | max_stars_repo_path string | max_stars_repo_name string | max_stars_repo_head_hexsha string | max_stars_repo_licenses list | max_stars_count int64 | max_stars_repo_stars_event_min_datetime string | max_stars_repo_stars_event_max_datetime string | max_issues_repo_path string | max_issues_repo_name string | max_issues_repo_head_hexsha string | max_issues_repo_licenses list | max_issues_count int64 | max_issues_repo_issues_event_min_datetime string | max_issues_repo_issues_event_max_datetime string | max_forks_repo_path string | max_forks_repo_name string | max_forks_repo_head_hexsha string | max_forks_repo_licenses list | max_forks_count int64 | max_forks_repo_forks_event_min_datetime string | max_forks_repo_forks_event_max_datetime string | content string | avg_line_length float64 | max_line_length int64 | alphanum_fraction float64 | qsc_code_num_words_quality_signal int64 | qsc_code_num_chars_quality_signal float64 | qsc_code_mean_word_length_quality_signal float64 | qsc_code_frac_words_unique_quality_signal float64 | qsc_code_frac_chars_top_2grams_quality_signal float64 | qsc_code_frac_chars_top_3grams_quality_signal float64 | qsc_code_frac_chars_top_4grams_quality_signal float64 | qsc_code_frac_chars_dupe_5grams_quality_signal float64 | qsc_code_frac_chars_dupe_6grams_quality_signal float64 | qsc_code_frac_chars_dupe_7grams_quality_signal float64 | qsc_code_frac_chars_dupe_8grams_quality_signal float64 | qsc_code_frac_chars_dupe_9grams_quality_signal float64 | qsc_code_frac_chars_dupe_10grams_quality_signal float64 | qsc_code_frac_chars_replacement_symbols_quality_signal float64 | qsc_code_frac_chars_digital_quality_signal float64 | qsc_code_frac_chars_whitespace_quality_signal float64 | qsc_code_size_file_byte_quality_signal float64 | qsc_code_num_lines_quality_signal float64 | qsc_code_num_chars_line_max_quality_signal float64 | qsc_code_num_chars_line_mean_quality_signal float64 | qsc_code_frac_chars_alphabet_quality_signal float64 | qsc_code_frac_chars_comments_quality_signal float64 | qsc_code_cate_xml_start_quality_signal float64 | qsc_code_frac_lines_dupe_lines_quality_signal float64 | qsc_code_cate_autogen_quality_signal float64 | qsc_code_frac_lines_long_string_quality_signal float64 | qsc_code_frac_chars_string_length_quality_signal float64 | qsc_code_frac_chars_long_word_length_quality_signal float64 | qsc_code_frac_lines_string_concat_quality_signal float64 | qsc_code_cate_encoded_data_quality_signal float64 | qsc_code_frac_chars_hex_words_quality_signal float64 | qsc_code_frac_lines_prompt_comments_quality_signal float64 | qsc_code_frac_lines_assert_quality_signal float64 | qsc_codepython_cate_ast_quality_signal float64 | qsc_codepython_frac_lines_func_ratio_quality_signal float64 | qsc_codepython_cate_var_zero_quality_signal bool | qsc_codepython_frac_lines_pass_quality_signal float64 | qsc_codepython_frac_lines_import_quality_signal float64 | qsc_codepython_frac_lines_simplefunc_quality_signal float64 | qsc_codepython_score_lines_no_logic_quality_signal float64 | qsc_codepython_frac_lines_print_quality_signal float64 | qsc_code_num_words int64 | qsc_code_num_chars int64 | qsc_code_mean_word_length int64 | qsc_code_frac_words_unique null | qsc_code_frac_chars_top_2grams int64 | qsc_code_frac_chars_top_3grams int64 | qsc_code_frac_chars_top_4grams int64 | qsc_code_frac_chars_dupe_5grams int64 | qsc_code_frac_chars_dupe_6grams int64 | qsc_code_frac_chars_dupe_7grams int64 | qsc_code_frac_chars_dupe_8grams int64 | qsc_code_frac_chars_dupe_9grams int64 | qsc_code_frac_chars_dupe_10grams int64 | qsc_code_frac_chars_replacement_symbols int64 | qsc_code_frac_chars_digital int64 | qsc_code_frac_chars_whitespace int64 | qsc_code_size_file_byte int64 | qsc_code_num_lines int64 | qsc_code_num_chars_line_max int64 | qsc_code_num_chars_line_mean int64 | qsc_code_frac_chars_alphabet int64 | qsc_code_frac_chars_comments int64 | qsc_code_cate_xml_start int64 | qsc_code_frac_lines_dupe_lines int64 | qsc_code_cate_autogen int64 | qsc_code_frac_lines_long_string int64 | qsc_code_frac_chars_string_length int64 | qsc_code_frac_chars_long_word_length int64 | qsc_code_frac_lines_string_concat null | qsc_code_cate_encoded_data int64 | qsc_code_frac_chars_hex_words int64 | qsc_code_frac_lines_prompt_comments int64 | qsc_code_frac_lines_assert int64 | qsc_codepython_cate_ast int64 | qsc_codepython_frac_lines_func_ratio int64 | qsc_codepython_cate_var_zero int64 | qsc_codepython_frac_lines_pass int64 | qsc_codepython_frac_lines_import int64 | qsc_codepython_frac_lines_simplefunc int64 | qsc_codepython_score_lines_no_logic int64 | qsc_codepython_frac_lines_print int64 | effective string | hits int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
e0315471bd1a35e31c6a9cdd93a2a2a27365d479 | 2,702 | py | Python | TWLight/emails/views.py | jajodiaraghav/TWLight | 22359ab0b95ee3653e8ffa0eb698acd7bb8ebf70 | [
"MIT"
] | 1 | 2019-10-24T04:49:52.000Z | 2019-10-24T04:49:52.000Z | TWLight/emails/views.py | jajodiaraghav/TWLight | 22359ab0b95ee3653e8ffa0eb698acd7bb8ebf70 | [
"MIT"
] | 1 | 2019-03-29T15:29:45.000Z | 2019-03-29T15:57:20.000Z | TWLight/emails/views.py | jajodiaraghav/TWLight | 22359ab0b95ee3653e8ffa0eb698acd7bb8ebf70 | [
"MIT"
] | 1 | 2019-09-26T14:40:27.000Z | 2019-09-26T14:40:27.000Z | from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.core.exceptions import PermissionDenied
from django.core.urlresolvers import reverse, reverse_lazy
from django.core.mail import BadHeaderError, send_mail
from django.http import HttpResponse, HttpResponseRedirect
from django.utils.decorators import method_decorator
from django.utils.translation import ugettext_lazy as _
from django.views.generic.edit import FormView
from TWLight.emails.forms import ContactUsForm
from TWLight.emails.signals import ContactUs
@method_decorator(login_required, name='post')
class ContactUsView(FormView):
template_name = 'emails/contact.html'
form_class = ContactUsForm
success_url = reverse_lazy('contact')
def get_initial(self):
initial = super(ContactUsView, self).get_initial()
# @TODO: This sort of gets repeated in ContactUsForm.
# We could probably be factored out to a common place for DRYness.
if self.request.user.is_authenticated():
if self.request.user.email:
initial.update({
'email': self.request.user.email,
})
if ('message' in self.request.GET):
initial.update({
'message': self.request.GET['message'],
})
initial.update({
'next': reverse_lazy('contact'),
})
return initial
def form_valid(self, form):
# Adding an extra check to ensure the user is a wikipedia editor.
try:
assert self.request.user.editor
email = form.cleaned_data['email']
message = form.cleaned_data['message']
carbon_copy = form.cleaned_data['cc']
ContactUs.new_email.send(
sender=self.__class__,
user_email=email,
cc=carbon_copy,
editor_wp_username=self.request.user.editor.wp_username,
body=message
)
messages.add_message(self.request, messages.SUCCESS,
# Translators: Shown to users when they successfully submit a new message using the contact us form.
_('Your message has been sent. We\'ll get back to you soon!'))
return HttpResponseRedirect(reverse('contact'))
except (AssertionError, AttributeError) as e:
messages.add_message (self.request, messages.WARNING,
# Translators: This message is shown to non-wikipedia editors who attempt to post data to the contact us form.
_('You must be a Wikipedia editor to do that.'))
raise PermissionDenied
return self.request.user.editor | 43.580645 | 126 | 0.657661 | 315 | 2,702 | 5.530159 | 0.422222 | 0.063146 | 0.051665 | 0.036165 | 0.04248 | 0.04248 | 0 | 0 | 0 | 0 | 0 | 0 | 0.263879 | 2,702 | 62 | 127 | 43.580645 | 0.875817 | 0.143597 | 0 | 0.115385 | 0 | 0 | 0.069727 | 0 | 0 | 0 | 0 | 0.016129 | 0.038462 | 1 | 0.038462 | false | 0 | 0.211538 | 0 | 0.384615 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
e0322ebc94878f3dc7b69955feb764a97d3db29b | 1,997 | py | Python | frontend/config.py | lcbm/cs-data-ingestion | 314525285bfefe726d86c232937b05d273e44e7f | [
"0BSD"
] | null | null | null | frontend/config.py | lcbm/cs-data-ingestion | 314525285bfefe726d86c232937b05d273e44e7f | [
"0BSD"
] | null | null | null | frontend/config.py | lcbm/cs-data-ingestion | 314525285bfefe726d86c232937b05d273e44e7f | [
"0BSD"
] | null | null | null | """Flask App configuration file."""
import logging
import os
import dotenv
import frontend.constants as constants
dotenv.load_dotenv(os.path.join(constants.BASEDIR, "frontend.env"))
class Base:
"""Configuration class used as base for all environments."""
DEBUG = False
TESTING = False
LOGGING_FORMAT = "[%(asctime)s] %(levelname)s in %(message)s"
LOGGING_LOCATION = "frontend.log"
LOGGING_LEVEL = os.environ.get("LOGGING_LEVEL", logging.DEBUG)
class Development(Base):
"""Configuration class for development environment.
Parameters
----------
Base: base configuration object.
"""
DEBUG = True
TESTING = False
ENV = "dev"
class Staging(Base):
"""Configuration class for development staging environment.
Parameters
----------
Base: base configuration object.
"""
DEBUG = False
TESTING = True
ENV = "staging"
class Production(Base):
"""Configuration class for development production environment.
Parameters
----------
Base: base configuration object.
"""
DEBUG = False
TESTING = False
ENV = "prod"
config = {
"development": "frontend.config.Development",
"staging": "frontend.config.Staging",
"production": "frontend.config.Production",
"default": "frontend.config.Development",
}
def configure_app(app):
"""Configures the Flask app according to the FLASK_ENV
envar. In case FLASK_ENV is not defined, then use the
'default' configuration.
Parameters
----------
app: flask.Flask
Flask app Module.
"""
# Configure app
config_name = os.environ.get("FLASK_ENV", "default")
app.config.from_object(config[config_name])
# Configure logging
handler = logging.FileHandler(app.config["LOGGING_LOCATION"])
handler.setLevel(app.config["LOGGING_LEVEL"])
formatter = logging.Formatter(app.config["LOGGING_FORMAT"])
handler.setFormatter(formatter)
app.logger.addHandler(handler)
| 21.473118 | 67 | 0.667001 | 216 | 1,997 | 6.097222 | 0.324074 | 0.090357 | 0.066819 | 0.056948 | 0.220957 | 0.138952 | 0.138952 | 0.098709 | 0.098709 | 0 | 0 | 0 | 0.207311 | 1,997 | 92 | 68 | 21.706522 | 0.831965 | 0.322484 | 0 | 0.162162 | 0 | 0 | 0.233683 | 0.082998 | 0 | 0 | 0 | 0 | 0 | 1 | 0.027027 | false | 0 | 0.108108 | 0 | 0.621622 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
e03555c89ef682c9881524e84b3f99fb40c60411 | 3,916 | py | Python | script/dummy/arm_control.py | amazon-picking-challenge/team_pfn | 2f76524b067d816d8407f6c4fae4e6d33939c024 | [
"Apache-2.0"
] | 7 | 2016-09-04T02:07:04.000Z | 2017-05-25T02:31:07.000Z | script/dummy/arm_control.py | amazon-picking-challenge/team_pfn | 2f76524b067d816d8407f6c4fae4e6d33939c024 | [
"Apache-2.0"
] | null | null | null | script/dummy/arm_control.py | amazon-picking-challenge/team_pfn | 2f76524b067d816d8407f6c4fae4e6d33939c024 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python
# Copyright 2016 Preferred Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy
import rospy
import actionlib
from geometry_msgs.msg import Twist, Vector3
from apc2016.msg import *
class DummyArmControl(object):
def __init__(self):
self.srv_lowlevel_left = \
actionlib.SimpleActionServer('move_to_left',
RobotArmMoveAction,
execute_cb=self.cb_move_to_left,
auto_start=False)
self.srv_highlevel_left = \
actionlib.SimpleActionServer('move_to_bin_left',
BinToteMoveAction,
execute_cb=self.cb_move_to_bin_left,
auto_start=False)
self.srv_lowlevel_right = \
actionlib.SimpleActionServer('move_to_right',
RobotArmMoveAction,
execute_cb=self.cb_move_to_right,
auto_start=False)
self.srv_highlevel_right = \
actionlib.SimpleActionServer('move_to_bin_right',
BinToteMoveAction,
execute_cb=self.cb_move_to_bin_right,
auto_start=False)
self.srv_lowlevel_left.start()
self.srv_highlevel_left.start()
self.srv_lowlevel_right.start()
self.srv_highlevel_right.start()
def cb_move_to_left(self, goal):
print "moving away right arm, then moving left arm:"
print goal.target_position
result = RobotArmMoveResult(success=True,
position=goal.target_position)
self.srv_lowlevel_left.set_succeeded(result)
def cb_move_to_bin_left(self, goal):
if goal.position:
pos = goal.position
else:
pos = "photo"
print "looking up position for %s/%s" % (goal.bin, pos)
pos = numpy.asarray([550, -146, 752, 181, 0, 180])
p = Vector3(pos[0], pos[1], pos[2])
r = Vector3(pos[3], pos[4], pos[5])
print "moving away right arm, then moving left arm"
result = BinToteMoveResult(success=True, position=Twist(p, r))
self.srv_highlevel_left.set_succeeded(result)
def cb_move_to_right(self, goal):
print "moving away left arm, then moving right arm:"
print goal.target_position
result = RobotArmMoveResult(success=True,
position=goal.target_position)
self.srv_lowlevel_right.set_succeeded(result)
def cb_move_to_bin_right(self, goal):
if goal.position:
pos = goal.position
else:
pos = "photo"
print "looking up position for %s/%s" % (goal.bin, pos)
pos = numpy.asarray([550, -146, 752, 184, 0, 180])
p = Vector3(pos[0], pos[1], pos[2])
r = Vector3(pos[3], pos[4], pos[5])
print "moving away left arm, then moving right arm"
result = BinToteMoveResult(success=True, position=Twist(p, r))
self.srv_highlevel_right.set_succeeded(result)
if __name__ == '__main__':
rospy.init_node("arm_control_dummy", anonymous=True)
DummyArmControl()
rospy.spin()
| 39.959184 | 78 | 0.589122 | 457 | 3,916 | 4.842451 | 0.306346 | 0.037958 | 0.02892 | 0.059648 | 0.620877 | 0.539087 | 0.477632 | 0.442386 | 0.359693 | 0.306371 | 0 | 0.023159 | 0.327375 | 3,916 | 97 | 79 | 40.371134 | 0.817008 | 0.1476 | 0 | 0.422535 | 0 | 0 | 0.097774 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.070423 | null | null | 0.112676 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
e035deed8737a8c4ccc24d990b915152d4728210 | 3,115 | py | Python | cogs/events.py | rompdodger/RompDodger | 9c8b481d9f69e05c15f01271f6c18e09ab2723e6 | [
"MIT"
] | null | null | null | cogs/events.py | rompdodger/RompDodger | 9c8b481d9f69e05c15f01271f6c18e09ab2723e6 | [
"MIT"
] | null | null | null | cogs/events.py | rompdodger/RompDodger | 9c8b481d9f69e05c15f01271f6c18e09ab2723e6 | [
"MIT"
] | null | null | null | import json
import discord
from utils.time import format_time
from utils import utilities
from discord.ext import commands
from discord import Embed
class Events(commands.Cog):
"""Event Handler for RompDodger"""
def __init__(self, bot):
self.bot = bot
@commands.Cog.listener()
async def on_command_error(self, ctx, error):
if hasattr(ctx.command, 'on_error'):
return
if isinstance(error, (commands.CommandNotFound, commands.NoPrivateMessage)):
return
elif isinstance(error, commands.MissingRequiredArgument):
await ctx.send(embed=await utilities.generate_embed(f"Command {ctx.prefix} {ctx.command} requires **{error.param.name}** argument, but you missed giving that"))
elif isinstance(error, commands.BotMissingPermissions):
perms = "".join(error.missing_perms)
await ctx.send(embed=await utilities.generate_embed(f"To finish the command bot must have {perms} permission, give the bot appropriate permissions and re-try"))
self.bot.logger.critical(f"Ignoring Exception in {ctx.command}\nError: {error}")
@commands.Cog.listener()
async def on_guild_join(self, guild):
#TODO: implement blacklist sytem
self.bot.logger.info(f"Joined on {guild} > Total Guilds: {len(self.bot.guilds)}")
@commands.Cog.listener()
async def on_guild_remove(self, guild):
self.bot.logger.info(f"Removed on {guild} > Total Guilds: {len(self.bot.guilds)}")
@commands.Cog.listener()
async def on_member_join(self, member):
cursor = await self.bot.db.execute(f"SELECT channel FROM welcomer WHERE guild_id = {member.guild.id}")
chrow = await cursor.fetchone()
if chrow is None:
return
else:
msgrow = await self.bot.db.execute(f"SELECT message FROM welcomer WHERE guild_id = {member.guild.id}")
msg = await msgrow.fetchone()
name = member.name
mention = member.mention
members = member.guild.member_count
server = member.guild
embed = discord.Embed(color=discord.Color.dark_green(), description=msg[0].format(name=name, mention=mention, members=members, server=server))
embed.set_thumbnail(url=f"{member.avatar_url_as(format='png', size=2048)}")
created = format_time(member.created_at)
embed.set_footer(text=f"{member.name} Created on {created}")
ch = self.bot.get_channel(int(chrow[0]))
await ch.send(embed=embed)
await cursor.close()
@commands.Cog.listener()
async def on_member_remove(self, member):
cursor = await self.bot.db.execute(f"SELECT channel FROM leaver WHERE guild_id = {ctx.guild.id}")
chrow = await cursor.fetchone()
if chrow is None:
return
else:
msg = await self.bot.db.execute(f"SELECT msg FROM leaver WHERE guild_id = {member.guild.id}")
name = member.name
mention = member.mention
server = member.server
members = member.guild.member_count
embed.set_thumbnail(url=f"{member.avatar_url_as(format='png', size=2048)}")
created = format_time(member.joined_at)
embed.set_footer(text=f"{member.name} Created joined on {joined}")
ch = self.bot.get_channel(int(chrow[0]))
await ch.send(embed=embed)
await cursor.close()
def setup(bot):
bot.add_cog(Events(bot)) | 39.43038 | 163 | 0.733547 | 452 | 3,115 | 4.964602 | 0.285398 | 0.040553 | 0.042335 | 0.053476 | 0.552139 | 0.497772 | 0.443405 | 0.382799 | 0.354724 | 0.280749 | 0 | 0.004104 | 0.139647 | 3,115 | 79 | 164 | 39.43038 | 0.833209 | 0.019262 | 0 | 0.432836 | 0 | 0.029851 | 0.258033 | 0.051475 | 0 | 0 | 0 | 0.012658 | 0 | 1 | 0.029851 | false | 0 | 0.089552 | 0 | 0.19403 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
e036f44b7fa0f2862267ed2ae2bb354dffc8bc0b | 260 | py | Python | setup.py | clin366/airpollutionnowcast | f9152583eebc4ad747c8d0510460334a5fb23ff9 | [
"MIT"
] | null | null | null | setup.py | clin366/airpollutionnowcast | f9152583eebc4ad747c8d0510460334a5fb23ff9 | [
"MIT"
] | 9 | 2020-03-24T18:12:45.000Z | 2022-02-10T00:36:57.000Z | setup.py | clin366/airpollutionnowcast | f9152583eebc4ad747c8d0510460334a5fb23ff9 | [
"MIT"
] | null | null | null | from setuptools import find_packages, setup
setup(
name='src',
packages=find_packages(),
version='0.1.0',
description='Project: Nowcasting the air pollution using online search log',
author='Emory University(IR Lab)',
license='MIT',
)
| 23.636364 | 80 | 0.692308 | 33 | 260 | 5.393939 | 0.848485 | 0.134831 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.014151 | 0.184615 | 260 | 10 | 81 | 26 | 0.825472 | 0 | 0 | 0 | 0 | 0 | 0.369231 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.111111 | 0 | 0.111111 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
e037cc498ab758b47d57f427145d459d775fb063 | 339 | py | Python | problems/p0048/s48.py | ahrarmonsur/euler | 4174790637806521a4ea2973abeb76c96c64a782 | [
"MIT"
] | 1 | 2017-12-19T21:18:48.000Z | 2017-12-19T21:18:48.000Z | problems/p0048/s48.py | ahrarmonsur/euler | 4174790637806521a4ea2973abeb76c96c64a782 | [
"MIT"
] | null | null | null | problems/p0048/s48.py | ahrarmonsur/euler | 4174790637806521a4ea2973abeb76c96c64a782 | [
"MIT"
] | null | null | null | """
Project Euler Problem 48
Self powers
Solved by Ahrar Monsur
The series, 1^1 + 2^2 + 3^3 + ... + 10^10 = 10405071317.
Find the last ten digits of the series, 1^1 + 2^2 + 3^3 + ... + 1000^1000.
"""
def main():
max_digits = 1000
sum = 0
for i in range(1, max_digits+1):
sum += i**i
print str(sum)[-10:]
main() | 17.842105 | 74 | 0.575221 | 60 | 339 | 3.216667 | 0.566667 | 0.093264 | 0.103627 | 0.11399 | 0.15544 | 0.15544 | 0.15544 | 0.15544 | 0 | 0 | 0 | 0.184 | 0.262537 | 339 | 19 | 75 | 17.842105 | 0.588 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0.142857 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
e03860989956f152e97aacd3a94938522a675b8e | 1,042 | py | Python | esercizi/areaSottesaCompareNumPy.py | gdv/python-alfabetizzazione | d87561222de8a230db11d8529c49cf1702aec326 | [
"MIT"
] | null | null | null | esercizi/areaSottesaCompareNumPy.py | gdv/python-alfabetizzazione | d87561222de8a230db11d8529c49cf1702aec326 | [
"MIT"
] | null | null | null | esercizi/areaSottesaCompareNumPy.py | gdv/python-alfabetizzazione | d87561222de8a230db11d8529c49cf1702aec326 | [
"MIT"
] | 1 | 2019-03-26T11:14:33.000Z | 2019-03-26T11:14:33.000Z | import numpy as np
import timeit
def effe(x):
y = -x * (x - 1.0)
return y
numIntervalli = input('inserire il numero di intervalli in [0.0, 1.0] ')
deltaIntervallo = 1.0 / float(numIntervalli)
print "larghezza intervallo", deltaIntervallo
start = timeit.default_timer()
xIntervalli = []
yIntervalli = []
i = 0
while i < numIntervalli:
xIntervallo = i*deltaIntervallo
xIntervalli.append(xIntervallo)
yIntervalli.append(effe(xIntervallo))
i += 1
areaSottesa = 0.0
for altezza in yIntervalli:
areaSottesa += altezza * deltaIntervallo
endOld = timeit.default_timer()
print "l'area sottesa dalla curva vale ", areaSottesa
xNPIntervalli = np.linspace(0.0, 1.0, numIntervalli, endpoint=False)
yNPIntervalli = -xNPIntervalli * (xNPIntervalli - 1.0)
npArea = np.sum(yNPIntervalli*deltaIntervallo)
endNP = timeit.default_timer()
# print xNPIntervalli
# print xIntervalli
# print yNPIntervalli
# print yIntervalli
print "area numpy = ", npArea
print "old timing = ", endOld - start, "numPy timing = ", endNP - endOld
| 24.809524 | 72 | 0.726488 | 127 | 1,042 | 5.937008 | 0.425197 | 0.013263 | 0.071618 | 0.01061 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.020713 | 0.166027 | 1,042 | 41 | 73 | 25.414634 | 0.846951 | 0.071977 | 0 | 0 | 0 | 0 | 0.14553 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.071429 | null | null | 0.142857 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
e039c81acd8d1fcb88f92f04b6556a716666da98 | 12,736 | py | Python | testing/regrid/testEsmfGridToMeshRegridCsrv.py | xylar/cdat | 8a5080cb18febfde365efc96147e25f51494a2bf | [
"BSD-3-Clause"
] | 62 | 2018-03-30T15:46:56.000Z | 2021-12-08T23:30:24.000Z | testing/regrid/testEsmfGridToMeshRegridCsrv.py | xylar/cdat | 8a5080cb18febfde365efc96147e25f51494a2bf | [
"BSD-3-Clause"
] | 114 | 2018-03-21T01:12:43.000Z | 2021-07-05T12:29:54.000Z | testing/regrid/testEsmfGridToMeshRegridCsrv.py | CDAT/uvcdat | 5133560c0c049b5c93ee321ba0af494253b44f91 | [
"BSD-3-Clause"
] | 14 | 2018-06-06T02:42:47.000Z | 2021-11-26T03:27:00.000Z | #!/usr/bin/env python
#
# $Id: ESMP_GridToMeshRegridCsrv.py,v 1.5 2012/04/23 23:00:14 rokuingh Exp $
#===============================================================================
# ESMP/examples/ESMP_GridToMeshRegrid.py
#===============================================================================
"""
ESMP_GridToMeshRegridCsrv.py
Two ESMP_Field objects are created, one on a Grid and the other on a Mesh. The
source Field is set to an analytic function, and a conservative regridding
operation is performed from the source to the destination Field. After
the regridding is completed, the destination Field is compared to the
exact solution over that domain.
"""
import cdms2
import ESMP
import numpy as _NP
import unittest
def grid_create():
'''
PRECONDITIONS: ESMP has been initialized.
POSTCONDITIONS: A ESMP_Grid has been created.
'''
ub_x = float(4)
ub_y = float(4)
lb_x = float(0)
lb_y = float(0)
max_x = float(4)
max_y = float(4)
min_x = float(0)
min_y = float(0)
cellwidth_x = (max_x-min_x)/(ub_x-lb_x)
cellwidth_y = (max_y-min_y)/(ub_y-lb_y)
cellcenter_x = cellwidth_x/2
cellcenter_y = cellwidth_y/2
maxIndex = _NP.array([ub_x,ub_y], dtype=_NP.int32)
grid = ESMP.ESMP_GridCreateNoPeriDim(maxIndex,
coordSys=ESMP.ESMP_COORDSYS_CART)
## CORNERS
ESMP.ESMP_GridAddCoord(grid, staggerloc=ESMP.ESMP_STAGGERLOC_CORNER)
exLB_corner, exUB_corner = ESMP.ESMP_GridGetCoord(grid, \
ESMP.ESMP_STAGGERLOC_CORNER)
# get the coordinate pointers and set the coordinates
[x,y] = [0, 1]
gridXCorner = ESMP.ESMP_GridGetCoordPtr(grid, x, ESMP.ESMP_STAGGERLOC_CORNER)
gridYCorner = ESMP.ESMP_GridGetCoordPtr(grid, y, ESMP.ESMP_STAGGERLOC_CORNER)
#print 'lower corner bounds = [{0},{1}]'.format(exLB_corner[0],exLB_corner[1])
#print 'upper corner bounds = [{0},{1}]'.format(exUB_corner[0],exUB_corner[1])
p = 0
for i1 in range(exLB_corner[1], exUB_corner[1]):
for i0 in range(exLB_corner[0], exUB_corner[0]):
gridXCorner[p] = float(i0)*cellwidth_x
gridYCorner[p] = float(i1)*cellwidth_y
p = p + 1
#print 'Grid corner coordinates:'
p = 0
for i1 in range(exLB_corner[1], exUB_corner[1]):
for i0 in range(exLB_corner[0], exUB_corner[0]):
#print '[{0},{1}]'.format(gridXCorner[p], gridYCorner[p])
p = p + 1
#print '\n'
## CENTERS
ESMP.ESMP_GridAddCoord(grid, staggerloc=ESMP.ESMP_STAGGERLOC_CENTER)
exLB_center, exUB_center = ESMP.ESMP_GridGetCoord(grid, \
ESMP.ESMP_STAGGERLOC_CENTER)
# get the coordinate pointers and set the coordinates
[x,y] = [0, 1]
gridXCenter = ESMP.ESMP_GridGetCoordPtr(grid, x, ESMP.ESMP_STAGGERLOC_CENTER)
gridYCenter = ESMP.ESMP_GridGetCoordPtr(grid, y, ESMP.ESMP_STAGGERLOC_CENTER)
#print 'lower corner bounds = [{0},{1}]'.format(exLB_center[0],exLB_center[1])
#print 'upper corner bounds = [{0},{1}]'.format(exUB_center[0],exUB_center[1])
p = 0
for i1 in range(exLB_center[1], exUB_center[1]):
for i0 in range(exLB_center[0], exUB_center[0]):
gridXCenter[p] = float(i0)*cellwidth_x + cellwidth_x/2.0
gridYCenter[p] = float(i1)*cellwidth_y + cellwidth_y/2.0
p = p + 1
#print 'Grid center coordinates:'
p = 0
for i1 in range(exLB_center[1], exUB_center[1]):
for i0 in range(exLB_center[0], exUB_center[0]):
#print '[{0},{1}]'.format(gridXCenter[p], gridYCenter[p])
p = p + 1
#print '\n'
return grid
def mesh_create_3x3(mesh):
'''
PRECONDITIONS: An ESMP_Mesh has been declared.
POSTCONDITIONS: A 3x3 ESMP_Mesh has been created.
3x3 Mesh
3.0 2.0 13 -------14 --------15--------16
| | | |
| 7 | 8 | 9 |
| | | |
2.5 1.5 9 ------- 10 --------11--------12
| | | |
| 4 | 5 | 6 |
| | | |
1.5 0.5 5 ------- 6 -------- 7-------- 8
| | | |
| 1 | 2 | 3 |
| | | |
1.0 0.0 1 ------- 2 -------- 3-------- 4
0.0 0.5 1.5 2.0
1.0 1.5 2.5 3.0
Node Ids at corners
Element Ids in centers
(Everything owned by PET 0)
'''
# set up a simple mesh
num_node = 16
num_elem = 9
nodeId = _NP.array([1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16])
'''
# this is for grid to mesh
nodeCoord = _NP.array([1.0,1.0, 1.5,1.0, 2.5,1.0, 3.0,1.0,
1.0,1.5, 1.5,1.5, 2.5,1.5, 3.0,1.5,
1.0,2.5, 1.5,2.5, 2.5,2.5, 3.0,2.5,
1.0,3.0, 1.5,3.0, 2.5,3.0, 3.0,3.0])
'''
# this is for mesh to grid
nodeCoord = _NP.array([0.0,0.0, 1.5,0.0, 2.5,0.0, 4.0,0.0,
0.0,1.5, 1.5,1.5, 2.5,1.5, 4.0,1.5,
0.0,2.5, 1.5,2.5, 2.5,2.5, 4.0,2.5,
0.0,4.0, 1.5,4.0, 2.5,4.0, 4.0,4.0])
nodeOwner = _NP.zeros(num_node, dtype=_NP.int32)
elemId = _NP.array([1,2,3,4,5,6,7,8,9], dtype=_NP.int32)
elemType = _NP.ones(num_elem, dtype=_NP.int32)
elemType*=ESMP.ESMP_MESHELEMTYPE_QUAD
elemConn = _NP.array([0,1,5,4,
1,2,6,5,
2,3,7,6,
4,5,9,8,
5,6,10,9,
6,7,11,10,
8,9,13,12,
9,10,14,13,
10,11,15,14], dtype=_NP.int32)
ESMP.ESMP_MeshAddNodes(mesh,num_node,nodeId,nodeCoord,nodeOwner)
ESMP.ESMP_MeshAddElements(mesh,num_elem,elemId,elemType,elemConn)
#print 'Mesh coordinates:'
for i in range(num_node):
x = nodeCoord[2*i]
y = nodeCoord[2*i+1]
#print '[{0},{1}]'.format(x, y)
#print '\n'
return mesh, nodeCoord, elemType, elemConn
def create_ESMPmesh_3x3():
'''
PRECONDITIONS: ESMP is initialized.
POSTCONDITIONS: An ESMP_Mesh (3x3) has been created and returned as 'mesh'.
'''
# Two parametric dimensions, and three spatial dimensions
mesh = ESMP.ESMP_MeshCreate(2,2)
mesh, nodeCoord, elemType, elemConn = mesh_create_3x3(mesh)
return mesh, nodeCoord, elemType, elemConn
def create_ESMPfieldgrid(grid, name):
'''
PRECONDITIONS: An ESMP_Grid has been created, and 'name' is a string that
will be used to initialize the name of a new ESMP_Field.
POSTCONDITIONS: An ESMP_Field has been created.
'''
# defaults to center staggerloc
field = ESMP.ESMP_FieldCreateGrid(grid, name)
return field
def build_analyticfieldgrid(field, grid):
'''
PRECONDITIONS: An ESMP_Field has been created.
POSTCONDITIONS: The 'field' has been initialized to an analytic field.
'''
# get the field pointer first
fieldPtr = ESMP.ESMP_FieldGetPtr(field)
# get the grid bounds and coordinate pointers
exLB, exUB = ESMP.ESMP_GridGetCoord(grid, ESMP.ESMP_STAGGERLOC_CENTER)
# get the coordinate pointers and set the coordinates
[x,y] = [0, 1]
gridXCoord = ESMP.ESMP_GridGetCoordPtr(grid, x, ESMP.ESMP_STAGGERLOC_CENTER)
gridYCoord = ESMP.ESMP_GridGetCoordPtr(grid, y, ESMP.ESMP_STAGGERLOC_CENTER)
#print "Grid center coordinates"
p = 0
for i1 in range(exLB[1], exUB[1]):
for i0 in range(exLB[0], exUB[0]):
xc = gridXCoord[p]
yc = gridYCoord[p]
fieldPtr[p] = 20.0+xc+yc
#fieldPtr[p] = 20.0+xc*yc+yc**2
#print '[{0},{1}] = {2}'.format(xc,yc,fieldPtr[p])
p = p + 1
#print "\n"
return field
def create_ESMPfield(mesh, name):
'''
PRECONDITIONS: An ESMP_Mesh has been created, and 'name' is a string that
will be used to initialize the name of a new ESMP_Field.
POSTCONDITIONS: An ESMP_Field has been created.
'''
field = ESMP.ESMP_FieldCreate(mesh, name, meshloc=ESMP.ESMP_MESHLOC_ELEMENT)
return field
def build_analyticfield(field, nodeCoord, elemType, elemConn):
'''
PRECONDITIONS: An ESMP_Field has been created.
POSTCONDITIONS: The 'field' has been initialized to an analytic field.
'''
# get the field pointer first
fieldPtr = ESMP.ESMP_FieldGetPtr(field, 0)
# set the field to a vanilla initial field for now
#print "Mesh center coordinates"
offset = 0
for i in range(field.size): # this routine assumes this field is on elements
if (elemType[i] == ESMP.ESMP_MESHELEMTYPE_TRI):
raise NameError("Cannot compute a non-constant analytic field for a mesh\
with triangular elements!")
x1 = nodeCoord[(elemConn[offset])*2]
x2 = nodeCoord[(elemConn[offset+1])*2]
y1 = nodeCoord[(elemConn[offset+1])*2+1]
y2 = nodeCoord[(elemConn[offset+3])*2+1]
x = (x1+x2)/2.0
y = (y1+y2)/2.0
fieldPtr[i] = 20.0+x+y
#fieldPtr[i] = 20.0+x*y+y**2
#print '[{0},{1}] = {2}'.format(x,y,fieldPtr[i])
offset = offset + 4
#print "\n"
return field
def run_regridding(srcfield, dstfield):
'''
PRECONDITIONS: Two ESMP_Fields have been created and a regridding operation
is desired from 'srcfield' to 'dstfield'.
POSTCONDITIONS: An ESMP regridding operation has set the data on 'dstfield'.
'''
# call the regridding functions
routehandle = ESMP.ESMP_FieldRegridStore(srcfield, dstfield,
regridmethod=ESMP.ESMP_REGRIDMETHOD_CONSERVE,
unmappedaction=ESMP.ESMP_UNMAPPEDACTION_ERROR)
ESMP.ESMP_FieldRegrid(srcfield, dstfield, routehandle)
ESMP.ESMP_FieldRegridRelease(routehandle)
return dstfield
def compare_fields(field1, field2):
'''
PRECONDITIONS: Two ESMP_Fields have been created and a comparison of the
the values is desired between 'srcfield' and 'dstfield'.
POSTCONDITIONS: The values on 'srcfield' and 'dstfield' are compared.
returns True if the fileds are comparable (success)
'''
# get the data pointers for the fields
field1ptr = ESMP.ESMP_FieldGetPtr(field1)
field2ptr = ESMP.ESMP_FieldGetPtr(field2)
# compare point values of field1 to field2
# first verify they are the same size
if (field1.size != field2.size):
raise NameError('compare_fields: Fields must be the same size!')
# initialize to True, and check for False point values
correct = True
totalErr = 0.0
for i in range(field1.size):
err = abs(field1ptr[i] - field2ptr[i])/abs(field2ptr[i])
if err > .06:
correct = False
print "ACCURACY ERROR - "+str(err)
print "field1 = {0} : field2 = {1}\n".format(field1ptr[i], field2ptr[i])
totalErr += err
if correct:
print " - PASS - Total Error = "+str(totalErr)
return True
else:
print " - FAIL - Total Error = "+str(totalErr)
return False
class TestESMP_GridToMeshRegridCsrv(unittest.TestCase):
def setUp(self):
pass
def test_test1(self):
# create two unique ESMP_Mesh objects
grid = grid_create()
mesh, nodeCoord, elemType, elemConn = create_ESMPmesh_3x3()
'''
# this is for grid to mesh
# create ESMP_Field objects on the Meshes
srcfield = create_ESMPfieldgrid(grid, 'srcfield')
dstfield = create_ESMPfield(mesh, 'dstfield')
dstfield2 = create_ESMPfield(mesh, 'dstfield_exact')
# initialize the Fields to an analytic function
srcfield = build_analyticfieldgrid(srcfield, grid)
dstfield2 = build_analyticfield(dstfield2, nodeCoord, elemType, elemConn)
'''
# this is for mesh to grid
# create ESMP_Field objects on the Meshes
srcfield = create_ESMPfield(mesh, 'srcfield')
dstfield = create_ESMPfieldgrid(grid, 'dstfield')
dstfield2 = create_ESMPfieldgrid(grid, 'dstfield_exact')
# initialize the Fields to an analytic function
srcfield = build_analyticfield(srcfield, nodeCoord, elemType, elemConn)
dstfield2 = build_analyticfieldgrid(dstfield2, grid)
# run the ESMF regridding
dstfield = run_regridding(srcfield, dstfield)
# compare results and output PASS or FAIL
ok = compare_fields(dstfield, dstfield2)
# clean up
ESMP.ESMP_FieldDestroy(srcfield)
ESMP.ESMP_FieldDestroy(dstfield)
ESMP.ESMP_FieldDestroy(dstfield2)
ESMP.ESMP_GridDestroy(grid)
ESMP.ESMP_MeshDestroy(mesh)
self.assertEqual(ok, True)
if __name__ == '__main__':
ESMP.ESMP_LogSet(True)
print "" # Spacer
suite = unittest.TestLoader().loadTestsFromTestCase(TestESMP_GridToMeshRegridCsrv)
unittest.TextTestRunner(verbosity = 1).run(suite)
| 33.515789 | 86 | 0.613693 | 1,760 | 12,736 | 4.323864 | 0.163068 | 0.049409 | 0.026018 | 0.022076 | 0.377924 | 0.332983 | 0.299869 | 0.275033 | 0.236137 | 0.189093 | 0 | 0.051196 | 0.254633 | 12,736 | 379 | 87 | 33.604222 | 0.750448 | 0.160019 | 0 | 0.157895 | 0 | 0 | 0.024728 | 0 | 0 | 0 | 0 | 0 | 0.005848 | 0 | null | null | 0.011696 | 0.023392 | null | null | 0.02924 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
e03bee15bfc41f500be41ba4168c4029ea4dba20 | 3,770 | py | Python | scripts/beautify.py | lukaschoebel/POTUSgen | 7b88ba63f0ddab199937df909c5af3271a833cf3 | [
"MIT"
] | null | null | null | scripts/beautify.py | lukaschoebel/POTUSgen | 7b88ba63f0ddab199937df909c5af3271a833cf3 | [
"MIT"
] | 5 | 2020-03-25T08:02:45.000Z | 2020-04-08T20:07:42.000Z | scripts/beautify.py | lukaschoebel/POTUSgen | 7b88ba63f0ddab199937df909c5af3271a833cf3 | [
"MIT"
] | null | null | null | import json
import re
import sys
def beautify(name):
''' Loading, filtering and saving the JSON tweet file to a newly generated .txt file
:type: name: String
:rtype: output: .txt
'''
filename = name + '.json'
output_name = name + "_filtered.txt"
with open(filename, "r", encoding="utf-8") as input:
with open(output_name, "w", encoding="utf-8") as output:
document = json.load(input)
# Filter only the messages that are not retweeted
# >> Version i): for tweets from archive "master_XXXX.json"
# document = [x['full_text'] for x in document if x['user']['screen_name'] == 'realDonaldTrump' and 'full_text' in x]
# >> Version ii): for self-scraped tweets via https://github.com/bpb27/twitter_scraping
# document = [x['text'] for x in document if x['user']['screen_name'] == 'realDonaldTrump' and 'text' in x]
# >> Version iii): Data set from https://github.com/MatthewWolff/MarkovTweets/
document = [x['text'] for x in document]
# Clean and only include not retweeted messages
document = [deep_clean(x) for x in document if deep_clean(x) is not None]
# Preventing unicode characters by ensuring false ascii encoding
for _, value in enumerate(document):
output.write(json.dumps(value, ensure_ascii=False) + "\n")
# json.dump(document, output, ensure_ascii=False, indent=4)
print(f">> Sucessfully cleaned {filename} and saved it to {output_name}")
def deep_clean(s):
''' Deep cleaning of filtered tweets. Replaces common symbols and kills quotation marks/apostrophes.
:type: s: String
:rtype: s: String
'''
# Return None if given tweet is a retweet
if s[:2] == 'RT':
return None
# Delete all URLs because they don't make for interesting tweets.
s = re.sub(r'http[\S]*', '', s)
# Replace some common unicode symbols with raw character variants
s = re.sub(r'\\u2026', '...', s)
s = re.sub(r'…', '', s)
s = re.sub(r'\\u2019', "'", s)
s = re.sub(r'\\u2018', "'", s)
s = re.sub(r"&", r"&", s)
s = re.sub(r'\\n', r"", s)
# Delete emoji modifying characters
s = re.sub(chr(127996), '', s)
s = re.sub(chr(65039), '', s)
# Kill apostrophes & punctuation because they confuse things.
s = re.sub(r"'", r"", s)
s = re.sub(r"“", r"", s)
s = re.sub(r"”", r"", s)
s = re.sub('[()]', r'', s)
s = re.sub(r'"', r"", s)
# Collapse multiples of certain chars
s = re.sub('([.-])+', r'\1', s)
# Pad sentence punctuation chars with whitespace
s = re.sub('([^0-9])([.,!?])([^0-9])', r'\1 \2 \3', s)
# Remove extra whitespace (incl. newlines)
s = ' '.join(s.split()).lower()
# Define emoji_pattern
emoji_pattern = re.compile("["
u"\U0001F600-\U0001F64F" # emoticons
u"\U0001F300-\U0001F5FF" # symbols & pictographs
u"\U0001F680-\U0001F6FF" # transport & map symbols
u"\U0001F1E0-\U0001F1FF" # flags (iOS)
u"\U0001F1F2-\U0001F1F4" # Macau flag
u"\U0001F1E6-\U0001F1FF" # flags
u"\U0001F600-\U0001F64F"
u"\U00002702-\U000027B0"
u"\U000024C2-\U0001F251"
u"\U0001f926-\U0001f937"
u"\U0001F1F2"
u"\U0001F1F4"
u"\U0001F620"
u"\u200d"
u"\u2640-\u2642"
"]+", flags=re.UNICODE)
s = emoji_pattern.sub(r'', s)
# Care for a special case where the first char is a "."
# return s[1:] if s[0] == "." else s
if len(s):
return s[1:] if s[0] == "." else s
return None
if __name__ == "__main__":
if len(sys.argv) - 1: beautify(sys.argv[1]) | 33.963964 | 129 | 0.571088 | 512 | 3,770 | 4.158203 | 0.404297 | 0.022546 | 0.045092 | 0.042743 | 0.130108 | 0.107562 | 0.103335 | 0.086426 | 0.065759 | 0.065759 | 0 | 0.072727 | 0.270557 | 3,770 | 111 | 130 | 33.963964 | 0.700364 | 0.396286 | 0 | 0.070175 | 1 | 0 | 0.211026 | 0.105739 | 0 | 0 | 0 | 0 | 0 | 1 | 0.035088 | false | 0 | 0.052632 | 0 | 0.140351 | 0.017544 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
e0404632a7378b088279de3e94aac11c26a9e183 | 1,540 | py | Python | monasca_persister/conf/influxdb.py | zhangjianweibj/monasca-persister | 0c5d8a7c5553001f2d38227347f482201f92c8e1 | [
"Apache-2.0"
] | null | null | null | monasca_persister/conf/influxdb.py | zhangjianweibj/monasca-persister | 0c5d8a7c5553001f2d38227347f482201f92c8e1 | [
"Apache-2.0"
] | 1 | 2020-03-13T12:30:29.000Z | 2020-03-13T12:38:16.000Z | monasca_persister/conf/influxdb.py | zhangjianweibj/monasca-persister | 0c5d8a7c5553001f2d38227347f482201f92c8e1 | [
"Apache-2.0"
] | null | null | null | # (C) Copyright 2016-2017 Hewlett Packard Enterprise Development LP
# Copyright 2017 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_config import cfg
influxdb_opts = [
cfg.StrOpt('database_name',
help='database name where metrics are stored',
default='mon'),
cfg.HostAddressOpt('ip_address',
help='Valid IP address or hostname '
'to InfluxDB instance'),
cfg.PortOpt('port',
help='port to influxdb',
default=8086),
cfg.StrOpt('user',
help='influxdb user ',
default='mon_persister'),
cfg.StrOpt('password',
secret=True,
help='influxdb password')]
influxdb_group = cfg.OptGroup(name='influxdb',
title='influxdb')
def register_opts(conf):
conf.register_group(influxdb_group)
conf.register_opts(influxdb_opts, influxdb_group)
def list_opts():
return influxdb_group, influxdb_opts
| 32.765957 | 69 | 0.653896 | 190 | 1,540 | 5.221053 | 0.573684 | 0.060484 | 0.02621 | 0.032258 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.017544 | 0.25974 | 1,540 | 46 | 70 | 33.478261 | 0.852632 | 0.398701 | 0 | 0 | 0 | 0 | 0.225275 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.083333 | false | 0.083333 | 0.041667 | 0.041667 | 0.166667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
e044775152d95d5fb032af9b89fee05b4ac263fe | 2,630 | py | Python | src/server.py | FlakM/fastai_text_serving | 8262c2c1192c5e11df2e06b494ab9cf88c1dcd2a | [
"Apache-2.0"
] | null | null | null | src/server.py | FlakM/fastai_text_serving | 8262c2c1192c5e11df2e06b494ab9cf88c1dcd2a | [
"Apache-2.0"
] | null | null | null | src/server.py | FlakM/fastai_text_serving | 8262c2c1192c5e11df2e06b494ab9cf88c1dcd2a | [
"Apache-2.0"
] | null | null | null | import asyncio
import logging
import aiohttp
import uvicorn
from fastai.vision import *
from starlette.applications import Starlette
from starlette.middleware.cors import CORSMiddleware
from starlette.responses import JSONResponse
# put your url here here
model_file_url = 'https://www.dropbox.com/s/...?raw=1'
model_file_name = 'model'
path = Path(__file__).parent
logging.basicConfig(format="%(levelname)s: %(message)s", level=logging.INFO)
logger = logging.getLogger()
logger.setLevel(logging.INFO)
app = Starlette()
app.add_middleware(CORSMiddleware, allow_origins=['*'], allow_headers=['X-Requested-With', 'Content-Type'])
def hashsum(path, hex=True, hash_type=hashlib.md5):
hashinst = hash_type()
with open(path, 'rb') as f:
for chunk in iter(lambda: f.read(hashinst.block_size * 128), b''):
hashinst.update(chunk)
return hashinst.hexdigest() if hex else hashinst.digest()
async def download_file(url, dest):
if dest.exists(): return
async with aiohttp.ClientSession() as session:
async with session.get(url) as response:
data = await response.read()
with open(dest, 'wb') as f: f.write(data)
async def setup_learner():
model_file = path.parent / 'models' / f'{model_file_name}.pkl'
if not model_file.exists():
logging.info("Will download file %s from %s", model_file, model_file_url)
await download_file(model_file_url, model_file)
logging.info("Downloaded file md5sum: %s", hashsum(model_file))
else:
logging.info("File %s already exists will reuse md5sum: %s", model_file, hashsum(model_file))
# Loading the saved model using fastai's load_learner method
model = load_learner(model_file.parent, f'{model_file_name}.pkl')
classes = model.data.classes
return model, classes
loop = asyncio.get_event_loop()
tasks = [asyncio.ensure_future(setup_learner())]
model, classes = loop.run_until_complete(asyncio.gather(*tasks))[0]
loop.close()
def sortByProb(val):
return val["prob"]
@app.route('/predict', methods=['POST'])
async def analyze(request):
data = await request.form()
text = data['text']
predict_class, predict_idx, predict_values = model.predict(text)
results = []
for idx, val in enumerate(predict_values):
prob = val.item()
if prob > 0.01:
record = {"value": classes[idx], "prob": prob}
results.append(record)
results.sort(key=sortByProb, reverse=True)
return JSONResponse(results[:5])
if __name__ == '__main__':
if 'serve' in sys.argv: uvicorn.run(app, host='0.0.0.0' port=4000)
| 30.941176 | 107 | 0.692776 | 359 | 2,630 | 4.927577 | 0.417827 | 0.071227 | 0.02035 | 0.015828 | 0.01922 | 0 | 0 | 0 | 0 | 0 | 0 | 0.009259 | 0.178707 | 2,630 | 84 | 108 | 31.309524 | 0.809722 | 0.030798 | 0 | 0 | 0 | 0 | 0.117439 | 0.016496 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.133333 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
e044ab975c816db8531273f338dcef5b52d8c7ce | 1,061 | py | Python | src/geneflow/extend/local_workflow.py | jhphan/geneflow2 | a39ab97e6425ee45584cfc15b5740e94a5bf7512 | [
"Apache-2.0"
] | 7 | 2019-04-11T03:50:51.000Z | 2020-03-27T15:59:04.000Z | src/geneflow/extend/local_workflow.py | jhphan/geneflow2 | a39ab97e6425ee45584cfc15b5740e94a5bf7512 | [
"Apache-2.0"
] | 1 | 2019-05-06T14:18:42.000Z | 2019-05-08T22:06:12.000Z | src/geneflow/extend/local_workflow.py | jhphan/geneflow2 | a39ab97e6425ee45584cfc15b5740e94a5bf7512 | [
"Apache-2.0"
] | 6 | 2019-04-10T20:25:27.000Z | 2021-12-16T15:59:59.000Z | """This module contains the GeneFlow LocalWorkflow class."""
class LocalWorkflow:
"""
A class that represents the Local Workflow objects.
"""
def __init__(
self,
job,
config,
parsed_job_work_uri
):
"""
Instantiate LocalWorkflow class.
"""
self._job = job
self._config = config
self._parsed_job_work_uri = parsed_job_work_uri
def initialize(self):
"""
Initialize the LocalWorkflow class.
This workflow class has no additional functionality.
Args:
None.
Returns:
True.
"""
return True
def init_data(self):
"""
Initialize any data specific to this context.
"""
return True
def get_context_options(self):
"""
Return dict of options specific for this context.
Args:
None.
Returns:
{} - no options specific for this context.
"""
return {}
| 18.293103 | 60 | 0.524034 | 100 | 1,061 | 5.37 | 0.42 | 0.100559 | 0.072626 | 0.089385 | 0.108007 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.402451 | 1,061 | 57 | 61 | 18.614035 | 0.847003 | 0.410933 | 0 | 0.125 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | false | 0 | 0 | 0 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
e0453a8ff093c7c5f6bb2239656a47c98c50cec7 | 2,849 | py | Python | S12/tensornet/engine/ops/lr_scheduler.py | abishek-raju/EVA4B2 | 189f4062c85d91f43c1381087a9c89ff794e5428 | [
"Apache-2.0"
] | 4 | 2020-06-18T13:07:19.000Z | 2022-01-07T10:51:10.000Z | S12/tensornet/engine/ops/lr_scheduler.py | abishek-raju/EVA4B2 | 189f4062c85d91f43c1381087a9c89ff794e5428 | [
"Apache-2.0"
] | 1 | 2021-07-31T04:34:46.000Z | 2021-08-11T05:55:57.000Z | S12/tensornet/engine/ops/lr_scheduler.py | abishek-raju/EVA4B2 | 189f4062c85d91f43c1381087a9c89ff794e5428 | [
"Apache-2.0"
] | 4 | 2020-08-09T07:10:46.000Z | 2021-01-16T14:57:23.000Z | from torch.optim.lr_scheduler import StepLR, ReduceLROnPlateau, OneCycleLR
def step_lr(optimizer, step_size, gamma=0.1, last_epoch=-1):
"""Create LR step scheduler.
Args:
optimizer (torch.optim): Model optimizer.
step_size (int): Frequency for changing learning rate.
gamma (float): Factor for changing learning rate. (default: 0.1)
last_epoch (int): The index of last epoch. (default: -1)
Returns:
StepLR: Learning rate scheduler.
"""
return StepLR(optimizer, step_size=step_size, gamma=gamma, last_epoch=last_epoch)
def reduce_lr_on_plateau(optimizer, factor=0.1, patience=10, verbose=False, min_lr=0):
"""Create LR plateau reduction scheduler.
Args:
optimizer (torch.optim): Model optimizer.
factor (float, optional): Factor by which the learning rate will be reduced.
(default: 0.1)
patience (int, optional): Number of epoch with no improvement after which learning
rate will be will be reduced. (default: 10)
verbose (bool, optional): If True, prints a message to stdout for each update.
(default: False)
min_lr (float, optional): A scalar or a list of scalars. A lower bound on the
learning rate of all param groups or each group respectively. (default: 0)
Returns:
ReduceLROnPlateau instance.
"""
return ReduceLROnPlateau(
optimizer, factor=factor, patience=patience, verbose=verbose, min_lr=min_lr
)
def one_cycle_lr(
optimizer, max_lr, epochs, steps_per_epoch, pct_start=0.5, div_factor=10.0, final_div_factor=10000
):
"""Create One Cycle Policy for Learning Rate.
Args:
optimizer (torch.optim): Model optimizer.
max_lr (float): Upper learning rate boundary in the cycle.
epochs (int): The number of epochs to train for. This is used along with
steps_per_epoch in order to infer the total number of steps in the cycle.
steps_per_epoch (int): The number of steps per epoch to train for. This is
used along with epochs in order to infer the total number of steps in the cycle.
pct_start (float, optional): The percentage of the cycle (in number of steps)
spent increasing the learning rate. (default: 0.5)
div_factor (float, optional): Determines the initial learning rate via
initial_lr = max_lr / div_factor. (default: 10.0)
final_div_factor (float, optional): Determines the minimum learning rate via
min_lr = initial_lr / final_div_factor. (default: 1e4)
Returns:
OneCycleLR instance.
"""
return OneCycleLR(
optimizer, max_lr, epochs=epochs, steps_per_epoch=steps_per_epoch,
pct_start=pct_start, div_factor=div_factor, final_div_factor=final_div_factor
)
| 40.7 | 102 | 0.679537 | 394 | 2,849 | 4.774112 | 0.27665 | 0.070175 | 0.041467 | 0.036683 | 0.239766 | 0.184476 | 0.127592 | 0.078682 | 0.047847 | 0.047847 | 0 | 0.01532 | 0.243945 | 2,849 | 69 | 103 | 41.289855 | 0.857939 | 0.667954 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.214286 | false | 0 | 0.071429 | 0 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
e04601e1749bc51e7e5f74ca383f947dc25e7da9 | 562 | py | Python | islam_fitz/survey/migrations/0005_auto_20210712_2132.py | OmarEhab177/Islam_fitz | 6ad0eb21549895a6fe537e8413022b82bc530c57 | [
"MIT"
] | null | null | null | islam_fitz/survey/migrations/0005_auto_20210712_2132.py | OmarEhab177/Islam_fitz | 6ad0eb21549895a6fe537e8413022b82bc530c57 | [
"MIT"
] | 2 | 2022-03-01T12:17:05.000Z | 2022-03-30T12:19:55.000Z | islam_fitz/survey/migrations/0005_auto_20210712_2132.py | OmarEhab177/Islam_fitz | 6ad0eb21549895a6fe537e8413022b82bc530c57 | [
"MIT"
] | null | null | null | # Generated by Django 3.1.12 on 2021-07-12 19:32
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('survey', '0004_lastpage_whatsapp_button'),
]
operations = [
migrations.RemoveField(
model_name='lastpage',
name='whatsapp_button',
),
migrations.AddField(
model_name='lastpage',
name='whatsapp_number',
field=models.CharField(default=1, max_length=50),
preserve_default=False,
),
]
| 23.416667 | 61 | 0.592527 | 56 | 562 | 5.785714 | 0.678571 | 0.08642 | 0.104938 | 0.12963 | 0.179012 | 0 | 0 | 0 | 0 | 0 | 0 | 0.058524 | 0.300712 | 562 | 23 | 62 | 24.434783 | 0.765903 | 0.081851 | 0 | 0.235294 | 1 | 0 | 0.157588 | 0.05642 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.058824 | 0 | 0.235294 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
e048929c57d8279d48bbfdb7b6430abd2459ceab | 243 | py | Python | Others/code_festival/code-festival-2015-final-open/a.py | KATO-Hiro/AtCoder | cbbdb18e95110b604728a54aed83a6ed6b993fde | [
"CC0-1.0"
] | 2 | 2020-06-12T09:54:23.000Z | 2021-05-04T01:34:07.000Z | Others/code_festival/code-festival-2015-final-open/a.py | KATO-Hiro/AtCoder | cbbdb18e95110b604728a54aed83a6ed6b993fde | [
"CC0-1.0"
] | 961 | 2020-06-23T07:26:22.000Z | 2022-03-31T21:34:52.000Z | Others/code_festival/code-festival-2015-final-open/a.py | KATO-Hiro/AtCoder | cbbdb18e95110b604728a54aed83a6ed6b993fde | [
"CC0-1.0"
] | null | null | null | # -*- coding: utf-8 -*-
def main():
s, t, u = map(str, input().split())
if len(s) == 5 and len(t) == 7 and len(u) == 5:
print('valid')
else:
print('invalid')
if __name__ == '__main__':
main()
| 16.2 | 52 | 0.440329 | 33 | 243 | 3 | 0.666667 | 0.121212 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.025157 | 0.345679 | 243 | 14 | 53 | 17.357143 | 0.597484 | 0.08642 | 0 | 0 | 0 | 0 | 0.097087 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.125 | true | 0 | 0 | 0 | 0.125 | 0.25 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
e04d583757322341dcf56eb5852389f9fd5b2748 | 1,634 | py | Python | mistral/tests/unit/utils/test_utils.py | shubhamdang/mistral | 3c83837f6ce1e4ab74fb519a63e82eaae70f9d2d | [
"Apache-2.0"
] | 205 | 2015-06-21T11:51:47.000Z | 2022-03-05T04:00:04.000Z | mistral/tests/unit/utils/test_utils.py | shubhamdang/mistral | 3c83837f6ce1e4ab74fb519a63e82eaae70f9d2d | [
"Apache-2.0"
] | 8 | 2015-06-23T14:47:58.000Z | 2021-01-28T06:06:44.000Z | mistral/tests/unit/utils/test_utils.py | shubhamdang/mistral | 3c83837f6ce1e4ab74fb519a63e82eaae70f9d2d | [
"Apache-2.0"
] | 110 | 2015-06-14T03:34:38.000Z | 2021-11-11T12:12:56.000Z | # Copyright 2013 - Mirantis, Inc.
# Copyright 2015 - StackStorm, Inc.
# Copyright 2015 - Huawei Technologies Co. Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from mistral import exceptions as exc
from mistral.tests.unit import base
from mistral.utils import ssh_utils
from mistral_lib import utils
class UtilsTest(base.BaseTest):
def test_itersubclasses(self):
class A(object):
pass
class B(A):
pass
class C(A):
pass
class D(C):
pass
self.assertEqual([B, C, D], list(utils.iter_subclasses(A)))
def test_paramiko_to_private_key(self):
self.assertRaises(
exc.DataAccessException,
ssh_utils._to_paramiko_private_key,
"../dir"
)
self.assertRaises(
exc.DataAccessException,
ssh_utils._to_paramiko_private_key,
"..\\dir"
)
self.assertIsNone(
ssh_utils._to_paramiko_private_key(private_key_filename=None,
password='pass')
)
| 29.178571 | 77 | 0.632191 | 200 | 1,634 | 5.04 | 0.53 | 0.059524 | 0.029762 | 0.053571 | 0.168651 | 0.168651 | 0.140873 | 0.140873 | 0.140873 | 0.140873 | 0 | 0.013865 | 0.293758 | 1,634 | 55 | 78 | 29.709091 | 0.859619 | 0.403305 | 0 | 0.333333 | 0 | 0 | 0.017727 | 0 | 0 | 0 | 0 | 0 | 0.133333 | 1 | 0.066667 | false | 0.166667 | 0.133333 | 0 | 0.366667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
e04ec585b764ff6cb1ec40221ed614d384e735f8 | 581 | py | Python | django_app_permissions/management/commands/resolve_app_groups.py | amp89/django-app-permissions | 11f576d2118f5b73fdbefa0675acc3374a5a9749 | [
"MIT"
] | 2 | 2020-09-04T04:12:30.000Z | 2020-10-20T00:12:01.000Z | django_app_permissions/management/commands/resolve_app_groups.py | amp89/django-app-permissions | 11f576d2118f5b73fdbefa0675acc3374a5a9749 | [
"MIT"
] | 4 | 2020-09-06T22:29:18.000Z | 2020-09-11T01:19:50.000Z | django_app_permissions/management/commands/resolve_app_groups.py | amp89/django-app-permissions | 11f576d2118f5b73fdbefa0675acc3374a5a9749 | [
"MIT"
] | null | null | null | from django.core.management.base import BaseCommand, no_translations
from django.contrib.auth.models import Group
from django.conf import settings
import sys
class Command(BaseCommand):
def handle(self, *args, **options):
sys.stdout.write("\nResolving app groups")
app_list = [app_name.lower() for app_name in settings.ACCESS_CONTROLLED_INSTALLED_APPS]
for app_name in app_list:
created = Group.objects.get_or_create(name=app_name)
sys.stdout.write(f"\n{app_name}, new={created}")
sys.stdout.write("\n") | 32.277778 | 95 | 0.693632 | 79 | 581 | 4.936709 | 0.56962 | 0.089744 | 0.107692 | 0.061538 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.203098 | 581 | 18 | 96 | 32.277778 | 0.842333 | 0 | 0 | 0 | 0 | 0 | 0.087629 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.083333 | false | 0 | 0.333333 | 0 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
e04f5b24d6bd2e775a7ec943b8b4d08de4e402bf | 34,343 | py | Python | swift/common/db.py | sunzz679/swift-2.4.0--source-read | 64355268da5265440f5f7e8d280dd8cd4c2cf2a2 | [
"Apache-2.0"
] | null | null | null | swift/common/db.py | sunzz679/swift-2.4.0--source-read | 64355268da5265440f5f7e8d280dd8cd4c2cf2a2 | [
"Apache-2.0"
] | null | null | null | swift/common/db.py | sunzz679/swift-2.4.0--source-read | 64355268da5265440f5f7e8d280dd8cd4c2cf2a2 | [
"Apache-2.0"
] | null | null | null | # Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Database code for Swift """
from contextlib import contextmanager, closing
import hashlib
import logging
import os
from uuid import uuid4
import sys
import time
import errno
import six.moves.cPickle as pickle
from swift import gettext_ as _
from tempfile import mkstemp
from eventlet import sleep, Timeout
import sqlite3
from swift.common.constraints import MAX_META_COUNT, MAX_META_OVERALL_SIZE
from swift.common.utils import json, Timestamp, renamer, \
mkdirs, lock_parent_directory, fallocate
from swift.common.exceptions import LockTimeout
from swift.common.swob import HTTPBadRequest
#: Whether calls will be made to preallocate disk space for database files.
DB_PREALLOCATION = False
#: Timeout for trying to connect to a DB
BROKER_TIMEOUT = 25
#: Pickle protocol to use
PICKLE_PROTOCOL = 2
#: Max number of pending entries
PENDING_CAP = 131072
def utf8encode(*args):
return [(s.encode('utf8') if isinstance(s, unicode) else s) for s in args]
def utf8encodekeys(metadata):
uni_keys = [k for k in metadata if isinstance(k, unicode)]
for k in uni_keys:
sv = metadata[k]
del metadata[k]
metadata[k.encode('utf-8')] = sv
def _db_timeout(timeout, db_file, call):
with LockTimeout(timeout, db_file):
retry_wait = 0.001
while True:
try:
return call()
except sqlite3.OperationalError as e:
if 'locked' not in str(e):
raise
sleep(retry_wait)
retry_wait = min(retry_wait * 2, 0.05)
class DatabaseConnectionError(sqlite3.DatabaseError):
"""More friendly error messages for DB Errors."""
def __init__(self, path, msg, timeout=0):
self.path = path
self.timeout = timeout
self.msg = msg
def __str__(self):
return 'DB connection error (%s, %s):\n%s' % (
self.path, self.timeout, self.msg)
class DatabaseAlreadyExists(sqlite3.DatabaseError):
"""More friendly error messages for DB Errors."""
def __init__(self, path):
self.path = path
def __str__(self):
return 'DB %s already exists' % self.path
class GreenDBConnection(sqlite3.Connection):
"""SQLite DB Connection handler that plays well with eventlet."""
def __init__(self, database, timeout=None, *args, **kwargs):
if timeout is None:
timeout = BROKER_TIMEOUT
self.timeout = timeout
self.db_file = database
super(GreenDBConnection, self).__init__(database, 0, *args, **kwargs)
def cursor(self, cls=None):
if cls is None:
cls = GreenDBCursor
return sqlite3.Connection.cursor(self, cls)
def commit(self):
return _db_timeout(
self.timeout, self.db_file,
lambda: sqlite3.Connection.commit(self))
class GreenDBCursor(sqlite3.Cursor):
"""SQLite Cursor handler that plays well with eventlet."""
def __init__(self, *args, **kwargs):
self.timeout = args[0].timeout
self.db_file = args[0].db_file
super(GreenDBCursor, self).__init__(*args, **kwargs)
def execute(self, *args, **kwargs):
return _db_timeout(
self.timeout, self.db_file, lambda: sqlite3.Cursor.execute(
self, *args, **kwargs))
def dict_factory(crs, row):
"""
This should only be used when you need a real dict,
i.e. when you're going to serialize the results.
"""
return dict(
((col[0], row[idx]) for idx, col in enumerate(crs.description)))
def chexor(old, name, timestamp):
"""
Each entry in the account and container databases is XORed by the 128-bit
hash on insert or delete. This serves as a rolling, order-independent hash
of the contents. (check + XOR)
:param old: hex representation of the current DB hash
:param name: name of the object or container being inserted
:param timestamp: internalized timestamp of the new record
:returns: a hex representation of the new hash value
"""
if name is None:
raise Exception('name is None!')
new = hashlib.md5(('%s-%s' % (name, timestamp)).encode('utf8')).hexdigest()
return '%032x' % (int(old, 16) ^ int(new, 16))
def get_db_connection(path, timeout=30, okay_to_create=False):
"""
Returns a properly configured SQLite database connection.
:param path: path to DB
:param timeout: timeout for connection
:param okay_to_create: if True, create the DB if it doesn't exist
:returns: DB connection object
"""
try:
connect_time = time.time()
conn = sqlite3.connect(path, check_same_thread=False,
factory=GreenDBConnection, timeout=timeout)
if path != ':memory:' and not okay_to_create:
# attempt to detect and fail when connect creates the db file
stat = os.stat(path)
if stat.st_size == 0 and stat.st_ctime >= connect_time:
os.unlink(path)
raise DatabaseConnectionError(path,
'DB file created by connect?')
conn.row_factory = sqlite3.Row
conn.text_factory = str
with closing(conn.cursor()) as cur:
cur.execute('PRAGMA synchronous = NORMAL')
cur.execute('PRAGMA count_changes = OFF')
cur.execute('PRAGMA temp_store = MEMORY')
cur.execute('PRAGMA journal_mode = DELETE')
conn.create_function('chexor', 3, chexor)
except sqlite3.DatabaseError:
import traceback
raise DatabaseConnectionError(path, traceback.format_exc(),
timeout=timeout)
return conn
class DatabaseBroker(object):
"""Encapsulates working with a database."""
def __init__(self, db_file, timeout=BROKER_TIMEOUT, logger=None,
account=None, container=None, pending_timeout=None,
stale_reads_ok=False):
"""Encapsulates working with a database."""
self.conn = None
self.db_file = db_file
self.pending_file = self.db_file + '.pending'
self.pending_timeout = pending_timeout or 10
self.stale_reads_ok = stale_reads_ok
self.db_dir = os.path.dirname(db_file)
self.timeout = timeout
self.logger = logger or logging.getLogger()
self.account = account
self.container = container
self._db_version = -1
def __str__(self):
"""
Returns a string identifying the entity under broker to a human.
The baseline implementation returns a full pathname to a database.
This is vital for useful diagnostics.
"""
return self.db_file
def initialize(self, put_timestamp=None, storage_policy_index=None):
"""
Create the DB
The storage_policy_index is passed through to the subclass's
``_initialize`` method. It is ignored by ``AccountBroker``.
:param put_timestamp: internalized timestamp of initial PUT request
:param storage_policy_index: only required for containers
"""
if self.db_file == ':memory:':
tmp_db_file = None
conn = get_db_connection(self.db_file, self.timeout)
else:
mkdirs(self.db_dir)
fd, tmp_db_file = mkstemp(suffix='.tmp', dir=self.db_dir)
os.close(fd)
conn = sqlite3.connect(tmp_db_file, check_same_thread=False,
factory=GreenDBConnection, timeout=0)
# creating dbs implicitly does a lot of transactions, so we
# pick fast, unsafe options here and do a big fsync at the end.
with closing(conn.cursor()) as cur:
cur.execute('PRAGMA synchronous = OFF')
cur.execute('PRAGMA temp_store = MEMORY')
cur.execute('PRAGMA journal_mode = MEMORY')
conn.create_function('chexor', 3, chexor)
conn.row_factory = sqlite3.Row
conn.text_factory = str
conn.executescript("""
CREATE TABLE outgoing_sync (
remote_id TEXT UNIQUE,
sync_point INTEGER,
updated_at TEXT DEFAULT 0
);
CREATE TABLE incoming_sync (
remote_id TEXT UNIQUE,
sync_point INTEGER,
updated_at TEXT DEFAULT 0
);
CREATE TRIGGER outgoing_sync_insert AFTER INSERT ON outgoing_sync
BEGIN
UPDATE outgoing_sync
SET updated_at = STRFTIME('%s', 'NOW')
WHERE ROWID = new.ROWID;
END;
CREATE TRIGGER outgoing_sync_update AFTER UPDATE ON outgoing_sync
BEGIN
UPDATE outgoing_sync
SET updated_at = STRFTIME('%s', 'NOW')
WHERE ROWID = new.ROWID;
END;
CREATE TRIGGER incoming_sync_insert AFTER INSERT ON incoming_sync
BEGIN
UPDATE incoming_sync
SET updated_at = STRFTIME('%s', 'NOW')
WHERE ROWID = new.ROWID;
END;
CREATE TRIGGER incoming_sync_update AFTER UPDATE ON incoming_sync
BEGIN
UPDATE incoming_sync
SET updated_at = STRFTIME('%s', 'NOW')
WHERE ROWID = new.ROWID;
END;
""")
if not put_timestamp:
put_timestamp = Timestamp(0).internal
self._initialize(conn, put_timestamp,
storage_policy_index=storage_policy_index)
conn.commit()
if tmp_db_file:
conn.close()
with open(tmp_db_file, 'r+b') as fp:
os.fsync(fp.fileno())
with lock_parent_directory(self.db_file, self.pending_timeout):
if os.path.exists(self.db_file):
# It's as if there was a "condition" where different parts
# of the system were "racing" each other.
raise DatabaseAlreadyExists(self.db_file)
renamer(tmp_db_file, self.db_file)
self.conn = get_db_connection(self.db_file, self.timeout)
else:
self.conn = conn
def delete_db(self, timestamp):
"""
Mark the DB as deleted
:param timestamp: internalized delete timestamp
"""
# first, clear the metadata
cleared_meta = {}
for k in self.metadata:
cleared_meta[k] = ('', timestamp)
self.update_metadata(cleared_meta)
# then mark the db as deleted
with self.get() as conn:
self._delete_db(conn, timestamp)
conn.commit()
def possibly_quarantine(self, exc_type, exc_value, exc_traceback):
"""
Checks the exception info to see if it indicates a quarantine situation
(malformed or corrupted database). If not, the original exception will
be reraised. If so, the database will be quarantined and a new
sqlite3.DatabaseError will be raised indicating the action taken.
"""
if 'database disk image is malformed' in str(exc_value):
exc_hint = 'malformed'
elif 'file is encrypted or is not a database' in str(exc_value):
exc_hint = 'corrupted'
elif 'disk I/O error' in str(exc_value):
exc_hint = 'disk error while accessing'
else:
raise exc_type, exc_value, exc_traceback
prefix_path = os.path.dirname(self.db_dir)
partition_path = os.path.dirname(prefix_path)
dbs_path = os.path.dirname(partition_path)
device_path = os.path.dirname(dbs_path)
quar_path = os.path.join(device_path, 'quarantined',
self.db_type + 's',
os.path.basename(self.db_dir))
try:
renamer(self.db_dir, quar_path, fsync=False)
except OSError as e:
if e.errno not in (errno.EEXIST, errno.ENOTEMPTY):
raise
quar_path = "%s-%s" % (quar_path, uuid4().hex)
renamer(self.db_dir, quar_path, fsync=False)
detail = _('Quarantined %s to %s due to %s database') % \
(self.db_dir, quar_path, exc_hint)
self.logger.error(detail)
raise sqlite3.DatabaseError(detail)
@contextmanager
def get(self):
"""Use with the "with" statement; returns a database connection."""
if not self.conn:
if self.db_file != ':memory:' and os.path.exists(self.db_file):
try:
self.conn = get_db_connection(self.db_file, self.timeout)
except (sqlite3.DatabaseError, DatabaseConnectionError):
self.possibly_quarantine(*sys.exc_info())
else:
raise DatabaseConnectionError(self.db_file, "DB doesn't exist")
conn = self.conn
self.conn = None
try:
yield conn
conn.rollback()
self.conn = conn
except sqlite3.DatabaseError:
try:
conn.close()
except Exception:
pass
self.possibly_quarantine(*sys.exc_info())
except (Exception, Timeout):
conn.close()
raise
@contextmanager
def lock(self):
"""Use with the "with" statement; locks a database."""
if not self.conn:
if self.db_file != ':memory:' and os.path.exists(self.db_file):
self.conn = get_db_connection(self.db_file, self.timeout)
else:
raise DatabaseConnectionError(self.db_file, "DB doesn't exist")
conn = self.conn
self.conn = None
orig_isolation_level = conn.isolation_level
conn.isolation_level = None
conn.execute('BEGIN IMMEDIATE')
try:
yield True
except (Exception, Timeout):
pass
try:
conn.execute('ROLLBACK')
conn.isolation_level = orig_isolation_level
self.conn = conn
except (Exception, Timeout):
logging.exception(
_('Broker error trying to rollback locked connection'))
conn.close()
def newid(self, remote_id):
"""
Re-id the database. This should be called after an rsync.
:param remote_id: the ID of the remote database being rsynced in
"""
with self.get() as conn:
row = conn.execute('''
UPDATE %s_stat SET id=?
''' % self.db_type, (str(uuid4()),))
row = conn.execute('''
SELECT ROWID FROM %s ORDER BY ROWID DESC LIMIT 1
''' % self.db_contains_type).fetchone()
sync_point = row['ROWID'] if row else -1
conn.execute('''
INSERT OR REPLACE INTO incoming_sync (sync_point, remote_id)
VALUES (?, ?)
''', (sync_point, remote_id))
self._newid(conn)
conn.commit()
def _newid(self, conn):
# Override for additional work when receiving an rsynced db.
pass
def _is_deleted(self, conn):
"""
Check if the database is considered deleted
:param conn: database conn
:returns: True if the DB is considered to be deleted, False otherwise
"""
raise NotImplementedError()
def is_deleted(self):
"""
Check if the DB is considered to be deleted.
:returns: True if the DB is considered to be deleted, False otherwise
"""
if self.db_file != ':memory:' and not os.path.exists(self.db_file):
return True
self._commit_puts_stale_ok()
with self.get() as conn:
return self._is_deleted(conn)
def merge_timestamps(self, created_at, put_timestamp, delete_timestamp):
"""
Used in replication to handle updating timestamps.
:param created_at: create timestamp
:param put_timestamp: put timestamp
:param delete_timestamp: delete timestamp
"""
with self.get() as conn:
old_status = self._is_deleted(conn)
conn.execute('''
UPDATE %s_stat SET created_at=MIN(?, created_at),
put_timestamp=MAX(?, put_timestamp),
delete_timestamp=MAX(?, delete_timestamp)
''' % self.db_type, (created_at, put_timestamp, delete_timestamp))
if old_status != self._is_deleted(conn):
timestamp = Timestamp(time.time())
self._update_status_changed_at(conn, timestamp.internal)
conn.commit()
def get_items_since(self, start, count):
"""
Get a list of objects in the database between start and end.
:param start: start ROWID
:param count: number to get
:returns: list of objects between start and end
"""
self._commit_puts_stale_ok()
with self.get() as conn:
curs = conn.execute('''
SELECT * FROM %s WHERE ROWID > ? ORDER BY ROWID ASC LIMIT ?
''' % self.db_contains_type, (start, count))
curs.row_factory = dict_factory
return [r for r in curs]
def get_sync(self, id, incoming=True):
"""
Gets the most recent sync point for a server from the sync table.
:param id: remote ID to get the sync_point for
:param incoming: if True, get the last incoming sync, otherwise get
the last outgoing sync
:returns: the sync point, or -1 if the id doesn't exist.
"""
with self.get() as conn:
row = conn.execute(
"SELECT sync_point FROM %s_sync WHERE remote_id=?"
% ('incoming' if incoming else 'outgoing'), (id,)).fetchone()
if not row:
return -1
return row['sync_point']
def get_syncs(self, incoming=True):
"""
Get a serialized copy of the sync table.
:param incoming: if True, get the last incoming sync, otherwise get
the last outgoing sync
:returns: list of {'remote_id', 'sync_point'}
"""
with self.get() as conn:
curs = conn.execute('''
SELECT remote_id, sync_point FROM %s_sync
''' % ('incoming' if incoming else 'outgoing'))
result = []
for row in curs:
result.append({'remote_id': row[0], 'sync_point': row[1]})
return result
def get_max_row(self):
query = '''
SELECT SQLITE_SEQUENCE.seq
FROM SQLITE_SEQUENCE
WHERE SQLITE_SEQUENCE.name == '%s'
LIMIT 1
''' % (self.db_contains_type)
with self.get() as conn:
row = conn.execute(query).fetchone()
return row[0] if row else -1
def get_replication_info(self):
"""
Get information about the DB required for replication.
:returns: dict containing keys from get_info plus max_row and metadata
Note:: get_info's <db_contains_type>_count is translated to just
"count" and metadata is the raw string.
"""
info = self.get_info()
info['count'] = info.pop('%s_count' % self.db_contains_type)
info['metadata'] = self.get_raw_metadata()
info['max_row'] = self.get_max_row()
return info
def get_info(self):
self._commit_puts_stale_ok()
with self.get() as conn:
curs = conn.execute('SELECT * from %s_stat' % self.db_type)
curs.row_factory = dict_factory
return curs.fetchone()
#在数据库中添加一条记录
def put_record(self, record):
if self.db_file == ':memory:':
self.merge_items([record])
return
if not os.path.exists(self.db_file):
raise DatabaseConnectionError(self.db_file, "DB doesn't exist")
#对数据库父目录加锁
with lock_parent_directory(self.pending_file, self.pending_timeout):
pending_size = 0
try:
pending_size = os.path.getsize(self.pending_file)
except OSError as err:
if err.errno != errno.ENOENT:
raise
if pending_size > PENDING_CAP:
self._commit_puts([record])
else:
#将对象记录写入数据库文件中
with open(self.pending_file, 'a+b') as fp:
# Colons aren't used in base64 encoding; so they are our
# delimiter
fp.write(':')
fp.write(pickle.dumps(
self.make_tuple_for_pickle(record),
protocol=PICKLE_PROTOCOL).encode('base64'))
fp.flush()
def _commit_puts(self, item_list=None):
"""
Scan for .pending files and commit the found records by feeding them
to merge_items(). Assume that lock_parent_directory has already been
called.
:param item_list: A list of items to commit in addition to .pending
"""
if self.db_file == ':memory:' or not os.path.exists(self.pending_file):
return
if item_list is None:
item_list = []
self._preallocate()
if not os.path.getsize(self.pending_file):
if item_list:
self.merge_items(item_list)
return
with open(self.pending_file, 'r+b') as fp:
for entry in fp.read().split(':'):
if entry:
try:
self._commit_puts_load(item_list, entry)
except Exception:
self.logger.exception(
_('Invalid pending entry %(file)s: %(entry)s'),
{'file': self.pending_file, 'entry': entry})
if item_list:
self.merge_items(item_list)
try:
os.ftruncate(fp.fileno(), 0)
except OSError as err:
if err.errno != errno.ENOENT:
raise
def _commit_puts_stale_ok(self):
"""
Catch failures of _commit_puts() if broker is intended for
reading of stats, and thus does not care for pending updates.
"""
if self.db_file == ':memory:' or not os.path.exists(self.pending_file):
return
try:
with lock_parent_directory(self.pending_file,
self.pending_timeout):
self._commit_puts()
except LockTimeout:
if not self.stale_reads_ok:
raise
def _commit_puts_load(self, item_list, entry):
"""
Unmarshall the :param:entry and append it to :param:item_list.
This is implemented by a particular broker to be compatible
with its :func:`merge_items`.
"""
raise NotImplementedError
def make_tuple_for_pickle(self, record):
"""
Turn this db record dict into the format this service uses for
pending pickles.
"""
raise NotImplementedError
def merge_syncs(self, sync_points, incoming=True):
"""
Merge a list of sync points with the incoming sync table.
:param sync_points: list of sync points where a sync point is a dict of
{'sync_point', 'remote_id'}
:param incoming: if True, get the last incoming sync, otherwise get
the last outgoing sync
"""
with self.get() as conn:
for rec in sync_points:
try:
conn.execute('''
INSERT INTO %s_sync (sync_point, remote_id)
VALUES (?, ?)
''' % ('incoming' if incoming else 'outgoing'),
(rec['sync_point'], rec['remote_id']))
except sqlite3.IntegrityError:
conn.execute('''
UPDATE %s_sync SET sync_point=max(?, sync_point)
WHERE remote_id=?
''' % ('incoming' if incoming else 'outgoing'),
(rec['sync_point'], rec['remote_id']))
conn.commit()
def _preallocate(self):
"""
The idea is to allocate space in front of an expanding db. If it gets
within 512k of a boundary, it allocates to the next boundary.
Boundaries are 2m, 5m, 10m, 25m, 50m, then every 50m after.
"""
if not DB_PREALLOCATION or self.db_file == ':memory:':
return
MB = (1024 * 1024)
def prealloc_points():
for pm in (1, 2, 5, 10, 25, 50):
yield pm * MB
while True:
pm += 50
yield pm * MB
stat = os.stat(self.db_file)
file_size = stat.st_size
allocated_size = stat.st_blocks * 512
for point in prealloc_points():
if file_size <= point - MB / 2:
prealloc_size = point
break
if allocated_size < prealloc_size:
with open(self.db_file, 'rb+') as fp:
fallocate(fp.fileno(), int(prealloc_size))
def get_raw_metadata(self):
with self.get() as conn:
try:
metadata = conn.execute('SELECT metadata FROM %s_stat' %
self.db_type).fetchone()[0]
except sqlite3.OperationalError as err:
if 'no such column: metadata' not in str(err):
raise
metadata = ''
return metadata
@property
def metadata(self):
"""
Returns the metadata dict for the database. The metadata dict values
are tuples of (value, timestamp) where the timestamp indicates when
that key was set to that value.
"""
metadata = self.get_raw_metadata()
if metadata:
metadata = json.loads(metadata)
utf8encodekeys(metadata)
else:
metadata = {}
return metadata
@staticmethod
def validate_metadata(metadata):
"""
Validates that metadata_falls within acceptable limits.
:param metadata: to be validated
:raises: HTTPBadRequest if MAX_META_COUNT or MAX_META_OVERALL_SIZE
is exceeded
"""
meta_count = 0
meta_size = 0
for key, (value, timestamp) in metadata.items():
key = key.lower()
if value != '' and (key.startswith('x-account-meta') or
key.startswith('x-container-meta')):
prefix = 'x-account-meta-'
if key.startswith('x-container-meta-'):
prefix = 'x-container-meta-'
key = key[len(prefix):]
meta_count = meta_count + 1
meta_size = meta_size + len(key) + len(value)
if meta_count > MAX_META_COUNT:
raise HTTPBadRequest('Too many metadata items; max %d'
% MAX_META_COUNT)
if meta_size > MAX_META_OVERALL_SIZE:
raise HTTPBadRequest('Total metadata too large; max %d'
% MAX_META_OVERALL_SIZE)
def update_metadata(self, metadata_updates, validate_metadata=False):
"""
Updates the metadata dict for the database. The metadata dict values
are tuples of (value, timestamp) where the timestamp indicates when
that key was set to that value. Key/values will only be overwritten if
the timestamp is newer. To delete a key, set its value to ('',
timestamp). These empty keys will eventually be removed by
:func:`reclaim`
"""
#从数据库中查询元数据信息,生成字典格式,保存到old_metadata
old_metadata = self.metadata
#如果新添加的元数据是原来元数据的子集
if set(metadata_updates).issubset(set(old_metadata)):
#查询时间戳,由于网络存在乱序,所以,只更新时间最后请求的元数据
for key, (value, timestamp) in metadata_updates.items():
if timestamp > old_metadata[key][1]:
break
else:
#所有的元数据均过期,则不作任何处理
return
#到这里,就是存在需要更新的元数据
with self.get() as conn:
try:
md = conn.execute('SELECT metadata FROM %s_stat' %
self.db_type).fetchone()[0]
md = json.loads(md) if md else {}
utf8encodekeys(md)
except sqlite3.OperationalError as err:
if 'no such column: metadata' not in str(err):
raise
conn.execute("""
ALTER TABLE %s_stat
ADD COLUMN metadata TEXT DEFAULT '' """ % self.db_type)
md = {}
#遍历待更新的所有元数据,只更新不存在的元数据或时间戳最新的元数据
for key, value_timestamp in metadata_updates.items():
value, timestamp = value_timestamp
if key not in md or timestamp > md[key][1]:
md[key] = value_timestamp
if validate_metadata:
DatabaseBroker.validate_metadata(md)
conn.execute('UPDATE %s_stat SET metadata = ?' % self.db_type,
(json.dumps(md),))
conn.commit()
def reclaim(self, age_timestamp, sync_timestamp):
"""
Delete rows from the db_contains_type table that are marked deleted
and whose created_at timestamp is < age_timestamp. Also deletes rows
from incoming_sync and outgoing_sync where the updated_at timestamp is
< sync_timestamp.
In addition, this calls the DatabaseBroker's :func:`_reclaim` method.
:param age_timestamp: max created_at timestamp of object rows to delete
:param sync_timestamp: max update_at timestamp of sync rows to delete
"""
if self.db_file != ':memory:' and os.path.exists(self.pending_file):
with lock_parent_directory(self.pending_file,
self.pending_timeout):
self._commit_puts()
with self.get() as conn:
conn.execute('''
DELETE FROM %s WHERE deleted = 1 AND %s < ?
''' % (self.db_contains_type, self.db_reclaim_timestamp),
(age_timestamp,))
try:
conn.execute('''
DELETE FROM outgoing_sync WHERE updated_at < ?
''', (sync_timestamp,))
conn.execute('''
DELETE FROM incoming_sync WHERE updated_at < ?
''', (sync_timestamp,))
except sqlite3.OperationalError as err:
# Old dbs didn't have updated_at in the _sync tables.
if 'no such column: updated_at' not in str(err):
raise
DatabaseBroker._reclaim(self, conn, age_timestamp)
conn.commit()
def _reclaim(self, conn, timestamp):
"""
Removes any empty metadata values older than the timestamp using the
given database connection. This function will not call commit on the
conn, but will instead return True if the database needs committing.
This function was created as a worker to limit transactions and commits
from other related functions.
:param conn: Database connection to reclaim metadata within.
:param timestamp: Empty metadata items last updated before this
timestamp will be removed.
:returns: True if conn.commit() should be called
"""
try:
md = conn.execute('SELECT metadata FROM %s_stat' %
self.db_type).fetchone()[0]
if md:
md = json.loads(md)
keys_to_delete = []
for key, (value, value_timestamp) in md.items():
if value == '' and value_timestamp < timestamp:
keys_to_delete.append(key)
if keys_to_delete:
for key in keys_to_delete:
del md[key]
conn.execute('UPDATE %s_stat SET metadata = ?' %
self.db_type, (json.dumps(md),))
return True
except sqlite3.OperationalError as err:
if 'no such column: metadata' not in str(err):
raise
return False
def update_put_timestamp(self, timestamp):
"""
Update the put_timestamp. Only modifies it if it is greater than
the current timestamp.
:param timestamp: internalized put timestamp
"""
with self.get() as conn:
conn.execute(
'UPDATE %s_stat SET put_timestamp = ?'
' WHERE put_timestamp < ?' % self.db_type,
(timestamp, timestamp))
conn.commit()
def update_status_changed_at(self, timestamp):
"""
Update the status_changed_at field in the stat table. Only
modifies status_changed_at if the timestamp is greater than the
current status_changed_at timestamp.
:param timestamp: internalized timestamp
"""
with self.get() as conn:
self._update_status_changed_at(conn, timestamp)
conn.commit()
def _update_status_changed_at(self, conn, timestamp):
conn.execute(
'UPDATE %s_stat SET status_changed_at = ?'
' WHERE status_changed_at < ?' % self.db_type,
(timestamp, timestamp))
| 38.032115 | 79 | 0.573945 | 4,027 | 34,343 | 4.742737 | 0.162155 | 0.019163 | 0.017802 | 0.01021 | 0.320593 | 0.279125 | 0.229279 | 0.209069 | 0.188387 | 0.169904 | 0 | 0.006636 | 0.341787 | 34,343 | 902 | 80 | 38.074279 | 0.838266 | 0.041406 | 0 | 0.375212 | 0 | 0 | 0.169021 | 0.000913 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.005093 | 0.03056 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
e053d13d8a4cd7c86d2670f87f97133354905c98 | 36,370 | py | Python | tests/python/gaia-ui-tests/gaiatest/gaia_test.py | AmyYLee/gaia | a5dbae8235163d7f985bdeb7d649268f02749a8b | [
"Apache-2.0"
] | 1 | 2020-04-06T13:02:09.000Z | 2020-04-06T13:02:09.000Z | tests/python/gaia-ui-tests/gaiatest/gaia_test.py | AmyYLee/gaia | a5dbae8235163d7f985bdeb7d649268f02749a8b | [
"Apache-2.0"
] | null | null | null | tests/python/gaia-ui-tests/gaiatest/gaia_test.py | AmyYLee/gaia | a5dbae8235163d7f985bdeb7d649268f02749a8b | [
"Apache-2.0"
] | null | null | null | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import json
import os
import sys
import time
from marionette import MarionetteTestCase
from marionette.by import By
from marionette.errors import NoSuchElementException
from marionette.errors import ElementNotVisibleException
from marionette.errors import TimeoutException
from marionette.errors import StaleElementException
from marionette.errors import InvalidResponseException
import mozdevice
class LockScreen(object):
def __init__(self, marionette):
self.marionette = marionette
js = os.path.abspath(os.path.join(__file__, os.path.pardir, 'atoms', "gaia_lock_screen.js"))
self.marionette.import_script(js)
@property
def is_locked(self):
self.marionette.switch_to_frame()
return self.marionette.execute_script('window.wrappedJSObject.LockScreen.locked')
def lock(self):
self.marionette.switch_to_frame()
result = self.marionette.execute_async_script('GaiaLockScreen.lock()')
assert result, 'Unable to lock screen'
def unlock(self):
self.marionette.switch_to_frame()
result = self.marionette.execute_async_script('GaiaLockScreen.unlock()')
assert result, 'Unable to unlock screen'
class GaiaApp(object):
def __init__(self, origin=None, name=None, frame=None, src=None):
self.frame = frame
self.frame_id = frame
self.src = src
self.name = name
self.origin = origin
def __eq__(self, other):
return self.__dict__ == other.__dict__
class GaiaApps(object):
def __init__(self, marionette):
self.marionette = marionette
js = os.path.abspath(os.path.join(__file__, os.path.pardir, 'atoms', "gaia_apps.js"))
self.marionette.import_script(js)
def get_permission(self, app_name, permission_name):
return self.marionette.execute_async_script("return GaiaApps.getPermission('%s', '%s')" % (app_name, permission_name))
def set_permission(self, app_name, permission_name, value):
return self.marionette.execute_async_script("return GaiaApps.setPermission('%s', '%s', '%s')" %
(app_name, permission_name, value))
def launch(self, name, switch_to_frame=True, url=None, launch_timeout=None):
self.marionette.switch_to_frame()
result = self.marionette.execute_async_script("GaiaApps.launchWithName('%s')" % name, script_timeout=launch_timeout)
assert result, "Failed to launch app with name '%s'" % name
app = GaiaApp(frame=result.get('frame'),
src=result.get('src'),
name=result.get('name'),
origin=result.get('origin'))
if app.frame_id is None:
raise Exception("App failed to launch; there is no app frame")
if switch_to_frame:
self.switch_to_frame(app.frame_id, url)
return app
@property
def displayed_app(self):
self.marionette.switch_to_frame()
result = self.marionette.execute_async_script('return GaiaApps.displayedApp();')
return GaiaApp(frame=result.get('frame'),
src=result.get('src'),
name=result.get('name'),
origin=result.get('origin'))
def switch_to_displayed_app(self):
self.marionette.switch_to_default_content()
self.marionette.switch_to_frame(self.displayed_app.frame)
def is_app_installed(self, app_name):
self.marionette.switch_to_frame()
return self.marionette.execute_async_script("GaiaApps.locateWithName('%s')" % app_name)
def uninstall(self, name):
self.marionette.switch_to_frame()
self.marionette.execute_async_script("GaiaApps.uninstallWithName('%s')" % name)
def kill(self, app):
self.marionette.switch_to_frame()
js = os.path.abspath(os.path.join(__file__, os.path.pardir, 'atoms', "gaia_apps.js"))
self.marionette.import_script(js)
result = self.marionette.execute_async_script("GaiaApps.kill('%s');" % app.origin)
assert result, "Failed to kill app with name '%s'" % app.name
def kill_all(self):
self.marionette.switch_to_frame()
js = os.path.abspath(os.path.join(__file__, os.path.pardir, 'atoms', "gaia_apps.js"))
self.marionette.import_script(js)
self.marionette.execute_async_script("GaiaApps.killAll()")
def runningApps(self):
return self.marionette.execute_script("return GaiaApps.getRunningApps()")
def switch_to_frame(self, app_frame, url=None, timeout=30):
self.marionette.switch_to_frame(app_frame)
start = time.time()
if not url:
def check(now):
return "about:blank" not in now
else:
def check(now):
return url in now
while (time.time() - start < timeout):
if check(self.marionette.get_url()):
return
time.sleep(2)
raise TimeoutException('Could not switch to app frame %s in time' % app_frame)
class GaiaData(object):
def __init__(self, marionette, testvars=None):
self.marionette = marionette
self.testvars = testvars or {}
js = os.path.abspath(os.path.join(__file__, os.path.pardir, 'atoms', "gaia_data_layer.js"))
self.marionette.import_script(js)
self.marionette.set_search_timeout(10000)
def set_time(self, date_number):
self.marionette.set_context(self.marionette.CONTEXT_CHROME)
self.marionette.execute_script("window.navigator.mozTime.set(%s);" % date_number)
self.marionette.set_context(self.marionette.CONTEXT_CONTENT)
@property
def all_contacts(self):
self.marionette.switch_to_frame()
return self.marionette.execute_async_script('return GaiaDataLayer.getAllContacts();', special_powers=True)
@property
def sim_contacts(self):
self.marionette.switch_to_frame()
return self.marionette.execute_async_script('return GaiaDataLayer.getSIMContacts();', special_powers=True)
def insert_contact(self, contact):
self.marionette.switch_to_frame()
result = self.marionette.execute_async_script('return GaiaDataLayer.insertContact(%s);' % json.dumps(contact), special_powers=True)
assert result, 'Unable to insert contact %s' % contact
def remove_all_contacts(self, default_script_timeout=60000):
self.marionette.switch_to_frame()
self.marionette.set_script_timeout(max(default_script_timeout, 1000 * len(self.all_contacts)))
result = self.marionette.execute_async_script('return GaiaDataLayer.removeAllContacts();', special_powers=True)
assert result, 'Unable to remove all contacts'
self.marionette.set_script_timeout(default_script_timeout)
def get_setting(self, name):
return self.marionette.execute_async_script('return GaiaDataLayer.getSetting("%s")' % name, special_powers=True)
@property
def all_settings(self):
return self.get_setting('*')
def set_setting(self, name, value):
import json
value = json.dumps(value)
result = self.marionette.execute_async_script('return GaiaDataLayer.setSetting("%s", %s)' % (name, value), special_powers=True)
assert result, "Unable to change setting with name '%s' to '%s'" % (name, value)
def _get_pref(self, datatype, name):
return self.marionette.execute_script("return SpecialPowers.get%sPref('%s');" % (datatype, name), special_powers=True)
def _set_pref(self, datatype, name, value):
value = json.dumps(value)
self.marionette.execute_script("SpecialPowers.set%sPref('%s', %s);" % (datatype, name, value), special_powers=True)
def get_bool_pref(self, name):
"""Returns the value of a Gecko boolean pref, which is different from a Gaia setting."""
return self._get_pref('Bool', name)
def set_bool_pref(self, name, value):
"""Sets the value of a Gecko boolean pref, which is different from a Gaia setting."""
return self._set_pref('Bool', name, value)
def get_int_pref(self, name):
"""Returns the value of a Gecko integer pref, which is different from a Gaia setting."""
return self._get_pref('Int', name)
def set_int_pref(self, name, value):
"""Sets the value of a Gecko integer pref, which is different from a Gaia setting."""
return self._set_pref('Int', name, value)
def get_char_pref(self, name):
"""Returns the value of a Gecko string pref, which is different from a Gaia setting."""
return self._get_pref('Char', name)
def set_char_pref(self, name, value):
"""Sets the value of a Gecko string pref, which is different from a Gaia setting."""
return self._set_pref('Char', name, value)
def set_volume(self, value):
channels = ['alarm', 'content', 'notification']
for channel in channels:
self.set_setting('audio.volume.%s' % channel, value)
def bluetooth_enable(self):
self.marionette.switch_to_frame()
return self.marionette.execute_async_script("return GaiaDataLayer.enableBluetooth()")
def bluetooth_disable(self):
self.marionette.switch_to_frame()
return self.marionette.execute_async_script("return GaiaDataLayer.disableBluetooth()")
def bluetooth_pair_device(self, device_name):
return self.marionette.execute_async_script('return GaiaDataLayer.pairBluetoothDevice("%s")' % device_name)
def bluetooth_unpair_all_devices(self):
self.marionette.switch_to_frame()
self.marionette.execute_async_script('return GaiaDataLayer.unpairAllBluetoothDevices()')
def bluetooth_set_device_name(self, device_name):
result = self.marionette.execute_async_script('return GaiaDataLayer.bluetoothSetDeviceName(%s);' % device_name)
assert result, "Unable to set device's bluetooth name to %s" % device_name
def bluetooth_set_device_discoverable_mode(self, discoverable):
if (discoverable):
result = self.marionette.execute_async_script('return GaiaDataLayer.bluetoothSetDeviceDiscoverableMode(true);')
else:
result = self.marionette.execute_async_script('return GaiaDataLayer.bluetoothSetDeviceDiscoverableMode(false);')
assert result, 'Able to set the device bluetooth discoverable mode'
@property
def bluetooth_is_enabled(self):
return self.marionette.execute_script("return window.navigator.mozBluetooth.enabled")
@property
def is_cell_data_enabled(self):
return self.get_setting('ril.data.enabled')
def connect_to_cell_data(self):
self.marionette.switch_to_frame()
result = self.marionette.execute_async_script("return GaiaDataLayer.connectToCellData()", special_powers=True)
assert result, 'Unable to connect to cell data'
def disable_cell_data(self):
self.marionette.switch_to_frame()
result = self.marionette.execute_async_script("return GaiaDataLayer.disableCellData()", special_powers=True)
assert result, 'Unable to disable cell data'
@property
def is_cell_data_connected(self):
# XXX: check bug-926169
# this is used to keep all tests passing while introducing multi-sim APIs
return self.marionette.execute_script('var mobileConnection = window.navigator.mozMobileConnection || ' +
'window.navigator.mozMobileConnections && ' +
'window.navigator.mozMobileConnections[0]; ' +
'return mobileConnection.data.connected;')
def enable_cell_roaming(self):
self.set_setting('ril.data.roaming_enabled', True)
def disable_cell_roaming(self):
self.set_setting('ril.data.roaming_enabled', False)
@property
def is_wifi_enabled(self):
return self.marionette.execute_script("return window.navigator.mozWifiManager.enabled;")
def enable_wifi(self):
self.marionette.switch_to_frame()
result = self.marionette.execute_async_script("return GaiaDataLayer.enableWiFi()", special_powers=True)
assert result, 'Unable to enable WiFi'
def disable_wifi(self):
self.marionette.switch_to_frame()
result = self.marionette.execute_async_script("return GaiaDataLayer.disableWiFi()", special_powers=True)
assert result, 'Unable to disable WiFi'
def connect_to_wifi(self, network=None):
network = network or self.testvars.get('wifi')
assert network, 'No WiFi network provided'
self.enable_wifi()
self.marionette.switch_to_frame()
result = self.marionette.execute_async_script("return GaiaDataLayer.connectToWiFi(%s)" % json.dumps(network))
assert result, 'Unable to connect to WiFi network'
def forget_all_networks(self):
self.marionette.switch_to_frame()
self.marionette.execute_async_script('return GaiaDataLayer.forgetAllNetworks()')
def is_wifi_connected(self, network=None):
network = network or self.testvars.get('wifi')
assert network, 'No WiFi network provided'
self.marionette.switch_to_frame()
return self.marionette.execute_script("return GaiaDataLayer.isWiFiConnected(%s)" % json.dumps(network))
@property
def known_networks(self):
return self.marionette.execute_async_script('return GaiaDataLayer.getKnownNetworks()')
@property
def active_telephony_state(self):
# Returns the state of only the currently active call or None if no active call
return self.marionette.execute_script("return GaiaDataLayer.getMozTelephonyState()")
@property
def is_antenna_available(self):
return self.marionette.execute_script('return window.navigator.mozFMRadio.antennaAvailable')
@property
def is_fm_radio_enabled(self):
return self.marionette.execute_script('return window.navigator.mozFMRadio.enabled')
@property
def fm_radio_frequency(self):
return self.marionette.execute_script('return window.navigator.mozFMRadio.frequency')
@property
def media_files(self):
result = []
result.extend(self.music_files)
result.extend(self.picture_files)
result.extend(self.video_files)
return result
def delete_all_sms(self):
self.marionette.switch_to_frame()
return self.marionette.execute_async_script("return GaiaDataLayer.deleteAllSms();", special_powers=True)
def delete_all_call_log_entries(self):
"""The call log needs to be open and focused in order for this to work."""
self.marionette.execute_script('window.wrappedJSObject.RecentsDBManager.deleteAll();')
def kill_active_call(self):
self.marionette.execute_script("var telephony = window.navigator.mozTelephony; " +
"if(telephony.active) telephony.active.hangUp();")
@property
def music_files(self):
return self.marionette.execute_async_script(
'return GaiaDataLayer.getAllMusic();')
@property
def picture_files(self):
return self.marionette.execute_async_script(
'return GaiaDataLayer.getAllPictures();')
@property
def video_files(self):
return self.marionette.execute_async_script(
'return GaiaDataLayer.getAllVideos();')
def sdcard_files(self, extension=''):
files = self.marionette.execute_async_script(
'return GaiaDataLayer.getAllSDCardFiles();')
if len(extension):
return [filename for filename in files if filename.endswith(extension)]
return files
def send_sms(self, number, message):
import json
number = json.dumps(number)
message = json.dumps(message)
result = self.marionette.execute_async_script('return GaiaDataLayer.sendSMS(%s, %s)' % (number, message), special_powers=True)
assert result, 'Unable to send SMS to recipient %s with text %s' % (number, message)
class GaiaDevice(object):
def __init__(self, marionette, testvars=None):
self.marionette = marionette
self.testvars = testvars or {}
@property
def manager(self):
if hasattr(self, '_manager') and self._manager:
return self._manager
if not self.is_android_build:
raise Exception('Device manager is only available for devices.')
dm_type = os.environ.get('DM_TRANS', 'adb')
if dm_type == 'adb':
self._manager = mozdevice.DeviceManagerADB()
elif dm_type == 'sut':
host = os.environ.get('TEST_DEVICE')
if not host:
raise Exception('Must specify host with SUT!')
self._manager = mozdevice.DeviceManagerSUT(host=host)
else:
raise Exception('Unknown device manager type: %s' % dm_type)
return self._manager
@property
def is_android_build(self):
if self.testvars.get('is_android_build') is None:
self.testvars['is_android_build'] = 'Android' in self.marionette.session_capabilities['platform']
return self.testvars['is_android_build']
@property
def is_online(self):
# Returns true if the device has a network connection established (cell data, wifi, etc)
return self.marionette.execute_script('return window.navigator.onLine;')
@property
def has_mobile_connection(self):
# XXX: check bug-926169
# this is used to keep all tests passing while introducing multi-sim APIs
return self.marionette.execute_script('var mobileConnection = window.navigator.mozMobileConnection || ' +
'window.navigator.mozMobileConnections && ' +
'window.navigator.mozMobileConnections[0]; ' +
'return mobileConnection !== undefined')
@property
def has_wifi(self):
if not hasattr(self, '_has_wifi'):
self._has_wifi = self.marionette.execute_script('return window.navigator.mozWifiManager !== undefined')
return self._has_wifi
def push_file(self, source, count=1, destination='', progress=None):
if not destination.count('.') > 0:
destination = '/'.join([destination, source.rpartition(os.path.sep)[-1]])
self.manager.mkDirs(destination)
self.manager.pushFile(source, destination)
if count > 1:
for i in range(1, count + 1):
remote_copy = '_%s.'.join(iter(destination.split('.'))) % i
self.manager._checkCmd(['shell', 'dd', 'if=%s' % destination, 'of=%s' % remote_copy])
if progress:
progress.update(i)
self.manager.removeFile(destination)
def restart_b2g(self):
self.stop_b2g()
time.sleep(2)
self.start_b2g()
def start_b2g(self):
if self.marionette.instance:
# launch the gecko instance attached to marionette
self.marionette.instance.start()
elif self.is_android_build:
self.manager.shellCheckOutput(['start', 'b2g'])
else:
raise Exception('Unable to start B2G')
self.marionette.wait_for_port()
self.marionette.start_session()
if self.is_android_build:
self.marionette.execute_async_script("""
window.addEventListener('mozbrowserloadend', function loaded(aEvent) {
if (aEvent.target.src.indexOf('ftu') != -1 || aEvent.target.src.indexOf('homescreen') != -1) {
window.removeEventListener('mozbrowserloadend', loaded);
marionetteScriptFinished();
}
});""", script_timeout=60000)
# TODO: Remove this sleep when Bug 924912 is addressed
time.sleep(5)
def stop_b2g(self):
if self.marionette.instance:
# close the gecko instance attached to marionette
self.marionette.instance.close()
elif self.is_android_build:
self.manager.shellCheckOutput(['stop', 'b2g'])
else:
raise Exception('Unable to stop B2G')
self.marionette.client.close()
self.marionette.session = None
self.marionette.window = None
class GaiaTestCase(MarionetteTestCase):
_script_timeout = 60000
_search_timeout = 10000
# deafult timeout in seconds for the wait_for methods
_default_timeout = 30
def __init__(self, *args, **kwargs):
self.restart = kwargs.pop('restart', False)
kwargs.pop('iterations', None)
kwargs.pop('checkpoint_interval', None)
MarionetteTestCase.__init__(self, *args, **kwargs)
def setUp(self):
try:
MarionetteTestCase.setUp(self)
except InvalidResponseException:
if self.restart:
pass
self.device = GaiaDevice(self.marionette, self.testvars)
if self.restart and (self.device.is_android_build or self.marionette.instance):
self.device.stop_b2g()
if self.device.is_android_build:
# revert device to a clean state
self.device.manager.removeDir('/data/local/storage/persistent')
self.device.manager.removeDir('/data/b2g/mozilla')
self.device.start_b2g()
# the emulator can be really slow!
self.marionette.set_script_timeout(self._script_timeout)
self.marionette.set_search_timeout(self._search_timeout)
self.lockscreen = LockScreen(self.marionette)
self.apps = GaiaApps(self.marionette)
self.data_layer = GaiaData(self.marionette, self.testvars)
from gaiatest.apps.keyboard.app import Keyboard
self.keyboard = Keyboard(self.marionette)
self.cleanUp()
def cleanUp(self):
# remove media
if self.device.is_android_build:
for filename in self.data_layer.media_files:
# filename is a fully qualified path
self.device.manager.removeFile(filename)
# Switch off keyboard FTU screen
self.data_layer.set_setting("keyboard.ftu.enabled", False)
# restore settings from testvars
[self.data_layer.set_setting(name, value) for name, value in self.testvars.get('settings', {}).items()]
# unlock
self.lockscreen.unlock()
# If we are restarting all of these values are reset to default earlier in the setUp
if not self.restart:
# disable passcode before restore settings from testvars
self.data_layer.set_setting('lockscreen.passcode-lock.code', '1111')
self.data_layer.set_setting('lockscreen.passcode-lock.enabled', False)
# Change language back to English
self.data_layer.set_setting("language.current", "en-US")
# Switch off spanish keyboard before test
self.data_layer.set_setting("keyboard.layouts.spanish", False)
# Set do not track pref back to the default
self.data_layer.set_setting('privacy.donottrackheader.value', '-1')
if self.data_layer.get_setting('ril.radio.disabled'):
# enable the device radio, disable Airplane mode
self.data_layer.set_setting('ril.radio.disabled', False)
# Re-set edge gestures pref to False
self.data_layer.set_setting('edgesgesture.enabled', False)
# disable carrier data connection
if self.device.has_mobile_connection:
self.data_layer.disable_cell_data()
self.data_layer.disable_cell_roaming()
if self.device.has_wifi:
self.data_layer.enable_wifi()
self.data_layer.forget_all_networks()
self.data_layer.disable_wifi()
# remove data
self.data_layer.remove_all_contacts(self._script_timeout)
# reset to home screen
self.marionette.execute_script("window.wrappedJSObject.dispatchEvent(new Event('home'));")
# kill any open apps
self.apps.kill_all()
# disable sound completely
self.data_layer.set_volume(0)
def install_marketplace(self):
_yes_button_locator = (By.ID, 'app-install-install-button')
mk = {"name": "Marketplace Dev",
"manifest": "https://marketplace-dev.allizom.org/manifest.webapp ",
}
if not self.apps.is_app_installed(mk['name']):
# install the marketplace dev app
self.marionette.execute_script('navigator.mozApps.install("%s")' % mk['manifest'])
# TODO add this to the system app object when we have one
self.wait_for_element_displayed(*_yes_button_locator)
self.marionette.find_element(*_yes_button_locator).tap()
self.wait_for_element_not_displayed(*_yes_button_locator)
def connect_to_network(self):
if not self.device.is_online:
try:
self.connect_to_local_area_network()
except:
if self.device.has_mobile_connection:
self.data_layer.connect_to_cell_data()
else:
raise Exception('Unable to connect to network')
assert self.device.is_online
def connect_to_local_area_network(self):
if not self.device.is_online:
if self.testvars.get('wifi') and self.device.has_wifi:
self.data_layer.connect_to_wifi()
assert self.device.is_online
else:
raise Exception('Unable to connect to local area network')
def push_resource(self, filename, count=1, destination=''):
self.device.push_file(self.resource(filename), count, '/'.join(['sdcard', destination]))
def resource(self, filename):
return os.path.abspath(os.path.join(os.path.dirname(__file__), 'resources', filename))
def change_orientation(self, orientation):
""" There are 4 orientation states which the phone can be passed in:
portrait-primary(which is the default orientation), landscape-primary, portrait-secondary and landscape-secondary
"""
self.marionette.execute_async_script("""
if (arguments[0] === arguments[1]) {
marionetteScriptFinished();
}
else {
var expected = arguments[1];
window.screen.onmozorientationchange = function(e) {
console.log("Received 'onmozorientationchange' event.");
waitFor(
function() {
window.screen.onmozorientationchange = null;
marionetteScriptFinished();
},
function() {
return window.screen.mozOrientation === expected;
}
);
};
console.log("Changing orientation to '" + arguments[1] + "'.");
window.screen.mozLockOrientation(arguments[1]);
};""", script_args=[self.screen_orientation, orientation])
@property
def screen_width(self):
return self.marionette.execute_script('return window.screen.width')
@property
def screen_orientation(self):
return self.marionette.execute_script('return window.screen.mozOrientation')
def wait_for_element_present(self, by, locator, timeout=_default_timeout):
timeout = float(timeout) + time.time()
while time.time() < timeout:
time.sleep(0.5)
try:
return self.marionette.find_element(by, locator)
except NoSuchElementException:
pass
else:
raise TimeoutException(
'Element %s not present before timeout' % locator)
def wait_for_element_not_present(self, by, locator, timeout=_default_timeout):
timeout = float(timeout) + time.time()
while time.time() < timeout:
time.sleep(0.5)
try:
self.marionette.find_element(by, locator)
except NoSuchElementException:
break
else:
raise TimeoutException(
'Element %s still present after timeout' % locator)
def wait_for_element_displayed(self, by, locator, timeout=_default_timeout):
timeout = float(timeout) + time.time()
e = None
while time.time() < timeout:
time.sleep(0.5)
try:
if self.marionette.find_element(by, locator).is_displayed():
break
except (NoSuchElementException, StaleElementException) as e:
pass
else:
# This is an effortless way to give extra debugging information
if isinstance(e, NoSuchElementException):
raise TimeoutException('Element %s not present before timeout' % locator)
else:
raise TimeoutException('Element %s present but not displayed before timeout' % locator)
def wait_for_element_not_displayed(self, by, locator, timeout=_default_timeout):
timeout = float(timeout) + time.time()
while time.time() < timeout:
time.sleep(0.5)
try:
if not self.marionette.find_element(by, locator).is_displayed():
break
except StaleElementException:
pass
except NoSuchElementException:
break
else:
raise TimeoutException(
'Element %s still visible after timeout' % locator)
def wait_for_condition(self, method, timeout=_default_timeout,
message="Condition timed out"):
"""Calls the method provided with the driver as an argument until the \
return value is not False."""
end_time = time.time() + timeout
while time.time() < end_time:
try:
value = method(self.marionette)
if value:
return value
except (NoSuchElementException, StaleElementException):
pass
time.sleep(0.5)
else:
raise TimeoutException(message)
def is_element_present(self, by, locator):
try:
self.marionette.find_element(by, locator)
return True
except:
return False
def is_element_displayed(self, by, locator):
try:
return self.marionette.find_element(by, locator).is_displayed()
except (NoSuchElementException, ElementNotVisibleException):
return False
def tearDown(self):
self.lockscreen = None
self.apps = None
self.data_layer = None
MarionetteTestCase.tearDown(self)
class GaiaEnduranceTestCase(GaiaTestCase):
def __init__(self, *args, **kwargs):
self.iterations = kwargs.pop('iterations') or 1
self.checkpoint_interval = kwargs.pop('checkpoint_interval') or self.iterations
GaiaTestCase.__init__(self, *args, **kwargs)
def drive(self, test, app):
self.test_method = test
self.app_under_test = app
# Now drive the actual test case iterations
for count in range(1, self.iterations + 1):
self.iteration = count
self.marionette.log("%s iteration %d of %d" % (self.test_method.__name__, count, self.iterations))
# Print to console so can see what iteration we're on while test is running
if self.iteration == 1:
print "\n"
print "Iteration %d of %d..." % (count, self.iterations)
sys.stdout.flush()
self.test_method()
# Checkpoint time?
if ((count % self.checkpoint_interval) == 0) or count == self.iterations:
self.checkpoint()
# Finished, now process checkpoint data into .json output
self.process_checkpoint_data()
def checkpoint(self):
# Console output so know what's happening if watching console
print "Checkpoint..."
sys.stdout.flush()
# Sleep to give device idle time (for gc)
idle_time = 30
self.marionette.log("sleeping %d seconds to give the device some idle time" % idle_time)
time.sleep(idle_time)
# Dump out some memory status info
self.marionette.log("checkpoint")
self.cur_time = time.strftime("%Y%m%d%H%M%S", time.localtime())
# If first checkpoint, create the file if it doesn't exist already
if self.iteration in (0, self.checkpoint_interval):
self.checkpoint_path = "checkpoints"
if not os.path.exists(self.checkpoint_path):
os.makedirs(self.checkpoint_path, 0755)
self.log_name = "%s/checkpoint_%s_%s.log" % (self.checkpoint_path, self.test_method.__name__, self.cur_time)
with open(self.log_name, 'a') as log_file:
log_file.write('%s Gaia Endurance Test: %s\n' % (self.cur_time, self.test_method.__name__))
output_str = self.device.manager.shellCheckOutput(["b2g-ps"])
with open(self.log_name, 'a') as log_file:
log_file.write('%s Checkpoint after iteration %d of %d:\n' % (self.cur_time, self.iteration, self.iterations))
log_file.write('%s\n' % output_str)
def close_app(self):
# Close the current app (self.app) by using the home button
self.marionette.switch_to_frame()
self.marionette.execute_script("window.wrappedJSObject.dispatchEvent(new Event('home'));")
# Bring up the cards view
_cards_view_locator = ('id', 'cards-view')
self.marionette.execute_script("window.wrappedJSObject.dispatchEvent(new Event('holdhome'));")
self.wait_for_element_displayed(*_cards_view_locator)
# Sleep a bit
time.sleep(5)
# Tap the close icon for the current app
locator_part_two = '#cards-view li.card[data-origin*="%s"] .close-card' % self.app_under_test.lower()
_close_button_locator = ('css selector', locator_part_two)
close_card_app_button = self.marionette.find_element(*_close_button_locator)
close_card_app_button.tap()
def process_checkpoint_data(self):
# Process checkpoint data into .json
self.marionette.log("processing checkpoint data from %s" % self.log_name)
# Open the checkpoint file
checkpoint_file = open(self.log_name, 'r')
# Grab every b2g rss reading for each checkpoint
b2g_rss_list = []
for next_line in checkpoint_file:
if next_line.startswith("b2g"):
b2g_rss_list.append(next_line.split()[5])
# Close the checkpoint file
checkpoint_file.close()
# Calculate the average b2g_rss
total = 0
for b2g_mem_value in b2g_rss_list:
total += int(b2g_mem_value)
avg_rss = total / len(b2g_rss_list)
# Create a summary text file
summary_name = self.log_name.replace('.log', '_summary.log')
summary_file = open(summary_name, 'w')
# Write the summarized checkpoint data
summary_file.write('test_name: %s\n' % self.test_method.__name__)
summary_file.write('completed: %s\n' % self.cur_time)
summary_file.write('app_under_test: %s\n' % self.app_under_test.lower())
summary_file.write('total_iterations: %d\n' % self.iterations)
summary_file.write('checkpoint_interval: %d\n' % self.checkpoint_interval)
summary_file.write('b2g_rss: ')
summary_file.write(', '.join(b2g_rss_list))
summary_file.write('\navg_rss: %d\n\n' % avg_rss)
# Close the summary file
summary_file.close()
# Write to suite summary file
suite_summary_file_name = '%s/avg_b2g_rss_suite_summary.log' % self.checkpoint_path
suite_summary_file = open(suite_summary_file_name, 'a')
suite_summary_file.write('%s: %s\n' % (self.test_method.__name__, avg_rss))
suite_summary_file.close()
| 41.376564 | 139 | 0.648474 | 4,227 | 36,370 | 5.388219 | 0.139815 | 0.087285 | 0.057165 | 0.043379 | 0.456401 | 0.395724 | 0.361257 | 0.330216 | 0.282446 | 0.226466 | 0 | 0.004811 | 0.251389 | 36,370 | 878 | 140 | 41.42369 | 0.831711 | 0.063761 | 0 | 0.283721 | 0 | 0.00155 | 0.189417 | 0.086549 | 0 | 0 | 0 | 0.001139 | 0.029457 | 0 | null | null | 0.010853 | 0.031008 | null | null | 0.004651 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
e053d242f75ab9ddd50217184c0c2cd558a9aad9 | 5,591 | py | Python | library/__mozilla__/pyjamas/DOM.py | certik/pyjamas | 5bb72e63e50f09743ac986f4c9690ba50c499ba9 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | library/__mozilla__/pyjamas/DOM.py | certik/pyjamas | 5bb72e63e50f09743ac986f4c9690ba50c499ba9 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | library/__mozilla__/pyjamas/DOM.py | certik/pyjamas | 5bb72e63e50f09743ac986f4c9690ba50c499ba9 | [
"ECL-2.0",
"Apache-2.0"
] | 1 | 2019-08-13T20:32:25.000Z | 2019-08-13T20:32:25.000Z | def buttonClick(button):
JS("""
var doc = button.ownerDocument;
if (doc != null) {
var evt = doc.createEvent('MouseEvents');
evt.initMouseEvent('click', true, true, null, 0, 0,
0, 0, 0, false, false, false, false, 0, null);
button.dispatchEvent(evt);
}
""")
def compare(elem1, elem2):
JS("""
if (!elem1 && !elem2) {
return true;
} else if (!elem1 || !elem2) {
return false;
}
if (!elem1.isSameNode) {
return (elem1 == elem2);
}
return (elem1.isSameNode(elem2));
""")
def eventGetButton(evt):
JS("""
var button = evt.which;
if(button == 2) {
return 4;
} else if (button == 3) {
return 2;
} else {
return button || 0;
}
""")
# This is what is in GWT 1.5 for getAbsoluteLeft. err...
#"""
# // We cannot use DOMImpl here because offsetLeft/Top return erroneous
# // values when overflow is not visible. We have to difference screenX
# // here due to a change in getBoxObjectFor which causes inconsistencies
# // on whether the calculations are inside or outside of the element's
# // border.
# try {
# return $doc.getBoxObjectFor(elem).screenX
# - $doc.getBoxObjectFor($doc.documentElement).screenX;
# } catch (e) {
# // This works around a bug in the FF3 betas. The bug
# // should be fixed before they release, so this can
# // be removed at a later date.
# // https://bugzilla.mozilla.org/show_bug.cgi?id=409111
# // DOMException.WRONG_DOCUMENT_ERR == 4
# if (e.code == 4) {
# return 0;
# }
# throw e;
# }
#"""
def getAbsoluteLeft(elem):
JS("""
// Firefox 3 expects getBoundingClientRect
// getBoundingClientRect can be float: 73.1 instead of 74, see
// gwt's workaround at user/src/com/google/gwt/dom/client/DOMImplMozilla.java:47
// Please note, their implementation has 1px offset.
if ( typeof elem.getBoundingClientRect == 'function' ) {
var left = Math.ceil(elem.getBoundingClientRect().left);
return left + $doc.body.scrollLeft + $doc.documentElement.scrollLeft;
}
// Older Firefox can use getBoxObjectFor
else {
var left = $doc.getBoxObjectFor(elem).x;
var parent = elem.parentNode;
while (parent) {
if (parent.scrollLeft > 0) {
left = left - parent.scrollLeft;
}
parent = parent.parentNode;
}
return left + $doc.body.scrollLeft + $doc.documentElement.scrollLeft;
}
""")
# This is what is in GWT 1.5 for getAbsoluteTop. err...
#"""
# // We cannot use DOMImpl here because offsetLeft/Top return erroneous
# // values when overflow is not visible. We have to difference screenY
# // here due to a change in getBoxObjectFor which causes inconsistencies
# // on whether the calculations are inside or outside of the element's
# // border.
# try {
# return $doc.getBoxObjectFor(elem).screenY
# - $doc.getBoxObjectFor($doc.documentElement).screenY;
# } catch (e) {
# // This works around a bug in the FF3 betas. The bug
# // should be fixed before they release, so this can
# // be removed at a later date.
# // https://bugzilla.mozilla.org/show_bug.cgi?id=409111
# // DOMException.WRONG_DOCUMENT_ERR == 4
# if (e.code == 4) {
# return 0;
# }
# throw e;
# }
#"""
def getAbsoluteTop(elem):
JS("""
// Firefox 3 expects getBoundingClientRect
if ( typeof elem.getBoundingClientRect == 'function' ) {
var top = Math.ceil(elem.getBoundingClientRect().top);
return top + $doc.body.scrollTop + $doc.documentElement.scrollTop;
}
// Older Firefox can use getBoxObjectFor
else {
var top = $doc.getBoxObjectFor(elem).y;
var parent = elem.parentNode;
while (parent) {
if (parent.scrollTop > 0) {
top -= parent.scrollTop;
}
parent = parent.parentNode;
}
return top + $doc.body.scrollTop + $doc.documentElement.scrollTop;
}
""")
def getChildIndex(parent, child):
JS("""
var count = 0, current = parent.firstChild;
while (current) {
if (! current.isSameNode) {
if (current == child) {
return count;
}
}
else if (current.isSameNode(child)) {
return count;
}
if (current.nodeType == 1) {
++count;
}
current = current.nextSibling;
}
return -1;
""")
def isOrHasChild(parent, child):
JS("""
while (child) {
if ((!parent.isSameNode)) {
if (parent == child) {
return true;
}
}
else if (parent.isSameNode(child)) {
return true;
}
try {
child = child.parentNode;
} catch(e) {
// Give up on 'Permission denied to get property
// HTMLDivElement.parentNode'
// See https://bugzilla.mozilla.org/show_bug.cgi?id=208427
return false;
}
if (child && (child.nodeType != 1)) {
child = null;
}
}
return false;
""")
def releaseCapture(elem):
JS("""
if ((DOM.sCaptureElem != null) && DOM.compare(elem, DOM.sCaptureElem))
DOM.sCaptureElem = null;
if (!elem.isSameNode) {
if (elem == $wnd.__captureElem) {
$wnd.__captureElem = null;
}
}
else if (elem.isSameNode($wnd.__captureElem)) {
$wnd.__captureElem = null;
}
""")
| 29.119792 | 84 | 0.571275 | 609 | 5,591 | 5.220033 | 0.280788 | 0.033973 | 0.027682 | 0.021705 | 0.505505 | 0.485373 | 0.431268 | 0.406103 | 0.301353 | 0.287512 | 0 | 0.017414 | 0.301556 | 5,591 | 191 | 85 | 29.272251 | 0.796671 | 0.296548 | 0 | 0.330827 | 0 | 0.007519 | 0.898584 | 0.183784 | 0 | 0 | 0 | 0 | 0 | 1 | 0.06015 | false | 0 | 0 | 0 | 0.195489 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
e05432743bd72af1411301793f19ae278f8a6b5a | 485 | py | Python | apps/vendors/migrations/0090_auto_20160610_2125.py | ExpoAshique/ProveBanking__s | f0b45fffea74d00d14014be27aa50fe5f42f6903 | [
"MIT"
] | null | null | null | apps/vendors/migrations/0090_auto_20160610_2125.py | ExpoAshique/ProveBanking__s | f0b45fffea74d00d14014be27aa50fe5f42f6903 | [
"MIT"
] | null | null | null | apps/vendors/migrations/0090_auto_20160610_2125.py | ExpoAshique/ProveBanking__s | f0b45fffea74d00d14014be27aa50fe5f42f6903 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-06-10 21:25
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('vendors', '0089_auto_20160602_2123'),
]
operations = [
migrations.AlterField(
model_name='vendor',
name='email',
field=models.EmailField(blank=True, max_length=254, verbose_name='Email'),
),
]
| 23.095238 | 86 | 0.626804 | 55 | 485 | 5.327273 | 0.818182 | 0.061433 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.096419 | 0.251546 | 485 | 20 | 87 | 24.25 | 0.710744 | 0.138144 | 0 | 0 | 1 | 0 | 0.110843 | 0.055422 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.153846 | 0 | 0.384615 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
e0554c3395746111d418fbf380163f0e080e4265 | 1,260 | py | Python | pytorch_gleam/search/rerank_format.py | Supermaxman/pytorch-gleam | 8b0d8dddc812e8ae120c9760fd44fe93da3f902d | [
"Apache-2.0"
] | null | null | null | pytorch_gleam/search/rerank_format.py | Supermaxman/pytorch-gleam | 8b0d8dddc812e8ae120c9760fd44fe93da3f902d | [
"Apache-2.0"
] | null | null | null | pytorch_gleam/search/rerank_format.py | Supermaxman/pytorch-gleam | 8b0d8dddc812e8ae120c9760fd44fe93da3f902d | [
"Apache-2.0"
] | null | null | null |
import torch
import argparse
from collections import defaultdict
import os
import json
def load_predictions(input_path):
pred_list = []
for file_name in os.listdir(input_path):
if file_name.endswith('.pt'):
preds = torch.load(os.path.join(input_path, file_name))
pred_list.extend(preds)
question_scores = defaultdict(lambda: defaultdict(dict))
p_count = 0
u_count = 0
for prediction in pred_list:
doc_pass_id = prediction['id']
q_p_id = prediction['question_id']
# score = prediction['pos_score']
score = prediction['pos_score'] - prediction['neg_score']
if doc_pass_id not in question_scores or q_p_id not in question_scores[doc_pass_id]:
p_count += 1
u_count += 1
question_scores[doc_pass_id][q_p_id] = score
print(f'{p_count} unique predictions')
print(f'{u_count} total predictions')
return question_scores
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-i', '--input_path', required=True)
parser.add_argument('-o', '--output_path', required=True)
args = parser.parse_args()
input_path = args.input_path
output_path = args.output_path
question_scores = load_predictions(input_path)
with open(output_path, 'w') as f:
json.dump(question_scores, f)
if __name__ == '__main__':
main()
| 25.714286 | 86 | 0.743651 | 191 | 1,260 | 4.586387 | 0.350785 | 0.071918 | 0.041096 | 0.054795 | 0.084475 | 0 | 0 | 0 | 0 | 0 | 0 | 0.003683 | 0.138095 | 1,260 | 48 | 87 | 26.25 | 0.802947 | 0.024603 | 0 | 0 | 0 | 0 | 0.103589 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.054054 | false | 0.081081 | 0.135135 | 0 | 0.216216 | 0.054054 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
e055f89145eb203a0a63bfdad54931948d02ec37 | 388 | py | Python | des036.py | LeonardoPereirajr/Curso_em_video_Python | 9d8a97ba3389c8e86b37dfd089fab5d04adc146d | [
"MIT"
] | null | null | null | des036.py | LeonardoPereirajr/Curso_em_video_Python | 9d8a97ba3389c8e86b37dfd089fab5d04adc146d | [
"MIT"
] | null | null | null | des036.py | LeonardoPereirajr/Curso_em_video_Python | 9d8a97ba3389c8e86b37dfd089fab5d04adc146d | [
"MIT"
] | null | null | null | casa = int(input('Qual o valor da casa? '))
sal = int(input('Qual seu salario? '))
prazo = int(input('Quantos meses deseja pagar ? '))
parcela = casa/prazo
margem = sal* (30/100)
if parcela > margem:
print('Este negocio não foi aprovado, aumente o prazo .')
else:
print("Negocio aprovado pois a parcela é de R$ {} e voce pode pagar R$ {} mensais".format(parcela,margem))
| 38.8 | 111 | 0.664948 | 59 | 388 | 4.372881 | 0.644068 | 0.093023 | 0.093023 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.016026 | 0.195876 | 388 | 9 | 112 | 43.111111 | 0.810897 | 0 | 0 | 0 | 0 | 0 | 0.503958 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.222222 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
e05606e62a7f260ca58d2f3413562fa3ee898b64 | 1,000 | py | Python | HackBitApp/migrations/0003_roadmap.py | SukhadaM/HackBit-Interview-Preparation-Portal | f4c6b0d7168a4ea4ffcf1569183b1614752d9946 | [
"MIT"
] | null | null | null | HackBitApp/migrations/0003_roadmap.py | SukhadaM/HackBit-Interview-Preparation-Portal | f4c6b0d7168a4ea4ffcf1569183b1614752d9946 | [
"MIT"
] | null | null | null | HackBitApp/migrations/0003_roadmap.py | SukhadaM/HackBit-Interview-Preparation-Portal | f4c6b0d7168a4ea4ffcf1569183b1614752d9946 | [
"MIT"
] | null | null | null | # Generated by Django 3.1.7 on 2021-03-27 18:22
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('HackBitApp', '0002_company_photo'),
]
operations = [
migrations.CreateModel(
name='Roadmap',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('company_name', models.CharField(db_index=True, max_length=200, unique=True)),
('photo1', models.ImageField(upload_to='photos/company/roadmap')),
('photo2', models.ImageField(blank=True, upload_to='photos/company/roadmap')),
('photo3', models.ImageField(blank=True, upload_to='photos/company/roadmap')),
],
options={
'verbose_name': 'roadmap',
'verbose_name_plural': 'roadmaps',
'ordering': ('company_name',),
},
),
]
| 34.482759 | 114 | 0.571 | 98 | 1,000 | 5.673469 | 0.581633 | 0.059353 | 0.07554 | 0.113309 | 0.241007 | 0.190647 | 0.190647 | 0.190647 | 0.190647 | 0 | 0 | 0.035112 | 0.288 | 1,000 | 28 | 115 | 35.714286 | 0.745787 | 0.045 | 0 | 0 | 1 | 0 | 0.210913 | 0.069255 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.045455 | 0 | 0.181818 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
e0581bc2242266c4f411267aa587a7bfd0afc840 | 965 | py | Python | main/models.py | StevenSume/EasyCMDB | c2c44c9efe2de2729659d81ef886abff242ac1c5 | [
"Apache-2.0"
] | 2 | 2019-08-23T06:04:12.000Z | 2019-09-16T07:27:16.000Z | main/models.py | StevenSume/EasyCMDB | c2c44c9efe2de2729659d81ef886abff242ac1c5 | [
"Apache-2.0"
] | null | null | null | main/models.py | StevenSume/EasyCMDB | c2c44c9efe2de2729659d81ef886abff242ac1c5 | [
"Apache-2.0"
] | null | null | null | from .app import db
class Project(db.Model):
__tablename__ = 'projects'
id = db.Column(db.Integer,primary_key=True,autoincrement=True)
project_name = db.Column(db.String(64),unique=True,index=True)
def to_dict(self):
mydict = {
'id': self.id,
'project_name': self.project_name
}
return mydict
def __repr__(self):
return '<Project %r>' % self.__name__
class Item(db.Model):
__tablename__ = 'Items'
id = db.Column(db.Integer, primary_key=True,autoincrement=True)
project_id = db.Column(db.Integer)
key = db.Column(db.String(64),nullable=False)
value = db.Column(db.String(64),nullable=False)
def to_dict(self):
mydict = {
'id': self.id,
'project_id': self.project_id,
'key': self.key,
'value': self.value
}
return mydict
def __repr__(self):
return '<Item %r>' % self.__name__
| 26.805556 | 67 | 0.592746 | 121 | 965 | 4.446281 | 0.280992 | 0.089219 | 0.111524 | 0.066915 | 0.630112 | 0.561338 | 0.453532 | 0.33829 | 0.33829 | 0.211896 | 0 | 0.008584 | 0.275648 | 965 | 35 | 68 | 27.571429 | 0.761087 | 0 | 0 | 0.413793 | 0 | 0 | 0.070539 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.137931 | false | 0 | 0.034483 | 0.068966 | 0.655172 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
e059b01690fb071d4b03811c7664f63e0007961b | 3,914 | py | Python | lib_exec/StereoPipeline/libexec/asp_image_utils.py | sebasmurphy/iarpa | aca39cc5390a153a9779a636ab2523e65cb6d3b0 | [
"MIT"
] | 20 | 2017-02-01T14:54:57.000Z | 2022-01-25T06:34:35.000Z | lib_exec/StereoPipeline/libexec/asp_image_utils.py | sebasmurphy/iarpa | aca39cc5390a153a9779a636ab2523e65cb6d3b0 | [
"MIT"
] | 3 | 2020-04-21T12:11:26.000Z | 2021-01-10T07:00:51.000Z | lib_exec/StereoPipeline/libexec/asp_image_utils.py | sebasmurphy/iarpa | aca39cc5390a153a9779a636ab2523e65cb6d3b0 | [
"MIT"
] | 10 | 2017-12-18T18:45:25.000Z | 2021-11-22T02:43:03.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# __BEGIN_LICENSE__
# Copyright (c) 2009-2013, United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration. All
# rights reserved.
#
# The NGT platform is licensed under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance with the
# License. You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# __END_LICENSE__
"""
Basic functions for working with images on disk.
"""
import sys, os, re, subprocess, string, time, errno
import asp_string_utils
def stripRgbImageAlphaChannel(inputPath, outputPath):
"""Makes an RGB copy of an RBGA image"""
cmd = 'gdal_translate ' + inputPath + ' ' + outputPath + ' -b 1 -b 2 -b 3 -co "COMPRESS=LZW" -co "TILED=YES" -co "BLOCKXSIZE=256" -co "BLOCKYSIZE=256"'
print cmd
os.system(cmd)
def getImageSize(imagePath):
"""Returns the size [samples, lines] in an image"""
# Make sure the input file exists
if not os.path.exists(imagePath):
raise Exception('Image file ' + imagePath + ' not found!')
# Use subprocess to suppress the command output
cmd = ['gdalinfo', imagePath]
p = subprocess.Popen(cmd, stdout=subprocess.PIPE)
textOutput, err = p.communicate()
# Extract the size from the text
sizePos = textOutput.find('Size is')
endPos = textOutput.find('\n', sizePos+7)
sizeStr = textOutput[sizePos+7:endPos]
sizeStrs = sizeStr.strip().split(',')
numSamples = int(sizeStrs[0])
numLines = int(sizeStrs[1])
size = [numSamples, numLines]
return size
def isIsisFile(filePath):
"""Returns True if the file is an ISIS file, False otherwise."""
# Currently we treat all files with .cub extension as ISIS files
extension = os.path.splitext(filePath)[1]
return (extension == '.cub')
def getImageStats(imagePath):
"""Obtains some image statistics from gdalinfo"""
if not os.path.exists(imagePath):
raise Exception('Image file ' + imagePath + ' not found!')
# Call command line tool silently
cmd = ['gdalinfo', imagePath, '-stats']
p = subprocess.Popen(cmd, stdout=subprocess.PIPE)
textOutput, err = p.communicate()
# Statistics are computed seperately for each band
bandStats = []
band = 0
while (True): # Loop until we run out of bands
# Look for the stats line for this band
bandString = 'Band ' + str(band+1) + ' Block='
bandLoc = textOutput.find(bandString)
if bandLoc < 0:
return bandStats # Quit if we did not find it
# Now parse out the statistics for this band
bandMaxStart = textOutput.find('STATISTICS_MAXIMUM=', bandLoc)
bandMeanStart = textOutput.find('STATISTICS_MEAN=', bandLoc)
bandMinStart = textOutput.find('STATISTICS_MINIMUM=', bandLoc)
bandStdStart = textOutput.find('STATISTICS_STDDEV=', bandLoc)
bandMax = asp_string_utils.getNumberAfterEqualSign(textOutput, bandMaxStart)
bandMean = asp_string_utils.getNumberAfterEqualSign(textOutput, bandMeanStart)
bandMin = asp_string_utils.getNumberAfterEqualSign(textOutput, bandMinStart)
bandStd = asp_string_utils.getNumberAfterEqualSign(textOutput, bandStdStart)
# Add results to the output list
bandStats.append( (bandMin, bandMax, bandMean, bandStd) )
band = band + 1 # Move to the next band
| 34.946429 | 155 | 0.67348 | 478 | 3,914 | 5.462343 | 0.481172 | 0.037534 | 0.02681 | 0.056683 | 0.171582 | 0.099579 | 0.099579 | 0.099579 | 0.099579 | 0.099579 | 0 | 0.010302 | 0.231221 | 3,914 | 111 | 156 | 35.261261 | 0.857428 | 0.314768 | 0 | 0.173913 | 0 | 0.021739 | 0.113951 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.043478 | null | null | 0.021739 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
e05d022e20ec708234ba466419ce63a57d30ac77 | 2,716 | py | Python | PythonScripting/NumbersInPython.py | Neo-sunny/pythonProgs | a9d2359d8a09d005d0ba6f94d7d256bf91499793 | [
"MIT"
] | null | null | null | PythonScripting/NumbersInPython.py | Neo-sunny/pythonProgs | a9d2359d8a09d005d0ba6f94d7d256bf91499793 | [
"MIT"
] | null | null | null | PythonScripting/NumbersInPython.py | Neo-sunny/pythonProgs | a9d2359d8a09d005d0ba6f94d7d256bf91499793 | [
"MIT"
] | null | null | null | """
Demonstration of numbers in Python
"""
# Python has an integer type called int
print("int")
print("---")
print(0)
print(1)
print(-3)
print(70383028364830)
print("")
# Python has a real number type called float
print("float")
print("-----")
print(0.0)
print(7.35)
print(-43.2)
print("")
# Limited precision
print("Precision")
print("---------")
print(4.56372883832331773)
print(1.23456789012345678)
print("")
# Scientific/exponential notation
print("Scientific notation")
print("-------------------")
print(5e32)
print(999999999999999999999999999999999999999.9)
print("")
# Infinity
print("Infinity")
print("--------")
print(1e500)
print(-1e500)
print("")
# Conversions
print("Conversions between numeric types")
print("---------------------------------")
print(float(3))
print(float(99999999999999999999999999999999999999))
print(int(3.0))
print(int(3.7))
print(int(-3.7))
"""
Demonstration of simple arithmetic expressions in Python
"""
# Unary + and -
print("Unary operators")
print(+3)
print(-5)
print(+7.86)
print(-3348.63)
print("")
# Simple arithmetic
print("Addition and Subtraction")
print(1 + 2)
print(48 - 89)
print(3.45 + 2.7)
print(87.3384 - 12.35)
print(3 + 6.7)
print(9.8 - 4)
print("")
print("Multiplication")
print(3 * 2)
print(7.8 * 27.54)
print(7 * 8.2)
print("")
print("Division")
print(8 / 2)
print(3 / 2)
print(7.538 / 14.3)
print(8 // 2)
print(3 // 2)
print(7.538 // 14.3)
print("")
print("Exponentiation")
print(3 ** 2)
print(5 ** 4)
print(32.6 ** 7)
print(9 ** 0.5)
"""
Demonstration of compound arithmetic expressions in Python
"""
# Expressions can include multiple operations
print("Compound expressions")
print(3 + 5 + 7 + 27)
#Operator with same precedence are evaluated from left to right
print(18 - 6 + 4)
print("")
# Operator precedence defines how expressions are evaluated
print("Operator precedence")
print(7 + 3 * 5)
print(5.5 * 6 // 2 + 8)
print(-3 ** 2)
print("")
# Use parentheses to change evaluation order
print("Grouping with parentheses")
print((7 + 3) * 5)
print(5.5 * ((6 // 2) + 8))
print((-3) ** 2)
"""
Demonstration of the use of variables and how to assign values to
them.
"""
# The = operator can be used to assign values to variables
bakers_dozen = 12 + 1
temperature = 93
# Variables can be used as values and in expressions
print(temperature, bakers_dozen)
print("celsius:", (temperature - 32) * 5 / 9)
print("fahrenheit:", float(temperature))
# You can assign a different value to an existing variable
temperature = 26
print("new value:", temperature)
# Multiple variables can be used in arbitrary expressions
offset = 32
multiplier = 5.0 / 9.0
celsius = (temperature - offset) * multiplier
print("celsius value:", celsius)
| 17.522581 | 65 | 0.674521 | 387 | 2,716 | 4.728682 | 0.30491 | 0.036066 | 0.022951 | 0.032787 | 0.065574 | 0.05847 | 0.05847 | 0.05847 | 0.05847 | 0.05847 | 0 | 0.122475 | 0.143225 | 2,716 | 154 | 66 | 17.636364 | 0.663945 | 0.238218 | 0 | 0.120879 | 0 | 0 | 0.183106 | 0.017984 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.934066 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 |
e05e6c4440c357c867a4c38e37f726c4d615e768 | 1,676 | py | Python | 3DBeam/source/solving_strategies/strategies/linear_solver.py | JoZimmer/Beam-Models | e701c0bae6e3035e7a07cc590da4a132b133dcff | [
"BSD-3-Clause"
] | null | null | null | 3DBeam/source/solving_strategies/strategies/linear_solver.py | JoZimmer/Beam-Models | e701c0bae6e3035e7a07cc590da4a132b133dcff | [
"BSD-3-Clause"
] | null | null | null | 3DBeam/source/solving_strategies/strategies/linear_solver.py | JoZimmer/Beam-Models | e701c0bae6e3035e7a07cc590da4a132b133dcff | [
"BSD-3-Clause"
] | 1 | 2022-01-05T17:32:32.000Z | 2022-01-05T17:32:32.000Z | from source.solving_strategies.strategies.solver import Solver
class LinearSolver(Solver):
def __init__(self,
array_time, time_integration_scheme, dt,
comp_model,
initial_conditions,
force,
structure_model):
super().__init__(array_time, time_integration_scheme, dt,
comp_model, initial_conditions, force, structure_model)
def _print_solver_info(self):
print("Linear Solver")
def solve(self):
# time loop
for i in range(0, len(self.array_time)):
self.step = i
current_time = self.array_time[i]
#print("time: {0:.2f}".format(current_time))
self.scheme.solve_single_step(self.force[:, i])
# appending results to the list
self.displacement[:, i] = self.scheme.get_displacement()
self.velocity[:, i] = self.scheme.get_velocity()
self.acceleration[:, i] = self.scheme.get_acceleration()
# TODO: only calculate reaction when user wants it
# if self.structure_model is not None:
# self.dynamic_reaction[:, i] = self._compute_reaction()
# reaction computed in dynamic analysis
# TODO: only calculate reaction when user wants it
# moved reaction computation to dynamic analysis level
# AK . this doesnt considers the support reaction check
#if self.structure_model is not None:
# self.dynamic_reaction[:, i] = self._compute_reaction()
# update results
self.scheme.update()
| 38.976744 | 80 | 0.590095 | 183 | 1,676 | 5.180328 | 0.404372 | 0.052743 | 0.041139 | 0.044304 | 0.390295 | 0.390295 | 0.390295 | 0.390295 | 0.305907 | 0.305907 | 0 | 0.00265 | 0.324582 | 1,676 | 42 | 81 | 39.904762 | 0.834806 | 0.318616 | 0 | 0 | 0 | 0 | 0.011525 | 0 | 0 | 0 | 0 | 0.02381 | 0 | 1 | 0.142857 | false | 0 | 0.047619 | 0 | 0.238095 | 0.095238 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
e05ea195ece947573587efca60ad05b204af43f6 | 1,095 | py | Python | payment/migrations/0002_auto_20171125_0022.py | Littledelma/mofadog | 5a7c6672da248e400a8a5746506a6e7b273c9510 | [
"MIT"
] | null | null | null | payment/migrations/0002_auto_20171125_0022.py | Littledelma/mofadog | 5a7c6672da248e400a8a5746506a6e7b273c9510 | [
"MIT"
] | 1 | 2021-06-08T03:28:08.000Z | 2021-06-08T03:28:08.000Z | payment/migrations/0002_auto_20171125_0022.py | Littledelma/mofadog | 5a7c6672da248e400a8a5746506a6e7b273c9510 | [
"MIT"
] | 1 | 2021-06-08T03:23:34.000Z | 2021-06-08T03:23:34.000Z | # -*- coding: utf-8 -*-
# Generated by Django 1.11.7 on 2017-11-24 16:22
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('payment', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='history_order',
name='dead_date',
field=models.DateTimeField(default=datetime.datetime(2017, 11, 24, 16, 22, 1, 719840, tzinfo=utc), verbose_name='daed_date'),
),
migrations.AlterField(
model_name='history_order',
name='order_date',
field=models.DateTimeField(default=datetime.datetime(2017, 11, 24, 16, 22, 1, 719662, tzinfo=utc), verbose_name='order date'),
),
migrations.AlterField(
model_name='history_order',
name='valid_date',
field=models.DateTimeField(default=datetime.datetime(2017, 11, 24, 16, 22, 1, 719758, tzinfo=utc), verbose_name='valid_date'),
),
]
| 33.181818 | 138 | 0.63379 | 127 | 1,095 | 5.307087 | 0.393701 | 0.035608 | 0.047478 | 0.059347 | 0.514837 | 0.497033 | 0.497033 | 0.430267 | 0.284866 | 0.284866 | 0 | 0.093976 | 0.242009 | 1,095 | 32 | 139 | 34.21875 | 0.718072 | 0.0621 | 0 | 0.36 | 1 | 0 | 0.113281 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.16 | 0 | 0.28 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
e05fe1dabdb8d88cb6b7077a77b9ecb4a63a39fd | 841 | py | Python | src/sqlfluff/rules/L024.py | NathanHowell/sqlfluff | 9eb30226d77727cd613947e144a0abe483151f18 | [
"MIT"
] | 3,024 | 2020-10-01T11:03:51.000Z | 2022-03-31T16:42:00.000Z | src/sqlfluff/rules/L024.py | NathanHowell/sqlfluff | 9eb30226d77727cd613947e144a0abe483151f18 | [
"MIT"
] | 2,395 | 2020-09-30T12:59:21.000Z | 2022-03-31T22:05:29.000Z | src/sqlfluff/rules/L024.py | NathanHowell/sqlfluff | 9eb30226d77727cd613947e144a0abe483151f18 | [
"MIT"
] | 246 | 2020-10-02T17:08:03.000Z | 2022-03-30T17:43:51.000Z | """Implementation of Rule L024."""
from sqlfluff.core.rules.doc_decorators import document_fix_compatible
from sqlfluff.rules.L023 import Rule_L023
@document_fix_compatible
class Rule_L024(Rule_L023):
"""Single whitespace expected after USING in JOIN clause.
| **Anti-pattern**
.. code-block:: sql
SELECT b
FROM foo
LEFT JOIN zoo USING(a)
| **Best practice**
| The • character represents a space.
| Add a space after USING, to avoid confusing it
| for a function.
.. code-block:: sql
:force:
SELECT b
FROM foo
LEFT JOIN zoo USING•(a)
"""
expected_mother_segment_type = "join_clause"
pre_segment_identifier = ("name", "using")
post_segment_identifier = ("type", "bracketed")
expand_children = None
allow_newline = True
| 21.564103 | 70 | 0.652794 | 107 | 841 | 4.981308 | 0.598131 | 0.030019 | 0.078799 | 0.052533 | 0.11257 | 0.11257 | 0.11257 | 0.11257 | 0 | 0 | 0 | 0.023885 | 0.25327 | 841 | 38 | 71 | 22.131579 | 0.821656 | 0.464923 | 0 | 0 | 0 | 0 | 0.087533 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.222222 | 0 | 0.888889 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
e06275178027bd16b4be36faab1b32af531b42cb | 1,047 | py | Python | flask-graphene-sqlalchemy/models.py | JovaniPink/flask-apps | de887f15261c286986cf38d234d49f7e4eb79c1a | [
"MIT"
] | null | null | null | flask-graphene-sqlalchemy/models.py | JovaniPink/flask-apps | de887f15261c286986cf38d234d49f7e4eb79c1a | [
"MIT"
] | null | null | null | flask-graphene-sqlalchemy/models.py | JovaniPink/flask-apps | de887f15261c286986cf38d234d49f7e4eb79c1a | [
"MIT"
] | null | null | null | import os
from graphene_sqlalchemy import SQLAlchemyObjectType
from sqlalchemy import Column, Integer, String, create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy.ext.declarative import declarative_base
POSTGRES_CONNECTION_STRING = (
os.environ.get("POSTGRES_CONNECTION_STRING")
or "postgres://postgres:password@localhost:6432/postgres"
)
engine = create_engine(POSTGRES_CONNECTION_STRING, convert_unicode=True)
db_session = scoped_session(
sessionmaker(autocommit=False, autoflush=False, bind=engine)
)
Base = declarative_base()
Base.query = db_session.query_property()
class UserModel(Base):
__tablename__ = "users"
id = Column(Integer, primary_key=True)
name = Column(String)
balance = Column(Integer)
class MinAmountModel(Base):
__tablename__ = "min_amount"
amount = Column(Integer, primary_key=True)
class User(SQLAlchemyObjectType):
class Meta:
model = UserModel
class MinAmount(SQLAlchemyObjectType):
class Meta:
model = MinAmountModel
| 26.175 | 72 | 0.770774 | 117 | 1,047 | 6.65812 | 0.444444 | 0.066752 | 0.092426 | 0.05905 | 0.06932 | 0 | 0 | 0 | 0 | 0 | 0 | 0.004479 | 0.147087 | 1,047 | 39 | 73 | 26.846154 | 0.867861 | 0 | 0 | 0.068966 | 0 | 0 | 0.088825 | 0.074499 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.034483 | 0.172414 | 0 | 0.586207 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
e068d2bbe0be95225acd32e5324a05a51bc85276 | 5,641 | py | Python | pandas 9 - Statistics Information on data sets.py | PythonProgramming/Pandas-Basics-with-2.7 | a6ecd5ac7c25dba83e934549903f229de89290d3 | [
"MIT"
] | 10 | 2015-07-16T05:46:10.000Z | 2020-10-28T10:35:50.000Z | pandas 9 - Statistics Information on data sets.py | PythonProgramming/Pandas-Basics-with-2.7 | a6ecd5ac7c25dba83e934549903f229de89290d3 | [
"MIT"
] | null | null | null | pandas 9 - Statistics Information on data sets.py | PythonProgramming/Pandas-Basics-with-2.7 | a6ecd5ac7c25dba83e934549903f229de89290d3 | [
"MIT"
] | 9 | 2017-01-31T18:57:25.000Z | 2019-09-10T08:52:57.000Z | import pandas as pd
from pandas import DataFrame
df = pd.read_csv('sp500_ohlc.csv', index_col = 'Date', parse_dates=True)
df['H-L'] = df.High - df.Low
# Giving us count (rows), mean (avg), std (standard deviation for the entire
# set), minimum for the set, maximum for the set, and some %s in that range.
print( df.describe())
x = input('enter to cont')
# gives us correlation data. Remember the 3d chart we plotted?
# now you can see if correlation of H-L and Volume also is correlated
# with price swings. Correlations for your correlations
print( df.corr())
x = input('enter to cont')
# covariance... now plenty of people know what correlation is, but what in the
# heck is covariance.
# Let's defined the two.
# covariance is the measure of how two variables change together.
# correlation is the measure of how two variables move in relation to eachother.
# so covariance is a more direct assessment of the relationship between two variables.
# Maybe a better way to put it is that covariance is the measure of the strength of correlation.
print( df.cov())
x = input('enter to cont')
print( df[['Volume','H-L']].corr())
x = input('enter to cont')
# see how it makes a table?
# so now, we can actually perform a service that some people actually pay for
# I once had a short freelance gig doing this
# so a popular form of analysis within especially forex is to compare correlations between
# the currencies. The idea here is that you pace one currency with another.
#
import datetime
import pandas.io.data
C = pd.io.data.get_data_yahoo('C',
start=datetime.datetime(2011, 10, 1),
end=datetime.datetime(2014, 1, 1))
AAPL = pd.io.data.get_data_yahoo('AAPL',
start=datetime.datetime(2011, 10, 1),
end=datetime.datetime(2014, 1, 1))
MSFT = pd.io.data.get_data_yahoo('MSFT',
start=datetime.datetime(2011, 10, 1),
end=datetime.datetime(2014, 1, 1))
TSLA = pd.io.data.get_data_yahoo('TSLA',
start=datetime.datetime(2011, 10, 1),
end=datetime.datetime(2014, 1, 1))
print( C.head())
x = input('enter to cont')
del C['Open']
# , 'high', 'low', 'close', 'volume'
del C['High']
del C['Low']
del C['Close']
del C['Volume']
corComp = C
corComp.rename(columns={'Adj Close': 'C'}, inplace=True)
corComp['AAPL'] = AAPL['Adj Close']
corComp['MSFT'] = MSFT['Adj Close']
corComp['TSLA'] = TSLA['Adj Close']
print( corComp.head())
x = input('enter to cont')
print( corComp.corr())
x = input('enter to cont')
C = pd.io.data.get_data_yahoo('C',
start=datetime.datetime(2011, 10, 1),
end=datetime.datetime(2014, 1, 1))
AAPL = pd.io.data.get_data_yahoo('AAPL',
start=datetime.datetime(2011, 10, 1),
end=datetime.datetime(2014, 1, 1))
MSFT = pd.io.data.get_data_yahoo('MSFT',
start=datetime.datetime(2011, 10, 1),
end=datetime.datetime(2014, 1, 1))
TSLA = pd.io.data.get_data_yahoo('TSLA',
start=datetime.datetime(2011, 10, 1),
end=datetime.datetime(2014, 1, 1))
BAC = pd.io.data.get_data_yahoo('BAC',
start=datetime.datetime(2011, 10, 1),
end=datetime.datetime(2014, 1, 1))
BBRY = pd.io.data.get_data_yahoo('BBRY',
start=datetime.datetime(2011, 10, 1),
end=datetime.datetime(2014, 1, 1))
CMG = pd.io.data.get_data_yahoo('CMG',
start=datetime.datetime(2011, 10, 1),
end=datetime.datetime(2014, 1, 1))
EBAY = pd.io.data.get_data_yahoo('EBAY',
start=datetime.datetime(2011, 10, 1),
end=datetime.datetime(2014, 1, 1))
JPM = pd.io.data.get_data_yahoo('JPM',
start=datetime.datetime(2011, 10, 1),
end=datetime.datetime(2014, 1, 1))
SBUX = pd.io.data.get_data_yahoo('SBUX',
start=datetime.datetime(2011, 10, 1),
end=datetime.datetime(2014, 1, 1))
TGT = pd.io.data.get_data_yahoo('TGT',
start=datetime.datetime(2011, 10, 1),
end=datetime.datetime(2014, 1, 1))
WFC = pd.io.data.get_data_yahoo('WFC',
start=datetime.datetime(2011, 10, 1),
end=datetime.datetime(2014, 1, 1))
x = input('enter to cont')
print( C.head())
del C['Open']
# , 'high', 'low', 'close', 'volume'
del C['High']
del C['Low']
del C['Close']
del C['Volume']
corComp = C
corComp.rename(columns={'Adj Close': 'C'}, inplace=True)
corComp['BAC'] = BAC['Adj Close']
corComp['MSFT'] = MSFT['Adj Close']
corComp['TSLA'] = TSLA['Adj Close']
corComp['AAPL'] = AAPL['Adj Close']
corComp['BBRY'] = BBRY['Adj Close']
corComp['CMG'] = CMG['Adj Close']
corComp['EBAY'] = EBAY['Adj Close']
corComp['JPM'] = JPM['Adj Close']
corComp['SBUX'] = SBUX['Adj Close']
corComp['TGT'] = TGT['Adj Close']
corComp['WFC'] = WFC['Adj Close']
print( corComp.head())
x = input('enter to cont')
print( corComp.corr())
x = input('enter to cont')
fancy = corComp.corr()
fancy.to_csv('bigmoney.csv')
| 32.606936 | 96 | 0.565148 | 762 | 5,641 | 4.135171 | 0.217848 | 0.162488 | 0.040622 | 0.055855 | 0.619486 | 0.603301 | 0.506506 | 0.488099 | 0.488099 | 0.488099 | 0 | 0.053617 | 0.29906 | 5,641 | 172 | 97 | 32.796512 | 0.743298 | 0.204928 | 0 | 0.730769 | 0 | 0 | 0.104508 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.038462 | 0 | 0.038462 | 0.096154 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
e0712713ade0b6560e6616c234015a83c6ef39c9 | 696 | py | Python | models/database_models/comment_model.py | RuiCoreSci/Flask-Restful | 03f98a17487d407b69b853a9bf0ed20d2c5b003b | [
"MIT"
] | 7 | 2020-05-24T02:15:46.000Z | 2020-11-26T07:14:44.000Z | models/database_models/comment_model.py | RuiCoreSci/Flask-Restful | 03f98a17487d407b69b853a9bf0ed20d2c5b003b | [
"MIT"
] | 12 | 2020-05-17T10:46:29.000Z | 2021-05-06T20:08:37.000Z | models/database_models/comment_model.py | RuiCoreSci/Flask-Restful | 03f98a17487d407b69b853a9bf0ed20d2c5b003b | [
"MIT"
] | 4 | 2020-05-09T07:26:09.000Z | 2021-10-31T07:09:10.000Z | from sqlalchemy import Integer, Text, DateTime, func, Boolean, text
from models.database_models import Base, Column
class Comment(Base):
__tablename__ = "comment"
id = Column(Integer, primary_key=True, )
user_id = Column(Integer, nullable=False, comment="评论用户的 ID")
post_id = Column(Integer, nullable=False, comment="Post 文章的 ID")
content = Column(Text, nullable=False, comment="用户的评论")
create_time = Column(DateTime, server_default=func.now(), comment="创建时间")
update_time = Column(DateTime, server_default=func.now(), onupdate=func.now(), comment="更新时间")
deleted = Column(Boolean, default=False, server_default=text('0'), nullable=False, comment="该项目是否被删除")
| 40.941176 | 106 | 0.728448 | 89 | 696 | 5.550562 | 0.438202 | 0.105263 | 0.161943 | 0.093117 | 0.295547 | 0.295547 | 0.153846 | 0 | 0 | 0 | 0 | 0.001669 | 0.139368 | 696 | 16 | 107 | 43.5 | 0.823038 | 0 | 0 | 0 | 0 | 0 | 0.068966 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.181818 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
e0776cc9711477b5d215a8a600b08e98b5af4d8a | 857 | py | Python | deal/linter/_extractors/returns.py | m4ta1l/deal | 2a8e9bf412b8635b00a2b798dd8802375814a1c8 | [
"MIT"
] | 1 | 2020-09-05T13:54:16.000Z | 2020-09-05T13:54:16.000Z | deal/linter/_extractors/returns.py | m4ta1l/deal | 2a8e9bf412b8635b00a2b798dd8802375814a1c8 | [
"MIT"
] | 7 | 2020-09-05T13:54:28.000Z | 2020-11-27T05:59:19.000Z | deal/linter/_extractors/returns.py | Smirenost/deal | 2a8e9bf412b8635b00a2b798dd8802375814a1c8 | [
"MIT"
] | null | null | null | # built-in
from typing import Optional
# app
from .common import TOKENS, Extractor, Token, traverse
from .value import UNKNOWN, get_value
get_returns = Extractor()
inner_extractor = Extractor()
def has_returns(body: list) -> bool:
for expr in traverse(body=body):
if isinstance(expr, TOKENS.RETURN + TOKENS.YIELD):
return True
return False
@get_returns.register(*TOKENS.RETURN)
def handle_return(expr) -> Optional[Token]:
value = get_value(expr=expr.value)
if value is UNKNOWN:
return None
return Token(value=value, line=expr.lineno, col=expr.value.col_offset)
@get_returns.register(*TOKENS.YIELD)
def handle_yield(expr) -> Optional[Token]:
value = get_value(expr=expr.value)
if value is UNKNOWN:
return None
return Token(value=value, line=expr.lineno, col=expr.value.col_offset)
| 25.205882 | 74 | 0.711785 | 119 | 857 | 5.02521 | 0.319328 | 0.06689 | 0.060201 | 0.080268 | 0.41806 | 0.41806 | 0.41806 | 0.41806 | 0.41806 | 0.41806 | 0 | 0 | 0.183197 | 857 | 33 | 75 | 25.969697 | 0.854286 | 0.014002 | 0 | 0.363636 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.136364 | false | 0 | 0.136364 | 0 | 0.545455 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
e078ffec67d1b2046e248c3ee5d65b353731cbf4 | 1,479 | py | Python | examples/basic/wire_feedthrough.py | souviksaha97/spydrnet-physical | b07bcc152737158ea7cbebf0ef844abe49d29c5e | [
"BSD-3-Clause"
] | null | null | null | examples/basic/wire_feedthrough.py | souviksaha97/spydrnet-physical | b07bcc152737158ea7cbebf0ef844abe49d29c5e | [
"BSD-3-Clause"
] | null | null | null | examples/basic/wire_feedthrough.py | souviksaha97/spydrnet-physical | b07bcc152737158ea7cbebf0ef844abe49d29c5e | [
"BSD-3-Clause"
] | null | null | null | """
==========================================
Genrating feedthrough from single instance
==========================================
This example demostrates how to generate a feedthrough wire connection for
a given scalar or vector wires.
**Initial Design**
.. hdl-diagram:: ../../../examples/basic/_initial_design.v
:type: netlistsvg
:align: center
:module: top
**Output1** ``wire0`` feedthough from ``inst_2_1``
.. hdl-diagram:: ../../../examples/basic/_output_wire.v
:type: netlistsvg
:align: center
:module: top
**Output2** ``bus_in`` feedthrough from ``inst_1_0``
.. hdl-diagram:: ../../../examples/basic/_output_bus.v
:type: netlistsvg
:align: center
:module: top
"""
from os import path
import spydrnet as sdn
import spydrnet_physical as sdnphy
netlist = sdnphy.load_netlist_by_name('basic_hierarchy')
top = netlist.top_instance.reference
cable0 = next(top.get_cables("wire0"))
inst2 = next(top.get_instances("inst_2_0"))
sdn.compose(netlist, '_initial_design.v', skip_constraints=True)
top.create_feedthrough(inst2, cable0)
top.create_unconn_wires()
sdn.compose(netlist, '_output_wire.v', skip_constraints=True)
netlist = sdnphy.load_netlist_by_name('basic_hierarchy')
top = netlist.top_instance.reference
bus_in = next(top.get_cables("bus_in"))
inst1 = next(top.get_instances("inst_1_0"))
cables = top.create_feedthrough(inst1, bus_in)
top.create_unconn_wires()
sdn.compose(netlist, '_output_bus.v', skip_constraints=True)
| 24.65 | 74 | 0.699797 | 194 | 1,479 | 5.087629 | 0.365979 | 0.020263 | 0.040527 | 0.069909 | 0.448835 | 0.343465 | 0.343465 | 0.237082 | 0.149949 | 0.149949 | 0 | 0.013688 | 0.110886 | 1,479 | 59 | 75 | 25.067797 | 0.736882 | 0.473969 | 0 | 0.333333 | 1 | 0 | 0.131339 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.166667 | 0 | 0.166667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
e07a13e1121d2676a50044d556f0800f60bfd2f7 | 2,849 | py | Python | team_fundraising/text.py | namtel-hp/fundraising-website | 30cb0cd2bd4505454295d11715e70712525234a3 | [
"MIT"
] | 5 | 2019-10-26T12:41:31.000Z | 2022-03-13T08:30:29.000Z | team_fundraising/text.py | Maalik1/fundraising-website | a5fcd7e8a5966f299f57c22af8c739a3d6cd501a | [
"MIT"
] | 9 | 2021-03-18T21:27:36.000Z | 2022-03-11T23:42:46.000Z | team_fundraising/text.py | Maalik1/fundraising-website | a5fcd7e8a5966f299f57c22af8c739a3d6cd501a | [
"MIT"
] | 2 | 2021-01-11T14:19:01.000Z | 2022-02-18T19:18:38.000Z |
class Donation_text:
# Shown as a message across the top of the page on return from a donation
# used in views.py:new_donation()
thank_you = (
"Thank you for your donation. "
"You may need to refresh this page to see the donation."
)
confirmation_email_subject = (
'Thank you for donating to the Triple Crown for Heart! '
)
# Start of the email sent confirming the paypal payment has gone through
# used in paypal.py:process_paypal()
confirmation_email_opening = (
'Thank you for your donation of '
)
# Closing of the email sent confirming the paypal payment has gone through
# used in paypal.py:process_paypal()
confirmation_email_closing = (
'.\n\nFor all donations over $20, you will receive a tax receipt for '
'the 2019 tax year.'
'\nYour PayPal receipt should arrive in a separate email.\n'
)
notification_email_subject = (
"You got a donation!"
)
notification_email_opening = (
"Great news! You've just received a donation of "
)
notification_email_closing = (
"\n\nAwesome work! They would probably appreciate "
"a quick thank you email.\n\n"
"-- Triple Crown for Heart\n"
)
class Fundraiser_text:
# Subject of the email sent on signup
signup_email_subject = (
"Welcome to fundraising for the Triple Crown for Heart!"
)
# Start of the email sent when someone signs up
# used in views.py:signup()
signup_email_opening = (
"Thanks for signing up to fundraise with us!\n"
"Your fundraising page can be found at:\n"
)
# Closing of the email sent when someone signs up
# used in views.py:signup()
signup_email_closing = (
'\n\nYou can change your information by using the "Login" link at the '
'top of that page.'
'\n\nThe easiest way to start fundraising is to post the above link '
'on social media or write a short email to your friends telling them '
'about your ride.'
'\nDon\'t forget to include the link to your page!\n'
)
# Message show at the top of the fundraiser page after signing up
# used in views.py:signup()
signup_return_message = (
"Thank you for signing up. Sharing your fundraiser page on social "
"media or over email is the best way to get donations."
)
signup_wrong_password_existing_user = (
"The username already exists, but the password entered is incorrect. "
"If you were already a fundraiser for a previous campaign, please "
"enter your previous password or use "
"<a href='/team_fundraising/accounts/password_reset/'>"
"Forgot your password</a>. If this is your first campaign, "
"please choose a different username."
)
| 33.916667 | 79 | 0.65251 | 397 | 2,849 | 4.602015 | 0.38539 | 0.019157 | 0.027367 | 0.038314 | 0.252326 | 0.219486 | 0.219486 | 0.204707 | 0.204707 | 0.204707 | 0 | 0.002921 | 0.279045 | 2,849 | 83 | 80 | 34.325301 | 0.886563 | 0.206739 | 0 | 0 | 0 | 0 | 0.577728 | 0.022272 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.090909 | 0 | 0 | 0.254545 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
0eb2577f85f04e68e802521ef8915750223e0174 | 624 | py | Python | tests/wagtail_live/test_apps.py | wagtail/wagtail-live | dd769be089d457cf36db2506520028bc5f506ac3 | [
"BSD-3-Clause"
] | 22 | 2021-06-07T20:36:18.000Z | 2022-03-29T01:48:58.000Z | tests/wagtail_live/test_apps.py | wagtail/wagtail-live | dd769be089d457cf36db2506520028bc5f506ac3 | [
"BSD-3-Clause"
] | 73 | 2021-05-21T16:08:44.000Z | 2022-03-20T23:59:59.000Z | tests/wagtail_live/test_apps.py | wagtail/wagtail-live | dd769be089d457cf36db2506520028bc5f506ac3 | [
"BSD-3-Clause"
] | 11 | 2021-06-10T10:05:13.000Z | 2022-02-12T13:31:34.000Z | from django.apps import apps
from django.test import override_settings
from wagtail_live.signals import live_page_update
def test_live_page_update_signal_receivers():
assert len(live_page_update.receivers) == 0
@override_settings(
WAGTAIL_LIVE_PUBLISHER="tests.testapp.publishers.DummyWebsocketPublisher"
)
def test_live_page_update_signal_receivers_websocket():
app_config = apps.get_app_config("wagtail_live")
app_config.ready()
try:
# Receiver should be connected, no IndexError
receiver = live_page_update.receivers[0]
finally:
live_page_update.disconnect(receiver)
| 27.130435 | 77 | 0.780449 | 80 | 624 | 5.725 | 0.4625 | 0.104803 | 0.183406 | 0.065502 | 0.262009 | 0.157205 | 0.157205 | 0 | 0 | 0 | 0 | 0.003774 | 0.150641 | 624 | 22 | 78 | 28.363636 | 0.860377 | 0.06891 | 0 | 0 | 0 | 0 | 0.103627 | 0.082902 | 0 | 0 | 0 | 0 | 0.066667 | 1 | 0.133333 | false | 0 | 0.2 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0eb6190157c1946b37b5fd1be18f551d0e559832 | 612 | py | Python | python/Patterns/inheritance/main.py | zinderud/ysa | e34d3f4c7afab3976d86f5d27edfcd273414e496 | [
"Apache-2.0"
] | null | null | null | python/Patterns/inheritance/main.py | zinderud/ysa | e34d3f4c7afab3976d86f5d27edfcd273414e496 | [
"Apache-2.0"
] | 1 | 2017-12-27T10:09:22.000Z | 2017-12-27T10:22:47.000Z | python/Patterns/inheritance/main.py | zinderud/ysa | e34d3f4c7afab3976d86f5d27edfcd273414e496 | [
"Apache-2.0"
] | null | null | null | class Yaratik(object):
def move_left(self):
print('Moving left...')
def move_right(self):
print('Moving left...')
class Ejderha(Yaratik):
def Ates_puskurtme(self):
print('ates puskurtum!')
class Zombie(Yaratik):
def Isirmak(self):
print('Isirdim simdi!')
enemy = Yaratik()
enemy.move_left()
# ejderha also includes all functions from parent class (yaratik)
ejderha = Ejderha()
ejderha.move_left()
ejderha.Ates_puskurtme()
# Zombie is called the (child class), inherits from Yaratik (parent class)
zombie = Zombie()
zombie.move_right()
zombie.Isirmak()
| 18 | 74 | 0.679739 | 76 | 612 | 5.381579 | 0.394737 | 0.08802 | 0.07335 | 0.09291 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.19281 | 612 | 33 | 75 | 18.545455 | 0.827935 | 0.222222 | 0 | 0.105263 | 0 | 0 | 0.120507 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.210526 | false | 0 | 0 | 0 | 0.368421 | 0.210526 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0eb71b68b065b14b8eebff52fa3bbffc15201b7a | 1,527 | py | Python | clustering/graph_utils.py | perathambkk/ml-techniques | 5d6fd122322342c0b47dc65d09c4425fd73f2ea9 | [
"MIT"
] | null | null | null | clustering/graph_utils.py | perathambkk/ml-techniques | 5d6fd122322342c0b47dc65d09c4425fd73f2ea9 | [
"MIT"
] | null | null | null | clustering/graph_utils.py | perathambkk/ml-techniques | 5d6fd122322342c0b47dc65d09c4425fd73f2ea9 | [
"MIT"
] | null | null | null | """
Author: Peratham Wiriyathammabhum
"""
import numpy as np
import pandas as pd
from sklearn.neighbors import NearestNeighbors
def affinity_graph(X):
'''
This function returns a numpy array.
'''
ni, nd = X.shape
A = np.zeros((ni, ni))
for i in range(ni):
for j in range(i+1, ni):
dist = ((X[i] - X[j])**2).sum() # compute L2 distance
A[i][j] = dist
A[j][i] = dist # by symmetry
return A
def knn_graph(X, knn=4):
'''
This function returns a numpy array.
'''
ni, nd = X.shape
nbrs = NearestNeighbors(n_neighbors=(knn+1), algorithm='ball_tree').fit(X)
distances, indices = nbrs.kneighbors(X)
A = np.zeros((ni, ni))
for dist, ind in zip(distances, indices):
i0 = ind[0]
for i in range(1,knn+1):
d = dist[i]
A[i0, i] = d
A[i, i0] = d # by symmetry
return A
def sparse_affinity_graph(X):
'''
TODO: This function returns a numpy sparse matrix.
'''
ni, nd = X.shape
A = np.zeros((ni, ni))
for i in range(ni):
for j in range(i+1, ni):
dist = ((X[i] - X[j])**2).sum() # compute L2 distance
A[i][j] = dist
A[j][i] = dist # by symmetry
return A
def laplacian_graph(X, mode='affinity', knn=3, eta=0.01, sigma=2.5):
'''
The unnormalized graph Laplacian, L = D − W.
'''
if mode == 'affinity':
W = affinity_graph(X)
W[abs(W) > eta] = 0
elif mode == 'nearestneighbor':
W = knn_graph(X, knn=knn)
elif mode == 'gaussian':
W = affinity_graph(X)
bandwidth = 2.0*(sigma**2)
W = np.exp(W) / bandwidth
else:
pass
D = np.diag(W.sum(axis=1))
L = D - W
return L
| 21.814286 | 75 | 0.614276 | 268 | 1,527 | 3.466418 | 0.309701 | 0.04521 | 0.06028 | 0.064586 | 0.383208 | 0.334769 | 0.318622 | 0.318622 | 0.318622 | 0.318622 | 0 | 0.020781 | 0.212181 | 1,527 | 70 | 76 | 21.814286 | 0.750623 | 0.183366 | 0 | 0.4375 | 0 | 0 | 0.040067 | 0 | 0 | 0 | 0 | 0.014286 | 0 | 1 | 0.083333 | false | 0.020833 | 0.0625 | 0 | 0.229167 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0ec1afd2facbda8f3febe8ca1dc7c71fb6558f04 | 1,993 | py | Python | packages/watchmen-data-kernel/src/watchmen_data_kernel/meta/external_writer_service.py | Indexical-Metrics-Measure-Advisory/watchmen | c54ec54d9f91034a38e51fd339ba66453d2c7a6d | [
"MIT"
] | null | null | null | packages/watchmen-data-kernel/src/watchmen_data_kernel/meta/external_writer_service.py | Indexical-Metrics-Measure-Advisory/watchmen | c54ec54d9f91034a38e51fd339ba66453d2c7a6d | [
"MIT"
] | null | null | null | packages/watchmen-data-kernel/src/watchmen_data_kernel/meta/external_writer_service.py | Indexical-Metrics-Measure-Advisory/watchmen | c54ec54d9f91034a38e51fd339ba66453d2c7a6d | [
"MIT"
] | null | null | null | from typing import Optional
from watchmen_auth import PrincipalService
from watchmen_data_kernel.cache import CacheService
from watchmen_data_kernel.common import DataKernelException
from watchmen_data_kernel.external_writer import find_external_writer_create, register_external_writer_creator
from watchmen_meta.common import ask_meta_storage, ask_snowflake_generator
from watchmen_meta.system import ExternalWriterService as ExternalWriterStorageService
from watchmen_model.common import ExternalWriterId
from watchmen_model.system import ExternalWriter
def register_external_writer(external_writer: ExternalWriter) -> None:
create = find_external_writer_create(external_writer.type)
if create is None:
raise DataKernelException(f'Creator not found for external writer[{external_writer.dict()}].')
register_external_writer_creator(external_writer.writerCode, create())
class ExternalWriterService:
def __init__(self, principal_service: PrincipalService):
self.principalService = principal_service
def find_by_id(self, writer_id: ExternalWriterId) -> Optional[ExternalWriter]:
external_writer = CacheService.external_writer().get(writer_id)
if external_writer is not None:
if external_writer.tenantId != self.principalService.get_tenant_id():
raise DataKernelException(
f'External writer[id={writer_id}] not belongs to '
f'current tenant[id={self.principalService.get_tenant_id()}].')
register_external_writer(external_writer)
return external_writer
storage_service = ExternalWriterStorageService(
ask_meta_storage(), ask_snowflake_generator(), self.principalService)
storage_service.begin_transaction()
try:
# noinspection PyTypeChecker
external_writer: ExternalWriter = storage_service.find_by_id(writer_id)
if external_writer is None:
return None
CacheService.external_writer().put(external_writer)
register_external_writer(external_writer)
return external_writer
finally:
storage_service.close_transaction()
| 41.520833 | 110 | 0.831912 | 236 | 1,993 | 6.694915 | 0.275424 | 0.23038 | 0.06962 | 0.070886 | 0.210127 | 0.148101 | 0.070886 | 0.070886 | 0 | 0 | 0 | 0 | 0.104365 | 1,993 | 47 | 111 | 42.404255 | 0.885154 | 0.013046 | 0 | 0.105263 | 0 | 0 | 0.086514 | 0.053944 | 0 | 0 | 0 | 0 | 0 | 1 | 0.078947 | false | 0 | 0.236842 | 0 | 0.421053 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0ec2983c9be55e068e1ac3a8da9a2e78b097ece9 | 882 | py | Python | scrywarden/module.py | chasebrewsky/scrywarden | c6a5a81d14016ca58625df68594ef52dd328a0dd | [
"MIT"
] | 1 | 2020-12-13T00:49:51.000Z | 2020-12-13T00:49:51.000Z | scrywarden/module.py | chasebrewsky/scrywarden | c6a5a81d14016ca58625df68594ef52dd328a0dd | [
"MIT"
] | null | null | null | scrywarden/module.py | chasebrewsky/scrywarden | c6a5a81d14016ca58625df68594ef52dd328a0dd | [
"MIT"
] | null | null | null | from importlib import import_module
from typing import Any
def import_string(path: str) -> Any:
"""Imports a dotted path name and returns the class/attribute.
Parameters
----------
path: str
Dotted module path to retrieve.
Returns
-------
Class/attribute at the given import path.
Raises
------
ImportError
If the path does not exist.
"""
try:
module_path, class_name = path.rsplit('.', 1)
except ValueError as error:
raise ImportError(
f"{path} does not look like a module path",
) from error
module = import_module(module_path)
try:
return getattr(module, class_name)
except AttributeError as error:
raise ImportError(
f"Module '{module_path}' does not define a '{class_name}' "
"attribute/class",
) from error
| 24.5 | 71 | 0.603175 | 105 | 882 | 4.980952 | 0.428571 | 0.095602 | 0.063098 | 0.087954 | 0.091778 | 0 | 0 | 0 | 0 | 0 | 0 | 0.001623 | 0.301587 | 882 | 35 | 72 | 25.2 | 0.847403 | 0.278912 | 0 | 0.352941 | 0 | 0 | 0.19105 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.058824 | false | 0 | 0.352941 | 0 | 0.470588 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
0ec3f460313d8f825c0daad58ff5e76ef71c5401 | 1,704 | py | Python | Win/reg.py | QGB/QPSU | 7bc214676d797f42d2d7189dc67c9377bccdf25d | [
"MIT"
] | 6 | 2018-03-25T20:05:21.000Z | 2022-03-13T17:23:05.000Z | Win/reg.py | pen9un/QPSU | 76e1a3f6f6f6f78452e02f407870a5a32177b667 | [
"MIT"
] | 15 | 2018-05-14T03:30:21.000Z | 2022-03-03T15:33:25.000Z | Win/reg.py | pen9un/QPSU | 76e1a3f6f6f6f78452e02f407870a5a32177b667 | [
"MIT"
] | 1 | 2021-07-15T06:23:45.000Z | 2021-07-15T06:23:45.000Z | #coding=utf-8
try:
if __name__.startswith('qgb.Win'):
from .. import py
else:
import py
except Exception as ei:
raise ei
raise EnvironmentError(__name__)
if py.is2():
import _winreg as winreg
from _winreg import *
else:
import winreg
from winreg import *
def get(skey,name,root=HKEY_CURRENT_USER,returnType=True):
''' from qgb.Win import reg
reg.get(r'Software\Microsoft\Windows\CurrentVersion\Internet Settings','ProxyEnable')
reg.get(r'HKLM\SYSTEM\CurrentControlSet\Services\LanmanServer\Parameters\Size' )
There are seven predefined root keys, traditionally named according to their constant handles defined in the Win32 API
skey不能包含 name,否则 FileNotFoundError: [WinError 2] 系统找不到指定的文件。
'''
r = OpenKey(root,skey)
r = QueryValueEx(r,name)
if returnType:return r[0],'{} : {}'.format(REG_TYPE[r[1]],r[1])
else :return r[0]
def set(skey,name,value,root=HKEY_CURRENT_USER,type='auto,or REG_TYPE int',returnType=True):
r = OpenKey(root,skey,0,KEY_SET_VALUE)
if not py.isint(type):
if py.isint(value):type=4
if py.istr(value):type=1
if py.isbyte(value):type=3 #TODO test,and add more rule
SetValueEx(r,'ProxyEnable',0,type,value)
if get(skey,name,root=root,returnType=False)==value:
return 'reg.set [{}] {}={} sucess!'.format(skey[-55:],name,value)
else:
return 'reg.set [{}] {}={} Failed !'.format(skey,name,value)
REG_TYPE={ 0 : 'REG_NONE',
1 : 'REG_SZ',
2 : 'REG_EXPAND_SZ',
3 : 'REG_BINARY',
4 : 'REG_DWORD',
5 : 'REG_DWORD_BIG_ENDIAN',
6 : 'REG_LINK',
7 : 'REG_MULTI_SZ',
8 : 'REG_RESOURCE_LIST',
9 : 'REG_FULL_RESOURCE_DESCRIPTOR',
10: 'REG_RESOURCE_REQUIREMENTS_LIST',
11: 'REG_QWORD'}
| 29.894737 | 119 | 0.693662 | 258 | 1,704 | 4.426357 | 0.465116 | 0.014011 | 0.028021 | 0.038529 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.02095 | 0.159624 | 1,704 | 56 | 120 | 30.428571 | 0.776536 | 0.245892 | 0 | 0.071429 | 0 | 0 | 0.209703 | 0.045383 | 0 | 0 | 0 | 0.017857 | 0 | 1 | 0.047619 | false | 0 | 0.142857 | 0 | 0.238095 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0ec65d0e2393fe675648f46032adc3e480a8ef52 | 1,032 | py | Python | examples/resources.py | willvousden/clint | 6dc7ab1a6a162750e968463b43994447bca32544 | [
"0BSD"
] | 1,230 | 2015-01-03T05:39:25.000Z | 2020-02-18T12:36:03.000Z | examples/resources.py | willvousden/clint | 6dc7ab1a6a162750e968463b43994447bca32544 | [
"0BSD"
] | 50 | 2015-01-06T17:58:20.000Z | 2018-03-19T13:25:22.000Z | examples/resources.py | willvousden/clint | 6dc7ab1a6a162750e968463b43994447bca32544 | [
"0BSD"
] | 153 | 2015-01-03T03:56:25.000Z | 2020-02-13T20:59:03.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import sys
import os
sys.path.insert(0, os.path.abspath('..'))
from clint import resources
resources.init('kennethreitz', 'clint')
lorem = 'Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.'
print('%s created.' % resources.user.path)
resources.user.write('lorem.txt', lorem)
print('lorem.txt created')
assert resources.user.read('lorem.txt') == lorem
print('lorem.txt has correct contents')
resources.user.delete('lorem.txt')
print('lorem.txt deleted')
assert resources.user.read('lorem.txt') == None
print('lorem.txt deletion confirmed')
| 33.290323 | 456 | 0.767442 | 151 | 1,032 | 5.211921 | 0.662252 | 0.081321 | 0.066074 | 0.045743 | 0.134689 | 0.134689 | 0 | 0 | 0 | 0 | 0 | 0.002222 | 0.127907 | 1,032 | 30 | 457 | 34.4 | 0.872222 | 0.040698 | 0 | 0 | 0 | 0.0625 | 0.611336 | 0 | 0 | 0 | 0 | 0 | 0.125 | 1 | 0 | false | 0 | 0.25 | 0 | 0.25 | 0.375 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0ec932467a0e10a4a3b540d34642f573915937be | 7,076 | py | Python | fedora_college/modules/content/views.py | fedora-infra/fedora-college | cf310dab2e4fea02b9ac5e7f57dc53aafb4834d8 | [
"BSD-3-Clause"
] | 2 | 2015-05-16T09:54:17.000Z | 2017-01-11T17:58:31.000Z | fedora_college/modules/content/views.py | fedora-infra/fedora-college | cf310dab2e4fea02b9ac5e7f57dc53aafb4834d8 | [
"BSD-3-Clause"
] | null | null | null | fedora_college/modules/content/views.py | fedora-infra/fedora-college | cf310dab2e4fea02b9ac5e7f57dc53aafb4834d8 | [
"BSD-3-Clause"
] | 1 | 2020-12-07T22:14:01.000Z | 2020-12-07T22:14:01.000Z | # -*- coding: utf-8 -*-
import re
from unicodedata import normalize
from flask import Blueprint, render_template, current_app
from flask import redirect, url_for, g, abort
from sqlalchemy import desc
from fedora_college.core.database import db
from fedora_college.modules.content.forms import * # noqa
from fedora_college.core.models import * # noqa
from fedora_college.fedmsgshim import publish
from flask_fas_openid import fas_login_required
bundle = Blueprint('content', __name__, template_folder='templates')
from fedora_college.modules.content.media import * # noqa
_punct_re = re.compile(r'[\t !"#$%&\'()*\-/<=>?@\[\\\]^_`{|},.]+')
# Verify if user is authenticated
def authenticated():
return hasattr(g, 'fas_user') and g.fas_user
# generate url slug
def slugify(text, delim=u'-'):
"""Generates an slightly worse ASCII-only slug."""
result = []
for word in _punct_re.split(text.lower()):
word = normalize('NFKD', word).encode('ascii', 'ignore')
if word:
result.append(word)
return unicode(delim.join(result))
# attach tags to a content entry
def attach_tags(tags, content):
rem = TagsMap.query.filter_by(content_id=content.content_id).all()
for r in rem:
db.session.delete(r)
db.session.commit()
for tag in tags:
tag_db = Tags.query.filter_by(tag_text=tag).first()
if tag_db is None:
tag_db = Tags(tag)
db.session.add(tag_db)
db.session.commit()
Map = TagsMap(tag_db.tag_id, content.content_id)
db.session.add(Map)
db.session.commit()
# delete content
@bundle.route('/content/delete/<posturl>', methods=['GET', 'POST'])
@bundle.route('/content/delete/<posturl>/', methods=['GET', 'POST'])
@fas_login_required
def delete_content(posturl=None):
if posturl is not None:
db.session.rollback()
content = Content.query.filter_by(slug=posturl).first_or_404()
rem = TagsMap.query.filter_by(
content_id=content.content_id).all()
'''delete mapped tags'''
for r in rem:
db.session.delete(r)
comments = Comments.query.filter_by(
content_id=content.content_id).all()
'''delete comments with foriegn keys'''
for r in comments:
db.session.delete(r)
db.session.delete(content)
db.session.commit()
return redirect(url_for('profile.user',
nickname=g.fas_user['username']))
abort(404)
# add / edit more content
@bundle.route('/content/add/', methods=['GET', 'POST'])
@bundle.route('/content/add', methods=['GET', 'POST'])
@bundle.route('/content/edit/<posturl>/', methods=['GET', 'POST'])
@bundle.route('/content/edit/<posturl>', methods=['GET', 'POST'])
@fas_login_required
def addcontent(posturl=None):
if authenticated():
form = CreateContent()
form_action = url_for('content.addcontent')
media = Media.query.order_by(desc(Media.timestamp)).limit(10).all()
if posturl is not None:
content = Content.query.filter_by(slug=posturl).first_or_404()
form = CreateContent(obj=content)
if form.validate_on_submit():
form.populate_obj(content)
tags = str(form.tags.data).split(',')
attach_tags(tags, content)
content.rehtml()
db.session.commit()
'''Publish the message'''
msg = content.getdata()
msg['title'] = content.title
msg['link'] = current_app.config[
'EXTERNAL_URL'] + content.slug
publish(
topic=current_app.config['CONTENT_EDIT_TOPIC'],
msg=msg
)
if content.type_content == "blog":
print url_for('content.blog', slug=posturl)
return redirect(url_for('content.blog', slug=posturl))
return redirect(url_for('home.content', slug=posturl))
else:
if form.validate_on_submit():
url_name = slugify(form.title.data)
content = Content(form.title.data,
url_name,
form.description.data,
form.active.data,
form.tags.data,
g.fas_user['username'],
form.type_content.data
)
tags = str(form.tags.data).split(',')
try:
db.session.add(content)
db.session.commit()
attach_tags(tags, content)
'''Publish the message'''
msg = content.getdata()
msg['title'] = content.title
msg['link'] = current_app.config[
'EXTERNAL_URL'] + url_name
publish(
topic=current_app.config['CONTENT_CREATE_TOPIC'],
msg=msg
)
if content.type_content == "blog":
return redirect(url_for('content.blog', slug=posturl))
return redirect(url_for('home.content', slug=url_name))
# Duplicate entry
except Exception as e:
return str(e)
db.session.rollback()
pass
tags = Tags.query.all()
return render_template('content/edit_content.html', form=form,
form_action=form_action, title="Create Content",
media=media[0:5], tags=tags)
abort(404)
# View Blog post
@bundle.route('/blog', methods=['GET', 'POST'])
@bundle.route('/blog/', methods=['GET', 'POST'])
@bundle.route('/blog/<slug>/', methods=['GET', 'POST'])
@bundle.route('/blog/<slug>', methods=['GET', 'POST'])
@bundle.route('/blog/page/<id>', methods=['GET', 'POST'])
@bundle.route('/blog/page/<id>', methods=['GET', 'POST'])
def blog(slug=None, id=0):
id = int(id)
screen = Content.query. \
filter_by(
type_content="lecture",
active=True
).limit(10).all()
if slug is not None:
try:
posts = Content.query. \
filter_by(slug=slug).all()
except:
posts = "No such posts in database."
else:
try:
posts = Content.query. \
filter_by(type_content="blog").all()
if id > 0:
posts = posts[id - 1:id + 5]
else:
posts = posts[0:5]
except:
posts = []
return render_template('blog/index.html',
title='Blog',
content=posts,
screen=screen,
id=id,
slug=slug
)
| 34.517073 | 79 | 0.53307 | 772 | 7,076 | 4.757772 | 0.221503 | 0.036755 | 0.045739 | 0.049006 | 0.452219 | 0.397767 | 0.329703 | 0.329703 | 0.262184 | 0.262184 | 0 | 0.005341 | 0.338468 | 7,076 | 204 | 80 | 34.686275 | 0.779321 | 0.026427 | 0 | 0.345912 | 0 | 0 | 0.0916 | 0.01832 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.006289 | 0.069182 | null | null | 0.018868 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0ecb9ff079e3fe67fcf620b3218ea8892b9b9c1c | 1,726 | py | Python | utils/utils.py | scomup/StereoNet-ActiveStereoNet | 05994cf1eec4a109e095732fe01ecb5558880ba5 | [
"MIT"
] | null | null | null | utils/utils.py | scomup/StereoNet-ActiveStereoNet | 05994cf1eec4a109e095732fe01ecb5558880ba5 | [
"MIT"
] | null | null | null | utils/utils.py | scomup/StereoNet-ActiveStereoNet | 05994cf1eec4a109e095732fe01ecb5558880ba5 | [
"MIT"
] | null | null | null | # ------------------------------------------------------------------------------
# Copyright (c) NKU
# Licensed under the MIT License.
# Written by Xuanyi Li (xuanyili.edu@gmail.com)
# ------------------------------------------------------------------------------
import os
import torch
import torch.nn.functional as F
#import cv2 as cv
import numpy as np
def GERF_loss(GT, pred, args):
# mask = (GT < args.maxdisp) & (GT >= 0)
mask = GT > 0
mask.detach_()
# print(mask.size(), GT.size(), pred.size())
count = len(torch.nonzero(mask))
# print(count)
if count == 0:
count = 1
return torch.sum(torch.sqrt(torch.pow(GT[mask] - pred[mask], 2) + 4) /2 - 1) / count
def smooth_L1_loss(GT, pred, args):
mask = GT < args.maxdisp
mask.detach_()
# loss = F.smooth_l1_loss(pred[mask], GT[mask], size_average=True)
loss = (pred[mask] - GT[mask]).abs().mean()
return loss
if __name__ == '__main__':
pass
# import matplotlib.pyplot as plt
# image = cv.imread('/media/lxy/sdd1/ActiveStereoNet/StereoNet_pytorch/results/forvideo/iter-122.jpg')
#im_gray = cv.imread('/media/lxy/sdd1/ActiveStereoNet/StereoNet_pytorch/results/forvideo/iter-133.jpg', cv.IMREAD_GRAYSCALE)
# print(im_gray.shape)
#im_color = cv.applyColorMap(im_gray*2, cv.COLORMAP_JET)
# cv.imshow('test', im_color)
# cv.waitKey(0)
#cv.imwrite('test.png',im_color)
# print(image.shape)
# plt.figure('Image')
# sc =plt.imshow(image)
# sc.set_cmap('hsv')
# plt.colorbar()
# plt.axis('off')
# plt.show()
# print('end')
# image[:,:,0].save('/media/lxy/sdd1/ActiveStereoNet/StereoNet_pytorch/results/pretrained_StereoNet_single/it1er-151.jpg')
| 32.566038 | 128 | 0.589803 | 228 | 1,726 | 4.333333 | 0.45614 | 0.030364 | 0.036437 | 0.081984 | 0.291498 | 0.255061 | 0.255061 | 0.204453 | 0.1417 | 0.1417 | 0 | 0.018881 | 0.171495 | 1,726 | 52 | 129 | 33.192308 | 0.672028 | 0.631518 | 0 | 0.111111 | 0 | 0 | 0.013158 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.111111 | false | 0.055556 | 0.222222 | 0 | 0.444444 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
0ecd026a7b7cddee19fb7d65983aadf807f4917d | 657 | py | Python | rblod/setup.py | TiKeil/Two-scale-RBLOD | 23f17a3e4edf63ea5f208eca50ca90c19bf511a9 | [
"BSD-2-Clause"
] | null | null | null | rblod/setup.py | TiKeil/Two-scale-RBLOD | 23f17a3e4edf63ea5f208eca50ca90c19bf511a9 | [
"BSD-2-Clause"
] | null | null | null | rblod/setup.py | TiKeil/Two-scale-RBLOD | 23f17a3e4edf63ea5f208eca50ca90c19bf511a9 | [
"BSD-2-Clause"
] | null | null | null | # ~~~
# This file is part of the paper:
#
# " An Online Efficient Two-Scale Reduced Basis Approach
# for the Localized Orthogonal Decomposition "
#
# https://github.com/TiKeil/Two-scale-RBLOD.git
#
# Copyright 2019-2021 all developers. All rights reserved.
# License: Licensed as BSD 2-Clause License (http://opensource.org/licenses/BSD-2-Clause)
# Authors:
# Stephan Rave
# Tim Keil
# ~~~
from setuptools import setup
setup(name='rblod',
version='2021.1',
description='Pymor support for RBLOD',
author='Tim Keil',
author_email='tim.keil@wwu.de',
license='MIT',
packages=['rblod'])
| 26.28 | 89 | 0.648402 | 83 | 657 | 5.120482 | 0.759036 | 0.049412 | 0.047059 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.029412 | 0.223744 | 657 | 24 | 90 | 27.375 | 0.803922 | 0.605784 | 0 | 0 | 0 | 0 | 0.266393 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.125 | 0 | 0.125 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0ecdf401d5b3926e749aa892bfa6a87de7f72b30 | 8,060 | py | Python | bin/euclid_fine_plot_job_array.py | ndeporzio/cosmicfish | f68f779d73f039512a958d110bb44194d0daceec | [
"MIT"
] | null | null | null | bin/euclid_fine_plot_job_array.py | ndeporzio/cosmicfish | f68f779d73f039512a958d110bb44194d0daceec | [
"MIT"
] | null | null | null | bin/euclid_fine_plot_job_array.py | ndeporzio/cosmicfish | f68f779d73f039512a958d110bb44194d0daceec | [
"MIT"
] | null | null | null | import os
import shutil
import numpy as np
import pandas as pd
import seaborn as sns
import cosmicfish as cf
import matplotlib.pyplot as plt
import dill
# Instruct pyplot to use seaborn
sns.set()
# Set project, data, CLASS directories
projectdir = os.environ['STORAGE_DIR']
datastore = os.environ['DATASTORE_DIR']
classpath = os.environ['CLASS_DIR']
fidx = int(os.environ['FORECAST_INDEX'])
# Generate output paths
fp_resultsdir = projectdir
cf.makedirectory(fp_resultsdir)
# Specify resolution of numerical integrals
derivative_step = 0.008 # How much to vary parameter to calculate numerical derivative
g_derivative_step = 0.1
mu_integral_step = 0.05 # For calculating numerical integral wrt mu between -1 and 1
# Linda Fiducial Cosmology
fp_fid = {
"A_s" : 2.2321e-9,
"n_s" : 0.967,
"omega_b" : 0.02226,
"omega_cdm" : 0.1127,
"tau_reio" : 0.0598,
"h" : 0.701,
"T_cmb" : 2.726, # Units [K]
"N_ncdm" : 4.,
"deg_ncdm" : 1.0,
"T_ncdm" : (0.79/2.726), # Units [T_cmb].
"m_ncdm" : 0.01, # Units [eV]
"b0" : 1.0,
"beta0" : 1.7,
"beta1" : 1.0,
"alphak2" : 1.0,
"sigma_fog_0" : 250000, #Units [m s^-2]
"N_eff" : 0.0064, #We allow relativistic neutrinos in addition to our DM relic
"relic_vary" : "N_ncdm", # Fix T_ncdm or m_ncdm
"m_nu" : 0.02
}
# EUCLID values
z_table = np.array([0.65, 0.75, 0.85, 0.95, 1.05, 1.15, 1.25, 1.35, 1.45, 1.55, 1.65, 1.75, 1.85, 1.95])
dNdz = np.array([2434.280, 4364.812, 4728.559, 4825.798, 4728.797, 4507.625, 4269.851, 3720.657, 3104.309,
2308.975, 1514.831, 1474.707, 893.716, 497.613])
skycover = 0.3636
# Run Fisher Forecast
full_masses = np.geomspace(0.01, 10., 21)
full_temps = np.array([0.79, 0.91, 0.94, 1.08])
mass_index=(fidx % 21)
temp_index=(fidx // 21)
masses = np.array([full_masses[mass_index]])
temps = np.array([full_temps[temp_index]])
omegacdm_set = np.array([
fp_fid['omega_cdm']
- ((masses/cf.NEUTRINO_SCALE_FACTOR)* np.power(tval / 1.95, 3.))
for tidx, tval in enumerate(temps)])
fp_fiducialset = [[
dict(fp_fid, **{
'm_ncdm' : masses[midx],
'omega_cdm' : omegacdm_set[tidx, midx],
'T_ncdm' : temps[tidx]/2.726})
for midx, mval in enumerate(masses)]
for tidx, tval in enumerate(temps)]
fp_forecastset = [[cf.forecast(
classpath,
datastore,
'2relic',
fidval,
z_table,
"EUCLID",
dNdz,
fsky=skycover,
dstep=derivative_step,
gstep=g_derivative_step,
RSD=True,
FOG=True,
AP=True,
COV=True)
for fididx, fidval in enumerate(fidrowvals)]
for fidrowidx, fidrowvals in enumerate(fp_fiducialset)]
#dill.load_session('')
for frowidx, frowval in enumerate(fp_forecastset):
for fidx, fcst in enumerate(frowval):
if type(fcst.fisher)==type(None):
fcst.gen_pm()
fcst.gen_fisher(
fisher_order=[
'omega_b',
'omega_cdm',
'n_s',
'A_s',
'tau_reio',
'h',
'N_ncdm',
'M_ncdm',
'sigma_fog',
'beta0',
'beta1',
'alpha_k2'],
mu_step=mu_integral_step,
skipgen=False)
print("Relic Forecast ", fidx, " complete...")
dill.dump_session(os.path.join(fp_resultsdir, 'fp_'+str(temp_index)+'_'+str(mass_index)+'.db'))
else:
print('Fisher matrix already generated!')
| 65.528455 | 116 | 0.262655 | 511 | 8,060 | 3.986301 | 0.448141 | 0.037801 | 0.014728 | 0.012764 | 0.028473 | 0.028473 | 0.028473 | 0 | 0 | 0 | 0 | 0.100917 | 0.675434 | 8,060 | 122 | 117 | 66.065574 | 0.677752 | 0.121216 | 0 | 0 | 0 | 0 | 0.048801 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.079208 | 0 | 0.079208 | 0.019802 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0ecedb23d891d612188b09f34a36b454a3d85a93 | 6,674 | py | Python | src/oci/apm_traces/models/query_result_row_type_summary.py | Manny27nyc/oci-python-sdk | de60b04e07a99826254f7255e992f41772902df7 | [
"Apache-2.0",
"BSD-3-Clause"
] | 249 | 2017-09-11T22:06:05.000Z | 2022-03-04T17:09:29.000Z | src/oci/apm_traces/models/query_result_row_type_summary.py | Manny27nyc/oci-python-sdk | de60b04e07a99826254f7255e992f41772902df7 | [
"Apache-2.0",
"BSD-3-Clause"
] | 228 | 2017-09-11T23:07:26.000Z | 2022-03-23T10:58:50.000Z | src/oci/apm_traces/models/query_result_row_type_summary.py | Manny27nyc/oci-python-sdk | de60b04e07a99826254f7255e992f41772902df7 | [
"Apache-2.0",
"BSD-3-Clause"
] | 224 | 2017-09-27T07:32:43.000Z | 2022-03-25T16:55:42.000Z | # coding: utf-8
# Copyright (c) 2016, 2021, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel # noqa: F401
from oci.decorators import init_model_state_from_kwargs
@init_model_state_from_kwargs
class QueryResultRowTypeSummary(object):
"""
A summary of the datatype, unit and related metadata of an individual row element of a query result row that is returned.
"""
def __init__(self, **kwargs):
"""
Initializes a new QueryResultRowTypeSummary object with values from keyword arguments.
The following keyword arguments are supported (corresponding to the getters/setters of this class):
:param data_type:
The value to assign to the data_type property of this QueryResultRowTypeSummary.
:type data_type: str
:param unit:
The value to assign to the unit property of this QueryResultRowTypeSummary.
:type unit: str
:param display_name:
The value to assign to the display_name property of this QueryResultRowTypeSummary.
:type display_name: str
:param expression:
The value to assign to the expression property of this QueryResultRowTypeSummary.
:type expression: str
:param query_result_row_type_summaries:
The value to assign to the query_result_row_type_summaries property of this QueryResultRowTypeSummary.
:type query_result_row_type_summaries: list[oci.apm_traces.models.QueryResultRowTypeSummary]
"""
self.swagger_types = {
'data_type': 'str',
'unit': 'str',
'display_name': 'str',
'expression': 'str',
'query_result_row_type_summaries': 'list[QueryResultRowTypeSummary]'
}
self.attribute_map = {
'data_type': 'dataType',
'unit': 'unit',
'display_name': 'displayName',
'expression': 'expression',
'query_result_row_type_summaries': 'queryResultRowTypeSummaries'
}
self._data_type = None
self._unit = None
self._display_name = None
self._expression = None
self._query_result_row_type_summaries = None
@property
def data_type(self):
"""
Gets the data_type of this QueryResultRowTypeSummary.
Datatype of the query result row element.
:return: The data_type of this QueryResultRowTypeSummary.
:rtype: str
"""
return self._data_type
@data_type.setter
def data_type(self, data_type):
"""
Sets the data_type of this QueryResultRowTypeSummary.
Datatype of the query result row element.
:param data_type: The data_type of this QueryResultRowTypeSummary.
:type: str
"""
self._data_type = data_type
@property
def unit(self):
"""
Gets the unit of this QueryResultRowTypeSummary.
Granular unit in which the query result row element's data is represented.
:return: The unit of this QueryResultRowTypeSummary.
:rtype: str
"""
return self._unit
@unit.setter
def unit(self, unit):
"""
Sets the unit of this QueryResultRowTypeSummary.
Granular unit in which the query result row element's data is represented.
:param unit: The unit of this QueryResultRowTypeSummary.
:type: str
"""
self._unit = unit
@property
def display_name(self):
"""
Gets the display_name of this QueryResultRowTypeSummary.
Alias name if an alias is used for the query result row element or an assigned display name from the query language
in some default cases.
:return: The display_name of this QueryResultRowTypeSummary.
:rtype: str
"""
return self._display_name
@display_name.setter
def display_name(self, display_name):
"""
Sets the display_name of this QueryResultRowTypeSummary.
Alias name if an alias is used for the query result row element or an assigned display name from the query language
in some default cases.
:param display_name: The display_name of this QueryResultRowTypeSummary.
:type: str
"""
self._display_name = display_name
@property
def expression(self):
"""
Gets the expression of this QueryResultRowTypeSummary.
Actual show expression in the user typed query that produced this column.
:return: The expression of this QueryResultRowTypeSummary.
:rtype: str
"""
return self._expression
@expression.setter
def expression(self, expression):
"""
Sets the expression of this QueryResultRowTypeSummary.
Actual show expression in the user typed query that produced this column.
:param expression: The expression of this QueryResultRowTypeSummary.
:type: str
"""
self._expression = expression
@property
def query_result_row_type_summaries(self):
"""
Gets the query_result_row_type_summaries of this QueryResultRowTypeSummary.
A query result row type summary object that represents a nested table structure.
:return: The query_result_row_type_summaries of this QueryResultRowTypeSummary.
:rtype: list[oci.apm_traces.models.QueryResultRowTypeSummary]
"""
return self._query_result_row_type_summaries
@query_result_row_type_summaries.setter
def query_result_row_type_summaries(self, query_result_row_type_summaries):
"""
Sets the query_result_row_type_summaries of this QueryResultRowTypeSummary.
A query result row type summary object that represents a nested table structure.
:param query_result_row_type_summaries: The query_result_row_type_summaries of this QueryResultRowTypeSummary.
:type: list[oci.apm_traces.models.QueryResultRowTypeSummary]
"""
self._query_result_row_type_summaries = query_result_row_type_summaries
def __repr__(self):
return formatted_flat_dict(self)
def __eq__(self, other):
if other is None:
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| 33.878173 | 245 | 0.674408 | 787 | 6,674 | 5.503177 | 0.190597 | 0.068575 | 0.087278 | 0.083122 | 0.626414 | 0.532671 | 0.414916 | 0.315862 | 0.315862 | 0.287693 | 0 | 0.003669 | 0.264909 | 6,674 | 196 | 246 | 34.05102 | 0.879128 | 0.557537 | 0 | 0.080645 | 0 | 0 | 0.102352 | 0.052265 | 0 | 0 | 0 | 0 | 0 | 1 | 0.225806 | false | 0 | 0.032258 | 0.032258 | 0.419355 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0ed3370d325b05dcd0ff4ac3d8d74980237e624c | 1,004 | py | Python | anti_cpdaily/command.py | hyx0329/nonebot_plugin_anti_cpdaily | 5868626fb95876f9638aaa1edd9a2f914ea7bed1 | [
"MIT"
] | 2 | 2021-11-07T10:33:16.000Z | 2021-12-20T08:25:19.000Z | anti_cpdaily/command.py | hyx0329/nonebot_plugin_anti_cpdaily | 5868626fb95876f9638aaa1edd9a2f914ea7bed1 | [
"MIT"
] | null | null | null | anti_cpdaily/command.py | hyx0329/nonebot_plugin_anti_cpdaily | 5868626fb95876f9638aaa1edd9a2f914ea7bed1 | [
"MIT"
] | null | null | null | import nonebot
from nonebot import on_command
from nonebot.rule import to_me
from nonebot.typing import T_State
from nonebot.adapters import Bot, Event
from nonebot.log import logger
from .config import global_config
from .schedule import anti_cpdaily_check_routine
cpdaily = on_command('cpdaily')
scheduler = nonebot.require("nonebot_plugin_apscheduler").scheduler
async def one_shot_routine():
scheduler.remove_job('anti_cpdaily_oneshot')
await anti_cpdaily_check_routine()
@cpdaily.handle()
async def handle_command(bot: Bot, event: Event, state: T_State):
""" Manually activate the routine in 1 min
"""
if event.get_user_id() in bot.config.superusers:
logger.debug('manually activate the cpdaily routine')
# await anti_cpdaily_check_routine()
scheduler.add_job(one_shot_routine, trigger='interval', minutes=1, id='anti_cpdaily_oneshot', replace_existing=True)
logger.debug('manual process end')
await cpdaily.finish('启动今日校园打卡程序ing')
| 32.387097 | 124 | 0.76494 | 136 | 1,004 | 5.419118 | 0.441176 | 0.074627 | 0.065129 | 0.093623 | 0.126187 | 0 | 0 | 0 | 0 | 0 | 0 | 0.002342 | 0.149402 | 1,004 | 30 | 125 | 33.466667 | 0.860656 | 0.033865 | 0 | 0 | 0 | 0 | 0.162309 | 0.028322 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.4 | 0 | 0.4 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
0ed495b3d64a671dbd7202470a06b2b18d6c7be4 | 155 | py | Python | tests/inputs/loops/51-arrays-in-loop.py | helq/pytropos | 497ed5902e6e4912249ca0a46b477f9bfa6ae80a | [
"MIT"
] | 4 | 2019-10-06T18:01:24.000Z | 2020-07-03T05:27:35.000Z | tests/inputs/loops/51-arrays-in-loop.py | helq/pytropos | 497ed5902e6e4912249ca0a46b477f9bfa6ae80a | [
"MIT"
] | 5 | 2021-06-07T15:50:04.000Z | 2021-06-07T15:50:06.000Z | tests/inputs/loops/51-arrays-in-loop.py | helq/pytropos | 497ed5902e6e4912249ca0a46b477f9bfa6ae80a | [
"MIT"
] | null | null | null | import numpy as np
from something import Top
i = 0
while i < 10:
a = np.ndarray((10,4))
b = np.ones((10, Top))
i += 1
del Top
# show_store()
| 12.916667 | 26 | 0.580645 | 29 | 155 | 3.068966 | 0.689655 | 0.089888 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.080357 | 0.277419 | 155 | 11 | 27 | 14.090909 | 0.714286 | 0.077419 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.25 | 0 | 0.25 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0ed9b178770e9775a60fa8ee66730cd786425565 | 448 | py | Python | test/test_delete_group.py | ruslankl9/ironpython_training | 51eaad4da24fdce60fbafee556160a9e847c08cf | [
"Apache-2.0"
] | null | null | null | test/test_delete_group.py | ruslankl9/ironpython_training | 51eaad4da24fdce60fbafee556160a9e847c08cf | [
"Apache-2.0"
] | null | null | null | test/test_delete_group.py | ruslankl9/ironpython_training | 51eaad4da24fdce60fbafee556160a9e847c08cf | [
"Apache-2.0"
] | null | null | null | from model.group import Group
import random
def test_delete_some_group(app):
if len(app.group.get_group_list()) <= 1:
app.group.add_new_group(Group(name='test'))
old_list = app.group.get_group_list()
index = random.randrange(len(old_list))
app.group.delete_group_by_index(index)
new_list = app.group.get_group_list()
assert len(old_list) - 1 == len(new_list)
del old_list[index]
assert old_list == new_list | 32 | 51 | 0.712054 | 73 | 448 | 4.068493 | 0.328767 | 0.13468 | 0.111111 | 0.161616 | 0.228956 | 0.161616 | 0 | 0 | 0 | 0 | 0 | 0.005376 | 0.169643 | 448 | 14 | 52 | 32 | 0.793011 | 0 | 0 | 0 | 0 | 0 | 0.008909 | 0 | 0 | 0 | 0 | 0 | 0.166667 | 1 | 0.083333 | false | 0 | 0.166667 | 0 | 0.25 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0ed9d9ea2d863109661ee50e679a897b97a003a9 | 3,173 | py | Python | Evaluation/batch_detection.py | gurkirt/actNet-inAct | 1930bcb41553e50ddd83985a497a9d5ce4f1fcf2 | [
"MIT"
] | 27 | 2016-05-04T07:13:05.000Z | 2021-12-05T04:45:45.000Z | Evaluation/batch_detection.py | gurkirt/actNet-inAct | 1930bcb41553e50ddd83985a497a9d5ce4f1fcf2 | [
"MIT"
] | 1 | 2017-12-28T08:29:00.000Z | 2017-12-28T08:29:00.000Z | Evaluation/batch_detection.py | gurkirt/actNet-inAct | 1930bcb41553e50ddd83985a497a9d5ce4f1fcf2 | [
"MIT"
] | 12 | 2016-05-15T21:40:06.000Z | 2019-11-27T09:43:55.000Z | '''
Autor: Gurkirt Singh
Start data: 15th May 2016
purpose: of this file is read frame level predictions and process them to produce a label per video
'''
from sklearn.svm import LinearSVC
from sklearn.ensemble import RandomForestClassifier
import numpy as np
import pickle
import os
import time,json
import pylab as plt
from eval_detection import ANETdetection
import scipy.io as sio
#######baseDir = "/mnt/sun-alpha/actnet/";
baseDir = "/data/shared/solar-machines/actnet/";
#baseDir = "/mnt/solar-machines/actnet/";
########imgDir = "/mnt/sun-alpha/actnet/rgb-images/";
######## imgDir = "/mnt/DATADISK2/ss-workspace/actnet/rgb-images/";
annotPklFile = "../Evaluation/data/actNet200-V1-3.pkl"
def getscore(ground_truth_filename, prediction_filename,
tiou_thr=0.5,subset='validation', verbose=True, check_status=True):
anet_detection = ANETdetection(ground_truth_filename, prediction_filename,
subset=subset, tiou_thr=tiou_thr,
verbose=verbose, check_status=True)
ap = anet_detection.evaluate()
return ap
def saveAPs():
K = 5;
subset = 'validation';#,'testing']:
featType = 'IMS-MBH'
# savename = '{}data/predictions-{}-{}.pkl'.format(baseDir,subset,featType)
# with open(savename,'r') as f:
# data = pickle.load(f)
outfilename = '{}results/classification/{}-{}-{}.json'.format(baseDir,subset,featType,str(K).zfill(3))
gtfiile = 'data/activity_net.v1-3.min.json'
ap = getscore(gtfiile,outfilename,top_k=1)
print ap
print np.mean(ap)
savename = '{}data/weightAP-{}.pkl'.format(baseDir,featType)
print 'Results saved in ',savename
with open(savename,'w') as f:
pickle.dump(ap,f)
def plotAPs():
K = 1;
subset = 'validation';#,'testing']:
aps = [];
count = 0;
colors = ['red','green','blue']
for featType in ['IMS-MBH','IMS','MBH']:
savename = '{}data/weightAP-{}.pkl'.format(baseDir,featType)
print 'Results saved in ',savename
with open(savename,'r') as f:
ap = pickle.load(f)
ind = np.arange(count,600+count,3)
plt.bar(ind,ap,width=0.5,color=colors[count])
count += 1
plt.show()
def evalAll():
K = 10;
subset = 'validation';#,'testing']:
gtfiile = 'data/activity_net.v1-3.min.json'
result = []; count = 0;
featType = 'C3D-BIN-BOOST-LONG'
# outfilename = '{}results/detection/{}-{}-K-{}-{}.json'.format(baseDir,subset,featType,str(K).zfill(3),'alpha-001')
for alpha in [1,3,5,]:
outfilename = '{}results/detection/{}-{}-K-{}-{}.json'.format(baseDir,subset,featType,str(K).zfill(3),'alpha-{}'.format(str(int(alpha*10)).zfill(3)))
print 'Evaluating results from ',outfilename
for tioth in [0.5,0.4,0.3,0.2,0.1]:
ap = getscore(gtfiile,outfilename,tiou_thr=tioth)
result.append([alpha,tioth,np.mean(ap)])
result = np.aaarray(result)
sio.savemat('result-{}.mat'.format(featType),mdict={'ap':ap})
if __name__=="__main__":
#processOnePredictions()
# saveAps()
# plotmAPs()
evalALL()
| 36.056818 | 157 | 0.627167 | 406 | 3,173 | 4.837438 | 0.3867 | 0.039715 | 0.038697 | 0.05499 | 0.267312 | 0.229633 | 0.217413 | 0.217413 | 0.184827 | 0.163951 | 0 | 0.021739 | 0.202647 | 3,173 | 87 | 158 | 36.471264 | 0.754545 | 0.15884 | 0 | 0.145161 | 0 | 0 | 0.175403 | 0.102419 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.145161 | null | null | 0.080645 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0eda1b4f399b44a556364cedf6c955fb55a3872c | 2,355 | py | Python | src/decisionengine/framework/modules/tests/test_module_decorators.py | moibenko/decisionengine | 4c458e0c225ec2ce1e82d56e752724983331b7d1 | [
"Apache-2.0"
] | 9 | 2018-06-11T20:06:50.000Z | 2020-10-01T17:02:02.000Z | src/decisionengine/framework/modules/tests/test_module_decorators.py | moibenko/decisionengine | 4c458e0c225ec2ce1e82d56e752724983331b7d1 | [
"Apache-2.0"
] | 551 | 2018-06-25T21:06:37.000Z | 2022-03-31T13:47:32.000Z | src/decisionengine/framework/modules/tests/test_module_decorators.py | goodenou/decisionengine | b203e2c493cf501562accf1013c6257c348711b7 | [
"Apache-2.0"
] | 70 | 2018-06-11T20:07:01.000Z | 2022-02-10T16:18:24.000Z | # SPDX-FileCopyrightText: 2017 Fermi Research Alliance, LLC
# SPDX-License-Identifier: Apache-2.0
import pytest
from decisionengine.framework.modules import Publisher, Source
from decisionengine.framework.modules.Module import verify_products
from decisionengine.framework.modules.Source import Parameter
def test_multiple_consumes_declarations():
with pytest.raises(Exception, match="@consumes has already been called"):
@Publisher.consumes(a=int)
@Publisher.consumes(b=float)
class _(Publisher.Publisher):
pass
def test_multiple_produces_declarations():
with pytest.raises(Exception, match="@produces has already been called"):
@Source.produces(c=str)
@Source.produces(d=bool)
class _(Source.Source):
pass
def test_wrong_product_names():
@Source.produces(a=str)
class BMaker(Source.Source):
def __init__(self, config):
super().__init__(config)
def acquire(self):
return {"b": ""}
maker = BMaker({"channel_name": "test"})
expected_err_msg = (
"The following products were not produced:\n"
+ " - 'a' of type 'str'\n\n"
+ "The following products were not declared:\n"
+ " - 'b' of type 'str'"
)
with pytest.raises(Exception, match=expected_err_msg):
verify_products(maker, maker.acquire())
def test_wrong_product_types():
@Source.produces(a=str, b=int)
class AMaker(Source.Source):
def __init__(self, config):
super().__init__(config)
def acquire(self):
return {"a": 42, "b": 17}
maker = AMaker({"channel_name": "test"})
expected_err_msg = "The following products have the wrong types:\n" + r" - 'a' \(expected 'str', got 'int'\)"
with pytest.raises(Exception, match=expected_err_msg):
verify_products(maker, maker.acquire())
def test_supports_config():
expected_err_msg = (
"An error occurred while processing the parameter 'conflicting_types':\n"
+ "The specified type 'int' conflicts with the type of the default value "
+ r"'hello' \(type 'str'\)"
)
with pytest.raises(Exception, match=expected_err_msg):
@Source.supports_config(Parameter("conflicting_types", type=int, default="hello"))
class _(Source.Source):
pass
| 31.4 | 113 | 0.656476 | 281 | 2,355 | 5.313167 | 0.327402 | 0.044206 | 0.056263 | 0.083724 | 0.379102 | 0.35633 | 0.300067 | 0.300067 | 0.300067 | 0.234427 | 0 | 0.005467 | 0.223355 | 2,355 | 74 | 114 | 31.824324 | 0.810826 | 0.03949 | 0 | 0.339623 | 0 | 0 | 0.220452 | 0.009739 | 0 | 0 | 0 | 0 | 0 | 1 | 0.169811 | false | 0.056604 | 0.075472 | 0.037736 | 0.377358 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
0edc64834d9ac7d861217e389cda5a4bf52a203f | 1,129 | py | Python | musicscore/musicxml/types/complextypes/backup.py | alexgorji/music_score | b4176da52295361f3436826903485c5cb8054c5e | [
"MIT"
] | 2 | 2020-06-22T13:33:28.000Z | 2020-12-30T15:09:00.000Z | musicscore/musicxml/types/complextypes/backup.py | alexgorji/music_score | b4176da52295361f3436826903485c5cb8054c5e | [
"MIT"
] | 37 | 2020-02-18T12:15:00.000Z | 2021-12-13T20:01:14.000Z | musicscore/musicxml/types/complextypes/backup.py | alexgorji/music_score | b4176da52295361f3436826903485c5cb8054c5e | [
"MIT"
] | null | null | null | '''
<xs:complexType name="backup">
<xs:annotation>
<xs:documentation></xs:documentation>
</xs:annotation>
<xs:sequence>
<xs:group ref="duration"/>
<xs:group ref="editorial"/>
</xs:sequence>
</xs:complexType>
'''
from musicscore.dtd.dtd import Sequence, GroupReference, Element
from musicscore.musicxml.groups.common import Editorial
from musicscore.musicxml.elements.note import Duration
from musicscore.musicxml.types.complextypes.complextype import ComplexType
class ComplexTypeBackup(ComplexType):
"""
The backup and forward elements are required to coordinate multiple voices in one part, including music on multiple
staves. The backup type is generally used to move between voices and staves. Thus the backup element does not
include voice or staff elements. Duration values should always be positive, and should not cross measure boundaries
or mid-measure changes in the divisions value.
"""
_DTD = Sequence(
Element(Duration),
GroupReference(Editorial)
)
def __init__(self, tag, *args, **kwargs):
super().__init__(tag=tag, *args, **kwargs)
| 35.28125 | 119 | 0.732507 | 139 | 1,129 | 5.884892 | 0.532374 | 0.06846 | 0.080685 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.170948 | 1,129 | 31 | 120 | 36.419355 | 0.873932 | 0.595217 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.090909 | false | 0 | 0.363636 | 0 | 0.636364 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
0edda9355db51eae6f5202748937966f72f31878 | 1,362 | py | Python | personal_ad/advice/converter.py | Sailer43/CSE5914Project | ebb47bff9a6101fac5173b5520e6002563da67d5 | [
"MIT"
] | null | null | null | personal_ad/advice/converter.py | Sailer43/CSE5914Project | ebb47bff9a6101fac5173b5520e6002563da67d5 | [
"MIT"
] | 1 | 2019-10-15T21:48:27.000Z | 2019-10-15T21:48:27.000Z | personal_ad/advice/converter.py | Sailer43/CSE5914Project | ebb47bff9a6101fac5173b5520e6002563da67d5 | [
"MIT"
] | null | null | null | from ibm_watson import TextToSpeechV1, SpeechToTextV1, DetailedResponse
from os import system
from json import loads
class Converter:
k_s2t_api_key = "0pxCnJQ_r5Yy3SZDRhYS4XshrTMJyZEsuc45SbBcfGgf"
k_t2s_api_key = "euoR7ZdLMOBd29wP1fNaZFJsqwKt45TUmwcVwpzbQBcA"
k_s2t_url = "https://stream.watsonplatform.net/speech-to-text/api"
k_t2s_url = "https://gateway-wdc.watsonplatform.net/text-to-speech/api"
k_t2s_voice = "en-US_AllisonVoice"
k_t2s_format = "audio/webm"
k_st2_model = "en-US_NarrowbandModel"
def __init__(self):
self.s2t = SpeechToTextV1(iam_apikey=self.k_s2t_api_key, url=self.k_s2t_url)
self.t2s = TextToSpeechV1(iam_apikey=self.k_t2s_api_key, url=self.k_t2s_url)
def read(self, string: str):
return self.t2s.synthesize(
string,
voice=self.k_t2s_voice,
accept=self.k_t2s_format
).get_result().content
def listen(self, audio_input):
try:
result = self.s2t.recognize(audio_input, model=self.k_st2_model)
result = loads(str(result))
result = result["result"]["results"][0]["alternatives"][0]['transcript']
except Exception:
return False, "I don't understand what you are saying."
return True, str(result)
def main():
pass
if __name__ == '__main__':
main()
| 30.954545 | 84 | 0.679883 | 176 | 1,362 | 4.960227 | 0.448864 | 0.036655 | 0.036655 | 0.02291 | 0.032073 | 0 | 0 | 0 | 0 | 0 | 0 | 0.033645 | 0.214391 | 1,362 | 43 | 85 | 31.674419 | 0.782243 | 0 | 0 | 0 | 0 | 0 | 0.240822 | 0.080029 | 0 | 0 | 0 | 0 | 0 | 1 | 0.125 | false | 0.03125 | 0.09375 | 0.03125 | 0.5625 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
0edddd954e6572bd2613d0926da19b7e62f01353 | 346 | py | Python | torrents/migrations/0011_auto_20190223_2345.py | 2600box/harvest | 57264c15a3fba693b4b58d0b6d4fbf4bd5453bbd | [
"Apache-2.0"
] | 9 | 2019-03-26T14:50:00.000Z | 2020-11-10T16:44:08.000Z | torrents/migrations/0011_auto_20190223_2345.py | 2600box/harvest | 57264c15a3fba693b4b58d0b6d4fbf4bd5453bbd | [
"Apache-2.0"
] | 22 | 2019-03-02T23:16:13.000Z | 2022-02-27T10:36:36.000Z | torrents/migrations/0011_auto_20190223_2345.py | 2600box/harvest | 57264c15a3fba693b4b58d0b6d4fbf4bd5453bbd | [
"Apache-2.0"
] | 5 | 2019-04-24T00:51:30.000Z | 2020-11-06T18:31:49.000Z | # Generated by Django 2.1.7 on 2019-02-23 23:45
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('torrents', '0010_auto_20190223_0326'),
]
operations = [
migrations.AlterModelOptions(
name='realm',
options={'ordering': ('name',)},
),
]
| 19.222222 | 48 | 0.586705 | 35 | 346 | 5.714286 | 0.828571 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.125 | 0.283237 | 346 | 17 | 49 | 20.352941 | 0.681452 | 0.130058 | 0 | 0 | 1 | 0 | 0.160535 | 0.076923 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.090909 | 0 | 0.363636 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0ee18e4216ec08fa76991908f8a448c6f9b7427c | 2,147 | py | Python | widgets/ui_ShowResultDialog.py | JaySon-Huang/SecertPhotos | e741cc26c19a5b249d45cc70959ac6817196cb8a | [
"MIT"
] | null | null | null | widgets/ui_ShowResultDialog.py | JaySon-Huang/SecertPhotos | e741cc26c19a5b249d45cc70959ac6817196cb8a | [
"MIT"
] | 3 | 2015-05-19T08:43:46.000Z | 2015-06-10T17:55:28.000Z | widgets/ui_ShowResultDialog.py | JaySon-Huang/SecertPhotos | e741cc26c19a5b249d45cc70959ac6817196cb8a | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'src/ui_ShowResultDialog.ui'
#
# Created: Sat May 16 17:05:43 2015
# by: PyQt5 UI code generator 5.4
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_Dialog(object):
def setupUi(self, Dialog):
Dialog.setObjectName("Dialog")
Dialog.resize(400, 300)
self.verticalLayout = QtWidgets.QVBoxLayout(Dialog)
self.verticalLayout.setObjectName("verticalLayout")
self.lb_image = ImageLabel(Dialog)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.lb_image.sizePolicy().hasHeightForWidth())
self.lb_image.setSizePolicy(sizePolicy)
self.lb_image.setMinimumSize(QtCore.QSize(100, 100))
self.lb_image.setAlignment(QtCore.Qt.AlignCenter)
self.lb_image.setObjectName("lb_image")
self.verticalLayout.addWidget(self.lb_image)
self.hLayout = QtWidgets.QHBoxLayout()
self.hLayout.setObjectName("hLayout")
spacerItem = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.hLayout.addItem(spacerItem)
self.btn_save = QtWidgets.QPushButton(Dialog)
self.btn_save.setObjectName("btn_save")
self.hLayout.addWidget(self.btn_save)
spacerItem1 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.hLayout.addItem(spacerItem1)
self.verticalLayout.addLayout(self.hLayout)
self.retranslateUi(Dialog)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self, Dialog):
_translate = QtCore.QCoreApplication.translate
Dialog.setWindowTitle(_translate("Dialog", "Dialog"))
self.lb_image.setText(_translate("Dialog", "Image Label"))
self.btn_save.setText(_translate("Dialog", "Save it"))
from widgets.ImageLabel import ImageLabel
| 43.816327 | 115 | 0.72054 | 230 | 2,147 | 6.63913 | 0.404348 | 0.041257 | 0.057629 | 0.074656 | 0.160445 | 0.128356 | 0.128356 | 0.128356 | 0.128356 | 0.128356 | 0 | 0.023177 | 0.17606 | 2,147 | 48 | 116 | 44.729167 | 0.840023 | 0.104797 | 0 | 0 | 1 | 0 | 0.04441 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.057143 | false | 0 | 0.057143 | 0 | 0.142857 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0ee1c3866e5f2d77866339896a7b340616b1337d | 414 | py | Python | Python tests/dictionaries.py | Johnny-QA/Python_training | a15de68195eb155c99731db3e4ff1d9d75681752 | [
"Apache-2.0"
] | null | null | null | Python tests/dictionaries.py | Johnny-QA/Python_training | a15de68195eb155c99731db3e4ff1d9d75681752 | [
"Apache-2.0"
] | null | null | null | Python tests/dictionaries.py | Johnny-QA/Python_training | a15de68195eb155c99731db3e4ff1d9d75681752 | [
"Apache-2.0"
] | null | null | null | my_set = {1, 3, 5}
my_dict = {'name': 'Jose', 'age': 90}
another_dict = {1: 15, 2: 75, 3: 150}
lottery_players = [
{
'name': 'Rolf',
'numbers': (13, 45, 66, 23, 22)
},
{
'name': 'John',
'numbers': (14, 56, 80, 23, 22)
}
]
universities = [
{
'name': 'Oxford',
'location': 'UK'
},
{
'name': 'MIT',
'location': 'US'
}
] | 16.56 | 39 | 0.398551 | 46 | 414 | 3.5 | 0.73913 | 0.049689 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.135659 | 0.376812 | 414 | 25 | 40 | 16.56 | 0.488372 | 0 | 0 | 0 | 0 | 0 | 0.187952 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0ee4cfc2dd5204b72c6c610aac6abe376e79a7c9 | 3,765 | py | Python | 3-functions/pytest-exercises/test_functions.py | BaseCampCoding/python-fundamentals | 3804c07841d6604b1e5a1c15126b3301aa8ae306 | [
"MIT"
] | null | null | null | 3-functions/pytest-exercises/test_functions.py | BaseCampCoding/python-fundamentals | 3804c07841d6604b1e5a1c15126b3301aa8ae306 | [
"MIT"
] | 1 | 2018-07-18T18:01:22.000Z | 2019-06-14T15:06:28.000Z | 3-functions/pytest-exercises/test_functions.py | BaseCampCoding/python-fundamentals | 3804c07841d6604b1e5a1c15126b3301aa8ae306 | [
"MIT"
] | null | null | null | import functions
from pytest import approx
from bcca.test import should_print
def test_add_em_up():
assert functions.add_em_up(1, 2, 3) == 6
assert functions.add_em_up(4, 5, 6) == 15
def test_sub_sub_hubbub():
assert functions.sub_sub_hubbub(1, 2, 3) == -4
def test_square_area():
assert functions.square_area(5, 5) == 25
assert functions.square_area(3, 5) == 15
assert functions.square_area(2, 2) == 4
def test_circle_area():
assert functions.circle_area(1) == approx(3.14)
assert functions.circle_area(5) == approx(78.5)
def test_kilometers_to_miles():
assert functions.kilometers_to_miles(1) == approx(0.6214)
assert functions.kilometers_to_miles(.5) == approx(0.3107)
assert functions.kilometers_to_miles(0) == approx(0.0)
assert functions.kilometers_to_miles(40) == approx(24.855999999999998)
@should_print
def test_sales_tax_1(output):
functions.sales_tax(1)
assert output == """
Purchase Amount: 1
State Sales Tax: 0.04
County Sales Tax: 0.02
Total Sales Tax: 0.06
Total Cost: 1.06
"""
@should_print
def test_sales_tax_99_99(output):
functions.sales_tax(99.99)
assert output == """
Purchase Amount: 99.99
State Sales Tax: 3.9996
County Sales Tax: 1.9998
Total Sales Tax: 5.9994
Total Cost: 105.98939999999999
"""
@should_print
def test_sales_tax_5_95(output):
functions.sales_tax(5.95)
assert output == """
Purchase Amount: 5.95
State Sales Tax: 0.23800000000000002
County Sales Tax: 0.11900000000000001
Total Sales Tax: 0.35700000000000004
Total Cost: 6.307
"""
def test_min_insurance():
assert functions.min_insurance(100000) == approx(80000.0)
assert functions.min_insurance(123456789) == approx(98765431.2)
assert functions.min_insurance(0) == approx(0.0)
assert functions.min_insurance(-54317890) == approx(-43454312.0)
@should_print
def test_property_tax_10000(output):
functions.property_tax(10000)
assert output == '''
Assessment Value: 6000.0
Property Tax: 38.4
'''
@should_print
def test_property_tax_99999_95(output):
functions.property_tax(99999.95)
assert output == '''
Assessment Value: 59999.969999999994
Property Tax: 383.999808
'''
def test_bmi():
assert functions.bmi(160, 67) == approx(25.05680552)
assert functions.bmi(200, 72) == approx(27.12191358)
assert functions.bmi(120, 60) == approx(23.43333333)
def test_calories():
assert functions.calories(5, 20) == 125
assert functions.calories(1, 1) == 13
def test_earnings():
assert functions.earnings(100, 100, 100) == 3600
assert functions.earnings(50, 75, 100) == 2550
assert functions.earnings(0, 1000, 79) == 12711
@should_print
def test_paint_job_estimator(output):
functions.paint_job_estimator(50, 10)
assert output == '''
Gallons of paint required: 0.43478260869565216
Hours of labor required: 3.4782608695652173
Cost of paint: 4.3478260869565215
Cost of labor: 69.56521739130434
Total Cost: 73.91304347826086
'''
@should_print
def test_paint_job_estimator_2(output):
functions.paint_job_estimator(750, 15.95)
assert output == '''
Gallons of paint required: 6.521739130434782
Hours of labor required: 52.17391304347826
Cost of paint: 104.02173913043477
Cost of labor: 1043.4782608695652
Total Cost: 1147.5
'''
@should_print
def test_monthly_sales_tax(output):
functions.monthly_sales_tax(123456.79)
assert output == '''
Monthly sales: 123456.79
State sales tax: 4938.2716
County sales tax: 2469.1358
Total sales tax: 7407.4074
'''
@should_print
def test_monthly_sales_tax_2(output):
functions.monthly_sales_tax(4321567.21)
assert output == '''
Monthly sales: 4321567.21
State sales tax: 172862.6884
County sales tax: 86431.3442
Total sales tax: 259294.03260000004
'''
| 22.957317 | 74 | 0.733068 | 554 | 3,765 | 4.808664 | 0.263538 | 0.075075 | 0.052553 | 0.067568 | 0.266141 | 0.145646 | 0.051051 | 0 | 0 | 0 | 0 | 0.204895 | 0.153519 | 3,765 | 163 | 75 | 23.09816 | 0.631001 | 0 | 0 | 0.228814 | 0 | 0 | 0.288712 | 0 | 0 | 0 | 0 | 0 | 0.279661 | 1 | 0.152542 | false | 0 | 0.025424 | 0 | 0.177966 | 0.084746 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0ee6ceb6e274923689476909061cf2ae7181004e | 1,555 | py | Python | Thesis/load/runRiakLoads.py | arnaudsjs/YCSB-1 | dc557d209244df72d68c9cb0a048d54e7bd72637 | [
"Apache-2.0"
] | null | null | null | Thesis/load/runRiakLoads.py | arnaudsjs/YCSB-1 | dc557d209244df72d68c9cb0a048d54e7bd72637 | [
"Apache-2.0"
] | null | null | null | Thesis/load/runRiakLoads.py | arnaudsjs/YCSB-1 | dc557d209244df72d68c9cb0a048d54e7bd72637 | [
"Apache-2.0"
] | null | null | null | import sys;
from Thesis.load.loadBenchmark import runLoadBenchmarkAsBatch;
from Thesis.cluster.RiakCluster import RiakCluster;
NORMAL_BINDING = 'riak';
CONSISTENCY_BINDING = 'riak_consistency';
IPS_IN_CLUSTER = ['172.16.33.14', '172.16.33.15', '172.16.33.16', '172.16.33.17', '172.16.33.18'];
def main():
if len(sys.argv) < 7:
printUsageAndExit();
pathToWorkloadFile = sys.argv[1];
dirToWriteResultTo = sys.argv[2];
runtimeBenchmarkInMinutes = int(sys.argv[3]);
listOfOpsPerSec = sys.argv[4].split(',');
listOfAmountThreads = sys.argv[5].split(',');
listOfAmountOfMachines = sys.argv[6].split(',');
if len(sys.argv) >= 8:
remoteYcsbNodes = sys.argv[7].split(',');
else:
remoteYcsbNodes = [];
cluster = RiakCluster(NORMAL_BINDING, CONSISTENCY_BINDING, IPS_IN_CLUSTER);
runLoadBenchmarkAsBatch(cluster, remoteYcsbNodes, pathToWorkloadFile,
runtimeBenchmarkInMinutes, dirToWriteResultTo,
listOfOpsPerSec, listOfAmountThreads, listOfAmountOfMachines);
def printUsageAndExit():
print 'usage: binary <path workload file> <result dir> <runtime benchmark> <list of #ops> <list of #threads> <list of #machines> [<list remote ycsb nodes>]';
exit();
cluster = RiakCluster(NORMAL_BINDING, CONSISTENCY_BINDING, IPS_IN_CLUSTER);
runLoadBenchmarkAsBatch(cluster, ['172.16.33.10'], '/root/YCSB/workloads/workload_load',
3, '/root/YCSB/loads/riak',
['1000000000'], ['1'], ['1']);
# main(); | 42.027027 | 161 | 0.664309 | 165 | 1,555 | 6.175758 | 0.424242 | 0.061825 | 0.041217 | 0.027478 | 0.178606 | 0.178606 | 0.178606 | 0.178606 | 0.178606 | 0.178606 | 0 | 0.060317 | 0.189711 | 1,555 | 37 | 162 | 42.027027 | 0.748413 | 0.004502 | 0 | 0.066667 | 0 | 0.033333 | 0.201034 | 0.035553 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.1 | null | null | 0.1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0eeba77c6034df540d6e02d1c1935e84c28bdcd9 | 10,427 | py | Python | tools/archive/create_loadable_configs.py | madelinemccombe/iron-skillet | f7bb805ac5ed0f2b44e4b438f8c021eaf2f5c66b | [
"MIT"
] | null | null | null | tools/archive/create_loadable_configs.py | madelinemccombe/iron-skillet | f7bb805ac5ed0f2b44e4b438f8c021eaf2f5c66b | [
"MIT"
] | null | null | null | tools/archive/create_loadable_configs.py | madelinemccombe/iron-skillet | f7bb805ac5ed0f2b44e4b438f8c021eaf2f5c66b | [
"MIT"
] | null | null | null | # Copyright (c) 2018, Palo Alto Networks
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
# Author: Scott Shoaf <sshoaf@paloaltonetworks.com>
'''
Palo Alto Networks create_loadable_configs.py
Provides rendering of configuration templates with user defined values
Output is a set of loadable full configurations and set commands for Panos and Panorama
Edit the config_variables.yaml values and then run the script
This software is provided without support, warranty, or guarantee.
Use at your own risk.
'''
import datetime
import os
import shutil
import sys
import time
import getpass
import oyaml
from jinja2 import Environment, FileSystemLoader
from passlib.hash import des_crypt
from passlib.hash import md5_crypt
from passlib.hash import sha256_crypt
from passlib.hash import sha512_crypt
defined_filters = ['md5_hash', 'des_hash', 'sha512_hash']
def myconfig_newdir(myconfigdir_name, foldertime):
'''
create a new main loadable_configs folder if required then new subdirectories for configs
:param myconfigdir_name: prefix folder name from the my_variables.py file
:param foldertime: datetime when script run; to be used as suffix of folder name
:return: the myconfigdir full path name
'''
# get the full path to the config directory we want (panos / panorama)
myconfigpath = os.path.abspath(os.path.join('..', 'loadable_configs'))
if os.path.isdir(myconfigpath) is False:
os.mkdir(myconfigpath, mode=0o755)
print('created new loadable config directory')
# check that configs folder exists and if not create a new one
# then create snippets and full sub-directories
myconfigdir = '{0}/{1}-{2}'.format(myconfigpath, myconfigdir_name, foldertime)
if os.path.isdir(myconfigdir) is False:
os.mkdir(myconfigdir, mode=0o755)
print('\ncreated new archive folder {0}-{1}'.format(myconfigdir_name, foldertime))
if os.path.isdir('{0}/{1}'.format(myconfigdir, config_type)) is False:
os.mkdir('{0}/{1}'.format(myconfigdir, config_type))
print('created new subdirectories for {0}'.format(config_type))
return myconfigdir
def create_context(config_var_file):
# read the metafile to get variables and values
try:
with open(config_var_file, 'r') as var_metadata:
variables = oyaml.safe_load(var_metadata.read())
except IOError as ioe:
print(f'Could not open metadata file {config_var_file}')
print(ioe)
sys.exit()
# grab the metadata values and convert to key-based dictionary
jinja_context = dict()
for snippet_var in variables['variables']:
jinja_context[snippet_var['name']] = snippet_var['value']
return jinja_context
def template_render(filename, template_path, render_type, context):
'''
render the jinja template using the context value from config_variables.yaml
:param filename: name of the template file
:param template_path: path for the template file
:param render_type: type if full or set commands; aligns with folder name
:param context: dict of variables to render
:return: return the rendered xml file and set conf file
'''
print('..creating template for {0}'.format(filename))
env = Environment(loader=FileSystemLoader('{0}/{1}'.format(template_path, render_type)))
# load our custom jinja filters here, see the function defs below for reference
env.filters['md5_hash'] = md5_hash
env.filters['des_hash'] = des_hash
env.filters['sha512_hash'] = sha512_hash
template = env.get_template(filename)
rendered_template = template.render(context)
return rendered_template
def template_save(snippet_name, myconfigdir, config_type, element):
'''
after rendering the template save to the myconfig directory
each run saves with a unique prefix name + datetime
:param snippet_name: name of the output file
:param myconfigdir: path to the my_config directory
:param config_type: based on initial run list; eg. panos or panorama
:param element: xml element rendered based on input variables; used as folder name
:param render_type: type eg. if full or snippets; aligns with folder name
:return: no value returned (future could be success code)
'''
print('..saving template for {0}'.format(snippet_name))
filename = snippet_name
with open('{0}/{1}/{2}'.format(myconfigdir, config_type, filename), 'w') as configfile:
configfile.write(element)
# copy the variables file used for the render into the my_template folder
var_file = 'loadable_config_vars/config_variables.yaml'
if os.path.isfile('{0}/{1}'.format(myconfigdir, var_file)) is False:
vfilesrc = var_file
vfiledst = '{0}/{1}'.format(myconfigdir, var_file)
shutil.copy(vfilesrc, vfiledst)
return
# define functions for custom jinja filters
def md5_hash(txt):
'''
Returns the MD5 Hashed secret for use as a password hash in the PanOS configuration
:param txt: text to be hashed
:return: password hash of the string with salt and configuration information. Suitable to place in the phash field
in the configurations
'''
return md5_crypt.hash(txt)
def des_hash(txt):
'''
Returns the DES Hashed secret for use as a password hash in the PanOS configuration
:param txt: text to be hashed
:return: password hash of the string with salt and configuration information. Suitable to place in the phash field
in the configurations
'''
return des_crypt.hash(txt)
def sha256_hash(txt):
'''
Returns the SHA256 Hashed secret for use as a password hash in the PanOS configuration
:param txt: text to be hashed
:return: password hash of the string with salt and configuration information. Suitable to place in the
phash field in the configurations
'''
return sha256_crypt.hash(txt)
def sha512_hash(txt):
'''
Returns the SHA512 Hashed secret for use as a password hash in the PanOS configuration
:param txt: text to be hashed
:return: password hash of the string with salt and configuration information. Suitable to place in the
phash field in the configurations
'''
return sha512_crypt.hash(txt)
def replace_variables(config_type, render_type, input_var):
'''
get the input variables and render the output configs with jinja2
inputs are read from the template directory and output to my_config
:param config_type: panos or panorama to read/write to the respective directories
:param archivetime: datetimestamp used for the output my_config folder naming
'''
config_variables = 'config_variables.yaml'
# create dict of values for the jinja template render
context = create_context(config_variables)
# update context dict with variables from user input
for snippet_var in input_var:
context[snippet_var] = input_var[snippet_var]
# get the full path to the output directory we want (panos / panorama)
template_path = os.path.abspath(os.path.join('..',
'templates', config_type))
# append to the sys path for module lookup
sys.path.append(template_path)
# output subdir located in loadable_configs dir
myconfig_path = myconfig_newdir(input_var['output_dir'], input_var['archive_time'])
# render full and set conf files
print('\nworking with {0} config template'.format(render_type))
if render_type == 'full':
filename = 'iron_skillet_{0}_full.xml'.format(config_type)
if render_type == 'set_commands':
filename = 'iron_skillet_{0}_full.conf'.format(config_type)
element = template_render(filename, template_path, render_type, context)
template_save(filename, myconfig_path, config_type, element)
print('\nconfigs have been created and can be found in {0}'.format(myconfig_path))
print('along with the metadata values used to render the configs\n')
return
if __name__ == '__main__':
# Use the timestamp to create a unique folder name
print('=' * 80)
print(' ')
print('Welcome to Iron-Skillet'.center(80))
print(' ')
print('=' * 80)
input_var = {}
# archive_time used as part of the my_config directory name
input_var['archive_time'] = datetime.datetime.fromtimestamp(time.time()).strftime('%Y%m%d_%H%M%S')
print('\ndatetime used for folder creation: {0}\n'.format(input_var['archive_time']))
# this prompts for the prefix name of the output directory
input_var['output_dir'] = input('Enter the name of the output directory: ')
# this prompts for the superuser username to be added into the configuration; no default admin/admin used
input_var['ADMINISTRATOR_USERNAME'] = input('Enter the superuser administrator account username: ')
print('\na phash will be created for superuser {0} and added to the config file\n'.format(
input_var['ADMINISTRATOR_USERNAME']))
passwordmatch = False
# prompt for the superuser password to create a phash and store in the my_config files; no default admin/admin
while passwordmatch is False:
password1 = getpass.getpass("Enter the superuser administrator account password: ")
password2 = getpass.getpass("Enter password again to verify: ")
if password1 == password2:
input_var['ADMINISTRATOR_PASSWORD'] = password1
passwordmatch = True
else:
print('\nPasswords do not match. Please try again.\n')
# loop through all config types that have their respective template folders
for config_type in ['panos', 'panorama']:
for render_type in ['full', 'set_commands']:
replace_variables(config_type, render_type, input_var) | 38.762082 | 118 | 0.720629 | 1,456 | 10,427 | 5.052198 | 0.225962 | 0.019032 | 0.006525 | 0.012915 | 0.216966 | 0.169657 | 0.143284 | 0.132953 | 0.107123 | 0.107123 | 0 | 0.011043 | 0.201017 | 10,427 | 269 | 119 | 38.762082 | 0.871924 | 0.458905 | 0 | 0.054545 | 0 | 0 | 0.219135 | 0.03357 | 0 | 0 | 0 | 0 | 0 | 1 | 0.081818 | false | 0.118182 | 0.109091 | 0 | 0.272727 | 0.163636 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
0eed163a13b8bf28c8e3cc3018df9acf80f8ef9a | 199 | py | Python | app/apis/__init__.py | FabienArcellier/blueprint-webapp-flask-restx | 84bc9dbe697c4b0f6667d2a2d8144a3f934a307a | [
"MIT"
] | null | null | null | app/apis/__init__.py | FabienArcellier/blueprint-webapp-flask-restx | 84bc9dbe697c4b0f6667d2a2d8144a3f934a307a | [
"MIT"
] | null | null | null | app/apis/__init__.py | FabienArcellier/blueprint-webapp-flask-restx | 84bc9dbe697c4b0f6667d2a2d8144a3f934a307a | [
"MIT"
] | null | null | null | from flask_restx import Api
from app.apis.hello import api as hello
api = Api(
title='api',
version='1.0',
description='',
prefix='/api',
doc='/api'
)
api.add_namespace(hello)
| 14.214286 | 39 | 0.633166 | 29 | 199 | 4.275862 | 0.62069 | 0.145161 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.012821 | 0.21608 | 199 | 13 | 40 | 15.307692 | 0.782051 | 0 | 0 | 0 | 0 | 0 | 0.070352 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.2 | 0 | 0.2 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0eed507c0a7d908570e5345420f87553a7bbdb5d | 788 | py | Python | main.py | poltavski/social-network-frontend | ccc3410e23e42cfc65efd811aba262ec88163481 | [
"MIT"
] | null | null | null | main.py | poltavski/social-network-frontend | ccc3410e23e42cfc65efd811aba262ec88163481 | [
"MIT"
] | null | null | null | main.py | poltavski/social-network-frontend | ccc3410e23e42cfc65efd811aba262ec88163481 | [
"MIT"
] | null | null | null | from fastapi import FastAPI, Request, Response
from fastapi.responses import HTMLResponse
from fastapi.staticfiles import StaticFiles
from fastapi.templating import Jinja2Templates
from utils import get_page_data, process_initial
import uvicorn
app = FastAPI()
templates = Jinja2Templates(directory="templates")
app.mount("/static", StaticFiles(directory="static"), name="static")
@app.get("/", response_class=HTMLResponse)
async def home(request: Request):
# Expect requests with cookies
return process_initial(request)
@app.get("/page", response_class=HTMLResponse)
async def home(request: Request):
# Expect requests with cookies
return get_page_data(request)
if __name__ == "__main__":
uvicorn.run("main:app", host="127.0.0.1", port=8050, log_level="info")
| 29.185185 | 74 | 0.769036 | 100 | 788 | 5.89 | 0.44 | 0.074703 | 0.037351 | 0.101868 | 0.278438 | 0.278438 | 0.278438 | 0.278438 | 0.278438 | 0.278438 | 0 | 0.017316 | 0.120558 | 788 | 26 | 75 | 30.307692 | 0.832612 | 0.072335 | 0 | 0.117647 | 0 | 0 | 0.086538 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.352941 | 0 | 0.470588 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
0eed571d3bbd262c5cff9905eccfdacc18b2c6bf | 4,515 | py | Python | Core/Python/create_static_group.py | Ku-Al/OpenManage-Enterprise | 5cc67435d7cedb091edb07311ed9dceeda43277f | [
"Apache-2.0"
] | null | null | null | Core/Python/create_static_group.py | Ku-Al/OpenManage-Enterprise | 5cc67435d7cedb091edb07311ed9dceeda43277f | [
"Apache-2.0"
] | null | null | null | Core/Python/create_static_group.py | Ku-Al/OpenManage-Enterprise | 5cc67435d7cedb091edb07311ed9dceeda43277f | [
"Apache-2.0"
] | null | null | null | #
# Python script using OME API to create a new static group
#
# _author_ = Raajeev Kalyanaraman <Raajeev.Kalyanaraman@Dell.com>
# _version_ = 0.1
#
# Copyright (c) 2020 Dell EMC Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
SYNOPSIS:
Script to create a new static group
DESCRIPTION:
This script exercises the OME REST API to create a new static
group. The user is responsible for adding devices to the
group once the group has been successfully created.
For authentication X-Auth is used over Basic Authentication
Note that the credentials entered are not stored to disk.
EXAMPLE:
python create_static_group.py --ip <xx> --user <username>
--password <pwd> --groupname "Random Test Group"
"""
import json
import argparse
from argparse import RawTextHelpFormatter
import urllib3
import requests
def create_static_group(ip_address, user_name, password, group_name):
""" Authenticate with OME and enumerate groups """
try:
session_url = 'https://%s/api/SessionService/Sessions' % ip_address
group_url = "https://%s/api/GroupService/Groups?$filter=Name eq 'Static Groups'" % ip_address
headers = {'content-type': 'application/json'}
user_details = {'UserName': user_name,
'Password': password,
'SessionType': 'API'}
session_info = requests.post(session_url, verify=False,
data=json.dumps(user_details),
headers=headers)
if session_info.status_code == 201:
headers['X-Auth-Token'] = session_info.headers['X-Auth-Token']
response = requests.get(group_url, headers=headers, verify=False)
if response.status_code == 200:
json_data = response.json()
if json_data['@odata.count'] > 0:
# Technically there should be only one result in the filter
group_id = json_data['value'][0]['Id']
group_payload = {"GroupModel": {
"Name": group_name,
"Description": "",
"MembershipTypeId": 12,
"ParentId": int(group_id)}
}
create_url = 'https://%s/api/GroupService/Actions/GroupService.CreateGroup' % ip_address
create_resp = requests.post(create_url, headers=headers,
verify=False,
data=json.dumps(group_payload))
if create_resp.status_code == 200:
print("New group created : ID =", create_resp.text)
elif create_resp.status_code == 400:
print("Failed group creation ...See error info below")
print(json.dumps(create_resp.json(), indent=4,
sort_keys=False))
else:
print("Unable to retrieve group list from %s" % ip_address)
else:
print("Unable to create a session with appliance %s" % ip_address)
except Exception as error:
print("Unexpected error:", str(error))
if __name__ == '__main__':
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
parser = argparse.ArgumentParser(description=__doc__, formatter_class=RawTextHelpFormatter)
parser.add_argument("--ip", "-i", required=True, help="OME Appliance IP")
parser.add_argument("--user", "-u", required=False,
help="Username for OME Appliance", default="admin")
parser.add_argument("--password", "-p", required=True,
help="Password for OME Appliance")
parser.add_argument("--groupname", "-g", required=True,
help="A valid name for the group")
args = parser.parse_args()
create_static_group(args.ip, args.user, args.password, args.groupname)
| 44.70297 | 108 | 0.61041 | 519 | 4,515 | 5.181118 | 0.425819 | 0.024544 | 0.013388 | 0.013388 | 0.080327 | 0.027891 | 0.019338 | 0 | 0 | 0 | 0 | 0.009416 | 0.294352 | 4,515 | 100 | 109 | 45.15 | 0.834589 | 0.287265 | 0 | 0.035088 | 0 | 0 | 0.196922 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.017544 | false | 0.087719 | 0.087719 | 0 | 0.105263 | 0.105263 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
0ef391d627e7c29662611237b93dc0cbb0bb55b3 | 1,600 | py | Python | tests/nls_smoother_test.py | sisl/CEEM | 6154587fe3cdb92e8b7f70eedb1262caa1553cc8 | [
"MIT"
] | 5 | 2020-06-21T16:50:42.000Z | 2021-03-14T04:02:01.000Z | tests/nls_smoother_test.py | sisl/CEEM | 6154587fe3cdb92e8b7f70eedb1262caa1553cc8 | [
"MIT"
] | 1 | 2021-03-13T07:46:36.000Z | 2021-03-16T05:14:47.000Z | tests/nls_smoother_test.py | sisl/CEEM | 6154587fe3cdb92e8b7f70eedb1262caa1553cc8 | [
"MIT"
] | 1 | 2021-03-30T12:08:20.000Z | 2021-03-30T12:08:20.000Z | import torch
from ceem.opt_criteria import *
from ceem.systems import LorenzAttractor
from ceem.dynamics import *
from ceem.smoother import *
from ceem import utils
def test_smoother():
utils.set_rng_seed(1)
torch.set_default_dtype(torch.float64)
sigma = torch.tensor([10.])
rho = torch.tensor([28.])
beta = torch.tensor([8. / 3.])
C = torch.randn(2, 3)
dt = 0.04
sys = LorenzAttractor(sigma, rho, beta, C, dt, method='midpoint')
B = 1
T = 200
xs = [torch.randn(B, 1, 3)]
for t in range(T - 1):
xs.append(sys.step(torch.tensor([0.] * B), xs[-1]))
x = torch.cat(xs, dim=1).detach()
x.requires_grad = True
y = sys.observe(0., x).detach()
# y += torch.rand_like(y) * 0.01
t = torch.stack([torch.arange(T), torch.arange(T)]).to(torch.get_default_dtype())
x0 = torch.zeros_like(x)
obscrit = GaussianObservationCriterion(torch.ones(2), t, y)
dyncrit = GaussianDynamicsCriterion(torch.ones(3), t)
# Test GroupSOSCriterion
crit = GroupSOSCriterion([obscrit, dyncrit])
xsm, metrics = NLSsmoother(x0, crit, sys, solver_kwargs={'verbose': 2, 'tr_rho': 0.})
err = float((xsm - x).norm())
assert err < 1e-8, 'Smoothing Error: %.3e' % err
print('Passed.')
# Test BlockSparseGroupSOSCriterion
crit = BlockSparseGroupSOSCriterion([obscrit, dyncrit])
xsm, metrics = NLSsmoother(torch.zeros_like(x), crit, sys)
err = float((xsm - x).norm())
assert err < 1e-8, 'Smoothing Error: %.3e' % err
print('Passed.')
if __name__ == '__main__':
test_smoother()
| 23.880597 | 89 | 0.6325 | 219 | 1,600 | 4.515982 | 0.420091 | 0.040445 | 0.042467 | 0.030334 | 0.188069 | 0.11729 | 0.11729 | 0.11729 | 0.11729 | 0.11729 | 0 | 0.031721 | 0.211875 | 1,600 | 66 | 90 | 24.242424 | 0.752577 | 0.054375 | 0 | 0.153846 | 0 | 0 | 0.056329 | 0 | 0 | 0 | 0 | 0 | 0.051282 | 1 | 0.025641 | false | 0.051282 | 0.153846 | 0 | 0.179487 | 0.051282 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
0ef760e6a3a5620b5876eba10c68bc7b0bb1b6c8 | 474 | py | Python | buzzbox/restaurants/migrations/0002_restaurant_description.py | Danielvalev/kutiika | 661b850163de942a137157a97d98d90553861044 | [
"MIT"
] | null | null | null | buzzbox/restaurants/migrations/0002_restaurant_description.py | Danielvalev/kutiika | 661b850163de942a137157a97d98d90553861044 | [
"MIT"
] | null | null | null | buzzbox/restaurants/migrations/0002_restaurant_description.py | Danielvalev/kutiika | 661b850163de942a137157a97d98d90553861044 | [
"MIT"
] | null | null | null | # Generated by Django 3.2.9 on 2021-12-06 10:02
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('restaurants', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='restaurant',
name='description',
field=models.CharField(default='Description', max_length=255, verbose_name='Description'),
preserve_default=False,
),
]
| 23.7 | 102 | 0.620253 | 48 | 474 | 6.020833 | 0.791667 | 0.103806 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.063218 | 0.265823 | 474 | 19 | 103 | 24.947368 | 0.767241 | 0.094937 | 0 | 0 | 1 | 0 | 0.154567 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.076923 | 0 | 0.307692 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0ef9b198b443266a7fb573c35726d29675e45f68 | 2,561 | py | Python | lib/py/src/Thrift.py | ahfeel/thrift | 3ac3fa6fede4b2446209cfeb6fcae5900da543cc | [
"BSL-1.0"
] | 3 | 2016-02-03T07:28:51.000Z | 2017-02-28T06:20:21.000Z | lib/py/src/Thrift.py | shigin/thrift | 4ca9547ffa73082fc4c3ff349dc23a1fda8dcc48 | [
"BSL-1.0"
] | null | null | null | lib/py/src/Thrift.py | shigin/thrift | 4ca9547ffa73082fc4c3ff349dc23a1fda8dcc48 | [
"BSL-1.0"
] | 8 | 2020-03-12T13:42:59.000Z | 2021-05-27T06:34:33.000Z | # Copyright (c) 2006- Facebook
# Distributed under the Thrift Software License
#
# See accompanying file LICENSE or visit the Thrift site at:
# http://developers.facebook.com/thrift/
class TType:
STOP = 0
VOID = 1
BOOL = 2
BYTE = 3
I08 = 3
DOUBLE = 4
I16 = 6
I32 = 8
I64 = 10
STRING = 11
UTF7 = 11
STRUCT = 12
MAP = 13
SET = 14
LIST = 15
UTF8 = 16
UTF16 = 17
class TMessageType:
CALL = 1
REPLY = 2
EXCEPTION = 3
class TProcessor:
"""Base class for procsessor, which works on two streams."""
def process(iprot, oprot):
pass
class TException(Exception):
"""Base class for all thrift exceptions."""
def __init__(self, message=None):
Exception.__init__(self, message)
self.message = message
class TApplicationException(TException):
"""Application level thrift exceptions."""
UNKNOWN = 0
UNKNOWN_METHOD = 1
INVALID_MESSAGE_TYPE = 2
WRONG_METHOD_NAME = 3
BAD_SEQUENCE_ID = 4
MISSING_RESULT = 5
def __init__(self, type=UNKNOWN, message=None):
TException.__init__(self, message)
self.type = type
def __str__(self):
if self.message:
return self.message
elif self.type == UNKNOWN_METHOD:
return 'Unknown method'
elif self.type == INVALID_MESSAGE_TYPE:
return 'Invalid message type'
elif self.type == WRONG_METHOD_NAME:
return 'Wrong method name'
elif self.type == BAD_SEQUENCE_ID:
return 'Bad sequence ID'
elif self.type == MISSING_RESULT:
return 'Missing result'
else:
return 'Default (unknown) TApplicationException'
def read(self, iprot):
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.message = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.type = iprot.readI32();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
oprot.writeStructBegin('TApplicationException')
if self.message != None:
oprot.writeFieldBegin('message', TType.STRING, 1)
oprot.writeString(self.message)
oprot.writeFieldEnd()
if self.type != None:
oprot.writeFieldBegin('type', TType.I32, 2)
oprot.writeI32(self.type)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
| 23.281818 | 62 | 0.635689 | 306 | 2,561 | 5.202614 | 0.398693 | 0.050251 | 0.037688 | 0.03392 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.033386 | 0.263178 | 2,561 | 109 | 63 | 23.495413 | 0.810281 | 0.118313 | 0 | 0.104651 | 0 | 0 | 0.067471 | 0.018767 | 0 | 0 | 0 | 0 | 0 | 1 | 0.069767 | false | 0.011628 | 0 | 0 | 0.511628 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
0efc0f40ba7d7a4e242df39e71061af9c4be7d55 | 4,224 | py | Python | satt/trace/logger/panic.py | jnippula/satt | aff4562b7e94f095d2e13eb10b9ac872484bb5cd | [
"Apache-2.0"
] | 54 | 2016-11-09T13:26:40.000Z | 2019-04-30T16:29:45.000Z | satt/trace/logger/panic.py | jnippula/satt | aff4562b7e94f095d2e13eb10b9ac872484bb5cd | [
"Apache-2.0"
] | 2 | 2016-11-09T13:25:19.000Z | 2017-03-27T04:09:35.000Z | satt/trace/logger/panic.py | jnippula/satt | aff4562b7e94f095d2e13eb10b9ac872484bb5cd | [
"Apache-2.0"
] | 10 | 2016-11-28T07:55:40.000Z | 2019-03-23T12:40:36.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
// Copyright (c) 2015 Intel Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
'''
""" PanicLogger RAM-tracing
"""
import sys
import time
from logger import Logger
class PanicLogger(Logger):
""" Panic logger
"""
def __init__(self, control):
# Base class init call
Logger.__init__(self, control)
# Add default kernel module parameter for RAM-tracing
self._kernel_module_parameters += " trace_method=1 sideband_log_method=1"
# Add more option to command line input
self._parser.add_argument('-p', '--panic', action='store', help='Panic tracing mode: 1=Normal, 2=Hooked(default)',
required=False, default=2)
self._parser.add_argument('-s', '--sideband', action='store', help='Panic tracing mode: 0=Off, 1=On(default)',
required=False, default=1)
self._parser.add_argument('-g', '--gbuffer', action='store', help='Dump trace data to gbuffer: 0=Off, 1=On(default)',
required=False, default=1)
self._parser.add_argument('-u', '--userspace', action='store', help='Exclude user space: 0=Off, 1=On(default)',
required=False, default=1)
self._parser.add_argument('-k', '--kernel', action='store', help='Exclude kernel: 0=Off(default), 1=On',
required=False, default=0)
self._parser.add_argument('-d', '--dump', action='store',
help='Dump kernel and kernel modules for processing: 0=Off, 1=On(default)',
required=False, default=0)
self.args = self._parser.parse_args()
self._kernel_module_parameters += " panic_tracer=" + str(self.args.panic)
self._kernel_module_parameters += " panic_sideband=" + str(self.args.sideband)
self._kernel_module_parameters += " panic_gbuffer=" + str(self.args.gbuffer)
self._kernel_module_parameters += " exclude_userspace=" + str(self.args.userspace)
self._kernel_module_parameters += " exclude_kernel=" + str(self.args.kernel)
def initialize(self):
self._debug_print("PanicLogger::initialize")
# Initialize Logger base class
Logger.initialize(self)
# Call start_tracing earlier to stop execution earlier
self.start_tracing()
def start_tracing(self):
self._debug_print("start_tracing")
trace_name, trace_path = self.get_trace_name("Enter <<trace name>> to start panic tracing? :")
if trace_name:
self.set_trace_path(trace_path, trace_name)
self.get_build_info()
# TODO Problem, there is no Sideband.bin info yet
# Quick Fix
# Start tracing, wait 100ms, Stop tracing, fetch sideband info
Logger.start_tracing(self)
time.sleep(0.2)
Logger.stop_tracing(self)
time.sleep(0.2)
Logger.get_sideband_data(self)
self.dump_kernel()
self.dump_linux_gate()
self.dump_kernel_modules()
Logger.start_tracing(self)
print ""
print "Panic tracing activated"
print "If panic happens, wait 10s and reboot device."
print ""
print "When device boot up run following command:"
print "sat-panic-fetch " + self.trace_name
sys.exit(0)
else:
print "Panic Tracer did not get started"
def stop_tracing(self):
return
def get_data(self):
return
def get_trace_data(self):
return
| 38.054054 | 125 | 0.615057 | 517 | 4,224 | 4.866538 | 0.340426 | 0.033386 | 0.038156 | 0.062003 | 0.202305 | 0.127186 | 0.102544 | 0.080286 | 0.066773 | 0.066773 | 0 | 0.012728 | 0.274621 | 4,224 | 110 | 126 | 38.4 | 0.80842 | 0.083807 | 0 | 0.233333 | 0 | 0 | 0.227642 | 0.013759 | 0 | 0 | 0 | 0.009091 | 0 | 0 | null | null | 0 | 0.05 | null | null | 0.15 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
160368ea260cbc50567d2f17656bb9f30dc2af47 | 3,494 | py | Python | pydbhub/httphub.py | sum3105/pydbhub | 501ea2c0ec7785bc06a38961a1366c3c04d7fabd | [
"MIT"
] | 18 | 2021-06-03T14:27:55.000Z | 2022-02-25T17:55:33.000Z | pydbhub/httphub.py | sum3105/pydbhub | 501ea2c0ec7785bc06a38961a1366c3c04d7fabd | [
"MIT"
] | 3 | 2021-06-20T07:17:51.000Z | 2021-12-10T15:24:19.000Z | pydbhub/httphub.py | sum3105/pydbhub | 501ea2c0ec7785bc06a38961a1366c3c04d7fabd | [
"MIT"
] | 5 | 2021-06-29T09:50:40.000Z | 2021-12-31T12:10:57.000Z | import pydbhub
from typing import Any, Dict, List, Tuple
from json.decoder import JSONDecodeError
import requests
import io
def send_request_json(query_url: str, data: Dict[str, Any]) -> Tuple[List[Any], str]:
"""
send_request_json sends a request to DBHub.io, formatting the returned result as JSON
Parameters
----------
query_url : str
url of the API endpoint
data : Dict[str, Any]
data to be processed to the server.
Returns
-------
Tuple[List[Any], str]
The returned data is
- a list of JSON object.
- a string describe error if occurs
"""
try:
headers = {'User-Agent': f'pydbhub v{pydbhub.__version__}'}
response = requests.post(query_url, data=data, headers=headers)
response.raise_for_status()
return response.json(), None
except JSONDecodeError as e:
return None, e.args[0]
except TypeError as e:
return None, e.args[0]
except requests.exceptions.HTTPError as e:
try:
return response.json(), e.args[0]
except JSONDecodeError:
return None, e.args[0]
except requests.exceptions.RequestException as e:
cause = e.args(0)
return None, str(cause.args[0])
def send_request(query_url: str, data: Dict[str, Any]) -> Tuple[List[bytes], str]:
"""
send_request sends a request to DBHub.io.
Parameters
---- query_url : str
url of the API endpoint
data : Dict[str, Any]
data to be processed to the server.------
Returns
-------
List[bytes]
database file is returned as a list of bytes
"""
try:
headers = {'User-Agent': f'pydbhub v{pydbhub.__version__}'}
response = requests.post(query_url, data=data, headers=headers)
response.raise_for_status()
return response.content, None
except requests.exceptions.HTTPError as e:
return None, e.args[0]
except requests.exceptions.RequestException as e:
cause = e.args(0)
return None, str(cause.args[0])
def send_upload(query_url: str, data: Dict[str, Any], db_bytes: io.BufferedReader) -> Tuple[List[Any], str]:
"""
send_upload uploads a database to DBHub.io.
Parameters
----------
query_url : str
url of the API endpoint.
data : Dict[str, Any]
data to be processed to the server.
db_bytes : io.BufferedReader
A buffered binary stream of the database file.
Returns
-------
Tuple[List[Any], str]
The returned data is
- a list of JSON object.
- a string describe error if occurs
"""
try:
headers = {'User-Agent': f'pydbhub v{pydbhub.__version__}'}
files = {"file": db_bytes}
response = requests.post(query_url, data=data, headers=headers, files=files)
response.raise_for_status()
if response.status_code != 201:
# The returned status code indicates something went wrong
try:
return response.json(), str(response.status_code)
except JSONDecodeError:
return None, str(response.status_code)
return response.json(), None
except requests.exceptions.HTTPError as e:
try:
return response.json(), e.args[0]
except JSONDecodeError:
return None, e.args[0]
except requests.exceptions.RequestException as e:
cause = e.args(0)
return None, str(cause.args[0])
| 30.649123 | 108 | 0.61763 | 450 | 3,494 | 4.704444 | 0.188889 | 0.030704 | 0.028342 | 0.039679 | 0.720359 | 0.691545 | 0.675012 | 0.643836 | 0.632026 | 0.576287 | 0 | 0.006339 | 0.277619 | 3,494 | 113 | 109 | 30.920354 | 0.832409 | 0.296508 | 0 | 0.703704 | 0 | 0 | 0.054577 | 0.029049 | 0 | 0 | 0 | 0 | 0 | 1 | 0.055556 | false | 0 | 0.092593 | 0 | 0.425926 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
1608a15c941a14be0a253388b661310efd0d4787 | 2,834 | py | Python | MultirangerTest.py | StuartLiam/DroneNavigationOnboard | 11ac6a301dfc72b15e337ddf09f5ddc79265a03f | [
"MIT"
] | null | null | null | MultirangerTest.py | StuartLiam/DroneNavigationOnboard | 11ac6a301dfc72b15e337ddf09f5ddc79265a03f | [
"MIT"
] | null | null | null | MultirangerTest.py | StuartLiam/DroneNavigationOnboard | 11ac6a301dfc72b15e337ddf09f5ddc79265a03f | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# || ____ _ __
# +------+ / __ )(_) /_______________ _____ ___
# | 0xBC | / __ / / __/ ___/ ___/ __ `/_ / / _ \
# +------+ / /_/ / / /_/ /__/ / / /_/ / / /_/ __/
# || || /_____/_/\__/\___/_/ \__,_/ /___/\___/
#
# Copyright (C) 2017 Bitcraze AB
#
# Crazyflie Python Library
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
"""
Example scipts that allows a user to "push" the Crazyflie 2.0 around
using your hands while it's hovering.
This examples uses the Flow and Multi-ranger decks to measure distances
in all directions and tries to keep away from anything that comes closer
than 0.2m by setting a velocity in the opposite direction.
The demo is ended by either pressing Ctrl-C or by holding your hand above the
Crazyflie.
For the example to run the following hardware is needed:
* Crazyflie 2.0
* Crazyradio PA
* Flow deck
* Multiranger deck
"""
import logging
import sys
import time
import cflib.crtp
from cflib.crazyflie import Crazyflie
from cflib.crazyflie.syncCrazyflie import SyncCrazyflie
from cflib.positioning.motion_commander import MotionCommander
from cflib.utils.multiranger import Multiranger
import matplotlib.pyplot as plt
from matplotlib.pyplot import figure
import matplotlib.patches as patches
URI = 'radio://0/80/2M'
if len(sys.argv) > 1:
URI = sys.argv[1]
# Only output errors from the logging framework
logging.basicConfig(level=logging.ERROR)
def is_close(range):
MIN_DISTANCE = 0.2 # m
if range is None:
return False
else:
return range < MIN_DISTANCE
if __name__ == '__main__':
# Initialize the low-level drivers (don't list the debug drivers)
cflib.crtp.init_drivers(enable_debug_driver=False)
rangeArray = []
cf = Crazyflie(rw_cache='./cache')
with SyncCrazyflie(URI, cf=cf) as scf:
with MotionCommander(scf) as motion_commander:
with Multiranger(scf) as multiranger:
motion_commander.start_turn_left(90)
rangeArray.append(multiranger.front)
time.sleep(0.05)
plt.plot(rangeArray) | 31.488889 | 77 | 0.693013 | 380 | 2,834 | 4.915789 | 0.542105 | 0.019272 | 0.020878 | 0.030514 | 0.043897 | 0.029979 | 0 | 0 | 0 | 0 | 0 | 0.017156 | 0.218419 | 2,834 | 90 | 78 | 31.488889 | 0.826185 | 0.590332 | 0 | 0 | 0 | 0 | 0.026643 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.03125 | false | 0 | 0.34375 | 0 | 0.4375 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
160c8a87b1d001ed3cb1d85873c9a8a8f238d3b2 | 6,537 | py | Python | lessons/sqlite_example/database.py | eliranM98/python_course | d9431dd6c0f27fca8ca052cc2a821ed0b883136c | [
"MIT"
] | 6 | 2019-03-29T06:14:53.000Z | 2021-10-15T23:42:36.000Z | lessons/sqlite_example/database.py | eliranM98/python_course | d9431dd6c0f27fca8ca052cc2a821ed0b883136c | [
"MIT"
] | 4 | 2019-09-06T10:03:40.000Z | 2022-03-11T23:30:55.000Z | lessons/sqlite_example/database.py | eliranM98/python_course | d9431dd6c0f27fca8ca052cc2a821ed0b883136c | [
"MIT"
] | 12 | 2019-06-20T19:34:52.000Z | 2021-10-15T23:42:39.000Z | """
in this example we want to create a user credentials database with:
user_id & password
logger showing connection logs, DB version, errors during fetching & executing
"""
import sqlite3
from lessons.sqlite_example.log import create as create_logger
class Commands:
create_users_table = '''
CREATE TABLE IF NOT EXISTS users (
id INTEGER PRIMARY KEY AUTOINCREMENT,
user_id text,
password text
);
'''
add_user = 'INSERT INTO users (user_id, password) VALUES (\'{}\', \'{}\');'
get_users = 'SELECT user_id, password FROM users;'
get_user_by_user_id = 'SELECT user_id, password FROM users WHERE user_id = \'{}\';'
get_user_by_id = 'SELECT user_id, password FROM users WHERE id = \'{}\';'''
get_last_user = 'SELECT user_id, password FROM users ORDER BY ID DESC LIMIT 1'
drop_table = 'DROP TABLE IF EXISTS {};'
class DataBase:
""" create a database connection to the SQLite database
specified by db_file
:param db_file: database file
"""
def __init__(self, db_file, log, commands=None):
""" database connection """
try:
self.log = log
self.log.info('connecting to database')
self.connection = sqlite3.connect(db_file)
self.cursor = self.connection.cursor()
self.log.info('connection success')
self.log.info('sqlite3 version {}'.format(sqlite3.version))
if commands is None:
commands = Commands
self.command = commands
except Exception as e:
self.log.exception(e)
raise Exception(e)
def execute(self, command, *args, **kwargs):
try:
return self.cursor.execute(command)
except Exception as e:
self.log.exception(e)
def fetch(self, command=None, *args, **kw):
if command is not None:
self.execute(command)
try:
return self.cursor.fetchall()
except Exception as e:
self.log.exception(e)
def export_from_table_to_file(self, table, file_name, titles, permission='w'):
try:
self.cursor.execute("select * from {}".format(table))
table_list = self.cursor.fetchall()
with open(file_name, permission) as f:
f.write(','.join(titles) + '\n')
for i in table_list:
s = []
for a in i:
s.append(str(a))
f.write(','.join(s) + '\n')
except Exception as e:
self.log.exception(e)
def fetch_log(self, *args, **kw):
rows = self.fetch(*args, **kw)
if rows is not None:
for r in rows:
self.log.info(r)
return rows
class DataBaseExtention(DataBase):
# def get_user_credentials(self, user=None, id=None):
# users = self.fetch(self.command.get_users)
# if user is not None:
# for i in users:
# if user in i:
# return i
# if id is not None:
# return users[id][1:]
# return users[-1][1:]
def get_user_credentials(self, user=None, id=None):
if user is not None:
user_credentials = self.fetch(self.command.get_user_by_user_id.format(user))
elif id is not None:
user_credentials = self.fetch(self.command.get_user_by_id.format(id))
else:
user_credentials = self.fetch(self.command.get_last_user)
if len(user_credentials) > 0:
return user_credentials[0]
if "__main__" == __name__:
import os
log_file = os.path.dirname(os.path.abspath(__file__)) + '\\log.txt'
db_file = os.path.dirname(os.path.abspath(__file__)) + '\\db.db'
log = create_logger(log_file=log_file)
database = DataBaseExtention(db_file, log)
# @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@
# database.execute(database.command.drop_table.format('users'))
# database.execute(database.command.create_users_table)
# database.execute(database.command.add_user.format('cs0008', '123123a'))
# database.execute(database.command.add_user.format('af0006', '123123a'))
# database.execute(database.command.add_user.format('jh0003', '123123a'))
# database.execute(database.command.add_user.format('kb0004', '123123a'))
# database.execute(database.command.add_user.format('op0001', '123123a'))
# database.execute(database.command.add_user.format('gv0001', '123123a'))
# database.execute(database.command.add_user.format('pm0001', '123123a'))
# database.execute(database.command.add_user.format('ps0001', '123123a'))
# database.execute(database.command.add_user.format('qa0000', '123123a'))
# user_credentials = database.get_user_credentials(id='14')
# database.connection.commit()
# database.connection.close()
# print(user_credentials)
# create a simple database with websites table that includes (
# url: varchar(1024),
# popularity_score: integer,
# monthly_visitations: integer
# )
# database.command.create_websites_table = '''
# CREATE TABLE IF NOT EXISTS websites (
# id INTEGER PRIMARY KEY AUTOINCREMENT,
# url TEXT,
# popularity_score INTEGER,
# monthly_visitations INTEGER
# )
# '''
# database.command.add_website = 'INSERT INTO websites (url, popularity_score, monthly_visitations) VALUES (\'{}\', \'{}\', \'{}\');'
# database.execute(database.command.create_websites_table)
# database.execute(database.command.add_website.format('https://www.google.com', 5, 4000000000))
# database.execute(database.command.add_website.format('https://www.ynet.com', 3, 5000000))
# database.execute(database.command.add_website.format('https://www.youtube.com', 6, 1300000000))
# database.execute(database.command.add_website.format('https://www.python.org', 5, 1000000))
# database.command.get_site = 'SELECT url, popularity_score, monthly_visitations FROM websites WHERE url = \'{}\';'
# url, popularity, visitations = database.fetch(database.command.get_site.format('https://www.python.org'))[0]
#
# print(url, popularity, visitations)
database.export_from_table_to_file(
table='websites',
file_name='exported.csv',
titles=('id', 'url', 'popularity_score', 'monthly_visitations')
)
# database.connection.commit()
database.connection.close()
| 39.379518 | 137 | 0.621539 | 778 | 6,537 | 5.062982 | 0.203085 | 0.076161 | 0.093425 | 0.121858 | 0.488195 | 0.385885 | 0.331048 | 0.310485 | 0.133029 | 0.048743 | 0 | 0.02949 | 0.24782 | 6,537 | 165 | 138 | 39.618182 | 0.771609 | 0.420223 | 0 | 0.142857 | 0 | 0 | 0.162038 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.071429 | false | 0.071429 | 0.035714 | 0 | 0.27381 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
1612dd8d2c7befa9fffd9b219b0f1e9b1d9948d5 | 508 | py | Python | Dataset/Leetcode/train/7/93.py | kkcookies99/UAST | fff81885aa07901786141a71e5600a08d7cb4868 | [
"MIT"
] | null | null | null | Dataset/Leetcode/train/7/93.py | kkcookies99/UAST | fff81885aa07901786141a71e5600a08d7cb4868 | [
"MIT"
] | null | null | null | Dataset/Leetcode/train/7/93.py | kkcookies99/UAST | fff81885aa07901786141a71e5600a08d7cb4868 | [
"MIT"
] | null | null | null | class Solution:
def XXX(self, x: int) -> int:
def solve(x):
a = list(map(int,str(x)))
p = {}
d=0
for ind, val in enumerate(a):
p[ind] = val
for i, v in p.items():
d += v*(10**i)
if (2**31 - 1>= d >= -(2**31)):
return d
else:
return 0
if x>=0:
return (solve(x))
if x<0:
x = -x
return (-solve(x))
| 24.190476 | 43 | 0.324803 | 65 | 508 | 2.538462 | 0.461538 | 0.109091 | 0.048485 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.054393 | 0.529528 | 508 | 20 | 44 | 25.4 | 0.635983 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.105263 | false | 0 | 0 | 0 | 0.368421 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
1614bfb3f4849c9afe583c49f1da9a5698654285 | 2,648 | py | Python | dist/weewx-4.0.0b3/bin/weewx/junk2.py | v0rts/docker-weewx | 70b2f252051dfead4fcb74e74662b297831e6342 | [
"Apache-2.0"
] | 10 | 2017-01-05T17:30:48.000Z | 2021-09-18T15:04:20.000Z | dist/weewx-4.0.0b3/bin/weewx/junk2.py | v0rts/docker-weewx | 70b2f252051dfead4fcb74e74662b297831e6342 | [
"Apache-2.0"
] | 2 | 2019-07-21T10:48:42.000Z | 2022-02-16T20:36:45.000Z | dist/weewx-4.0.0b3/bin/weewx/junk2.py | v0rts/docker-weewx | 70b2f252051dfead4fcb74e74662b297831e6342 | [
"Apache-2.0"
] | 12 | 2017-01-05T18:50:30.000Z | 2021-10-05T07:35:45.000Z | from __future__ import print_function
import time
import weeutil.weeutil
import weewx.manager
import weewx.xtypes
archive_sqlite = {'database_name': '/home/weewx/archive/weepwr.sdb', 'driver': 'weedb.sqlite'}
archive_mysql = {'database_name': 'weewx', 'user': 'weewx', 'password': 'weewx', 'driver': 'weedb.mysql'}
sql_str = "SELECT %s(%s), MIN(usUnits), MAX(usUnits) FROM %s " \
"WHERE dateTime > ? AND dateTime <= ?" % ('avg', 'outTemp', 'archive')
timespan = weeutil.weeutil.TimeSpan(1573245000, 1573246800)
timespan = weeutil.weeutil.TimeSpan(1573245000, 1573245000 + 600)
print('timespan=', timespan)
with weewx.manager.Manager.open(archive_sqlite) as db_manager:
interpolate_dict = {
'aggregate_type': 'diff',
'obs_type': 'ch8_a_energy2',
'table_name': db_manager.table_name,
'start': timespan.start,
'stop': timespan.stop,
}
SQL_TEMPLATE = "SELECT (ch8_a_energy2 - (SELECT ch8_a_energy2 FROM archive WHERE dateTime=%(start)s)) / (%(stop)s - %(start)s) FROM archive WHERE dateTime=%(stop)s;"
SQL_TEMPLATE = """Select a.dateTime as StartTime
, b.dateTime as EndTime
, b.dateTime-a.dateTime as TimeChange
, b.ch8_a_energy2-a.ch8_a_energy2 as ValueChange
FROM archive a
Inner Join archive b ON b.dateTime>=1573245000 AND b.dateTime<=(1573245000 + 600)"""
SQL_TEMPLATE = """Select a.dateTime as StartTime, b.datetime as EndTime, b.dateTime-a.dateTime as TimeChange, b.ch8_a_energy2-a.ch8_a_energy2 as ValueChange
FROM archive a, archive b WHERE b.dateTime = (Select MAX(c.dateTime) FROM archive c WHERE c.dateTime<=(1573245000+600)) AND a.dateTime = (SELECT MIN(dateTime) FROM archive WHERE dateTime>=1573245000);"""
SQL_TEMPLATE = """Select a.dateTime as StartTime, b.datetime as EndTime, b.dateTime-a.dateTime as TimeChange, b.ch8_a_energy2-a.ch8_a_energy2 as ValueChange
FROM archive a, archive b WHERE b.dateTime = (Select MAX(dateTime) FROM archive WHERE dateTime<=(1573245000+600)) AND a.dateTime = (SELECT MIN(dateTime) FROM archive WHERE dateTime>=1573245000);"""
SQL_TEMPLATE = "SELECT (b.%(obs_type)s - a.%(obs_type)s) / (b.dateTime-a.dateTime) "\
"FROM archive a, archive b "\
"WHERE b.dateTime = (SELECT MAX(dateTime) FROM archive WHERE dateTime <= %(stop)s) "\
"AND a.dateTime = (SELECT MIN(dateTime) FROM archive WHERE dateTime >= %(start)s);"
sql_stmt = SQL_TEMPLATE % interpolate_dict
print(sql_stmt)
# Get the number of records
with db_manager.connection.cursor() as cursor:
for row in cursor.execute(sql_stmt):
print(row)
| 50.923077 | 203 | 0.692976 | 362 | 2,648 | 4.933702 | 0.209945 | 0.073908 | 0.055431 | 0.094065 | 0.532475 | 0.487682 | 0.448488 | 0.448488 | 0.448488 | 0.448488 | 0 | 0.059743 | 0.178248 | 2,648 | 51 | 204 | 51.921569 | 0.761029 | 0.009441 | 0 | 0.05 | 0 | 0.2 | 0.6219 | 0.135063 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.025 | 0.125 | 0 | 0.125 | 0.1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
161a0260062e641dc32fc774ac4b854148c5381e | 3,310 | py | Python | src/requester/py/ElevatorTestCaseList.py | akzare/Elevator_Sys_Design | 2f7d7381d68699515a43ec4cf7a8a8afade726f3 | [
"MIT"
] | 1 | 2020-09-03T06:36:22.000Z | 2020-09-03T06:36:22.000Z | src/requester/py/ElevatorTestCaseList.py | akzare/Elevator_Sys_Design | 2f7d7381d68699515a43ec4cf7a8a8afade726f3 | [
"MIT"
] | null | null | null | src/requester/py/ElevatorTestCaseList.py | akzare/Elevator_Sys_Design | 2f7d7381d68699515a43ec4cf7a8a8afade726f3 | [
"MIT"
] | null | null | null | '''
* @file ElevatorTestCaseList.py
* @author Armin Zare Zadeh
* @date 30 July 2020
* @version 0.1
* @brief Implements a class to hold all the test cases during the program life cycle.
'''
#!/usr/bin/env python3
import sys
import ctypes
import ElevatorConfig as cfg
import ElevatorMsgProtocol as msgProto
class ElevatorTestCaseList:
'''
This class builds a test case list out of the configuration
and holds it during the runtime
'''
def __init__(self, config):
self.config = config
self.CallGoTCList = []
def create_testcase_list(self):
'''
Creates a test case list out of the configuration
'''
# ############################################################
# Construct 'call' test cases
for k in self.config.test_case['call'].keys():
msgHdr = msgProto.MsgHeader(tx_node_addr = self.config.test_case['call'][k][0],
rx_node_addr = self.config.test_case['call'][k][1],
msg_id = self.config.test_case['call'][k][2],
msg_class = self.config.test_case['call'][k][3],
hdr_len = self.config.network['packet_header_len'],
payload_len = self.config.network['packet_payload_req_len'])
self.CallGoTCList.append(msgProto.EncodeReqPacket(msg_header = msgHdr,
time_tag = self.config.test_case['call'][k][4],
req_typ = self.config.usr_request['call'],
floor_num = self.config.test_case['call'][k][5],
direction = self.config.test_case['call'][k][6],
go_msg_id = self.config.test_case['call'][k][7],
state = msgProto.CallGoState.READY2GO))
# ############################################################
# Construct 'go' test cases
for k in self.config.test_case['go'].keys():
msgHdr = msgProto.MsgHeader(tx_node_addr = self.config.test_case['go'][k][0],
rx_node_addr = self.config.test_case['go'][k][1],
msg_id = self.config.test_case['go'][k][2],
msg_class = self.config.test_case['go'][k][3],
hdr_len = self.config.network['packet_header_len'],
payload_len = self.config.network['packet_payload_req_len'])
self.CallGoTCList.append(msgProto.EncodeReqPacket(msg_header = msgHdr,
time_tag = self.config.test_case['go'][k][4],
req_typ = self.config.usr_request['go'],
floor_num = self.config.test_case['go'][k][5],
direction = 0,
go_msg_id = 0,
state = msgProto.CallGoState.RESET))
| 50.151515 | 105 | 0.459517 | 327 | 3,310 | 4.461774 | 0.30581 | 0.164496 | 0.15353 | 0.197395 | 0.654558 | 0.650446 | 0.583962 | 0.583962 | 0.396162 | 0.309801 | 0 | 0.013205 | 0.405136 | 3,310 | 65 | 106 | 50.923077 | 0.727781 | 0.124773 | 0 | 0.162162 | 0 | 0 | 0.049156 | 0.016141 | 0 | 0 | 0 | 0 | 0 | 1 | 0.054054 | false | 0 | 0.108108 | 0 | 0.189189 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
161dd7d6b32c517702822fdd2b972e9c34a403fe | 8,759 | py | Python | appengine/chromium_build_logs/handler.py | mithro/chromium-infra | d27ac0b230bedae4bc968515b02927cf9e17c2b7 | [
"BSD-3-Clause"
] | 1 | 2018-01-02T05:47:07.000Z | 2018-01-02T05:47:07.000Z | appengine/chromium_build_logs/handler.py | mithro/chromium-infra | d27ac0b230bedae4bc968515b02927cf9e17c2b7 | [
"BSD-3-Clause"
] | null | null | null | appengine/chromium_build_logs/handler.py | mithro/chromium-infra | d27ac0b230bedae4bc968515b02927cf9e17c2b7 | [
"BSD-3-Clause"
] | null | null | null | # Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import appengine_config
import datetime
import json
import logging
import os.path
import pickle
import sys
import urllib
sys.path.append(
os.path.join(os.path.abspath(os.path.dirname(__file__)), 'third_party'))
from google.appengine.ext import blobstore
from google.appengine.ext import db
from google.appengine.ext import deferred
from google.appengine.ext import webapp
from google.appengine.ext.webapp import blobstore_handlers
from google.appengine.ext.webapp import template
from google.appengine.ext.webapp.util import run_wsgi_app
import cloudstorage
import app
import gtest_parser
# pylint: disable=pointless-string-statement
"""When displaying a list of results, how many to display on one page."""
PAGE_SIZE = 100
def _clean_int(value, default):
"""Convert a value to an int, or the default value if conversion fails."""
try:
return int(value)
except (TypeError, ValueError), _:
return default
class MyRequestHandler(webapp.RequestHandler):
"""Base request handler with this application specific helpers."""
def _render_template(self, name, values):
"""
Wrapper for template.render that updates response
and knows where to look for templates.
"""
self.response.out.write(template.render(
os.path.join(os.path.dirname(__file__), 'templates', name),
values))
class StatusReceiverAction(MyRequestHandler):
def post(self):
# This handler should be extremely fast so that buildbot doesn't fail
# the push and doesn't get stuck on us. Defer all processing to the
# background.
try:
deferred.defer(app.process_status_push, self.request.body, _queue='fast')
except Exception:
# For large requests we have to do it now. We can't return HTTP 500
# because buildbot will try again.
app.process_status_push(self.request.body)
class FetchBuildersAction(MyRequestHandler):
def get(self):
deferred.defer(app.fetch_builders)
class FetchStepsAction(MyRequestHandler):
def get(self):
deferred.defer(app.fetch_steps)
class UpdateParsedDataAction(MyRequestHandler):
def get(self):
query = app.BuildStep.all(keys_only=True)
query.filter('is_fetched =', True)
query.filter('is_too_large =', False)
deferred.defer(app.for_all_entities,
query,
app.update_parsed_data,
None)
class MainAction(MyRequestHandler):
def get(self):
self._render_template('main.html', {})
class GTestQueryAction(MyRequestHandler):
def get(self):
gtest_results = []
cursor = None
if self.request.get('gtest_query'):
query = app.GTestResult.all()
query.filter('fullname =', self.request.get('gtest_query'))
query.order('-time_finished')
if self.request.get('cursor'):
query.with_cursor(start_cursor=self.request.get('cursor'))
gtest_results = query.fetch(PAGE_SIZE)
cursor = query.cursor()
self._render_template('query.html', {
'gtest_query': self.request.get('gtest_query'),
'cursor': cursor,
'gtest_results': gtest_results,
})
class SuppressionQueryAction(MyRequestHandler):
def get(self):
query = app.MemorySuppressionResult.all()
query.filter('name =', self.request.get('suppression_query'))
query.order('-time_finished')
if self.request.get('cursor'):
query.with_cursor(start_cursor=self.request.get('cursor'))
suppression_results = query.fetch(PAGE_SIZE)
self._render_template('suppression_query.html', {
'suppression_query': self.request.get('suppression_query'),
'cursor': query.cursor(),
'suppression_results': suppression_results,
})
class UnusedSuppressionsAction(MyRequestHandler):
def post(self):
now_timestamp = datetime.datetime.now()
queries = []
for line in self.request.body.splitlines():
query = app.MemorySuppressionResult.all()
query.filter('name =', line)
query.order('-time_finished')
queries.append(query.run(limit=1))
for q in queries:
for sr in q:
if now_timestamp - sr.time_finished > datetime.timedelta(days=30):
self.response.out.write(sr.name + '\n')
class ListAction(MyRequestHandler):
"""Lists stored build results."""
def get(self):
all_steps = app.BuildStep.all().order('-time_finished')
if self.request.get('buildbot_root'):
all_steps.filter('buildbot_root =',
urllib.unquote(self.request.get('buildbot_root')))
if self.request.get('builder'):
all_steps.filter('builder =',
urllib.unquote(self.request.get('builder')))
if self.request.get('step_name'):
all_steps.filter('step_name =',
urllib.unquote(self.request.get('step_name')))
if self.request.get('status'):
all_steps.filter('status =', _clean_int(urllib.unquote(
self.request.get('status')), None))
if self.request.get('cursor'):
all_steps.with_cursor(start_cursor=self.request.get('cursor'))
steps = all_steps.fetch(limit=PAGE_SIZE)
step_names = app.iterate_large_result(app.StepName.all().order('name'))
self._render_template('list.html', {
'buildbot_roots': app.BUILDBOT_ROOTS,
'step_names': step_names,
'steps': steps,
'cursor': all_steps.cursor(),
'filter_buildbot_root': self.request.get('buildbot_root', ''),
'filter_builder': self.request.get('builder', ''),
'filter_step_name': self.request.get('step_name', ''),
'filter_status': self.request.get('status', ''),
})
class BuildStepJSONAction(MyRequestHandler):
def get(self):
all_steps = app.BuildStep.all().order('-time_finished')
if self.request.get('cursor'):
all_steps.with_cursor(start_cursor=self.request.get('cursor'))
build_steps = all_steps.fetch(limit=1000)
json_data = {
'build_steps': [
{
'build_number': bs.build_number,
'buildbot_root': bs.buildbot_root,
'builder': bs.builder,
'status': bs.status,
'step_number': bs.step_number,
'step_name': bs.step_name,
# BigQuery doesn't recognize the T separator, but space works.
'time_started': bs.time_started.isoformat(sep=' '),
'time_finished': bs.time_finished.isoformat(sep=' '),
} for bs in build_steps
],
'cursor': all_steps.cursor(),
}
self.response.out.write(json.dumps(json_data))
class SuppressionSummaryAction(MyRequestHandler):
"""Displays summary information about memory suppressions."""
def get(self):
sort = 'count'
if self.request.get('sort') in ('count',):
sort = self.request.get('sort')
query = app.MemorySuppressionSummary.all()
monthly_timestamp = datetime.date.today().replace(day=1)
query.filter('monthly_timestamp =', monthly_timestamp)
query.order('monthly_timestamp')
query.order('-%s' % sort)
if self.request.get('cursor'):
query.with_cursor(start_cursor=self.request.get('cursor'))
suppression_summaries = query.fetch(PAGE_SIZE)
self._render_template('suppression_summary.html', {
'suppression_summary_query':
self.request.get('suppression_summary_query'),
'suppression_summaries': suppression_summaries,
'cursor': query.cursor(),
'sort': sort,
})
class ViewRawLogAction(blobstore_handlers.BlobstoreDownloadHandler):
"""Sends selected log file to the user."""
def get(self, blobkey): # pylint: disable=arguments-differ
blob_info = blobstore.BlobInfo.get(urllib.unquote(blobkey))
if not blob_info:
self.error(404)
return
self.send_blob(blob_info)
application = webapp.WSGIApplication(
[('/', MainAction),
('/gtest_query', GTestQueryAction),
('/suppression_query', SuppressionQueryAction),
('/suppression_summary', SuppressionSummaryAction),
('/unused_suppressions', UnusedSuppressionsAction),
('/list', ListAction),
('/build_step_json', BuildStepJSONAction),
('/status_receiver', StatusReceiverAction),
('/tasks/fetch_builders', FetchBuildersAction),
('/tasks/fetch_steps', FetchStepsAction),
('/tasks/update_parsed_data', UpdateParsedDataAction),
('/viewlog/raw/(.*)', ViewRawLogAction)])
def main():
my_default_retry_params = cloudstorage.RetryParams(
initial_delay=0.5,
max_delay=30.0,
backoff_factor=2,
urlfetch_timeout=60)
cloudstorage.set_default_retry_params(my_default_retry_params)
run_wsgi_app(application)
if __name__ == '__main__':
main()
| 31.170819 | 79 | 0.685352 | 1,046 | 8,759 | 5.562141 | 0.269598 | 0.062393 | 0.07219 | 0.030251 | 0.292884 | 0.183912 | 0.153661 | 0.124785 | 0.092472 | 0.092472 | 0 | 0.004077 | 0.187921 | 8,759 | 280 | 80 | 31.282143 | 0.813862 | 0.061765 | 0 | 0.190955 | 0 | 0 | 0.14247 | 0.021035 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.090452 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
16214a743fb88fbf7d2c7ed97c9778c2fbeb46d1 | 4,764 | py | Python | tools/pod-xml-to-geojson.py | 24-timmarsseglingarna/app | 0c028bd2eb284c6893cb16dd91bd093b2222338f | [
"Apache-2.0"
] | null | null | null | tools/pod-xml-to-geojson.py | 24-timmarsseglingarna/app | 0c028bd2eb284c6893cb16dd91bd093b2222338f | [
"Apache-2.0"
] | 14 | 2017-08-24T12:46:58.000Z | 2021-04-21T07:56:58.000Z | tools/pod-xml-to-geojson.py | 24-timmarsseglingarna/app | 0c028bd2eb284c6893cb16dd91bd093b2222338f | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# Converts a PoD XML file to a GeoJSON file.
#
# With the --javascript parameter, the generated file is a javascript
# file defining a variable 'basePodSpec'.
#
# Get the PoD XML file from http://dev.24-timmars.nu/PoD/xmlapi_app.php.
import xml.etree.ElementTree as etree
import argparse
import re
import json
import io
import sys
import os.path
import datetime
if sys.version < '3':
import codecs
# points number 9000 and above are not real points; they are used to mark
# area borders
MAXPOINT=8999
def run():
parser = argparse.ArgumentParser()
parser.add_argument("-i", "--infile", help="input file")
parser.add_argument("-o", "--outfile", help="output file")
parser.add_argument("--id", help="id of terrain")
parser.add_argument("--javascript", action="store_true")
args = parser.parse_args()
tree = etree.parse(args.infile)
all_points, start_points, turning_points = get_points(tree)
inshore_legs, offshore_legs = get_legs(tree, all_points)
output_pod(args.outfile, args.javascript, args.id,
[('startPoints', start_points),
('turningPoints', turning_points),
('inshoreLegs', inshore_legs),
('offshoreLegs', offshore_legs)])
def output_pod(fname, javascript, id, features):
if sys.version < '3':
fd = codecs.open(fname, "w", encoding="utf-8")
else:
fd = io.open(fname, "w", encoding="utf-8")
if javascript:
fd.write(u'/* eslint-disable */\n')
fd.write(u'export var basePodSpec = ')
fd.write(u'{"id": %s, ' % id)
flen = len(features)
i = 1
for (name, obj) in features:
fd.write(u'"%s": {"type": "FeatureCollection",'
'"crs": { "type": "name",'
'"properties": { "name": "urn:ogc:def:crs:OGC:1.3:CRS84" } },'
'"features":' % name)
fd.write(json.dumps(obj, ensure_ascii=False))
if i == flen:
fd.write(u'}')
else:
i = i + 1
fd.write(u'},\n')
if javascript:
fd.write(u'};\n')
else:
fd.write(u'}\n')
def get_points(tree):
doc = tree.getroot()
startnumbers = {}
all_points = {}
start_points = []
turning_points = []
for n in doc.findall("kretsar/krets/startpoints/number"):
startnumbers[n.text] = True
for p in doc.findall("points/point"):
number = p.find("number").text
if int(number) > MAXPOINT:
continue
name = p.find("name").text
descr = p.find("descr").text
lat = p.find("lat").text
lng = p.find("long").text
footnote = None
footnoteelem = p.find("footnote")
if footnoteelem is not None:
footnote = footnoteelem.text
properties = {"number": number,
"name": name,
"descr": descr}
if footnote != None:
properties["footnote"] = footnote
coordinates = [float(lng), float(lat)]
geometry = {"type": "Point",
"coordinates": coordinates}
point = {"type": "Feature",
"properties": properties,
"geometry": geometry},
if number in startnumbers:
start_points.extend(point)
else:
turning_points.extend(point)
all_points[number] = coordinates
return all_points, start_points, turning_points
def get_legs(tree, all_points):
doc = tree.getroot()
coast = []
offshore = []
for p in doc.findall("legs/leg"):
src = p.find("from").text
dst = p.find("to").text
if int(src) > MAXPOINT or int(dst) > MAXPOINT:
continue
if int(src) < int(dst):
# since all legs are present twice (in both directions),
# skip one direction
continue
dist = p.find("dist").text
sea = p.find("sea").text
addtime = p.find("addtime").text
if dist is None:
print("** error: no distance: src: %s dst: %s" % (src, dst))
properties = {"src": src,
"dst": dst,
"dist": float(dist)}
if properties["dist"] == 0 and addtime == "1":
properties["addtime"] = True;
src_coords = all_points[src]
dst_coords = all_points[dst]
geometry = {"type": "LineString",
"coordinates": [src_coords, dst_coords]}
leg = {"type": "Feature",
"properties": properties,
"geometry": geometry},
if sea == "0":
coast.extend(leg)
else:
offshore.extend(leg)
return coast, offshore
if __name__ == '__main__':
run()
| 29.407407 | 79 | 0.553736 | 559 | 4,764 | 4.633274 | 0.316637 | 0.021236 | 0.02471 | 0.023166 | 0.136293 | 0.09305 | 0.037838 | 0 | 0 | 0 | 0 | 0.006959 | 0.306255 | 4,764 | 161 | 80 | 29.590062 | 0.776702 | 0.084173 | 0 | 0.145161 | 1 | 0.008065 | 0.146829 | 0.014476 | 0 | 0 | 0 | 0 | 0 | 1 | 0.032258 | false | 0 | 0.072581 | 0 | 0.120968 | 0.008065 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
16262857a0ab051d70328d47ffe56eedbe48f8d3 | 1,259 | py | Python | tpp/controller/ConversionController.py | pennyarcade/TPPP | 9bb6db774d77f74c54ed2fa004e97c1aa114fff9 | [
"MIT"
] | null | null | null | tpp/controller/ConversionController.py | pennyarcade/TPPP | 9bb6db774d77f74c54ed2fa004e97c1aa114fff9 | [
"MIT"
] | null | null | null | tpp/controller/ConversionController.py | pennyarcade/TPPP | 9bb6db774d77f74c54ed2fa004e97c1aa114fff9 | [
"MIT"
] | null | null | null | """
Implements a non interactive controller to controt non-interactive visualizers.
(i.e. those that are used for converting TPP souce code into another format)
"""
from tpp.FileParser import FileParser
from tpp.controller.TPPController import TPPController
class ConversionController(TPPController):
"""
Implements a non interactive controller to run non-interactive visualizers.
(i.e. those that are used for converting TPP source code into another format)
"""
def __init__(self, input_file, output, visualizer_class):
"""
Todo: ApiDoc.
:rtype:
:param input:
:param output:
:param visualizer_class:
"""
super(ConversionController, self).__init__()
parser = FileParser(input_file)
self.pages = parser.get_pages()
self.vis = visualizer_class(output)
def run(self):
"""
Todo: ApiDoc.
:return:
"""
for page in self.pages:
while True:
eop = page.is_eop()
self.vis.visualize(page.next_line(), eop)
if eop:
break
def close(self):
"""
Todo: ApiDoc.
:return:
"""
self.vis.close()
| 24.686275 | 81 | 0.590151 | 134 | 1,259 | 5.425373 | 0.440299 | 0.077029 | 0.038514 | 0.068776 | 0.264099 | 0.264099 | 0.162311 | 0.162311 | 0.162311 | 0.162311 | 0 | 0 | 0.317712 | 1,259 | 50 | 82 | 25.18 | 0.846333 | 0.347101 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.06 | 0 | 1 | 0.176471 | false | 0 | 0.117647 | 0 | 0.352941 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
16278cfaea317b80559af8d9f8ed6e412d50c446 | 776 | py | Python | img/autoeditimg.py | schorsche/css3-imageslider | 6d15b2e77f141b8e871bdce2049ee7b2567981fe | [
"MIT"
] | null | null | null | img/autoeditimg.py | schorsche/css3-imageslider | 6d15b2e77f141b8e871bdce2049ee7b2567981fe | [
"MIT"
] | null | null | null | img/autoeditimg.py | schorsche/css3-imageslider | 6d15b2e77f141b8e871bdce2049ee7b2567981fe | [
"MIT"
] | 1 | 2019-02-23T22:54:22.000Z | 2019-02-23T22:54:22.000Z | #!/usr/bin/python2.7
import os
from PIL import Image
DATEI_WEB_GROSSE = 700
def isimg(isitimg):
ext = os.path.splitext(isitimg)[1].lower()
if ext == ".jpg" or ext == ".png" or ext == ".gif":
return True
return False
def bearbeiten(datei):
img = Image.open(datei)
wrel = DATEI_WEB_GROSSE / float(img.size[0])
habs = int( float(img.size[1]) * float(wrel) )
splt = os.path.splitext(datei)
newfilename = splt[0] + splt[1].lower()
img = img.resize((DATEI_WEB_GROSSE, habs), Image.ANTIALIAS)
img.save(newfilename, quality=100, optimize=True, progressive=True)
if newfilename != datei:
os.rename(newfilename, datei)
def main():
files = os.listdir('.')
files = filter(isimg, files)
for f in files:
print f
bearbeiten(f)
if __name__ == '__main__':
main() | 22.171429 | 68 | 0.68299 | 117 | 776 | 4.410256 | 0.487179 | 0.046512 | 0.081395 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.019878 | 0.157216 | 776 | 35 | 69 | 22.171429 | 0.769113 | 0.024485 | 0 | 0 | 0 | 0 | 0.027741 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.076923 | null | null | 0.038462 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
1627fcf089cd43ce83004fbce276962343e2f2c7 | 785 | py | Python | wow/wow.py | brisberg/Kiri-Cogs | 9a5307ff8fbaa5e0560ec518cf26df52347da98d | [
"MIT"
] | null | null | null | wow/wow.py | brisberg/Kiri-Cogs | 9a5307ff8fbaa5e0560ec518cf26df52347da98d | [
"MIT"
] | null | null | null | wow/wow.py | brisberg/Kiri-Cogs | 9a5307ff8fbaa5e0560ec518cf26df52347da98d | [
"MIT"
] | null | null | null | import discord
from discord.ext import commands
class WowCog:
"""Custom Cog that had commands for WoW Memes"""
def __init__(self, bot):
self.bot = bot
async def _play(self, url, ctx):
"""Helper for aliasing Play in the Audio module"""
audio = self.bot.get_cog('Audio')
if not audio:
await self.bot.say("Audio module required. Load with: {}load audio".format(ctx.prefix))
return
await ctx.invoke(audio.play, url_or_search_terms=url)
@commands.command(pass_context=True, no_pm=True)
async def flamewreath(self, ctx):
"""I will not move when Flame Wreath is cast!"""
await self._play("https://www.youtube.com/watch?v=gcA6y7sxKcA", ctx)
def setup(bot):
bot.add_cog(WowCog(bot))
| 29.074074 | 99 | 0.64586 | 113 | 785 | 4.371681 | 0.59292 | 0.05668 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.003333 | 0.235669 | 785 | 26 | 100 | 30.192308 | 0.82 | 0.053503 | 0 | 0 | 0 | 0 | 0.147105 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.125 | false | 0.0625 | 0.125 | 0 | 0.375 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
162c59bea2ea2599ffb8f94490a631231802e6ea | 2,272 | py | Python | video_encoding/fields.py | fossabot/django-video-encoding | 16a88c2d61d28e6f5ec2b49956ce356f8c458c67 | [
"BSD-3-Clause"
] | 164 | 2019-07-29T17:59:06.000Z | 2022-03-19T21:36:01.000Z | video_encoding/fields.py | fossabot/django-video-encoding | 16a88c2d61d28e6f5ec2b49956ce356f8c458c67 | [
"BSD-3-Clause"
] | 188 | 2019-03-16T09:53:25.000Z | 2019-07-25T14:57:24.000Z | video_encoding/fields.py | fossabot/django-video-encoding | 16a88c2d61d28e6f5ec2b49956ce356f8c458c67 | [
"BSD-3-Clause"
] | 80 | 2019-08-03T17:49:08.000Z | 2022-02-28T16:56:33.000Z | from django.db.models.fields.files import (FieldFile, ImageField,
ImageFileDescriptor)
from django.utils.translation import ugettext as _
from .backends import get_backend_class
from .files import VideoFile
class VideoFileDescriptor(ImageFileDescriptor):
pass
class VideoFieldFile(VideoFile, FieldFile):
def delete(self, save=True):
# Clear the video info cache
if hasattr(self, '_info_cache'):
del self._info_cache
super(VideoFieldFile, self).delete(save=save)
class VideoField(ImageField):
attr_class = VideoFieldFile
descriptor_class = VideoFileDescriptor
description = _("Video")
def __init__(self, verbose_name=None, name=None, duration_field=None,
**kwargs):
self.duration_field = duration_field
super(VideoField, self).__init__(verbose_name, name, **kwargs)
def check(self, **kwargs):
errors = super(ImageField, self).check(**kwargs)
errors.extend(self._check_backend())
return errors
def _check_backend(self):
backend = get_backend_class()
return backend.check()
def to_python(self, data):
# use FileField method
return super(ImageField, self).to_python(data)
def update_dimension_fields(self, instance, force=False, *args, **kwargs):
_file = getattr(instance, self.attname)
# we need a real file
if not _file._committed:
return
# write `width` and `height`
super(VideoField, self).update_dimension_fields(instance, force,
*args, **kwargs)
if not self.duration_field:
return
# Nothing to update if we have no file and not being forced to update.
if not _file and not force:
return
if getattr(instance, self.duration_field) and not force:
return
# get duration if file is defined
duration = _file.duration if _file else None
# update duration
setattr(instance, self.duration_field, duration)
def formfield(self, **kwargs):
# use normal FileFieldWidget for now
return super(ImageField, self).formfield(**kwargs)
| 31.555556 | 78 | 0.636444 | 251 | 2,272 | 5.585657 | 0.358566 | 0.055635 | 0.048502 | 0.035663 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.28257 | 2,272 | 71 | 79 | 32 | 0.860123 | 0.108275 | 0 | 0.088889 | 0 | 0 | 0.007933 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.155556 | false | 0.022222 | 0.088889 | 0.044444 | 0.555556 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
1631aec82f9bb8a63392680178fdfa614b25b1c9 | 10,654 | py | Python | shardDesigner/shardTemplateDir/shardStemDir/log/elast.py | vinci-project/rootShard | 2f6633c7fb1c1b690c0a38ffbb16af0b50d532bb | [
"MIT"
] | null | null | null | shardDesigner/shardTemplateDir/shardStemDir/log/elast.py | vinci-project/rootShard | 2f6633c7fb1c1b690c0a38ffbb16af0b50d532bb | [
"MIT"
] | 7 | 2020-03-02T11:23:41.000Z | 2022-03-11T23:52:51.000Z | shardDesigner/shardTemplateDir/shardStemDir/log/elast.py | vinci-project/rootShard | 2f6633c7fb1c1b690c0a38ffbb16af0b50d532bb | [
"MIT"
] | null | null | null | import elasticsearch
from elasticsearch import Elasticsearch
from elasticsearch import helpers
import time, json, datetime, os
class elalog:
def __init__(self, date):
es_host = os.getenv("ES_PORT_9200_TCP_ADDR") or '<%ELASTICIP%>'
es_port = os.getenv("ES_PORT_9200_TCP_PORT") or '9200'
self.lastDate = date
self.es = Elasticsearch([{'host': es_host, 'port': es_port}])
# BLOCKS INDEX
self.blocks_index_name = "blocks-" + date
self.block_mapping = {
"settings": {
"number_of_shards": 5,
"number_of_replicas": 0
},
"mappings": {
"blocks-" + date: {
"properties": {
"@dtime": {
"type": "date",
"format": "epoch_second"
},
"hash": {
"type": "text"
},
"signatures": {
"type": "text"
},
"tcount": {
"type": "long"
},
"validator": {
"type": "text",
"fielddata": True
},
"bheight": {
"type": "long"
}
}
}
}
}
if self.es.indices.exists(self.blocks_index_name):
try:
self.es.indices.delete(index=self.blocks_index_name)
self.es.indices.create(index=self.blocks_index_name, body=self.block_mapping)
except elasticsearch.ElasticsearchException as es1:
print("Elastic exception on create Indicies:", es1)
else:
self.es.indices.create(index=self.blocks_index_name, body=self.block_mapping)
# TRANSACTIONS INDEX
self.transactions_index_name = "transactions-" + date
self.transactions_mapping = {
"settings": {
"number_of_shards": 5,
"number_of_replicas": 0
},
"mappings": {
"transactions-" + date: {
"properties": {
"@dtime": {
"type": "date",
"format": "epoch_second"
},
"sender": {
"type": "text",
"fielddata": True
},
"receiver": {
"type": "text",
"fielddata": True
},
"token_count": {
"type": "float"
},
"token_type": {
"type": "text",
"fielddata": True
},
"hash": {
"type": "text"
},
"block": {
"type": "long"
}
}
}
}
}
if self.es.indices.exists(self.transactions_index_name):
try:
self.es.indices.delete(index=self.transactions_index_name)
self.es.indices.create(index=self.transactions_index_name, body=self.transactions_mapping)
except elasticsearch.ElasticsearchException as es1:
print("Elastic exception on create Indicies:", es1)
else:
self.es.indices.create(index=self.transactions_index_name, body=self.transactions_mapping)
# BALANCE HISTORY
self.balance_index_name = "balance"
self.balance_mapping = {
"settings": {
"number_of_shards": 5,
"number_of_replicas": 0
},
"mappings": {
"balance": {
"properties": {
"@dtime": {
"type": "date",
"format": "epoch_second"
},
"user": {
"type": "text",
"fielddata": True
},
"balance": {
"type": "float"
}
}
}
}
}
if self.es.indices.exists(self.balance_index_name):
try:
self.es.indices.delete(index=self.balance_index_name)
self.es.indices.create(index=self.balance_index_name, body=self.balance_mapping)
except elasticsearch.ElasticsearchException as es1:
print("Elastic exception on create Indicies:", es1)
else:
self.es.indices.create(index=self.balance_index_name, body=self.balance_mapping)
# VALIDATOR STATISTIC
self.clients_index_name = "clients"
self.clients_mapping = {
"settings": {
"number_of_shards": 5,
"number_of_replicas": 0
},
"mappings": {
"clients": {
"properties": {
"@dtime": {
"type": "date",
"format": "epoch_second"
},
"ip": {
"type": "ip"
},
"geoip": {
"properties": {
"city_name": {
"type": "text"
},
"continent_name": {
"type": "text"
},
"country_iso_code": {
"type": "text"
},
"location": {
"type": "geo_point"
},
"region_name": {
"type": "text"
}
}
},
"public_key": {
"type": "text",
"fielddata": True
},
"client_type": {
"type": "text",
"fielddata": True
}
}
}
}
}
if self.es.indices.exists(self.clients_index_name):
try:
self.es.indices.delete(index=self.clients_index_name)
self.es.indices.create(index=self.clients_index_name, body=self.clients_mapping)
except elasticsearch.ElasticsearchException as es1:
print("Elastic exception on create Indicies:", es1)
else:
self.es.indices.create(index=self.clients_index_name, body=self.clients_mapping)
def elasticClients(self, jsons:list):
try:
helpers.bulk(self.es, jsons)
except elasticsearch.ElasticsearchException as es1:
print("Elastic exception on save Validators:", es1)
print("Save Validators in elastic!")
def elasticBlock(self, timestamp:float, validator:str, tcount:int, signatures:list, hash:str, bheight:int):
index = 'blocks-' + self.lastDate
estype = 'blocks-' + self.lastDate
eljson = json.dumps({"@dtime": int(timestamp), "validator": validator, "tcount": tcount, "signatures": list(signatures), "hash": hash, "bheight": bheight}, separators=(',', ':'))
try:
self.es.index(index=str(index).lower(), doc_type=estype.lower(), body=eljson)
except elasticsearch.ElasticsearchException as es1:
print("Elastic exception on send Block:", es1)
def elasticTransaction(self, jsons:list):
try:
helpers.bulk(self.es, jsons)
except elasticsearch.ElasticsearchException as es1:
print("Elastic exception on save bulk Transactions:", es1)
def elasticBalanceHistory(self, balance:dict):
users = balance.keys()
jsonMas = []
print("USER LEN:", len(users))
for user in users:
eljson = {"_index": "balance", "_type": "balance", "_id": user,
"_source": {"@dtime": int(time.time()), "user": user,
"balance": balance.get(user)}}
jsonMas.append(eljson)
try:
helpers.bulk(self.es, jsonMas)
except elasticsearch.ElasticsearchException as es1:
print("Elastic exception on save balance:", es1)
def getLastEBlock(self):
query = {"aggs" : {
"max_blnum":{"max":{"field":"bheight"}}
},"size": 0
}
try:
answer = self.es.search(index="blocks-" + self.lastDate, doc_type="blocks-" + self.lastDate, body=query)
if not answer["aggregations"]["max_blnum"]["value"] == None:
return int(answer["aggregations"]["max_blnum"]["value"])
else:
return 0
except elasticsearch.ElasticsearchException as es1:
print("Elastic exception on search last block index:", es1)
| 41.455253 | 186 | 0.382016 | 728 | 10,654 | 5.443681 | 0.190934 | 0.033308 | 0.052485 | 0.097653 | 0.598789 | 0.524098 | 0.500883 | 0.480696 | 0.432753 | 0.357557 | 0 | 0.007849 | 0.521682 | 10,654 | 256 | 187 | 41.617188 | 0.769819 | 0.006289 | 0 | 0.436681 | 0 | 0 | 0.142695 | 0.003969 | 0 | 0 | 0 | 0 | 0 | 1 | 0.026201 | false | 0 | 0.017467 | 0 | 0.056769 | 0.048035 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
163248c24fc9b2b48d8f714d22251c83d3496af1 | 2,694 | py | Python | dialogue-engine/test/programytest/config/brain/test_oob.py | cotobadesign/cotoba-agent-oss | 3833d56e79dcd7529c3e8b3a3a8a782d513d9b12 | [
"MIT"
] | 104 | 2020-03-30T09:40:00.000Z | 2022-03-06T22:34:25.000Z | dialogue-engine/test/programytest/config/brain/test_oob.py | cotobadesign/cotoba-agent-oss | 3833d56e79dcd7529c3e8b3a3a8a782d513d9b12 | [
"MIT"
] | 25 | 2020-06-12T01:36:35.000Z | 2022-02-19T07:30:44.000Z | dialogue-engine/test/programytest/config/brain/test_oob.py | cotobadesign/cotoba-agent-oss | 3833d56e79dcd7529c3e8b3a3a8a782d513d9b12 | [
"MIT"
] | 10 | 2020-04-02T23:43:56.000Z | 2021-05-14T13:47:01.000Z | """
Copyright (c) 2020 COTOBA DESIGN, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
import unittest
from programy.config.file.yaml_file import YamlConfigurationFile
from programy.config.brain.oob import BrainOOBConfiguration
from programy.clients.events.console.config import ConsoleConfiguration
class BrainOOBConfigurationTests(unittest.TestCase):
def test_oob_with_data(self):
yaml = YamlConfigurationFile()
self.assertIsNotNone(yaml)
yaml.load_from_text("""
brain:
oobs:
default:
classname: programy.oob.defaults.default.DefaultOutOfBandProcessor
""", ConsoleConfiguration(), ".")
brain_config = yaml.get_section("brain")
self.assertIsNotNone(brain_config)
oobs_config = yaml.get_section("oobs", brain_config)
self.assertIsNotNone(oobs_config)
oob_config = BrainOOBConfiguration("default")
oob_config.load_config_section(yaml, oobs_config, ".")
self.assertEqual("programy.oob.defaults.default.DefaultOutOfBandProcessor", oob_config.classname)
def test_default_without_data(self):
yaml = YamlConfigurationFile()
self.assertIsNotNone(yaml)
yaml.load_from_text("""
brain:
oobs:
default:
""", ConsoleConfiguration(), ".")
brain_config = yaml.get_section("brain")
self.assertIsNotNone(brain_config)
oobs_config = yaml.get_section("oobs", brain_config)
self.assertIsNotNone(oobs_config)
oob_config = BrainOOBConfiguration("default")
oob_config.load_config_section(yaml, oobs_config, ".")
self.assertIsNone(oob_config.classname)
| 42.761905 | 126 | 0.72977 | 325 | 2,694 | 5.935385 | 0.396923 | 0.045619 | 0.026957 | 0.041472 | 0.381545 | 0.328668 | 0.328668 | 0.328668 | 0.328668 | 0.328668 | 0 | 0.001849 | 0.197105 | 2,694 | 62 | 127 | 43.451613 | 0.889968 | 0.394209 | 0 | 0.722222 | 0 | 0 | 0.188424 | 0.067734 | 0 | 0 | 0 | 0 | 0.222222 | 1 | 0.055556 | false | 0 | 0.111111 | 0 | 0.194444 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
163cbfb7a11f70465bec9d58e23cdc35d6fe4e2c | 5,976 | py | Python | v1/hsvfilter.py | gavinIRL/RHBot | 1e22ae5ca7b67ebd6a72c23d9f46d5a8eb6e99cf | [
"MIT"
] | null | null | null | v1/hsvfilter.py | gavinIRL/RHBot | 1e22ae5ca7b67ebd6a72c23d9f46d5a8eb6e99cf | [
"MIT"
] | 60 | 2021-03-29T14:29:49.000Z | 2021-05-03T06:06:19.000Z | v1/hsvfilter.py | gavinIRL/RHBot | 1e22ae5ca7b67ebd6a72c23d9f46d5a8eb6e99cf | [
"MIT"
] | null | null | null | import typing
# custom data structure to hold the state of an HSV filter
class HsvFilter:
def __init__(self, hMin=None, sMin=None, vMin=None, hMax=None, sMax=None, vMax=None,
sAdd=None, sSub=None, vAdd=None, vSub=None):
self.hMin = hMin
self.sMin = sMin
self.vMin = vMin
self.hMax = hMax
self.sMax = sMax
self.vMax = vMax
self.sAdd = sAdd
self.sSub = sSub
self.vAdd = vAdd
self.vSub = vSub
# Putting this here out of the way as it's a chonk
# For a given item string case it will return the optimal filter and the correct position to look
def grab_object_preset(object_name=None, **kwargs) -> typing.Tuple[HsvFilter, list]:
if object_name is None:
#print("Using default filter")
return HsvFilter(0, 0, 0, 255, 255, 255, 0, 0, 0, 0), [3, 32, 1280, 794]
if object_name == "dungeon_check":
return HsvFilter(0, 73, 94, 106, 255, 255, 0, 0, 0, 0), [1083, 295, 1188, 368]
if object_name == "enemy_map_loc":
#print("Using enemy location filter")
if kwargs.get("big_map"):
return HsvFilter(0, 128, 82, 8, 255, 255, 0, 66, 30, 34), [485, 280, 900, 734]
return HsvFilter(0, 128, 82, 8, 255, 255, 0, 66, 30, 34), [1100, 50, 1260, 210]
if object_name == "player_map_loc":
if kwargs.get("big_map"):
return HsvFilter(31, 94, 86, 73, 255, 255, 0, 0, 0, 0), [485, 280, 900, 734]
return HsvFilter(31, 94, 86, 73, 255, 255, 0, 0, 0, 0), [1100, 50, 1260, 210]
if object_name == "other_player_map_loc":
if kwargs.get("big_map"):
return HsvFilter(16, 172, 194, 32, 255, 255, 0, 0, 70, 37), [485, 280, 900, 734]
return HsvFilter(16, 172, 194, 32, 255, 255, 0, 0, 70, 37), [1100, 50, 1260, 210]
if object_name == "loot_distant":
return HsvFilter(14, 116, 33, 32, 210, 59, 16, 0, 3, 0), [10, 145, 1084, 684]
if object_name == "loot_near":
return HsvFilter(0, 155, 135, 31, 240, 217, 0, 0, 0, 0), [460, 420, 855, 710]
if object_name == "prompt_press_x_pickup":
return HsvFilter(78, 110, 110, 97, 189, 255, 0, 0, 0, 0), [1080, 660, 1255, 725]
if object_name == "message_section_cleared":
return HsvFilter(0, 0, 214, 179, 65, 255, 0, 0, 0, 17), [464, 600, 855, 680]
if object_name == "message_go":
return HsvFilter(32, 114, 89, 58, 255, 255, 0, 12, 0, 0), [600, 222, 700, 275]
if object_name == "enemy_nametag":
return HsvFilter(49, 0, 139, 91, 30, 197, 0, 0, 40, 38), [10, 145, 1084, 684]
if object_name == "message_boss_encounter":
return HsvFilter(0, 92, 128, 13, 255, 255, 0, 0, 0, 0), [630, 520, 1120, 680]
if object_name == "display_boss_name_and_healthbar":
return HsvFilter(0, 92, 123, 29, 255, 255, 0, 0, 0, 20), [415, 533, 888, 700]
if object_name == "loot_chest_normal":
# This is a difficult one to separate
return HsvFilter(0, 34, 38, 28, 152, 124, 0, 0, 5, 12), [10, 145, 1084, 684]
if object_name == "map_outline":
if kwargs.get("big_map"):
return HsvFilter(0, 128, 82, 8, 255, 255, 0, 66, 30, 34), [485, 280, 900, 734]
return HsvFilter(0, 128, 82, 8, 255, 255, 0, 66, 30, 34), [1100, 50, 1260, 210]
if object_name == "gate_map_pos":
# This is a very difficult one to separate
if kwargs.get("big_map"):
return HsvFilter(0, 128, 82, 8, 255, 255, 0, 66, 30, 34), [485, 280, 900, 734]
return HsvFilter(0, 128, 82, 8, 255, 255, 0, 66, 30, 34), [1100, 50, 1260, 210]
if object_name == "prompt_move_reward_screen":
return HsvFilter(72, 98, 92, 105, 255, 225, 0, 54, 24, 38)
if object_name == "prompt_select_card":
return HsvFilter(79, 149, 140, 255, 255, 255, 0, 0, 0, 0)
if object_name == "event_chest_special_appear":
return HsvFilter(0, 124, 62, 88, 217, 246, 0, 0, 0, 0)
if object_name == "inventory_green_item":
return HsvFilter(37, 147, 0, 61, 255, 255, 0, 0, 0, 0)
if object_name == "inventory_blue_item":
return HsvFilter(79, 169, 0, 109, 246, 188, 0, 0, 0, 0)
if object_name == "inventory_yellow_item":
# This is a dangerous one as it can barely
# distinguish against green items and vice versa
return HsvFilter(19, 91, 107, 31, 168, 181, 0, 11, 32, 21)
if object_name == "inventory_purple_item":
return HsvFilter(126, 153, 0, 255, 255, 255, 0, 0, 0, 0)
if object_name == "button_repair":
return None, [208, 600]
# These are all To be done later
if object_name == "event_card_trade":
return HsvFilter(0, 0, 0, 255, 255, 255, 0, 0, 0, 0)
if object_name == "event_otherworld":
return HsvFilter(0, 0, 0, 255, 255, 255, 0, 0, 0, 0)
if object_name == "loot_chest_special":
if kwargs.get("big_map"):
return HsvFilter(0, 0, 0, 255, 255, 255, 0, 0, 0, 0), [10, 145, 1084, 684]
return HsvFilter(0, 0, 0, 255, 255, 255, 0, 0, 0, 0), [10, 145, 1084, 684]
if object_name == "cards":
return HsvFilter(0, 0, 0, 255, 255, 255, 0, 0, 0, 0), [735, 32, 1085, 100]
if object_name == "enemy_arrow":
return HsvFilter(0, 0, 0, 255, 255, 255, 0, 0, 0, 0), [10, 145, 1084, 684]
# Buttons for clicking, known positions
if object_name == "button_explore_again":
return None, []
if object_name == "button_choose_map":
return None, []
if object_name == "button_open_store":
return None, []
if object_name == "button_go_town":
return None, []
if object_name == "button_inv_equipment":
return None, []
if object_name == "button_inv_consume":
return None, []
if object_name == "button_inv_other":
return None, []
if object_name == "button_repair_confirm":
return None, []
if object_name == "inv_grid_location":
return None, [533+44*kwargs.get("col"), 277+44*kwargs.get("row")]
| 49.38843 | 97 | 0.593373 | 954 | 5,976 | 3.591195 | 0.285115 | 0.045534 | 0.039405 | 0.021016 | 0.430823 | 0.405721 | 0.360187 | 0.304437 | 0.271454 | 0.265032 | 0 | 0.218297 | 0.261044 | 5,976 | 120 | 98 | 49.8 | 0.557518 | 0.083668 | 0 | 0.242718 | 0 | 0 | 0.124085 | 0.038616 | 0 | 0 | 0 | 0 | 0 | 1 | 0.019417 | false | 0 | 0.009709 | 0 | 0.466019 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
163d903313e3ca0e241b2c27dfd7fddcb15bbfdb | 287 | py | Python | ecommerce_api/core/cart/exceptions.py | victormartinez/ecommerceapi | a887d9e938050c15ebf52001f63d7aa7f33fa5ee | [
"MIT"
] | null | null | null | ecommerce_api/core/cart/exceptions.py | victormartinez/ecommerceapi | a887d9e938050c15ebf52001f63d7aa7f33fa5ee | [
"MIT"
] | null | null | null | ecommerce_api/core/cart/exceptions.py | victormartinez/ecommerceapi | a887d9e938050c15ebf52001f63d7aa7f33fa5ee | [
"MIT"
] | null | null | null | from typing import Iterable, Optional
class ProductsNotFound(Exception):
def __init__(self, product_ids: Optional[Iterable[int]] = None):
self.product_ids = product_ids or []
self.message = "One or more products are invalid."
super().__init__(self.message)
| 31.888889 | 68 | 0.700348 | 35 | 287 | 5.428571 | 0.657143 | 0.157895 | 0.147368 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.198606 | 287 | 8 | 69 | 35.875 | 0.826087 | 0 | 0 | 0 | 0 | 0 | 0.114983 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.166667 | false | 0 | 0.166667 | 0 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
163dc7048c89ab3ce7a0707b33435bed5fbe6660 | 6,742 | py | Python | test/unit/test_record.py | jsoref/neo4j-python-driver | 32c130c9a975dbf8c0d345b362d096b5e1dd3e5b | [
"Apache-2.0"
] | null | null | null | test/unit/test_record.py | jsoref/neo4j-python-driver | 32c130c9a975dbf8c0d345b362d096b5e1dd3e5b | [
"Apache-2.0"
] | null | null | null | test/unit/test_record.py | jsoref/neo4j-python-driver | 32c130c9a975dbf8c0d345b362d096b5e1dd3e5b | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
# Copyright (c) 2002-2018 "Neo Technology,"
# Network Engine for Objects in Lund AB [http://neotechnology.com]
#
# This file is part of Neo4j.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from unittest import TestCase
from neo4j.v1 import Record
class RecordTestCase(TestCase):
def test_record_equality(self):
record1 = Record(["name", "empire"], ["Nigel", "The British Empire"])
record2 = Record(["name", "empire"], ["Nigel", "The British Empire"])
record3 = Record(["name", "empire"], ["Stefan", "Das Deutschland"])
assert record1 == record2
assert record1 != record3
assert record2 != record3
def test_record_hashing(self):
record1 = Record(["name", "empire"], ["Nigel", "The British Empire"])
record2 = Record(["name", "empire"], ["Nigel", "The British Empire"])
record3 = Record(["name", "empire"], ["Stefan", "Das Deutschland"])
assert hash(record1) == hash(record2)
assert hash(record1) != hash(record3)
assert hash(record2) != hash(record3)
def test_record_iter(self):
a_record = Record(["name", "empire"], ["Nigel", "The British Empire"])
assert list(a_record.__iter__()) == ["name", "empire"]
def test_record_copy(self):
original = Record(["name", "empire"], ["Nigel", "The British Empire"])
duplicate = original.copy()
assert dict(original) == dict(duplicate)
assert original.keys() == duplicate.keys()
assert original is not duplicate
def test_record_as_dict(self):
a_record = Record(["name", "empire"], ["Nigel", "The British Empire"])
assert dict(a_record) == {"name": "Nigel", "empire": "The British Empire"}
def test_record_as_list(self):
a_record = Record(["name", "empire"], ["Nigel", "The British Empire"])
assert list(a_record) == ["name", "empire"]
def test_record_len(self):
a_record = Record(["name", "empire"], ["Nigel", "The British Empire"])
assert len(a_record) == 2
def test_record_repr(self):
a_record = Record(["name", "empire"], ["Nigel", "The British Empire"])
assert repr(a_record) == "<Record name='Nigel' empire='The British Empire'>"
def test_record_data(self):
r = Record(["name", "age", "married"], ["Alice", 33, True])
self.assertEqual(r.data(), {"name": "Alice", "age": 33, "married": True})
self.assertEqual(r.data("name"), {"name": "Alice"})
self.assertEqual(r.data("age", "name"), {"age": 33, "name": "Alice"})
self.assertEqual(r.data("age", "name", "shoe size"), {"age": 33, "name": "Alice", "shoe size": None})
self.assertEqual(r.data(0, "name"), {"name": "Alice"})
self.assertEqual(r.data(0), {"name": "Alice"})
self.assertEqual(r.data(1, 0), {"age": 33, "name": "Alice"})
with self.assertRaises(IndexError):
_ = r.data(1, 0, 999)
def test_record_keys(self):
r = Record(["name", "age", "married"], ["Alice", 33, True])
self.assertEqual(r.keys(), ("name", "age", "married"))
def test_record_values(self):
r = Record(["name", "age", "married"], ["Alice", 33, True])
self.assertEqual(r.values(), ("Alice", 33, True))
self.assertEqual(r.values("name"), ("Alice",))
self.assertEqual(r.values("age", "name"), (33, "Alice"))
self.assertEqual(r.values("age", "name", "shoe size"), (33, "Alice", None))
self.assertEqual(r.values(0, "name"), ("Alice", "Alice"))
self.assertEqual(r.values(0), ("Alice",))
self.assertEqual(r.values(1, 0), (33, "Alice"))
with self.assertRaises(IndexError):
_ = r.values(1, 0, 999)
def test_record_items(self):
r = Record(["name", "age", "married"], ["Alice", 33, True])
self.assertEqual(r.items(), [("name", "Alice"), ("age", 33), ("married", True)])
self.assertEqual(r.items("name"), [("name", "Alice")])
self.assertEqual(r.items("age", "name"), [("age", 33), ("name", "Alice")])
self.assertEqual(r.items("age", "name", "shoe size"), [("age", 33), ("name", "Alice"), ("shoe size", None)])
self.assertEqual(r.items(0, "name"), [("name", "Alice"), ("name", "Alice")])
self.assertEqual(r.items(0), [("name", "Alice")])
self.assertEqual(r.items(1, 0), [("age", 33), ("name", "Alice")])
with self.assertRaises(IndexError):
_ = r.items(1, 0, 999)
def test_record_index(self):
r = Record(["name", "age", "married"], ["Alice", 33, True])
self.assertEqual(r.index("name"), 0)
self.assertEqual(r.index("age"), 1)
self.assertEqual(r.index("married"), 2)
with self.assertRaises(KeyError):
_ = r.index("shoe size")
self.assertEqual(r.index(0), 0)
self.assertEqual(r.index(1), 1)
self.assertEqual(r.index(2), 2)
with self.assertRaises(IndexError):
_ = r.index(3)
with self.assertRaises(TypeError):
_ = r.index(None)
def test_record_value(self):
r = Record(["name", "age", "married"], ["Alice", 33, True])
self.assertEqual(r.value(), "Alice")
self.assertEqual(r.value("name"), "Alice")
self.assertEqual(r.value("age"), 33)
self.assertEqual(r.value("married"), True)
self.assertEqual(r.value("shoe size"), None)
self.assertEqual(r.value("shoe size", 6), 6)
self.assertEqual(r.value(0), "Alice")
self.assertEqual(r.value(1), 33)
self.assertEqual(r.value(2), True)
self.assertEqual(r.value(3), None)
self.assertEqual(r.value(3, 6), 6)
with self.assertRaises(TypeError):
_ = r.value(None)
def test_record_contains(self):
r = Record(["name", "age", "married"], ["Alice", 33, True])
self.assertTrue("name" in r)
self.assertTrue("age" in r)
self.assertTrue("married" in r)
self.assertFalse("shoe size" in r)
self.assertTrue(0 in r)
self.assertTrue(1 in r)
self.assertTrue(2 in r)
self.assertFalse(3 in r)
with self.assertRaises(TypeError):
_ = r.index(None)
| 43.496774 | 116 | 0.590923 | 846 | 6,742 | 4.64539 | 0.165485 | 0.148855 | 0.158779 | 0.080153 | 0.628753 | 0.508142 | 0.436896 | 0.370992 | 0.35369 | 0.313995 | 0 | 0.024214 | 0.222041 | 6,742 | 154 | 117 | 43.779221 | 0.725072 | 0.103975 | 0 | 0.234783 | 0 | 0 | 0.17566 | 0 | 0 | 0 | 0 | 0 | 0.6 | 1 | 0.130435 | false | 0 | 0.017391 | 0 | 0.156522 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
1648b2044844b3d9b645771b179a716a797264e9 | 599 | py | Python | src/spaceone/inventory/connector/snapshot.py | jean1042/plugin-azure-cloud-services | 3a75a516c9a4d1e8a4962988934ead3fd40e8494 | [
"Apache-2.0"
] | 1 | 2020-12-08T11:59:54.000Z | 2020-12-08T11:59:54.000Z | src/spaceone/inventory/connector/snapshot.py | jean1042/plugin-azure-cloud-services | 3a75a516c9a4d1e8a4962988934ead3fd40e8494 | [
"Apache-2.0"
] | 4 | 2021-01-26T10:43:37.000Z | 2021-12-17T10:13:33.000Z | src/spaceone/inventory/connector/snapshot.py | jean1042/plugin-azure-cloud-services | 3a75a516c9a4d1e8a4962988934ead3fd40e8494 | [
"Apache-2.0"
] | 2 | 2021-01-13T03:24:05.000Z | 2021-01-19T07:25:45.000Z | import logging
from spaceone.inventory.libs.connector import AzureConnector
from spaceone.inventory.error import *
from spaceone.inventory.error.custom import *
__all__ = ['SnapshotConnector']
_LOGGER = logging.getLogger(__name__)
class SnapshotConnector(AzureConnector):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.set_connect(kwargs.get('secret_data'))
def list_snapshots(self):
try:
return self.compute_client.snapshots.list()
except ConnectionError:
_LOGGER.error(ERROR_CONNECTOR(field='Public IP Address'))
| 28.52381 | 69 | 0.721202 | 64 | 599 | 6.390625 | 0.59375 | 0.08802 | 0.154034 | 0.127139 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.176962 | 599 | 20 | 70 | 29.95 | 0.829615 | 0 | 0 | 0 | 0 | 0 | 0.075125 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.133333 | false | 0 | 0.266667 | 0 | 0.533333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
164ff194ddd6475fcc83a8af8f5b4d32701c55ea | 886 | py | Python | pymterm/colour/tango.py | stonewell/pymterm | af36656d5f7fb008533178d14b00d83d72ba00cf | [
"MIT"
] | 102 | 2016-07-21T06:39:02.000Z | 2022-03-09T19:34:03.000Z | pymterm/colour/tango.py | stonewell/pymterm | af36656d5f7fb008533178d14b00d83d72ba00cf | [
"MIT"
] | 2 | 2017-01-11T13:43:34.000Z | 2020-01-19T12:06:47.000Z | pymterm/colour/tango.py | stonewell/pymterm | af36656d5f7fb008533178d14b00d83d72ba00cf | [
"MIT"
] | 4 | 2020-03-22T04:08:35.000Z | 2021-06-27T23:38:02.000Z | TANGO_PALLETE = [
'2e2e34343636',
'cccc00000000',
'4e4e9a9a0606',
'c4c4a0a00000',
'34346565a4a4',
'757550507b7b',
'060698989a9a',
'd3d3d7d7cfcf',
'555557575353',
'efef29292929',
'8a8ae2e23434',
'fcfce9e94f4f',
'72729f9fcfcf',
'adad7f7fa8a8',
'3434e2e2e2e2',
'eeeeeeeeecec',
]
def parse_tango_color(c):
r = int(c[:4][:2], 16)
g = int(c[4:8][:2], 16)
b = int(c[8:][:2], 16)
return [r, g, b, 0xFF]
def apply_color(cfg, color_table):
cfg.default_foreground_color = parse_tango_color('eeeeeeeeecec')
cfg.default_background_color = parse_tango_color('323232323232')
cfg.default_cursor_color = cfg.default_foreground_color
for i in range(len(TANGO_PALLETE)):
if i < len(color_table):
color_table[i] = parse_tango_color(TANGO_PALLETE[i])
| 24.611111 | 69 | 0.613995 | 98 | 886 | 5.316327 | 0.469388 | 0.076775 | 0.115163 | 0.095969 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.215569 | 0.24605 | 886 | 35 | 70 | 25.314286 | 0.564371 | 0 | 0 | 0 | 0 | 0 | 0.254118 | 0 | 0 | 0 | 0.004706 | 0 | 0 | 1 | 0.066667 | false | 0 | 0 | 0 | 0.1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
16506683fe35155169d6fbcd3b4087bff7394386 | 22,681 | py | Python | user_manager/oauth/oauth2.py | voegtlel/auth-manager-backend | 20d40de0abc9deeb3fcddd892ffe2e635301917a | [
"MIT"
] | null | null | null | user_manager/oauth/oauth2.py | voegtlel/auth-manager-backend | 20d40de0abc9deeb3fcddd892ffe2e635301917a | [
"MIT"
] | null | null | null | user_manager/oauth/oauth2.py | voegtlel/auth-manager-backend | 20d40de0abc9deeb3fcddd892ffe2e635301917a | [
"MIT"
] | null | null | null | from datetime import datetime, timedelta
from enum import Enum
from typing import List, Optional, Tuple, Dict, Any, Union
import time
from authlib.common.security import generate_token
from authlib.consts import default_json_headers
from authlib.oauth2 import (
OAuth2Request,
AuthorizationServer as _AuthorizationServer,
ResourceProtector as _ResourceProtector,
OAuth2Error,
HttpRequest,
)
from authlib.oauth2.rfc6749 import InvalidClientError
from authlib.oauth2.rfc6749.grants import (
AuthorizationCodeGrant as _AuthorizationCodeGrant,
RefreshTokenGrant as _RefreshTokenGrant,
BaseGrant,
)
from authlib.oauth2.rfc6749.grants import (
ResourceOwnerPasswordCredentialsGrant as _ResourceOwnerPasswordCredentialsGrant,
)
from authlib.oauth2.rfc6749.util import scope_to_list
from authlib.oauth2.rfc6750 import BearerTokenValidator as _BearerTokenValidator, BearerToken as _BearerToken, \
InsufficientScopeError
from authlib.oauth2.rfc8414 import AuthorizationServerMetadata
from authlib.oidc.core import UserInfo
from authlib.oidc.core.grants import (
OpenIDCode as _OpenIDCode,
OpenIDImplicitGrant as _OpenIDImplicitGrant,
OpenIDHybridGrant as _OpenIDHybridGrant,
)
from authlib.oidc.core.grants.util import is_openid_scope, generate_id_token
from fastapi import HTTPException
from starlette.concurrency import run_in_threadpool
from starlette.responses import Response, JSONResponse
from user_manager.common.config import config
from user_manager.common.models import DbAuthorizationCode, DbToken, DbClient, DbUser, DbManagerSchema, DbUserProperty, \
UserPropertyType
from user_manager.common.mongo import authorization_code_collection, token_collection, \
client_collection, client_user_cache_collection, user_group_collection, async_token_collection, \
async_user_group_collection, async_client_collection, user_collection, read_schema, async_read_schema
from . import oauth2_key
from .user_helper import UserWithRoles
USERS_SCOPE = '*users'
class TypedRequest(OAuth2Request):
user: UserWithRoles
credential: Union[DbAuthorizationCode, DbToken]
client: DbClient
class RedirectResponse(Response):
def to_json_response(self) -> JSONResponse:
return JSONResponse(
content={'redirect_uri': self.headers['Location']},
status_code=200,
headers=dict(default_json_headers),
)
class ErrorJSONResponse(JSONResponse):
pass
class ErrorRedirectResponse(RedirectResponse):
def to_json_response(self) -> JSONResponse:
return ErrorJSONResponse(
content={'redirect_uri': self.headers['Location']},
status_code=401,
headers=dict(default_json_headers),
)
class AuthorizationServer(_AuthorizationServer):
metadata_class = AuthorizationServerMetadata
def create_oauth2_request(self, request: TypedRequest):
assert isinstance(request, OAuth2Request)
return request
def create_json_request(self, request):
assert isinstance(request, HttpRequest)
raise NotImplementedError()
# TODO: Create HttpRequest with json in body.
def handle_response(self, status_code: int, payload: Optional[dict], headers: List[Tuple[str, str]]):
headers = dict(headers)
if isinstance(payload, dict):
return JSONResponse(payload, status_code=status_code, headers=headers)
elif headers.get('Location'):
assert not payload
return RedirectResponse(status_code=status_code, headers=headers)
assert False
def handle_error_response(self, request: TypedRequest, error: OAuth2Error):
status_code, body, headers = error(
translations=self.get_translations(request),
error_uris=self.get_error_uris(request)
)
headers = dict(headers)
if isinstance(body, dict):
return ErrorJSONResponse(
content=body,
status_code=status_code,
headers=headers,
)
elif headers.get('Location'):
assert not body
return ErrorRedirectResponse(
status_code=status_code,
headers=headers,
)
assert False
def save_authorization_code(code: str, request: TypedRequest):
nonce = request.data.get('nonce')
item = DbAuthorizationCode(
code=code,
client_id=request.client.id,
redirect_uri=request.redirect_uri,
scope=request.scope,
user_id=request.user.user.id,
nonce=nonce,
auth_time=int(time.time()),
expiration_time=datetime.utcnow() + timedelta(seconds=config.oauth2.token_expiration.authorization_code),
)
authorization_code_collection.insert_one(item.document())
return item
class ExistsNonceMixin(object):
def exists_nonce(self, nonce: str, request: TypedRequest):
# exists = mongo.authorization_code_collection.count_documents(
# {'client_id': request.client_id, 'nonce': nonce},
# limit=1,
# )
mod_result = authorization_code_collection.update_one(
{'client_id': request.client_id, 'nonce': nonce},
{'$set': {'nonce': None}},
)
if mod_result.modified_count != 1:
return False
return True
class JwtConfigMixin(object):
jwt_token_expiration: int
def get_jwt_config(self, *args, **kwargs):
return {
'key': oauth2_key.key.key,
'alg': oauth2_key.key.jwk.alg.value,
'iss': config.oauth2.issuer,
'exp': self.jwt_token_expiration,
}
class UserInfoMixin(object):
def _translate_properties(
self,
scope: str,
schema: DbManagerSchema,
) -> List[Tuple[str, DbUserProperty, Optional[str], Optional[bool]]]:
scope_list = ['*'] + scope_to_list(scope)
return [
(prop.valid_key, schema.properties_by_key[prop.user_property], prop.group_type, prop.group_by_name)
for scope_name in scope_list
if scope_name not in ('openid', 'offline_access') and scope_name in schema.scopes_by_key
for prop in schema.scopes_by_key[scope_name].properties
if prop.user_property in schema.properties_by_key
]
def generate_user_info(self, user: UserWithRoles, scope: str):
user_data = {
'roles': user.roles,
}
for key, prop, group_type, group_by_name in self._translate_properties(scope, read_schema()):
if not hasattr(user.user, prop.key):
continue
value = getattr(user.user, prop.key, None)
if prop.type == UserPropertyType.picture:
if value is not None:
value = f"{config.oauth2.base_url}/picture/{value}"
elif prop.type == UserPropertyType.groups:
group_filter = {} if group_type is None else {'group_type': group_type}
value = [
group['group_name'] if group_by_name else group['_id']
for group in user_group_collection.find(
{'_id': {'$in': value}, 'visible': True, **group_filter},
projection={'group_name' if group_by_name else '_id': 1}
)
]
elif prop.type in (
UserPropertyType.access_token, UserPropertyType.password, UserPropertyType.token
):
continue
user_data[key] = value
return UserInfo(**user_data)
async def async_generate_user_info(self, user: UserWithRoles, scope: str):
user_data = {
'roles': user.roles,
}
for key, prop, group_type, group_by_name in self._translate_properties(scope, await async_read_schema()):
if not hasattr(user.user, prop.key):
continue
value = getattr(user.user, prop.key, None)
if prop.type == UserPropertyType.picture:
if value is not None:
value = f"{config.oauth2.base_url}/picture/{value}"
elif prop.type == UserPropertyType.groups:
group_filter = {} if group_type is None else {'group_type': group_type}
value = [
group['group_name'] if group_by_name else group['_id']
async for group in async_user_group_collection.find(
{'_id': {'$in': value}, 'visible': True, **group_filter},
projection={'group_name' if group_by_name else '_id': 1}
)
]
elif prop.type in (
UserPropertyType.access_token, UserPropertyType.password, UserPropertyType.token
):
continue
user_data[key] = value
return UserInfo(**user_data)
class AuthorizationCodeGrant(_AuthorizationCodeGrant):
TOKEN_ENDPOINT_AUTH_METHODS = ['none', 'client_secret_basic', 'client_secret_post']
AUTHORIZATION_CODE_LENGTH = config.oauth2.authorization_code_length
def save_authorization_code(self, code: str, request: TypedRequest):
return save_authorization_code(code, request)
def query_authorization_code(self, code: str, client: DbClient):
auth_code_data = authorization_code_collection.find_one({'_id': code, 'client_id': client.id})
if auth_code_data is None:
return None
auth_code = DbAuthorizationCode.validate_document(auth_code_data)
if auth_code.is_expired():
return None
return auth_code
def delete_authorization_code(self, authorization_code: DbAuthorizationCode):
authorization_code_collection.delete_one({'_id': authorization_code.code})
def authenticate_user(self, authorization_code: DbAuthorizationCode):
return UserWithRoles.load(authorization_code.user_id, authorization_code.client_id)
class ResourceOwnerPasswordCredentialsGrant(_ResourceOwnerPasswordCredentialsGrant):
def authenticate_token_endpoint_client(self):
# Must override this to set the client in the request, to make it available to authenticate_user
client = super(self).authenticate_token_endpoint_client()
self.request.client = client
return client
def authenticate_user(self, username: str, password: str):
user_data = user_collection.find_one({'email': username, 'access_tokens.token': password, 'active': True})
if user_data is None:
return None
return UserWithRoles.load_groups(DbUser.validate_document(user_data), self.client.id)
class OpenIDCode(UserInfoMixin, ExistsNonceMixin, JwtConfigMixin, _OpenIDCode):
jwt_token_expiration = config.oauth2.token_expiration.authorization_code
class OpenIDImplicitGrant(UserInfoMixin, ExistsNonceMixin, JwtConfigMixin, _OpenIDImplicitGrant):
jwt_token_expiration = config.oauth2.token_expiration.implicit
class OpenIDHybridGrant(UserInfoMixin, ExistsNonceMixin, JwtConfigMixin, _OpenIDHybridGrant):
jwt_token_expiration = config.oauth2.token_expiration.implicit
def generate_authorization_code(self) -> str:
return generate_token(config.oauth2.authorization_code_length)
def save_authorization_code(self, code: str, request: TypedRequest):
return save_authorization_code(code, request)
class RefreshTokenGrant(_RefreshTokenGrant):
TOKEN_ENDPOINT_AUTH_METHODS = ['none', 'client_secret_basic']
INCLUDE_NEW_REFRESH_TOKEN = True
def authenticate_refresh_token(self, refresh_token: str):
token_data = token_collection.find_one({'refresh_token': refresh_token})
if token_data is None:
return None
auth_code = DbToken.validate_document(token_data)
if auth_code.is_expired():
return None
return auth_code
def authenticate_user(self, credential: DbToken):
return UserWithRoles.load(credential.user_id, credential.client_id)
def revoke_old_credential(self, credential: DbToken):
# token_collection.update_one({'_id': credential.access_token}, {'revoked': True})
token_collection.delete_one({'_id': credential.access_token})
def save_token(token: Dict[str, Any], request: TypedRequest):
if request.user:
user_id = request.user.user.id
else:
user_id = None
now = int(time.time())
token_data = DbToken.validate_document({
'client_id': request.client.id,
'user_id': user_id,
'issued_at': now,
'expiration_time': datetime.utcnow() + timedelta(seconds=token.get('expires_in', 0)),
'scope': request.scope,
'auth_time': request.credential.get_auth_time(),
**token
})
token_collection.insert_one(token_data.document())
return token_data
def query_client(client_id: str):
client_data = client_collection.find_one({'_id': client_id})
if client_data is None:
return None
return DbClient.validate_document(client_data)
async def async_query_client(client_id: str):
client_data = await async_client_collection.find_one({'_id': client_id})
if client_data is None:
return None
return DbClient.validate_document(client_data)
def token_generator(*_):
return generate_token(config.oauth2.token_length)
class AccessTokenGenerator(UserInfoMixin, JwtConfigMixin):
jwt_token_expiration = config.oauth2.token_expiration.authorization_code
def __call__(self, client: DbClient, grant_type: str, user: UserWithRoles, scope: str):
jwt_config = self.get_jwt_config()
jwt_config['aud'] = [client.get_client_id()]
jwt_config['auth_time'] = int(time.time())
user_info = {'sub': user.user.id, 'roles': user.roles}
if 'groups' in scope_to_list(scope):
user_info['groups'] = user.user.groups
return generate_id_token({}, user_info, code=generate_token(config.oauth2.access_token_length), **jwt_config)
def token_expires_in(_, grant_type: str):
return getattr(config.oauth2.token_expiration, grant_type)
class BearerToken(_BearerToken):
def __call__(self, client, grant_type, user=None, scope=None,
expires_in=None, include_refresh_token=True):
if 'offline_access' not in scope_to_list(scope):
include_refresh_token = False
return super(BearerToken, self).__call__(client, grant_type, user, scope, expires_in, include_refresh_token)
authorization = AuthorizationServer(
query_client,
save_token,
BearerToken(AccessTokenGenerator(), expires_generator=token_expires_in, refresh_token_generator=token_generator),
)
class OpenIDSessionState:
def __call__(self, grant: BaseGrant):
grant.register_hook('process_token', self.process_token)
def process_token(self, grant: BaseGrant, token: dict):
scope = token.get('scope')
if not scope or not is_openid_scope(scope):
# standard authorization code flow
return token
token['session_state'] = str(grant.request.user.last_modified)
return token
# support all openid grants
authorization.register_grant(AuthorizationCodeGrant, [OpenIDCode(), OpenIDSessionState()])
authorization.register_grant(OpenIDImplicitGrant)
authorization.register_grant(OpenIDHybridGrant)
authorization.register_grant(RefreshTokenGrant, [OpenIDCode(), OpenIDSessionState()])
authorization.register_grant(ResourceOwnerPasswordCredentialsGrant)
class BearerTokenValidator(_BearerTokenValidator):
def authenticate_token(self, token_string: str):
token_data = token_collection.find_one({'_id': token_string})
if token_data is None:
return None
token = DbToken.validate_document(token_data)
if client_user_cache_collection.count_documents({
'client_id': token.client_id,
'user_id': token.user_id,
}) != 1:
return None
return token
def request_invalid(self, request: TypedRequest):
return False
def token_revoked(self, token: DbToken):
return token.revoked
class ResourceProtector(_ResourceProtector):
def validate(self, request: OAuth2Request, scope: str = None, scope_operator='AND') -> DbToken:
assert isinstance(request, OAuth2Request)
return self.validate_request(scope, request, scope_operator)
class UserIntrospection(UserInfoMixin):
async def create_response(self, request: TypedRequest) -> Response:
try:
assert isinstance(request, OAuth2Request)
request.token = await run_in_threadpool(resource_protector.validate_request, None, request)
if request.token is None:
raise HTTPException(403, "Invalid token")
request.user = await UserWithRoles.async_load(request.token.user_id, request.token.client_id)
user_info = await self.async_generate_user_info(request.user, request.token.scope)
return JSONResponse(user_info)
except OAuth2Error as error:
return authorization.handle_error_response(request, error)
class RequestOriginVerifier:
async def create_response(self, request: TypedRequest, origin: str) -> Optional[Response]:
try:
assert isinstance(request, OAuth2Request)
request.token = await run_in_threadpool(resource_protector.validate_request, None, request)
if request.token is None:
raise HTTPException(403, "Invalid token")
request.client = await async_query_client(request.token.client_id)
if request.client is None:
raise HTTPException(403, "Invalid client in token")
if not request.client.check_redirect_uri(origin):
raise HTTPException(403, "Allowed redirect uri does not match request")
return None
except OAuth2Error as error:
return authorization.handle_error_response(request, error)
class OtherUserInspection(UserInfoMixin):
async def create_response(self, request: TypedRequest, user_id: str, client_auth: dict = None) -> Response:
try:
assert isinstance(request, OAuth2Request)
if request.client is None:
request.token = await run_in_threadpool(resource_protector.validate_request, None, request)
if request.token is None:
raise HTTPException(403, "Invalid token")
client_id = request.token.client_id
scopes = request.token.scope
scope = USERS_SCOPE
else:
client_id = request.client_id
scopes = request.client.allowed_scope
scope = scopes
if USERS_SCOPE not in scope_to_list(scopes):
raise InsufficientScopeError('Missing "*users" scope', request.uri)
user = await UserWithRoles.async_load(user_id, client_id)
if user is None:
raise HTTPException(404, "User not found")
user_info = await self.async_generate_user_info(user, scope)
return JSONResponse(user_info)
except OAuth2Error as error:
return authorization.handle_error_response(request, error)
class OtherUsersInspection(UserInfoMixin):
async def create_response(self, request: TypedRequest) -> Response:
try:
assert isinstance(request, OAuth2Request)
if request.client is None:
request.token = await run_in_threadpool(resource_protector.validate_request, None, request)
if request.token is None:
raise HTTPException(403, "Invalid token")
client_id = request.token.client_id
scopes = request.token.scope
scope = USERS_SCOPE
load_roles = False
else:
client_id = request.client_id
scopes = request.client.allowed_scope
scope = scopes
load_roles = True
if USERS_SCOPE not in scope_to_list(scopes):
raise InsufficientScopeError('Missing "*users" scope', request.uri)
user_infos = []
for user in await UserWithRoles.async_load_all(client_id, load_roles=load_roles):
user_info = await self.async_generate_user_info(user, scope)
if not load_roles:
del user_info['roles']
user_infos.append(user_info)
return JSONResponse(user_infos)
except OAuth2Error as error:
return authorization.handle_error_response(request, error)
class TypeHint(str, Enum):
AccessToken = "access_token"
RefreshToken = "refresh_token"
class RevocationEndpoint:
async def create_response(
self, raw_token: str, token_type_hint: Optional[TypeHint], request: TypedRequest
) -> Response:
token_data = None
if token_type_hint is None or token_type_hint == TypeHint.AccessToken:
token_data = await async_token_collection.find_one({'_id': raw_token})
if token_data is None and (token_type_hint is None or token_type_hint == TypeHint.RefreshToken):
token_data = await async_token_collection.find_one({'refresh_token': raw_token})
if token_data is None:
return Response()
token = DbToken.validate_document(token_data)
try:
if request.client_id is None:
request.data['client_id'] = token.client_id
elif token.client_id != request.client_id:
raise InvalidClientError(state=request.state, status_code=401)
await run_in_threadpool(
authorization.authenticate_client, request, ["none", "client_secret_basic", "client_secret_post"]
)
# await async_token_collection.update_one({'_id': token.access_token}, {'$set': {'revoked': True}})
# token_collection.update_one({'_id': credential.access_token}, {'revoked': True})
await async_token_collection.delete_one({'_id': token.access_token})
return Response()
except OAuth2Error as error:
return authorization.handle_error_response(request, error)
resource_protector = ResourceProtector()
resource_protector.register_token_validator(BearerTokenValidator())
user_introspection = UserIntrospection()
token_revocation = RevocationEndpoint()
request_origin_verifier = RequestOriginVerifier()
other_user_inspection = OtherUserInspection()
other_users_inspection = OtherUsersInspection()
| 40.501786 | 121 | 0.680217 | 2,471 | 22,681 | 5.986645 | 0.114933 | 0.020009 | 0.009126 | 0.009937 | 0.458392 | 0.388968 | 0.354762 | 0.319273 | 0.287636 | 0.272764 | 0 | 0.005781 | 0.237379 | 22,681 | 559 | 122 | 40.57424 | 0.849454 | 0.025925 | 0 | 0.35412 | 1 | 0 | 0.039853 | 0.003623 | 0 | 0 | 0 | 0.001789 | 0.024499 | 1 | 0.075724 | false | 0.017817 | 0.053452 | 0.026726 | 0.342984 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
1652c769892c847b99d4a49f23694f814ea670c4 | 2,803 | py | Python | src/robusta/core/model/events.py | kandahk/robusta | 61a2001cb1c4e90e8a74b810463ec99e6cb80787 | [
"MIT"
] | null | null | null | src/robusta/core/model/events.py | kandahk/robusta | 61a2001cb1c4e90e8a74b810463ec99e6cb80787 | [
"MIT"
] | null | null | null | src/robusta/core/model/events.py | kandahk/robusta | 61a2001cb1c4e90e8a74b810463ec99e6cb80787 | [
"MIT"
] | null | null | null | import logging
import uuid
from enum import Enum
from typing import List, Optional, Dict, Any
from dataclasses import dataclass, field
from pydantic import BaseModel
from ...integrations.scheduled.playbook_scheduler import PlaybooksScheduler
from ..reporting.base import Finding, BaseBlock
class EventType(Enum):
KUBERNETES_TOPOLOGY_CHANGE = 1
PROMETHEUS = 2
MANUAL_TRIGGER = 3
SCHEDULED_TRIGGER = 4
class ExecutionEventBaseParams(BaseModel):
named_sinks: Optional[List[str]] = None
# Right now:
# 1. this is a dataclass but we need to make all fields optional in subclasses because of https://stackoverflow.com/questions/51575931/
# 2. this can't be a pydantic BaseModel because of various pydantic bugs (see https://github.com/samuelcolvin/pydantic/pull/2557)
# once the pydantic PR that addresses those issues is merged, this should be a pydantic class
# (note that we need to integrate with dataclasses because of hikaru)
@dataclass
class ExecutionBaseEvent:
findings: Dict[str, Finding] = field(default_factory=lambda: {})
named_sinks: Optional[List[str]] = None
response: Dict[
str, Any
] = None # Response returned to caller. For admission or manual triggers for example
stop_processing: bool = False
_scheduler: Optional[PlaybooksScheduler] = None
def set_scheduler(self, scheduler: PlaybooksScheduler):
self._scheduler = scheduler
def get_scheduler(self) -> PlaybooksScheduler:
return self._scheduler
def create_default_finding(self) -> Finding:
"""Create finding default fields according to the event type"""
return Finding(title="Generic Finding", aggregation_key="Generic finding key")
def add_enrichment(
self,
enrichment_blocks: List[BaseBlock],
annotations=None,
finding_key: str = "DEFAULT",
):
finding = self.findings.get(finding_key)
if not finding:
finding = self.create_default_finding()
self.findings[finding_key] = finding
finding.add_enrichment(enrichment_blocks, annotations)
def add_finding(self, finding: Finding, finding_key: str = None):
if (
not finding_key
): # user didn't specify a key, so this finding shouldn't be accessed by key. Randomise it
finding_key = str(uuid.uuid4())
existing_finding = self.findings.get(finding_key)
if existing_finding:
logging.warning(
f"Overriding existing finding. finding_key: {finding_key} new finding: {finding}"
)
self.findings[finding_key] = finding
@staticmethod
def from_params(params: ExecutionEventBaseParams) -> Optional["ExecutionBaseEvent"]:
return ExecutionBaseEvent(named_sinks=params.named_sinks)
| 35.481013 | 135 | 0.708883 | 334 | 2,803 | 5.832335 | 0.413174 | 0.056468 | 0.039014 | 0.022587 | 0.101643 | 0.101643 | 0.034908 | 0 | 0 | 0 | 0 | 0.008629 | 0.214413 | 2,803 | 78 | 136 | 35.935897 | 0.876022 | 0.232251 | 0 | 0.109091 | 0 | 0 | 0.064019 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.109091 | false | 0 | 0.145455 | 0.036364 | 0.545455 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
1653cd2fffd32e2ad6ea59e14f67f33d48afc170 | 560 | py | Python | examples/django_mongoengine/bike/models.py | pfrantz/graphene-mongo | f7d4f3e194ec41793e6da547934c34e11fd9ef51 | [
"MIT"
] | 260 | 2018-02-03T01:00:42.000Z | 2022-02-18T12:42:01.000Z | examples/django_mongoengine/bike/models.py | pfrantz/graphene-mongo | f7d4f3e194ec41793e6da547934c34e11fd9ef51 | [
"MIT"
] | 159 | 2018-02-09T07:35:03.000Z | 2022-03-20T03:43:23.000Z | examples/django_mongoengine/bike/models.py | pfrantz/graphene-mongo | f7d4f3e194ec41793e6da547934c34e11fd9ef51 | [
"MIT"
] | 124 | 2018-02-04T20:19:01.000Z | 2022-03-25T21:40:41.000Z | from mongoengine import Document
from mongoengine.fields import (
FloatField,
StringField,
ListField,
URLField,
ObjectIdField,
)
class Shop(Document):
meta = {"collection": "shop"}
ID = ObjectIdField()
name = StringField()
address = StringField()
website = URLField()
class Bike(Document):
meta = {"collection": "bike"}
ID = ObjectIdField()
name = StringField()
brand = StringField()
year = StringField()
size = ListField(StringField())
wheel_size = FloatField()
type = StringField()
| 20 | 35 | 0.642857 | 49 | 560 | 7.326531 | 0.489796 | 0.083565 | 0.122563 | 0.167131 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.241071 | 560 | 27 | 36 | 20.740741 | 0.844706 | 0 | 0 | 0.173913 | 0 | 0 | 0.05 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.086957 | 0 | 0.73913 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
1654fce2866f6b2ef021c29092efa26419e5ba83 | 4,918 | py | Python | uhd_restpy/testplatform/sessions/ixnetwork/impairment/profile/fixedclassifier/fixedclassifier.py | OpenIxia/ixnetwork_restpy | f628db450573a104f327cf3c737ca25586e067ae | [
"MIT"
] | 20 | 2019-05-07T01:59:14.000Z | 2022-02-11T05:24:47.000Z | uhd_restpy/testplatform/sessions/ixnetwork/impairment/profile/fixedclassifier/fixedclassifier.py | OpenIxia/ixnetwork_restpy | f628db450573a104f327cf3c737ca25586e067ae | [
"MIT"
] | 60 | 2019-04-03T18:59:35.000Z | 2022-02-22T12:05:05.000Z | uhd_restpy/testplatform/sessions/ixnetwork/impairment/profile/fixedclassifier/fixedclassifier.py | OpenIxia/ixnetwork_restpy | f628db450573a104f327cf3c737ca25586e067ae | [
"MIT"
] | 13 | 2019-05-20T10:48:31.000Z | 2021-10-06T07:45:44.000Z | # MIT LICENSE
#
# Copyright 1997 - 2020 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from uhd_restpy.base import Base
from uhd_restpy.files import Files
from typing import List, Any, Union
class FixedClassifier(Base):
"""Specifies the packets to apply this profile to. If there are multiple patterns enabled, they are ANDed: each packet must match all packets in order to be impaired by this profile.
The FixedClassifier class encapsulates a list of fixedClassifier resources that are managed by the user.
A list of resources can be retrieved from the server using the FixedClassifier.find() method.
The list can be managed by using the FixedClassifier.add() and FixedClassifier.remove() methods.
"""
__slots__ = ()
_SDM_NAME = 'fixedClassifier'
_SDM_ATT_MAP = {
}
_SDM_ENUM_MAP = {
}
def __init__(self, parent, list_op=False):
super(FixedClassifier, self).__init__(parent, list_op)
@property
def Pattern(self):
"""
Returns
-------
- obj(uhd_restpy.testplatform.sessions.ixnetwork.impairment.profile.fixedclassifier.pattern.pattern.Pattern): An instance of the Pattern class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from uhd_restpy.testplatform.sessions.ixnetwork.impairment.profile.fixedclassifier.pattern.pattern import Pattern
if self._properties.get('Pattern', None) is not None:
return self._properties.get('Pattern')
else:
return Pattern(self)
def add(self):
"""Adds a new fixedClassifier resource on the server and adds it to the container.
Returns
-------
- self: This instance with all currently retrieved fixedClassifier resources using find and the newly added fixedClassifier resources available through an iterator or index
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._create(self._map_locals(self._SDM_ATT_MAP, locals()))
def remove(self):
"""Deletes all the contained fixedClassifier resources in this instance from the server.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
self._delete()
def find(self):
"""Finds and retrieves fixedClassifier resources from the server.
All named parameters are evaluated on the server using regex. The named parameters can be used to selectively retrieve fixedClassifier resources from the server.
To retrieve an exact match ensure the parameter value starts with ^ and ends with $
By default the find method takes no parameters and will retrieve all fixedClassifier resources from the server.
Returns
-------
- self: This instance with matching fixedClassifier resources retrieved from the server available through an iterator or index
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._select(self._map_locals(self._SDM_ATT_MAP, locals()))
def read(self, href):
"""Retrieves a single instance of fixedClassifier data from the server.
Args
----
- href (str): An href to the instance to be retrieved
Returns
-------
- self: This instance with the fixedClassifier resources from the server available through an iterator or index
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
return self._read(href)
| 41.677966 | 187 | 0.700895 | 625 | 4,918 | 5.448 | 0.3536 | 0.044934 | 0.030543 | 0.033774 | 0.313069 | 0.249633 | 0.249633 | 0.249633 | 0.249633 | 0.229075 | 0 | 0.002127 | 0.235055 | 4,918 | 117 | 188 | 42.034188 | 0.902977 | 0.707808 | 0 | 0 | 0 | 0 | 0.027831 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.222222 | false | 0 | 0.148148 | 0 | 0.740741 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
165cb63df5c2c12565813006cb857ecc7266b584 | 9,952 | py | Python | Lib/test/test_runpy.py | arvindm95/unladen-swallow | 8175e37eaea7ca66ed03283b46bc1d2db0d3f9c3 | [
"PSF-2.0"
] | 2,293 | 2015-01-02T12:46:10.000Z | 2022-03-29T09:45:43.000Z | python/src/Lib/test/test_runpy.py | weiqiangzheng/sl4a | d3c17dca978cbeee545e12ea240a9dbf2a6999e9 | [
"Apache-2.0"
] | 315 | 2015-05-31T11:55:46.000Z | 2022-01-12T08:36:37.000Z | python/src/Lib/test/test_runpy.py | weiqiangzheng/sl4a | d3c17dca978cbeee545e12ea240a9dbf2a6999e9 | [
"Apache-2.0"
] | 1,033 | 2015-01-04T07:48:40.000Z | 2022-03-24T09:34:37.000Z | # Test the runpy module
import unittest
import os
import os.path
import sys
import tempfile
from test.test_support import verbose, run_unittest, forget
from runpy import _run_code, _run_module_code, run_module
# Note: This module can't safely test _run_module_as_main as it
# runs its tests in the current process, which would mess with the
# real __main__ module (usually test.regrtest)
# See test_cmd_line_script for a test that executes that code path
# Set up the test code and expected results
class RunModuleCodeTest(unittest.TestCase):
expected_result = ["Top level assignment", "Lower level reference"]
test_source = (
"# Check basic code execution\n"
"result = ['Top level assignment']\n"
"def f():\n"
" result.append('Lower level reference')\n"
"f()\n"
"# Check the sys module\n"
"import sys\n"
"run_argv0 = sys.argv[0]\n"
"run_name_in_sys_modules = __name__ in sys.modules\n"
"if run_name_in_sys_modules:\n"
" module_in_sys_modules = globals() is sys.modules[__name__].__dict__\n"
"# Check nested operation\n"
"import runpy\n"
"nested = runpy._run_module_code('x=1\\n', mod_name='<run>')\n"
)
def test_run_code(self):
saved_argv0 = sys.argv[0]
d = _run_code(self.test_source, {})
self.failUnless(d["result"] == self.expected_result)
self.failUnless(d["__name__"] is None)
self.failUnless(d["__file__"] is None)
self.failUnless(d["__loader__"] is None)
self.failUnless(d["__package__"] is None)
self.failUnless(d["run_argv0"] is saved_argv0)
self.failUnless("run_name" not in d)
self.failUnless(sys.argv[0] is saved_argv0)
def test_run_module_code(self):
initial = object()
name = "<Nonsense>"
file = "Some other nonsense"
loader = "Now you're just being silly"
package = '' # Treat as a top level module
d1 = dict(initial=initial)
saved_argv0 = sys.argv[0]
d2 = _run_module_code(self.test_source,
d1,
name,
file,
loader,
package)
self.failUnless("result" not in d1)
self.failUnless(d2["initial"] is initial)
self.failUnless(d2["result"] == self.expected_result)
self.failUnless(d2["nested"]["x"] == 1)
self.failUnless(d2["__name__"] is name)
self.failUnless(d2["run_name_in_sys_modules"])
self.failUnless(d2["module_in_sys_modules"])
self.failUnless(d2["__file__"] is file)
self.failUnless(d2["run_argv0"] is file)
self.failUnless(d2["__loader__"] is loader)
self.failUnless(d2["__package__"] is package)
self.failUnless(sys.argv[0] is saved_argv0)
self.failUnless(name not in sys.modules)
class RunModuleTest(unittest.TestCase):
def expect_import_error(self, mod_name):
try:
run_module(mod_name)
except ImportError:
pass
else:
self.fail("Expected import error for " + mod_name)
def test_invalid_names(self):
# Builtin module
self.expect_import_error("sys")
# Non-existent modules
self.expect_import_error("sys.imp.eric")
self.expect_import_error("os.path.half")
self.expect_import_error("a.bee")
self.expect_import_error(".howard")
self.expect_import_error("..eaten")
# Package
self.expect_import_error("logging")
def test_library_module(self):
run_module("runpy")
def _add_pkg_dir(self, pkg_dir):
os.mkdir(pkg_dir)
pkg_fname = os.path.join(pkg_dir, "__init__"+os.extsep+"py")
pkg_file = open(pkg_fname, "w")
pkg_file.close()
return pkg_fname
def _make_pkg(self, source, depth):
pkg_name = "__runpy_pkg__"
test_fname = "runpy_test"+os.extsep+"py"
pkg_dir = sub_dir = tempfile.mkdtemp()
if verbose: print " Package tree in:", sub_dir
sys.path.insert(0, pkg_dir)
if verbose: print " Updated sys.path:", sys.path[0]
for i in range(depth):
sub_dir = os.path.join(sub_dir, pkg_name)
pkg_fname = self._add_pkg_dir(sub_dir)
if verbose: print " Next level in:", sub_dir
if verbose: print " Created:", pkg_fname
mod_fname = os.path.join(sub_dir, test_fname)
mod_file = open(mod_fname, "w")
mod_file.write(source)
mod_file.close()
if verbose: print " Created:", mod_fname
mod_name = (pkg_name+".")*depth + "runpy_test"
return pkg_dir, mod_fname, mod_name
def _del_pkg(self, top, depth, mod_name):
for entry in list(sys.modules):
if entry.startswith("__runpy_pkg__"):
del sys.modules[entry]
if verbose: print " Removed sys.modules entries"
del sys.path[0]
if verbose: print " Removed sys.path entry"
for root, dirs, files in os.walk(top, topdown=False):
for name in files:
try:
os.remove(os.path.join(root, name))
except OSError, ex:
if verbose: print ex # Persist with cleaning up
for name in dirs:
fullname = os.path.join(root, name)
try:
os.rmdir(fullname)
except OSError, ex:
if verbose: print ex # Persist with cleaning up
try:
os.rmdir(top)
if verbose: print " Removed package tree"
except OSError, ex:
if verbose: print ex # Persist with cleaning up
def _check_module(self, depth):
pkg_dir, mod_fname, mod_name = (
self._make_pkg("x=1\n", depth))
forget(mod_name)
try:
if verbose: print "Running from source:", mod_name
d1 = run_module(mod_name) # Read from source
self.failUnless("x" in d1)
self.failUnless(d1["x"] == 1)
del d1 # Ensure __loader__ entry doesn't keep file open
__import__(mod_name)
os.remove(mod_fname)
if verbose: print "Running from compiled:", mod_name
d2 = run_module(mod_name) # Read from bytecode
self.failUnless("x" in d2)
self.failUnless(d2["x"] == 1)
del d2 # Ensure __loader__ entry doesn't keep file open
finally:
self._del_pkg(pkg_dir, depth, mod_name)
if verbose: print "Module executed successfully"
def _add_relative_modules(self, base_dir, source, depth):
if depth <= 1:
raise ValueError("Relative module test needs depth > 1")
pkg_name = "__runpy_pkg__"
module_dir = base_dir
for i in range(depth):
parent_dir = module_dir
module_dir = os.path.join(module_dir, pkg_name)
# Add sibling module
sibling_fname = os.path.join(module_dir, "sibling"+os.extsep+"py")
sibling_file = open(sibling_fname, "w")
sibling_file.close()
if verbose: print " Added sibling module:", sibling_fname
# Add nephew module
uncle_dir = os.path.join(parent_dir, "uncle")
self._add_pkg_dir(uncle_dir)
if verbose: print " Added uncle package:", uncle_dir
cousin_dir = os.path.join(uncle_dir, "cousin")
self._add_pkg_dir(cousin_dir)
if verbose: print " Added cousin package:", cousin_dir
nephew_fname = os.path.join(cousin_dir, "nephew"+os.extsep+"py")
nephew_file = open(nephew_fname, "w")
nephew_file.close()
if verbose: print " Added nephew module:", nephew_fname
def _check_relative_imports(self, depth, run_name=None):
contents = r"""\
from __future__ import absolute_import
from . import sibling
from ..uncle.cousin import nephew
"""
pkg_dir, mod_fname, mod_name = (
self._make_pkg(contents, depth))
try:
self._add_relative_modules(pkg_dir, contents, depth)
pkg_name = mod_name.rpartition('.')[0]
if verbose: print "Running from source:", mod_name
d1 = run_module(mod_name, run_name=run_name) # Read from source
self.failUnless("__package__" in d1)
self.failUnless(d1["__package__"] == pkg_name)
self.failUnless("sibling" in d1)
self.failUnless("nephew" in d1)
del d1 # Ensure __loader__ entry doesn't keep file open
__import__(mod_name)
os.remove(mod_fname)
if verbose: print "Running from compiled:", mod_name
d2 = run_module(mod_name, run_name=run_name) # Read from bytecode
self.failUnless("__package__" in d2)
self.failUnless(d2["__package__"] == pkg_name)
self.failUnless("sibling" in d2)
self.failUnless("nephew" in d2)
del d2 # Ensure __loader__ entry doesn't keep file open
finally:
self._del_pkg(pkg_dir, depth, mod_name)
if verbose: print "Module executed successfully"
def test_run_module(self):
for depth in range(4):
if verbose: print "Testing package depth:", depth
self._check_module(depth)
def test_explicit_relative_import(self):
for depth in range(2, 5):
if verbose: print "Testing relative imports at depth:", depth
self._check_relative_imports(depth)
def test_main_relative_import(self):
for depth in range(2, 5):
if verbose: print "Testing main relative imports at depth:", depth
self._check_relative_imports(depth, "__main__")
def test_main():
run_unittest(RunModuleCodeTest)
run_unittest(RunModuleTest)
if __name__ == "__main__":
test_main()
| 39.181102 | 82 | 0.60621 | 1,283 | 9,952 | 4.414653 | 0.159002 | 0.081568 | 0.059322 | 0.025953 | 0.396893 | 0.272069 | 0.217514 | 0.204449 | 0.192444 | 0.181144 | 0 | 0.00896 | 0.293509 | 9,952 | 253 | 83 | 39.335968 | 0.796615 | 0.074759 | 0 | 0.191781 | 0 | 0 | 0.174325 | 0.019382 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.004566 | 0.141553 | null | null | 0.109589 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
165d5b352de2106b373e88fa207e7c0361117e91 | 4,795 | py | Python | experiments/_pytorch/_grpc_server/protofiles/imagedata_pb2.py | RedisAI/benchmarks | 65b8509b81795da73f25f51941c61fbd9765914c | [
"MIT"
] | 6 | 2019-04-18T10:17:52.000Z | 2021-07-02T19:57:08.000Z | experiments/_pytorch/_grpc_server/protofiles/imagedata_pb2.py | hhsecond/benchmarks | 65b8509b81795da73f25f51941c61fbd9765914c | [
"MIT"
] | 1 | 2021-07-21T12:17:08.000Z | 2021-07-21T12:17:08.000Z | experiments/_pytorch/_grpc_server/protofiles/imagedata_pb2.py | hhsecond/benchmarks | 65b8509b81795da73f25f51941c61fbd9765914c | [
"MIT"
] | 2 | 2020-03-15T00:37:57.000Z | 2022-02-26T04:36:00.000Z | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: imagedata.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='imagedata.proto',
package='',
syntax='proto3',
serialized_options=None,
serialized_pb=_b('\n\x0fimagedata.proto\"H\n\tImageData\x12\r\n\x05image\x18\x01 \x01(\x0c\x12\x0e\n\x06height\x18\x02 \x01(\x05\x12\r\n\x05width\x18\x03 \x01(\x05\x12\r\n\x05\x64type\x18\x04 \x01(\t\"!\n\x0fPredictionClass\x12\x0e\n\x06output\x18\x01 \x03(\x02\x32<\n\tPredictor\x12/\n\rGetPrediction\x12\n.ImageData\x1a\x10.PredictionClass\"\x00\x62\x06proto3')
)
_IMAGEDATA = _descriptor.Descriptor(
name='ImageData',
full_name='ImageData',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='image', full_name='ImageData.image', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='height', full_name='ImageData.height', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='width', full_name='ImageData.width', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='dtype', full_name='ImageData.dtype', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=19,
serialized_end=91,
)
_PREDICTIONCLASS = _descriptor.Descriptor(
name='PredictionClass',
full_name='PredictionClass',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='output', full_name='PredictionClass.output', index=0,
number=1, type=2, cpp_type=6, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=93,
serialized_end=126,
)
DESCRIPTOR.message_types_by_name['ImageData'] = _IMAGEDATA
DESCRIPTOR.message_types_by_name['PredictionClass'] = _PREDICTIONCLASS
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
ImageData = _reflection.GeneratedProtocolMessageType('ImageData', (_message.Message,), dict(
DESCRIPTOR = _IMAGEDATA,
__module__ = 'imagedata_pb2'
# @@protoc_insertion_point(class_scope:ImageData)
))
_sym_db.RegisterMessage(ImageData)
PredictionClass = _reflection.GeneratedProtocolMessageType('PredictionClass', (_message.Message,), dict(
DESCRIPTOR = _PREDICTIONCLASS,
__module__ = 'imagedata_pb2'
# @@protoc_insertion_point(class_scope:PredictionClass)
))
_sym_db.RegisterMessage(PredictionClass)
_PREDICTOR = _descriptor.ServiceDescriptor(
name='Predictor',
full_name='Predictor',
file=DESCRIPTOR,
index=0,
serialized_options=None,
serialized_start=128,
serialized_end=188,
methods=[
_descriptor.MethodDescriptor(
name='GetPrediction',
full_name='Predictor.GetPrediction',
index=0,
containing_service=None,
input_type=_IMAGEDATA,
output_type=_PREDICTIONCLASS,
serialized_options=None,
),
])
_sym_db.RegisterServiceDescriptor(_PREDICTOR)
DESCRIPTOR.services_by_name['Predictor'] = _PREDICTOR
# @@protoc_insertion_point(module_scope)
| 30.935484 | 365 | 0.740563 | 580 | 4,795 | 5.844828 | 0.248276 | 0.040118 | 0.061947 | 0.029499 | 0.438053 | 0.405015 | 0.395575 | 0.395575 | 0.367257 | 0.367257 | 0 | 0.032266 | 0.133889 | 4,795 | 154 | 366 | 31.136364 | 0.784012 | 0.057769 | 0 | 0.511811 | 1 | 0.007874 | 0.15122 | 0.084922 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.03937 | 0 | 0.03937 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
165e63725354de429a448d866f665cccca991916 | 656 | py | Python | mmdet/ops/dcn/__init__.py | TJUsym/TJU_Advanced_CV_Homework | 2d85943390e9ba53b80988e0ab8d50aef0cd17da | [
"Apache-2.0"
] | 1,158 | 2019-04-26T01:08:32.000Z | 2022-03-30T06:46:24.000Z | mmdet/ops/dcn/__init__.py | TJUsym/TJU_Advanced_CV_Homework | 2d85943390e9ba53b80988e0ab8d50aef0cd17da | [
"Apache-2.0"
] | 148 | 2021-03-18T09:44:02.000Z | 2022-03-31T06:01:39.000Z | mmdet/ops/dcn/__init__.py | TJUsym/TJU_Advanced_CV_Homework | 2d85943390e9ba53b80988e0ab8d50aef0cd17da | [
"Apache-2.0"
] | 197 | 2020-01-29T09:58:27.000Z | 2022-03-25T12:08:56.000Z | from .functions.deform_conv import deform_conv, modulated_deform_conv
from .functions.deform_pool import deform_roi_pooling
from .modules.deform_conv import (DeformConv, ModulatedDeformConv,
DeformConvPack, ModulatedDeformConvPack)
from .modules.deform_pool import (DeformRoIPooling, DeformRoIPoolingPack,
ModulatedDeformRoIPoolingPack)
__all__ = [
'DeformConv', 'DeformConvPack', 'ModulatedDeformConv',
'ModulatedDeformConvPack', 'DeformRoIPooling', 'DeformRoIPoolingPack',
'ModulatedDeformRoIPoolingPack', 'deform_conv', 'modulated_deform_conv',
'deform_roi_pooling'
]
| 46.857143 | 76 | 0.739329 | 51 | 656 | 9.156863 | 0.352941 | 0.12848 | 0.08137 | 0.107066 | 0.124197 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.184451 | 656 | 13 | 77 | 50.461538 | 0.872897 | 0 | 0 | 0 | 0 | 0 | 0.275915 | 0.11128 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.333333 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
1662a331dbe1e237d08e9e21a3e8d596bcbce6c4 | 2,477 | py | Python | pyxrd/mixture/models/insitu_behaviours/insitu_behaviour.py | PyXRD/pyxrd | 26bacdf64f3153fa74b8caa62e219b76d91a55c1 | [
"BSD-2-Clause"
] | 27 | 2018-06-15T15:28:18.000Z | 2022-03-10T12:23:50.000Z | pyxrd/mixture/models/insitu_behaviours/insitu_behaviour.py | PyXRD/pyxrd | 26bacdf64f3153fa74b8caa62e219b76d91a55c1 | [
"BSD-2-Clause"
] | 22 | 2018-06-14T08:29:16.000Z | 2021-07-05T13:33:44.000Z | pyxrd/mixture/models/insitu_behaviours/insitu_behaviour.py | PyXRD/pyxrd | 26bacdf64f3153fa74b8caa62e219b76d91a55c1 | [
"BSD-2-Clause"
] | 8 | 2019-04-13T13:03:51.000Z | 2021-06-19T09:29:11.000Z | # coding=UTF-8
# ex:ts=4:sw=4:et=on
#
# Copyright (c) 2013, Mathijs Dumon
# All rights reserved.
# Complete license can be found in the LICENSE file.
from mvc.models.properties import StringProperty
from pyxrd.generic.io.custom_io import storables, Storable
from pyxrd.generic.models.base import DataModel
from pyxrd.refinement.refinables.mixins import RefinementGroup
@storables.register()
class InSituBehaviour(DataModel, RefinementGroup, Storable):
"""
Interface class for coding in-situ behaviour scripts.
Sub-classes should override or implement the methods below.
"""
# MODEL INTEL:
class Meta(DataModel.Meta):
store_id = "InSituBehaviour" # Override this so it is a unique string
concrete = False # Indicates this cannot be instantiated and added in the UI
mixture = property(DataModel.parent.fget, DataModel.parent.fset)
# REFINEMENT GROUP IMPLEMENTATION:
@property
def refine_title(self):
return "In-situ behaviour"
@property
def refine_descriptor_data(self):
return dict(
phase_name=self.phase.refine_title,
component_name="*"
)
#: The name of this Behaviour
name = StringProperty(
default="New Behaviour", text="Name",
visible=True, persistent=True, tabular=True
)
# ------------------------------------------------------------
# Initialization and other internals
# ------------------------------------------------------------
def __init__(self, *args, **kwargs):
my_kwargs = self.pop_kwargs(kwargs,
*[prop.label for prop in InSituBehaviour.Meta.get_local_persistent_properties()]
)
super(InSituBehaviour, self).__init__(*args, **kwargs)
kwargs = my_kwargs
with self.data_changed.hold():
self.name = self.get_kwarg(kwargs, self.name, "name")
pass #end of constructor
# ------------------------------------------------------------
# Methods & Functions
# ------------------------------------------------------------
def apply(self, phase):
assert phase is not None, "Cannot apply on None"
assert self.is_compatible_with(phase), "`%r` is not compatible with phase `%r`" % (self, phase)
def is_compatible_with(self, phase):
return False # sub classes need to override this
pass #end of class | 34.402778 | 103 | 0.583771 | 264 | 2,477 | 5.371212 | 0.507576 | 0.025388 | 0.022567 | 0.028209 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.003721 | 0.240614 | 2,477 | 72 | 104 | 34.402778 | 0.750133 | 0.32176 | 0 | 0.105263 | 0 | 0 | 0.068543 | 0 | 0 | 0 | 0 | 0 | 0.052632 | 1 | 0.131579 | false | 0.052632 | 0.105263 | 0.078947 | 0.421053 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
166407e573ed13b6f495ddb118b6bb572fdf1148 | 423 | py | Python | ba5a-min-coins/money_change.py | kjco/bioinformatics-algorithms | 3c466157b89c1cbd54749563e39d86a307d7a3f3 | [
"MIT"
] | null | null | null | ba5a-min-coins/money_change.py | kjco/bioinformatics-algorithms | 3c466157b89c1cbd54749563e39d86a307d7a3f3 | [
"MIT"
] | null | null | null | ba5a-min-coins/money_change.py | kjco/bioinformatics-algorithms | 3c466157b89c1cbd54749563e39d86a307d7a3f3 | [
"MIT"
] | null | null | null |
money = 8074
#money = 18705
#coin_list = [24,23,21,5,3,1]
coin_list = [24,13,12,7,5,3,1]
#coin_list = map(int, open('dataset_71_8.txt').read().split(','))
d = {0:0}
for m in range(1,money+1):
min_coin = 1000000
for coin in coin_list:
if m >= coin:
if d[m-coin]+1 < min_coin:
min_coin = d[m-coin]+1
d[m] = min_coin
#print d
print d[money]
| 18.391304 | 66 | 0.51773 | 75 | 423 | 2.786667 | 0.44 | 0.15311 | 0.095694 | 0.066986 | 0.105263 | 0 | 0 | 0 | 0 | 0 | 0 | 0.151724 | 0.314421 | 423 | 22 | 67 | 19.227273 | 0.568966 | 0.264775 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0.090909 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
1665579643c424a545b6a8b3af94a1a9e0f4f184 | 357 | py | Python | examples/remove_comments.py | igordejanovic/textx-bibtex | b1374a39b96da9c1bc979c367b9ed3feb04f4f01 | [
"MIT"
] | 1 | 2020-06-17T21:51:33.000Z | 2020-06-17T21:51:33.000Z | examples/remove_comments.py | igordejanovic/textx-bibtex | b1374a39b96da9c1bc979c367b9ed3feb04f4f01 | [
"MIT"
] | null | null | null | examples/remove_comments.py | igordejanovic/textx-bibtex | b1374a39b96da9c1bc979c367b9ed3feb04f4f01 | [
"MIT"
] | null | null | null | """
Remove comments from bib file.
"""
from textx import metamodel_for_language
from txbibtex import bibentry_str
BIB_FILE = 'references.bib'
bibfile = metamodel_for_language('bibtex').model_from_file(BIB_FILE)
# Drop line comments.
print('\n'.join([bibentry_str(e) for e in bibfile.entries
if e.__class__.__name__ != 'BibLineComment']))
| 27.461538 | 68 | 0.739496 | 49 | 357 | 5.020408 | 0.591837 | 0.085366 | 0.162602 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.148459 | 357 | 12 | 69 | 29.75 | 0.809211 | 0.142857 | 0 | 0 | 0 | 0 | 0.120805 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.333333 | 0 | 0.333333 | 0.166667 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
16677a6fe2ff1b1e4b01bda4446f100594d88c8e | 390 | py | Python | wishes/migrations/0005_auto_20201029_0904.py | e-elson/bd | e35c59686e5ec81925c22353e269601f286634db | [
"MIT"
] | null | null | null | wishes/migrations/0005_auto_20201029_0904.py | e-elson/bd | e35c59686e5ec81925c22353e269601f286634db | [
"MIT"
] | null | null | null | wishes/migrations/0005_auto_20201029_0904.py | e-elson/bd | e35c59686e5ec81925c22353e269601f286634db | [
"MIT"
] | null | null | null | # Generated by Django 3.1.2 on 2020-10-29 09:04
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('wishes', '0004_auto_20201029_0857'),
]
operations = [
migrations.AlterField(
model_name='gallery',
name='image',
field=models.FilePathField(path='/images'),
),
]
| 20.526316 | 55 | 0.594872 | 41 | 390 | 5.560976 | 0.853659 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.111111 | 0.284615 | 390 | 18 | 56 | 21.666667 | 0.706093 | 0.115385 | 0 | 0 | 1 | 0 | 0.139942 | 0.067055 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.083333 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
166802c5b61892041a13896dbed6ef514fd83df2 | 7,115 | py | Python | undeployed/legacy/Landsat/DNtoReflectance.py | NASA-DEVELOP/dnppy | 8f7ef6f0653f5a4ea730ee557c72a2c89c06ce0b | [
"NASA-1.3"
] | 65 | 2015-09-10T12:59:56.000Z | 2022-02-27T22:09:03.000Z | undeployed/legacy/Landsat/DNtoReflectance.py | snowzm/dnppy | 8f7ef6f0653f5a4ea730ee557c72a2c89c06ce0b | [
"NASA-1.3"
] | 40 | 2015-04-08T19:23:30.000Z | 2015-08-04T15:53:11.000Z | undeployed/legacy/Landsat/DNtoReflectance.py | snowzm/dnppy | 8f7ef6f0653f5a4ea730ee557c72a2c89c06ce0b | [
"NASA-1.3"
] | 45 | 2015-08-14T19:09:38.000Z | 2022-02-15T18:53:16.000Z | #-------------------------------------------------------------------------------
# Name: Landsat Digital Numbers to Radiance/Reflectance
# Purpose: To convert landsat 4,5, or 7 pixel values from digital numbers
# to Radiance, Reflectance, or Temperature
# Author: Quinten Geddes Quinten.A.Geddes@nasa.gov
# NASA DEVELOP Program
# Created: 19/10/2012
#-------------------------------------------------------------------------------
import arcpy
import math
arcpy.CheckOutExtension("Spatial")
def DNtoReflectance(Lbands,MetaData,OutputType="Reflectance/Temperature",Save=False,OutputFolder=""):
"""This function is used to convert Landsat 4,5, or 7 pixel values from
digital numbers to Radiance, Reflectance, or Temperature (if using Band 6)
-----Inputs------
Lbands: GeoTIFF files containing individual bands of Landsat imagery. These
must have the original names as downloaded and must be from a single scene.
MetaData: The metadata text file that is downloaded with the Landsat Bands themselves.
This may be either the old or new MTL.txt file.
OutputType: Choose whether the output should be:
"Radiance"
"Reflectance/Temperature" - Calculates Reflectance for spectral bands
and Temperature in Kelvin for Thermal bands
Save: Boolean value that indicates whether the output rasters will be saved permanantly
Each band will be saved as an individual GeoTIFF file and be named
accoriding to the original filename and the output pixel unit
*if this is true, then the OutputFolder variable must also be set
OutputFolder: Folder in which to save the output rasters
-----Outputs-----
A list of arcpy raster objects in a sequence that mirrors that of the input Lbands
"""
OutList=[]
#These lists will be used to parse the meta data text file and locate relevant information
#metadata format was changed August 29, 2012. This tool can process either the new or old format
newMeta=['LANDSAT_SCENE_ID = "','DATE_ACQUIRED = ',"SUN_ELEVATION = ",
"RADIANCE_MAXIMUM_BAND_{0} = ","RADIANCE_MINIMUM_BAND_{0} = ",
"QUANTIZE_CAL_MAX_BAND_{0} = ","QUANTIZE_CAL_MIN_BAND_{0} = "]
oldMeta=['BAND1_FILE_NAME = "',"ACQUISITION_DATE = ","SUN_ELEVATION = ",
"LMAX_BAND{0} = ","LMIN_BAND{0} = ",
"QCALMAX_BAND{0} = ","QCALMIN_BAND{0} = "]
f=open(MetaData)
MText=f.read()
#the presence of a PRODUCT_CREATION_TIME category is used to identify old metadata
#if this is not present, the meta data is considered new.
#Band6length refers to the length of the Band 6 name string. In the new metadata this string is longer
if "PRODUCT_CREATION_TIME" in MText:
Meta=oldMeta
Band6length=2
else:
Meta=newMeta
Band6length=8
#The tilename is located using the newMeta/oldMeta indixes and the date of capture is recorded
if Meta==newMeta:
TileName=MText.split(Meta[0])[1].split('"')[0]
year=TileName[9:13]
jday=TileName[13:16]
elif Meta==oldMeta:
TileName=MText.split(Meta[0])[1].split('"')[0]
year=TileName[13:17]
jday=TileName[17:20]
date=MText.split(Meta[1])[1].split('\n')[0]
#the spacecraft from which the imagery was capture is identified
#this info determines the solar exoatmospheric irradiance (ESun) for each band
spacecraft=MText.split('SPACECRAFT_ID = "')[1].split('"')[0]
ThermBands=["6"]
if "7" in spacecraft:
ESun=(1969.0,1840.0,1551.0,1044.0,255.700,0. ,82.07,1368.00)
ThermBands=["B6_VCID_1","B6_VCID_2"]
elif "5" in spacecraft: ESun=(1957.0,1826.0,1554.0,1036.0,215.0 ,0. ,80.67)
elif "4" in spacecraft: ESun=(1957.0,1825.0,1557.0,1033.0,214.9 ,0. ,80.72)
elif "8" in spacecraft:
ESun=(1857.0,1996.0,1812.0,1516.0,983.3 ,251.8,85.24,0.0,389.3,0.,0.)
ThermBands=["10","11"]
else:
arcpy.AddError("This tool only works for Landsat 4, 5, 7 or 8 ")
raise arcpy.ExecuteError()
#determing if year is leap year and setting the Days in year accordingly
if float(year) % 4 ==0: DIY=366.
else:DIY=365.
#using the date to determing the distance from the sun
theta =2*math.pi*float(jday)/DIY
dSun2 = (1.00011 + 0.034221*math.cos(theta) + 0.001280*math.sin(theta) +
0.000719*math.cos(2*theta)+ 0.000077*math.sin(2*theta) )
SZA=90.-float(MText.split(Meta[2])[1].split("\n")[0])
#Calculating values for each band
for pathname in Lbands:
try:
BandNum=pathname.split("\\")[-1].split("B")[1][0:2]
try: int(BandNum)
except: BandNum=pathname.split("\\")[-1].split("B")[1][0]
except:
msg="Error reading Band {0}. Bands must have original names as downloaded.".format(str(inputbandnum))
arcpy.AddError(msg)
print msg
raise arcpy.ExecuteError
#changing Band 6 name to match metadata
if BandNum=="6" and spacecraft[8]=="7":
BandNum=pathname.split("\\")[-1].split("B")[1][0:Band6length]
print "Processing Band {0}".format(BandNum)
Oraster=arcpy.Raster(pathname)
#using the oldMeta/newMeta indixes to pull the min/max for radiance/Digital numbers
LMax= float(MText.split(Meta[3].format(BandNum))[1].split("\n")[0])
LMin= float(MText.split(Meta[4].format(BandNum))[1].split("\n")[0])
QCalMax=float(MText.split(Meta[5].format(BandNum))[1].split("\n")[0])
QCalMin=float(MText.split(Meta[6].format(BandNum))[1].split("\n")[0])
Radraster=(((LMax - LMin)/(QCalMax-QCalMin)) * (Oraster - QCalMin)) +LMin
Oraster=0
if OutputType=="Radiance":
Radraster.save("{0}\\{1}_B{2}_Radiance.tif".format(OutputFolder,TileName,BandNum))
Radraster=0
elif OutputType=="Reflectance/Temperature":
#Calculating temperature for band 6 if present
if BandNum in ThermBands:
Refraster=1282.71/(arcpy.sa.Ln((666.09/Radraster)+1.0))
BandPath="{0}\\{1}_B{2}_Temperature.tif".format(OutputFolder,TileName,BandNum)
arcpy.AddMessage("Proceeded through if")
#Otherwise calculate reflectance
else:
Refraster=( math.pi * Radraster * dSun2) / (ESun[int(BandNum[0])-1] * math.cos(SZA*math.pi/180) )
BandPath="{0}\\{1}_B{2}_TOA_Reflectance.tif".format(OutputFolder,TileName,BandNum)
arcpy.AddMessage("Proceeded through else")
if Save==True:
Refraster.save(BandPath)
OutList.append(arcpy.Raster(BandPath))
else:
OutList.append(Refraster)
del Refraster,Radraster
arcpy.AddMessage( "Reflectance Calculated for Band {0}".format(BandNum))
print "Reflectance Calculated for Band {0}".format(BandNum)
f.close()
return OutList
| 42.100592 | 113 | 0.619115 | 945 | 7,115 | 4.616931 | 0.319577 | 0.013752 | 0.02567 | 0.011002 | 0.178318 | 0.146917 | 0.127664 | 0.108412 | 0.088471 | 0.057758 | 0 | 0.060388 | 0.238932 | 7,115 | 168 | 114 | 42.35119 | 0.745337 | 0.207309 | 0 | 0.067416 | 0 | 0 | 0.175879 | 0.058246 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.022472 | null | null | 0.033708 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
1668b92419e5394d4eb735fba074c84b5eb16b19 | 1,396 | py | Python | .modules/.theHarvester/discovery/twittersearch.py | termux-one/EasY_HaCk | 0a8d09ca4b126b027b6842e02fa0c29d8250e090 | [
"Apache-2.0"
] | 1,103 | 2018-04-20T14:08:11.000Z | 2022-03-29T06:22:43.000Z | .modules/.theHarvester/discovery/twittersearch.py | sshourya948/EasY_HaCk | 0a8d09ca4b126b027b6842e02fa0c29d8250e090 | [
"Apache-2.0"
] | 29 | 2019-04-03T14:52:38.000Z | 2022-03-24T12:33:05.000Z | .modules/.theHarvester/discovery/twittersearch.py | sshourya948/EasY_HaCk | 0a8d09ca4b126b027b6842e02fa0c29d8250e090 | [
"Apache-2.0"
] | 262 | 2017-09-16T22:15:50.000Z | 2022-03-31T00:38:42.000Z | import string
import requests
import sys
import myparser
import re
class search_twitter:
def __init__(self, word, limit):
self.word = word.replace(' ', '%20')
self.results = ""
self.totalresults = ""
self.server = "www.google.com"
self.hostname = "www.google.com"
self.userAgent = "(Mozilla/5.0 (Windows; U; Windows NT 6.0;en-US; rv:1.9.2) Gecko/20100116 Firefox/3.7"
self.quantity = "100"
self.limit = int(limit)
self.counter = 0
def do_search(self):
try:
urly="https://"+ self.server + "/search?num=100&start=" + str(self.counter) + "&hl=en&meta=&q=site%3Atwitter.com%20intitle%3A%22on+Twitter%22%20" + self.word
except Exception, e:
print e
headers = {'User-Agent':'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:34.0) Gecko/20100101 Firefox/34.0'}
try:
r=requests.get(urly,headers=headers)
except Exception,e:
print e
self.results = r.content
self.totalresults += self.results
def get_people(self):
rawres = myparser.parser(self.totalresults, self.word)
return rawres.people_twitter()
def process(self):
while (self.counter < self.limit):
self.do_search()
self.counter += 100
print "\tSearching " + str(self.counter) + " results.."
| 32.465116 | 169 | 0.592407 | 181 | 1,396 | 4.519337 | 0.480663 | 0.067237 | 0.07335 | 0.03912 | 0.05379 | 0 | 0 | 0 | 0 | 0 | 0 | 0.058014 | 0.27149 | 1,396 | 42 | 170 | 33.238095 | 0.746313 | 0 | 0 | 0.166667 | 0 | 0.083333 | 0.234957 | 0.062321 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.138889 | null | null | 0.083333 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.