blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 2 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 69 | license_type stringclasses 2 values | repo_name stringlengths 5 118 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringlengths 4 63 | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 2.91k 686M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 23 values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 220 values | src_encoding stringclasses 30 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 2 10.3M | extension stringclasses 257 values | content stringlengths 2 10.3M | authors listlengths 1 1 | author_id stringlengths 0 212 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
40484b276eb071a2f28ec506dbd524e024bc27ec | 2587aad5d43426acd65f6809ef12530398604feb | /Codes_for_cnc/Codes_for_cnc/expose_check.py | 19d6a7b1895b39a665547463c1e8ba4523df1af4 | [] | no_license | eYSIP-2018/CNC-for-GrowBox | 2d48542bc1f0b1fdf0a242a371f83a5b087c574f | 6f41f8919048d3ec5eb53837ef05a53c31930b8d | refs/heads/master | 2020-03-18T08:40:30.873281 | 2018-07-03T07:41:21 | 2018-07-03T07:41:21 | 134,522,286 | 0 | 1 | null | 2018-07-03T07:41:22 | 2018-05-23T06:14:59 | null | UTF-8 | Python | false | false | 213 | py | import cv2
import numpy as np
cap = cv2.VideoCapture(0)
cap.set(3,1280)
cap.set(4,1024)
cap.set(15,1.9)
ret, frame=cap.read()
ret, frame = cap.read()
roi = cv2.resize(frame,(800,800))
cv2.imwrite("frame.jpg",roi) | [
"harshlunia7@gmail.com"
] | harshlunia7@gmail.com |
9058507aee8063fda48865ed37cb06f5eeafa18a | 85a3727eaeb3f41b01a6db08b2bb7568374ebd5e | /test/test_key_value_item.py | 513f714534624fa00729bdb9be49ae0b6bf0bda7 | [] | no_license | donthor/python-client-generated | a737284764e38f8b0b17e2683e820e23b2e0556b | d521e83cad91f293098b6d006f783142cde2ec92 | refs/heads/master | 2023-08-05T08:34:25.118564 | 2021-09-24T19:40:39 | 2021-09-24T19:40:39 | 410,082,817 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,028 | py | # coding: utf-8
"""
IMDb-API
The IMDb-API Documentation. You need a <a href='/Identity/Account/Manage' target='_blank'><code>API Key</code></a> for testing APIs.<br/><a class='link' href='/API'>Back to API Tester</a> # noqa: E501
OpenAPI spec version: 1.5
Contact: support@imdb-api.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import swagger_client
from swagger_client.models.key_value_item import KeyValueItem # noqa: E501
from swagger_client.rest import ApiException
class TestKeyValueItem(unittest.TestCase):
"""KeyValueItem unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testKeyValueItem(self):
"""Test KeyValueItem"""
# FIXME: construct object with mandatory attributes with example values
# model = swagger_client.models.key_value_item.KeyValueItem() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"parockho@cisco.com"
] | parockho@cisco.com |
4e204107afaea8f043c56aca25f88a45e024f16b | 839ffe8aa2f9b53152a87ae7a6069f5e361e5a6a | /bimmer_connected/remote_services.py | cdfb09ed701df1a275b5d08b995c2ed693752583 | [
"Apache-2.0"
] | permissive | lawtancool/bimmer_connected | 5fae09f27d8a30dd5ca003825e9c3420d8623a11 | fd05aca6ada19260bc77bb55b0258178e3604aac | refs/heads/master | 2021-04-26T23:17:15.726492 | 2018-03-03T10:18:19 | 2018-03-03T10:18:19 | 123,963,370 | 1 | 0 | Apache-2.0 | 2018-03-13T13:56:35 | 2018-03-05T18:49:10 | Python | UTF-8 | Python | false | false | 6,914 | py | """Trigger remote services on a vehicle."""
from enum import Enum
import datetime
import logging
import time
import requests
from bimmer_connected.const import REMOTE_SERVICE_URL
TIME_FORMAT = '%Y-%m-%dT%H:%M:%S.%f'
_LOGGER = logging.getLogger(__name__)
#: time in seconds between polling updates on the status of a remote service
_POLLING_CYCLE = 1
#: maximum number of seconds to wait for the server to return a positive answer
_POLLING_TIMEOUT = 60
#: time in seconds to wait before updating the vehicle state from the server
_UPDATE_AFTER_REMOTE_SERVICE_DELAY = 10
class ExecutionState(Enum):
"""Enumeration of possible states of the execution of a remote service."""
PENDING = 'PENDING'
DELIVERED = 'DELIVERED_TO_VEHICLE'
EXECUTED = 'EXECUTED'
class _Services(Enum):
"""Enumeration of possible services to be executed."""
REMOTE_LIGHT_FLASH = 'RLF'
REMOTE_DOOR_LOCK = 'RDL'
REMOTE_DOOR_UNLOCK = 'RDU'
REMOTE_SERVICE_STATUS = 'state/execution'
REMOTE_HORN = 'RHB'
REMOTE_AIR_CONDITIONING = 'RCN'
class RemoteServiceStatus(object): # pylint: disable=too-few-public-methods
"""Wraps the status of the execution of a remote service."""
def __init__(self, response: dict):
"""Construct a new object from a dict."""
self._response = response
# the result from the service call is different from the status request
# we need to go one level down in the response if possible
if 'remoteServiceEvent' in response:
response = response['remoteServiceEvent']
self.state = ExecutionState(response['remoteServiceStatus'])
self.timestamp = self._parse_timestamp(response['lastUpdate'])
@staticmethod
def _parse_timestamp(timestamp: str) -> datetime.datetime:
"""Parse the timestamp format from the response."""
offset = int(timestamp[-3:])
time_zone = datetime.timezone(datetime.timedelta(hours=offset))
result = datetime.datetime.strptime(timestamp[:-3], TIME_FORMAT)
result.replace(tzinfo=time_zone)
return result
class RemoteServices(object):
"""Trigger remote services on a vehicle."""
def __init__(self, account, vehicle):
"""Constructor."""
self._account = account
self._vehicle = vehicle
def trigger_remote_light_flash(self) -> RemoteServiceStatus:
"""Trigger the vehicle to flash its headlights.
A state update is NOT triggered after this, as the vehicle state is unchanged.
"""
_LOGGER.debug('Triggering remote light flash')
# needs to be called via POST, GET is not working
self._trigger_remote_service(_Services.REMOTE_LIGHT_FLASH, post=True)
return self._block_until_done()
def trigger_remote_door_lock(self) -> RemoteServiceStatus:
"""Trigger the vehicle to lock its doors.
A state update is triggered after this, as the lock state of the vehicle changes.
"""
_LOGGER.debug('Triggering remote door lock')
# needs to be called via POST, GET is not working
self._trigger_remote_service(_Services.REMOTE_DOOR_LOCK, post=True)
result = self._block_until_done()
self._trigger_state_update()
return result
def trigger_remote_door_unlock(self) -> RemoteServiceStatus:
"""Trigger the vehicle to unlock its doors.
A state update is triggered after this, as the lock state of the vehicle changes.
"""
_LOGGER.debug('Triggering remote door lock')
# needs to be called via POST, GET is not working
self._trigger_remote_service(_Services.REMOTE_DOOR_UNLOCK, post=True)
result = self._block_until_done()
self._trigger_state_update()
return result
def trigger_remote_horn(self) -> RemoteServiceStatus:
"""Trigger the vehicle to sound its horn.
A state update is NOT triggered after this, as the vehicle state is unchanged.
"""
_LOGGER.debug('Triggering remote light flash')
# needs to be called via POST, GET is not working
self._trigger_remote_service(_Services.REMOTE_HORN, post=True)
return self._block_until_done()
def trigger_remote_air_conditioning(self) -> RemoteServiceStatus:
"""Trigger the vehicle to sound its horn.
A state update is NOT triggered after this, as the vehicle state is unchanged.
"""
_LOGGER.debug('Triggering remote light flash')
# needs to be called via POST, GET is not working
self._trigger_remote_service(_Services.REMOTE_AIR_CONDITIONING, post=True)
result = self._block_until_done()
self._trigger_state_update()
return result
def _trigger_remote_service(self, service_id: _Services, post=False) -> requests.Response:
"""Trigger a generic remote service.
You can choose if you want a POST or a GET operation.
"""
url = REMOTE_SERVICE_URL.format(vin=self._vehicle.vin, service=service_id.value,
server=self._account.server_url)
return self._account.send_request(url, post=post)
def _block_until_done(self) -> RemoteServiceStatus:
"""Keep polling the server until we get a final answer.
:raises IOError: if there is no final answer before _POLLING_TIMEOUT
"""
fail_after = datetime.datetime.now() + datetime.timedelta(seconds=_POLLING_TIMEOUT)
while True:
status = self._get_remote_service_status()
_LOGGER.debug('current state if remote service is: %s', status.state.value)
if status.state not in [ExecutionState.PENDING, ExecutionState.DELIVERED]:
return status
if datetime.datetime.now() > fail_after:
raise IOError(
'Timeout on getting final answer from server. Current state: {}'.format(status.state.value))
time.sleep(_POLLING_CYCLE)
def _get_remote_service_status(self) -> RemoteServiceStatus:
"""The the execution status of the last remote service that was triggered.
As the status changes over time, you probably need to poll this.
Recommended polling time is AT LEAST one second as the reaction is sometimes quite slow.
"""
_LOGGER.debug('getting remote service status')
response = self._trigger_remote_service(_Services.REMOTE_SERVICE_STATUS)
try:
json_result = response.json()
return RemoteServiceStatus(json_result)
except ValueError:
_LOGGER.error('Error decoding json response from the server.')
_LOGGER.debug(response.headers)
_LOGGER.debug(response.text)
raise
def _trigger_state_update(self) -> None:
time.sleep(_UPDATE_AFTER_REMOTE_SERVICE_DELAY)
self._account.update_vehicle_states()
| [
"christian.kuehnel@gmail.com"
] | christian.kuehnel@gmail.com |
852c620fb2dad2d857281fc606a568ecd50ddcb0 | ba541e03d028984d8021d44e5a4c412ba1840654 | /tests/testLWWGraph.py | 2c5e7df77d9e701e1a7462291eb763a30ce17d51 | [] | no_license | ra312/replicated-data-structures | 7d81d34ca217840e6acd4c21faf6e983e467e4d2 | b651931f3a03ef4f5f48a4b6de5d3ab8675b6cbe | refs/heads/main | 2023-04-21T00:22:22.810281 | 2021-05-10T04:52:43 | 2021-05-10T04:52:43 | 364,539,429 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,525 | py | import unittest
from LWW.Graph import Graph as lww_graph
class Test_LWW_Graph(unittest.TestCase):
def test_graph_init(self):
'''
test lww-graph init
'''
exception_received = False
try:
t = lww_graph()
except:
exception_received = True
self.assertFalse(exception_received)
def test_adding_vertex(self):
'''
test lww-graph addVertex
'''
t = lww_graph()
t.addVertex(1)
def test_removing_vertex(self):
'''
test lww-graph removeVertex
'''
t = lww_graph()
t.addVertex(1)
def test_vertex_exists(self):
'''
test lww-graph vertex exists
'''
t = lww_graph()
vertex = 1
t.addVertex(vertex)
self.assertTrue(t.exists(vertex))
t = lww_graph()
self.assertFalse(t.exists(vertex))
def test_adding_edge(self):
'''
test lww-graph addEdge
'''
t = lww_graph()
t.addEdge(1,2)
def test_removing_edge(self):
'''
test lww-graph removeEdge
'''
g = lww_graph()
try:
g.removeEdge(1,2)
except Exception as e:
self.assertRaises(e, AssertionError)
def test_merge(self):
'''
test graph merge
'''
g = lww_graph()
z = lww_graph()
g.merge(z)
# def test_is_reachable()
if __name__ == '__main__':
unittest.main() | [
"rauan.akylzhanov@kcell.kz"
] | rauan.akylzhanov@kcell.kz |
f43afaa4f0016dfe69be9baf4880dfa31bc9e26b | 85e50dc8487701f0c15c72141681c849021d9826 | /news/scripts/indiatoday.py | 0dc1c52a98c0c909ed68bcbfa7521e59391e3c62 | [] | no_license | sadakchap/news-aggregator | 4c46a9b3ab18cf4c693dff2c71c9c7fc58f1ee1c | dbd7e031d783f9cc9cf9e652be8b70d73d53d9cb | refs/heads/master | 2022-12-09T12:34:43.929043 | 2021-06-02T01:46:30 | 2021-06-02T01:46:30 | 199,251,857 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 880 | py | from bs4 import BeautifulSoup
import requests
from news.models import NewsBox
requests.packages.urllib3.disable_warnings()
url = 'https://www.indiatoday.in/'
source = requests.get(url).text
soup = BeautifulSoup(source, "lxml")
news_box = soup.find('ul', class_='itg-listing')
# print(news_box.prettify())
def indiatoday():
for news_story in news_box.find_all('li')[:7]:
news_link = url + news_story.find('a').get('href')
img_src = None
news_title = news_story.find('a').text
if not NewsBox.objects.filter(news_link=news_link).exists():
news = NewsBox()
news.src_name = 'India Today'
news.src_link = url
news.title = news_title
news.news_link = news_link
news.img = img_src
news.save()
# print(news_link)
# print(news_title)
# print('*'*80)
| [
"aliceprerna@gmail.com"
] | aliceprerna@gmail.com |
41a6800d3bb1fc9d9ac3be25bf186988e0f5b7f6 | b3ef0abaaeb063b42b32fb207ee8f2b170dfdb1d | /PythonNote/43_Inheritance/InheritanceDemo.py | 49cec8e248817b78e424e6f0d4182f92e6d68c53 | [] | no_license | VisualAcademy/PythonNote | 417a2b8e17be14b3530daca0d529b5660cfc677a | 3f340da501048d0c3a30b056abcd303f5a770f51 | refs/heads/master | 2023-05-28T14:21:23.271306 | 2023-05-15T14:00:12 | 2023-05-15T14:00:12 | 185,853,356 | 7 | 4 | null | 2019-05-09T19:30:17 | 2019-05-09T18:40:42 | Python | UTF-8 | Python | false | false | 412 | py | # [?] 상속(Inheritance): 부모 클래스의 기능을 자식 클래스에서 물려받아 사용
# 부모: Base
class Parent():
def Say(self):
print("부모 말하다.")
# 자식: Derived
class Child(Parent):
#pass
def Say(self):
Parent.Say(self)
print("자식 말하다.")
# 자식 클래스의 인스턴스 생성
child = Child()
child.Say() # 부모로부터 상속
| [
"redplus@redplus.net"
] | redplus@redplus.net |
53f066b9b58f4908fa80f7b05ad0314541a78b2f | d83120a187421256f4a59e7dec582657b8b8bb88 | /starter/.history/backend/flaskr/__init___20210711040044.py | 96f30636e696400be6d39026aec94776b5b72129 | [] | no_license | saraalmuraytib/Trivia | 2dc382cf0c124d673cad699cb01563ca87389d38 | f88e21ac04681f4131c737f1674dcde32731071c | refs/heads/main | 2023-06-30T04:52:08.612654 | 2021-08-03T15:33:50 | 2021-08-03T15:33:50 | 384,743,716 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,468 | py | import os
from flask import Flask, request, abort, jsonify
from flask_sqlalchemy import SQLAlchemy
from flask_cors import CORS
import random
from models import setup_db, Question, Category
QUESTIONS_PER_PAGE = 10
def paginate_questions(request, selection):
page = request.args.get('page', 1, type=int)
start = (page - 1) * QUESTIONS_PER_PAGE
end = start + QUESTIONS_PER_PAGE
questions = [question.format() for question in selection]
current_questions = questions[start:end]
return current_questions
def create_app(test_config=None):
# create and configure the app
app = Flask(__name__)
setup_db(app)
'''
@TODO 1: Set up CORS. Allow '*' for origins. Delete the sample route after completing the TODOs
'''
CORS(app)
#CORS(app, resources={'/': {'origins': '*'}})
'''
@TODO 2: Use the after_request decorator to set Access-Control-Allow
'''
@app.after_request
def after_request(response):
response.headers.add('Access-Control-Allow-Headers', 'Content-Type,Authorization,true')
response.headers.add('Access-Control-Allow-Methods', 'GET,PUT,POST,DELETE,OPTIONS')
return response
'''
@TODO 3:
Create an endpoint to handle GET requests
for all available categories.
'''
@app.route('/categories')
def get_categories():
categories = Category.query.order_by(Category.type).all()
if len(categories) == 0:
abort(404)
return jsonify({
'success': True,
'categories': {category.id: category.type for category in categories}
})
'''
@TODO 4:
Create an endpoint to handle GET requests for questions,
including pagination (every 10 questions).
This endpoint should return a list of questions,
number of total questions, current category, categories.
TEST: At this point, when you start the application
you should see questions and categories generated,
ten questions per page and pagination at the bottom of the screen for three pages.
Clicking on the page numbers should update the questions.
'''
@app.route('/questions')
def get_questions():
selection = Question.query.order_by(Question.id).all()
current_questions = paginate_questions(request, selection)
categories = Category.query.order_by(Category.type).all()
if len(current_questions) == 0:
abort(404)
'''
This endpoint should return a list of questions,
number of total questions, current category, categories
'''
return jsonify({
'success': True,
'questions': current_questions,
'total_questions': len(selection),
'categories': {category.id: category.type for category in categories},
'current_category': None
})
'''
@TODO 5:
Create an endpoint to DELETE question using a question ID.
TEST: When you click the trash icon next to a question, the question will be removed.
This removal will persist in the database and when you refresh the page.
'''
@app.route('/questions/<int:question_id>', methods=['DELETE'])
def delete_question(question_id):
try:
question = Question.query.filter(
Question.id == question_id).one_or_none()
if question is None:
abort(404)
question.delete()
return jsonify({
'success': True,
'deleted': question_id
})
except:
abort(422)
'''
@TODO 6:
Create an endpoint to POST a new question,
which will require the question and answer text,
category, and difficulty score.
TEST: When you submit a question on the "Add" tab,
the form will clear and the question will appear at the end of the last page
of the questions list in the "List" tab.
'''
@app.route('/questions', methods=['POST'])
def create_question():
body = request.get_json()
new_question = body.get('question')
new_answer = body.get('answer')
new_category = body.get('category')
new_difficulty = body.get('difficulty')
try:
question = Question(question=new_question, answer=new_answer,
category=new_category,difficulty=new_difficulty)
question.insert()
return jsonify({
'success': True,
'created': question.id,
})
except:
abort(422)
'''
@TODO 7:
Create a POST endpoint to get questions based on a search term.
It should return any questions for whom the search term
is a substring of the question.
TEST: Search by any phrase. The questions list will update to include
only question that include that string within their question.
Try using the word "title" to start.
'''
'''
@TODO 8:
Create a GET endpoint to get questions based on category.
TEST: In the "List" tab / main screen, clicking on one of the
categories in the left column will cause only questions of that
category to be shown.
'''
'''
@TODO 9:
Create a POST endpoint to get questions to play the quiz.
This endpoint should take category and previous question parameters
and return a random questions within the given category,
if provided, and that is not one of the previous questions.
TEST: In the "Play" tab, after a user selects "All" or a category,
one question at a time is displayed, the user is allowed to answer
and shown whether they were correct or not.
'''
'''
@TODO 10:
Create error handlers for all expected errors
including 404 and 422.
'''
return app
| [
"sara.almuraytib@gmail.com"
] | sara.almuraytib@gmail.com |
3c63c3dbe3f2c3a76a0186bc5ef7d42890ea6fb6 | eafced651cb83c6d325b146dfc4e7ecde236cb24 | /實用代碼/反轉串列.py | ec57837c14652f74f59483dab63862f065382e0a | [] | no_license | zxc83147759/pythonObject | 7bb1bc3888c730dd4c84e338a085be037511e3db | 4e8bbcceb41d3d17168ef8efa8c789fdbabe4d1d | refs/heads/master | 2020-11-29T21:58:04.139765 | 2020-04-19T14:25:15 | 2020-04-19T14:25:15 | 230,224,915 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 86 | py | a = [5, 4, 3, 2, 1]
print(a[::-1])
for ele in reversed(a):
print(ele, end='')
| [
"32771813+zxc83147759@users.noreply.github.com"
] | 32771813+zxc83147759@users.noreply.github.com |
b8b719bfa0c3cad3415aa801e99f1b92b64a229e | 9f463398b10e7182be90d0ff0feda1a8c5c1a0b6 | /timing_inv_caso_3_longdouble.py | 96028bc5db5847da3eb742bf6559c45359ff2519 | [] | no_license | Felobrat/MCOC2020-P0 | e96b154532b0a7969d7fd851d364742d6ad5db19 | d47a393dc4c6b05f539e9191ddbf6b25aa35ea5d | refs/heads/master | 2022-12-06T22:59:20.799265 | 2020-08-24T11:18:18 | 2020-08-24T11:18:18 | 284,769,082 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,390 | py | # -*- coding: utf-8 -*-
"""
Created on Wed Aug 12 15:20:02 2020
@author: Felipe Bravo
"""
from time import perf_counter
from scipy.linalg import inv
import numpy as np
NP = [2, 5, 10,
12, 15, 20,
30, 40, 45,
50, 55, 60,
75, 100, 125,
160, 200, 250,
350, 500, 600,
800, 1000, 2000,
5000, 10000] #indice de dimension de matrices cuadradas que vamos a evaluar
dts = [] #indice que almacena el tiempo que se demora en cada iteracion de matriz de dimension N en NP
mem = [] #indice que almacena la memoria que se usa en cada iteracion de matriz de dimension N en NP
name = (f"timing_inv_caso_3_np.longdouble.txt")
fid = open(name, "w") #genera el archivo Matmul.txt
for N in NP: #recorremos el indice NP para matrices aleatorias A y B de dimension N en NP
print(f"N = {N}") #imprime en consola lo que está pasando en la operacion, indicando en que elemento N en NP esta iterando.
A = 2*(np.eye(N, dtype=np.longdouble)) #genera la matriz A diagonal con valores 2 de dimension NxN
for m in range(N):
for n in range(N):
if m==n+1:
A[m,n]+=-1
if m==n-1:
A[m,n]+=-1
print(A.dtype)
t1 = perf_counter() #empieza a contar cuando llega a esta parte de la iteracion
Ainv = inv(A, overwrite_a=False) #hace la operacion de multiplicar matrices A y B de dimension NxN*NxN
t2 = perf_counter() #deja de contar
dt = t2 - t1 #mide cuando se demoro en multiplicar las matrices.
print(Ainv.dtype)
size=(Ainv.itemsize+A.itemsize)*(N**2) #calculo de cuanta memoria usa en alamcenar los datos de las matrices.
print(Ainv.itemsize) #calculo de cuanta memoria usa en alamcenar los datos de las matrices.
dts.append(dt) #guarda en el indice dts creado el timepo de iteracion que se demoró para esa matriz e NxN
mem.append(size) #guarda en el indice mem creado lo que uso de memoria en la iteracion para esa matriz de NxN
fid.write(f"{N} {dt} {size}\n") #escribe en el archivo los datos almacenados
print(f"Tiempo transcurrido = {dt} s") #imprime en la consola para llevar un registro visual de lo que esta haciendo
print(f"memoria usada = {size} bytes")
fid.flush() #limpia lo almacenado que imprimio en el archivo de texto
fid.close() #deja de escribir | [
"noreply@github.com"
] | Felobrat.noreply@github.com |
ef3d8382444a8442f31cd305561d3477ba1a01b4 | 4ff8676136167cdd81d7a983272102fff86360e8 | /python/258. 各位相加.py | 138d139307262136c271371b5e43d5a1c038538f | [] | no_license | geniuscynic/leetcode | 0ec256af2377d19fee22ce736462a7e95e3f4e67 | 379a8f27f8213951ee8be41bd56598036995d267 | refs/heads/master | 2023-07-19T07:22:20.001770 | 2021-09-07T14:50:40 | 2021-09-07T14:50:40 | 297,277,833 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,290 | py | import sys
from collections import defaultdict
from collections import Counter
from collections import deque
# Definition for a binary tree node.
class TreeNode:
def __init__(self, val=0, left=None, right=None):
self.val = val
self.left = left
self.right = right
class Solution:
def addDigits(self, num: int) -> int:
res = num
while res >= 10:
num = res
res = 0
while num != 0:
res += num % 10
num = num // 10
return res
def coverttoTree():
ls =deque([6,2,8,0,4,7,9,None,None,3,5])
temp = TreeNode(ls.popleft())
res = deque()
res.append(temp)
while ls:
left = ls.popleft()
right = ls.popleft()
node = res.popleft()
#print(node.val, left, right)
if left != None:
node.left = TreeNode(left)
res.append(node.left)
if right != None:
node.right = TreeNode(right)
res.append(node.right)
return temp
if __name__ == "__main__":
solution = Solution()
nums1 = 38
m = TreeNode(2)
nums2 = TreeNode(4)
n = 3
result = solution.addDigits(nums1)
#print(solution.ls)
print(result) | [
"350810375@qq.com"
] | 350810375@qq.com |
83568e7bbee45bdec6b45b63ccf9be19ef66b4b9 | 541b446ed1c81ef9594d28c08265e5af1aca08f8 | /app/app/settings.py | c9f1bd4ceff38ff49eae1281f9acfd235326d8bf | [
"MIT"
] | permissive | kushalbhalla/recipe-app-api | 2041a987f8eda42027807921d1d780a369015f27 | 1c6a4cec2cd225ef2ab62af24d4749f5a85bf25d | refs/heads/master | 2022-12-18T18:21:14.904357 | 2020-09-11T13:10:56 | 2020-09-11T13:10:56 | 289,422,304 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,421 | py | """
Django settings for app project.
Generated by 'django-admin startproject' using Django 3.0.8.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.0/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'fbu!=78*%+(3oc_6tqe4v)%s5!zf%50m@%8g92j*aidoi!%4%e'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'rest_framework.authtoken',
'core',
'user',
'recipe',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'app.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'app.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE' : 'django.db.backends.postgresql',
'HOST' : os.environ.get('DB_HOST'),
'NAME' : os.environ.get('DB_NAME'),
'USER' : os.environ.get('DB_USER'),
'PASSWORD' : os.environ.get('DB_PASSWORD'),
}
}
# Password validation
# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.0/howto/static-files/
STATIC_URL = '/static/'
MEDIA_URL = '/media/'
MEDIA_ROOT = '/vol/web/media'
STATIC_ROOT = '/vol/web/static'
AUTH_USER_MODEL = 'core.User'
| [
"kushalbhalla14@gmail.com"
] | kushalbhalla14@gmail.com |
6dd9210d2c0622b486978089a2a5c0b072f88f00 | 9a107e91cd01d015eb48040560875bf7dc173a62 | /blog/migrations/0002_post_email.py | 16110fb165d34b65d5a95c9fda5a0c1dd30464e5 | [] | no_license | katariny/my-first-blog | 7b5fff23a671f35bc5e01e962f80e5401a81fead | b2bdeb905a5521cd53e7cd7046827892b238d33b | refs/heads/master | 2023-02-27T07:37:13.399975 | 2021-02-01T20:11:00 | 2021-02-01T20:11:00 | 333,527,211 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 406 | py | # Generated by Django 3.1.5 on 2021-02-01 16:56
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('blog', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='post',
name='email',
field=models.EmailField(blank=True, default=None, max_length=250, null=True),
),
]
| [
"kat@exemplo.com"
] | kat@exemplo.com |
e4191ee8faa9cae6b500b0cb05622c3f63276b3e | 31efa445ae49cf6830270bb6ccf15425a97ad08d | /self-crossing.py | 579f47a8ef7e80e2a4a42f231ae98d84f0356805 | [] | no_license | odremaer/some-exercises | 812a0d4ea592e00509bde3aa93c03aa3a96bbad6 | 0bc33d8b968a431e3023d0253e8760d215c14833 | refs/heads/master | 2022-12-28T17:44:17.058699 | 2020-10-16T01:44:17 | 2020-10-16T01:44:17 | 297,093,430 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 131 | py | class Solution:
def isSelfCrossing(self, x):
cordx, cordy = 0, 0
everymove = {}
Solution().isSelfCrossing([]) | [
"mr.jeka0104@gmail.com"
] | mr.jeka0104@gmail.com |
dbdca5bf52f7cb3e29adca6edbc4f9148cb0981b | ce67887d8b5abf79bcfe6d79d107802823e233fa | /ZabbixTool/lib_zabbix/w_lib/terminaltables/terminal_io.py | a1b7b545d0869def13f205bc63c4d7d33c8eca67 | [
"Apache-2.0"
] | permissive | cathywife/zabbix_manager | b933efd6a7b49620a9ce60f73937270b2898e5b4 | 7cab94e7efbaac0e13b604d18038150ddc95b2cb | refs/heads/master | 2020-06-19T04:29:04.151388 | 2017-06-10T12:56:57 | 2017-06-10T12:56:57 | 94,177,465 | 0 | 2 | null | 2017-06-13T06:28:02 | 2017-06-13T06:28:02 | null | UTF-8 | Python | false | false | 1,809 | py | """Get info about the current terminal window/screen buffer."""
import ctypes
import struct
import sys
try:
import fcntl
import termios
except ImportError:
fcntl = None
termios = None
DEFAULT_HEIGHT = 24
DEFAULT_WIDTH = 80
def terminal_size():
"""Get the width and height of the terminal.
http://code.activestate.com/recipes/440694-determine-size-of-console-window-on-windows/
http://stackoverflow.com/questions/17993814/why-the-irrelevant-code-made-a-difference
:return: Width (number of characters) and height (number of lines) of the terminal.
:rtype: tuple
"""
if hasattr(ctypes, 'windll'):
# Only works on Microsoft Windows platforms.
string_buffer = ctypes.create_string_buffer(22) # To be written to by GetConsoleScreenBufferInfo.
ctypes.windll.kernel32.GetConsoleScreenBufferInfo(ctypes.windll.kernel32.GetStdHandle(-11), string_buffer)
left, top, right, bottom = struct.unpack('hhhhHhhhhhh', string_buffer.raw)[5:-2]
width, height = right - left, bottom - top
if width < 1 or height < 1:
return DEFAULT_WIDTH, DEFAULT_HEIGHT
return width, height
try:
device = fcntl.ioctl(0, termios.TIOCGWINSZ, '\0' * 8)
except IOError:
return DEFAULT_WIDTH, DEFAULT_HEIGHT
height, width = struct.unpack('hhhh', device)[:2]
return width, height
def set_terminal_title(title):
"""Set the terminal title.
:param str title: The title to set.
"""
if hasattr(ctypes, 'windll'):
if sys.version_info[0] == 3:
func = ctypes.windll.kernel32.SetConsoleTitleW # Unicode.
else:
func = ctypes.windll.kernel32.SetConsoleTitleA # Ascii.
return func(title)
sys.stdout.write('\033]0;{0}\007'.format(title))
| [
"wangbin139967@163.com"
] | wangbin139967@163.com |
c4eed199172881acd25a5d986044c3aded598837 | 52b5773617a1b972a905de4d692540d26ff74926 | /.history/2D_20200722174654.py | d47af0911f58fcf03a29cbe21f4c5d543a6f60ef | [] | no_license | MaryanneNjeri/pythonModules | 56f54bf098ae58ea069bf33f11ae94fa8eedcabc | f4e56b1e4dda2349267af634a46f6b9df6686020 | refs/heads/master | 2022-12-16T02:59:19.896129 | 2020-09-11T12:05:22 | 2020-09-11T12:05:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,036 | py | def array(n,m):
# where n is row size and m is column size
array = [[0 for x in range(n)] for x in range(m)]
print(array)
a = [[2, 4, 6, 8, 10], [3, 6, 9, 12, 15], [4, 8, 12, 16, 20]]
# where the first arguement reps the row and second arguement reps the column
print(a[0][3])
def hourGlass(arr):
# you have a 2d array
# get max hour glass
# var maxCount to keep record of the max count
# what do you know about an hourglass
# the indicies fall in a pattern where
# i and i+2 are not equal to 0 and i + 1 is equal to 0
maxCount = 1
totalCount = 0
count = 0
j = 3
if arr !=[]:
for i in range(len(arr)):
# remember j is looping through arr[i]
for j in range(len(arr[i])):
print(arr[i][j],arr[i])
else:
return 0
print(hourGlass([[1,1,1,0,0,0],[0,1,0,0,0,0],[1,1,1,0,0,0],[0,0,2,4,4,0],[0,0,0,2,0,0],[0,0,1,2,4,0]])) | [
"mary.jereh@gmail.com"
] | mary.jereh@gmail.com |
c41b0444df94178605dabcb7a228a1b78f8f2c39 | 459ddc777a037827cec3576c1b28cf4334b554b5 | /external-traffic-signs/willdelete.py | a5e954bcc7e633ad5ce86449629988109c3304e0 | [] | no_license | eoguzinci/traffic_sign_classification | 2f45721a9a7928fd48ad5c8742a7b10b5cba3bdc | 639f0aaa7238ace98fb358d583fd4e4563abeb95 | refs/heads/master | 2020-04-05T12:11:57.177337 | 2017-08-05T01:00:20 | 2017-08-05T01:00:20 | 81,028,380 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,685 | py | ### Load the images and plot them here.
### Feel free to use as many code cells as needed.
import cv2
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
import numpy as np
import os
import glob
# size_images = 10
# images = np.zeros((size_images,32,32,3), dtype = np.int32)
# for i in range(size_images):
# filename = "./external-traffic-signs/p%d_resized.jpg" % i
# images[i] = cv2.imread(filename)
image_files = ['external-traffic-signs/' + image_file for image_file in os.listdir('external-traffic-signs')]
images = []
for i, img in enumerate(image_files):
image = cv2.imread(img)
image_gray = cv2.cvtColor(image,cv2.COLOR_BGR2RGB)
images.append(image_gray)
images = np.asarray(images)
images = (images - 128)/128
# for image_file in image_files:
# image = Image.open(image_file)
# image = image.convert('RGB')
# image = image.resize((32, 32), Image.ANTIALIAS)
# image = np.array(list(image.getdata()), dtype='uint8')
# image = np.reshape(image, (32, 32, 3))
# images.append(image)
images = np.array(images, dtype='uint8')
print(images[0].shape)
text_file = open("./external-traffic-signs/extResults.txt","r")
ylabel = np.zeros(size_images)
ylabel = text_file.readlines()
print(ylabel[0])
# Grayscaling
# imagesGray = np.zeros(shape = [images.shape[0],images.shape[1],images.shape[2]], dtype = np.int32)
# for i in range(size_images):
# imagesGray[i] = cv2.cvtColor(images[i], cv2.COLOR_RGB2GRAY)
## I do not know why but cv2.cvtColor does not work for this example so I write it as
imagesGray = np.sum(images/3, axis=3, keepdims=True)
print(imagesGray.shape)
# Normalize
imgNormal = (imagesGray - 128)/128 | [
"eoguzinci@gmail.com"
] | eoguzinci@gmail.com |
1d6f4c562826712511423d8ada03854d2b476c4f | 60aad3810be3ddac41c13f63c4f060f6ae0285b5 | /dd_app/django_codec.py | 9ae0d2bc7df750b28b17ece24ecb9e59d1c5b8e4 | [
"Artistic-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | AAB-Manifesto/dd_app | 8d7bb697f28ca56095c622541601ba56eae1bb67 | 3806b9b9df165a49f0fca8a249170b4ccd4d0177 | refs/heads/master | 2021-12-03T20:36:01.948101 | 2014-11-12T16:12:31 | 2014-11-12T16:12:31 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,442 | py | """
Helpers to decode/encode session data
"""
import base64
import cPickle as pickle
from django.utils.crypto import salted_hmac, constant_time_compare
class InvalidHash(Exception):
"""Exception raised when hash signature in session cookie fails to verify"""
pass
class DjangoSessionCodec(object):
"""Encodes/decodes django 1.4 session data
"""
uid_key = '_auth_user_id'
def __init__(self, settings, **kwargs):
self.django_key_salt = kwargs.get('django_key_salt', settings['django.key_salt'])
self.django_secret = kwargs.get('django_secret', settings['django.secret'])
def _hash(self, value):
return salted_hmac(self.django_key_salt, value, secret=self.django_secret).hexdigest()
def decode(self, session_data):
encoded_data = base64.decodestring(session_data)
new_hash, pickled = encoded_data.split(':', 1)
expected_hash = self._hash(pickled)
if not constant_time_compare(new_hash, expected_hash):
raise InvalidHash('Invalid hash. Got %s, expected %s.' % (new_hash, expected_hash))
decoded = {}
else:
decoded = pickle.loads(pickled)
return (decoded, decoded.get(self.uid_key, None))
def encode(self, session_dict):
pickled = pickle.dumps(session_dict, pickle.HIGHEST_PROTOCOL)
new_hash = self._hash(pickled)
return base64.encodestring("%s:%s" % (new_hash, pickled))
| [
"dev@datadealer.net"
] | dev@datadealer.net |
937856c9a6ece0da8d5de01cfd2683abc9d3e14b | 62822dca093ae6b1487aa3d2ad3b3875b62f8682 | /LetsMakeAGroup/forms.py | db1b3a61b4a4aae96b6236d707b8073d57814c8b | [] | no_license | muyangy/LetsMakeAGroup | de2800be733da1d20f3bf5299b964d803f0f2a92 | a5aff5d6e5b98d97069bd39fd2134b5c52a5aa5d | refs/heads/master | 2021-01-01T16:39:11.315224 | 2014-01-17T20:41:02 | 2014-01-17T20:41:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,805 | py | from django import forms
from django.contrib.auth.models import User
from LetsMakeAGroup.models import *
class RegistrationForm(forms.Form):
username = forms.EmailField(max_length = 200, label =' Email')
password1 = forms.CharField(max_length = 200, label =' Password',widget = forms.PasswordInput())
password2 = forms.CharField(max_length = 200, label = 'Confirm password', widget = forms.PasswordInput())
firstname = forms.CharField(max_length = 200, label='First name')
lastname = forms.CharField(max_length = 200, label='Last name')
picture = forms.ImageField(required=False)
# Customizes form validation for properties that apply to more
# than one field. Overrides the forms.Form.clean function.
def clean(self):
# Calls our parent (forms.Form) .clean function, gets a dictionary
# of cleaned data as a result
cleaned_data = super(RegistrationForm, self).clean()
# Confirms that the two password fields match
password1 = cleaned_data.get('password1')
password2 = cleaned_data.get('password2')
if password1 and password2 and password1 != password2:
raise forms.ValidationError("Passwords did not match.")
# We must return the cleaned data we got from our parent.
return cleaned_data
# Customizes form validation for the username field.
def clean_username(self):
# Confirms that the username is not already present in the
# User model database.
username = self.cleaned_data.get('username')
if User.objects.filter(username__exact=username):
raise forms.ValidationError("Username is already taken.")
# We must return the cleaned data we got from the cleaned_data
# dictionary
return username
| [
"muyangy@andrew.cmu.edu"
] | muyangy@andrew.cmu.edu |
74f652c82ed09864bfc29d35dfe58397eefec789 | 613d8e4af67407c8e95effb1759b9ffca5246cd3 | /oc_stats/common.py | c9028eedcbcc4e889573a69a6a85ae4c9a630def | [] | no_license | old-castle-fansubs/stats | c0286af1c66d82165a526b4a307c79235da6807a | dd814252918d1d5f08af90ec0d39b008249639a1 | refs/heads/master | 2021-07-21T15:14:22.622202 | 2021-07-18T15:02:35 | 2021-07-18T15:02:35 | 189,878,638 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,049 | py | import dataclasses
import typing as T
from datetime import date, datetime, timedelta
from pathlib import Path
PROJ_DIR = Path(__file__).parent
ROOT_DIR = PROJ_DIR.parent
DATA_DIR = ROOT_DIR / "data"
CACHE_DIR = DATA_DIR / "cache"
STATIC_DIR = PROJ_DIR / "static"
def json_default(obj: T.Any) -> T.Any:
if dataclasses.is_dataclass(obj):
return dataclasses.asdict(obj)
if isinstance(obj, (datetime, date)):
return obj.isoformat()
if isinstance(obj, timedelta):
return obj.total_seconds()
return None
def convert_to_diffs(
items: dict[date, T.Union[int, float]]
) -> dict[date, T.Union[int, float]]:
ret: dict[date, T.Union[int, float]] = {}
if not items:
return ret
prev_key = list(items.keys())[0]
prev_value = None
for key, value in sorted(items.items(), key=lambda kv: kv[0]):
if prev_value is not None:
if abs((key - prev_key).days) <= 1:
ret[key] = value - prev_value
prev_key = key
prev_value = value
return ret
| [
"rr-@sakuya.pl"
] | rr-@sakuya.pl |
bc9829b87cfdd895c786f11b24e6aff615ddf15b | 4a869982cc4cc99d83df18465f545e51c97aeb37 | /.history/Baseline/ma-course-subjectivity-mining/pynlp/ml_pipeline/experiment_20201020181444.py | 272c380903f12fbe5e7c0bc22ccf713c02668ee2 | [] | no_license | SorenKF/emotional_sm | 09d367421782d8c83987fb99be258b1b30c4ce8d | 63d51103f7511b19a83dec668327fcc7ea4a7f39 | refs/heads/main | 2023-02-03T14:12:14.572581 | 2023-01-24T18:06:52 | 2023-01-24T18:06:52 | 301,679,032 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,291 | py | import logging
import sys
from tasks import vua_format as vf
from ml_pipeline import utils, cnn, preprocessing, pipeline_with_lexicon
from ml_pipeline import pipelines
from ml_pipeline.cnn import CNN, evaluate
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
handler = logging.StreamHandler(sys.stdout)
handler = logging.FileHandler('experiment.log')
formatter = logging.Formatter('%(asctime)s %(message)s')
handler.setFormatter(formatter)
logger.addHandler(handler)
def run(task_name, data_dir, pipeline_name, print_predictions):
logger.info('>> Running {} experiment'.format(task_name))
tsk = task(task_name)
logger.info('>> Loading data...')
tsk.load(data_dir)
logger.info('>> retrieving train/data instances...' + data_dir)
train_X, train_y, test_X, test_y = utils.get_instances(tsk, split_train_dev=False)
test_X_ref = test_X
if pipeline_name.startswith('cnn'):
pipe = cnn(pipeline_name)
train_X, train_y, test_X, test_y = pipe.encode(train_X, train_y, test_X, test_y)
logger.info('>> testing...')
else:
pipe = pipeline(pipeline_name)
logger.info('>> training pipeline ' + pipeline_name)
pipe.fit(train_X, train_y)
if pipeline_name == 'naive_bayes_counts_lex':
logger.info(" -- Found {} tokens in lexicon".format(pipe.tokens_from_lexicon))
logger.info('>> testing...')
sys_y = pipe.predict(test_X)
logger.info('>> evaluation...')
logger.info(utils.eval(test_y, sys_y))
# Write predictions to file
with open(f'results_{pipeline_name}.tsv', 'w+', encoding='utf-8') as outfile:
outfile.write("text\tgold_label\tpredicted_label\n")
for i in range(len(test_X_ref)):
outfile.write(f"{test_X_ref[i]}\t{test_y[i]}\t{sys_y[i]}\n")
if print_predictions:
logger.info('>> predictions')
utils.print_all_predictions(test_X_ref, test_y, sys_y, logger)
def task(name):
if name == 'vua_format':
return vf.VuaFormat()
else:
raise ValueError("task name is unknown. You can add a custom task in 'tasks'")
def cnn(name):
if name == 'cnn_raw':
return CNN()
elif name == 'cnn_prep':
return CNN(preprocessing.std_prep())
else:
raise ValueError("pipeline name is unknown.")
def pipeline(name):
# Standard pipelines
if name == 'naive_bayes_counts':
return pipelines.naive_bayes_counts()
elif name == 'naive_bayes_tfidf':
return pipelines.naive_bayes_tfidf()
elif name == 'naive_bayes_counts_lex':
return pipeline_with_lexicon.naive_bayes_counts_lex()
elif name == 'svm_libsvc_counts':
return pipelines.svm_libsvc_counts()
elif name == 'svm_libsvc_tfidf':
return pipelines.svm_libsvc_tfidf()
elif name == 'svm_libsvc_embed':
return pipelines.svm_libsvc_embed()
elif name == 'svm_sigmoid_embed':
return pipelines.svm_sigmoid_embed()
# Custom pipelines
elif name == 'svm_libsvc_char_4gram':
return pipelines.svm_libsvc_char_4gram()
elif name == 'deepmoji_embed':
return pipelines.deepmoji_embed()
else:
raise ValueError("pipeline name is unknown. You can add a custom pipeline in 'pipelines'")
| [
"60227515+SorenKF@users.noreply.github.com"
] | 60227515+SorenKF@users.noreply.github.com |
f71d0c192d1285d54474ea904ea26261a6914e66 | 6e12b7bb4fd2001c6327da0e16161d730809249d | /conditionals.py | c99ae12eb8428120ce208ecb6d1bb836b766d358 | [] | no_license | shruthi-jay/python-microdegree | bb7e4f53f54e53ac486ef496a743906bb52a66f0 | bc8264ac893196de26db5e2784ba073ce028d6a2 | refs/heads/main | 2023-08-07T04:27:58.648861 | 2021-09-08T03:47:39 | 2021-09-08T03:47:39 | 371,587,709 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,063 | py | mylist = [12, 32, 34, 2, 4]
mydictinry = {"one": 32, "two": 322, "three": 32, "four": 4}
def avg(n):
if type(n) == dict:
print("its a dictionary")
avg = sum(n.values()) / len(n)
print("average is ", avg)
else:
print("its a list")
avg = sum(mylist) / len(mylist)
print("average is ", avg)
avg(mylist)
avg(mydictinry)
'''
The isinstance() function returns True if the specified object is of the specified type, otherwise False.
If the type parameter is a tuple, this function will return True if the object is one of the types in the tuple.
isinstance(object, type)
so the code can be changed as
'''
def avgs(n):
if isinstance(n, dict):
print("its a dictionary")
avg = sum(n.values()) / len(n)
print("average is ", avg)
else:
print("its a list")
avg = sum(mylist) / len(mylist)
print("average is ", avg)
avgs(mylist)
avgs(mydictinry)
'''
if else
'''
if 3 > 1:
print("3 greater")
elif 3 == 1:
print("equal")
else:
print("3 smaller")
| [
"noreply@github.com"
] | shruthi-jay.noreply@github.com |
472f728bfdf96cd0449f7321354f095d713f0a98 | 08e9445864626b063093b68f3b2350e3bdc41f2b | /smt.py | 711e43b6da517989cda83bacb9c50b5ace5a2652 | [
"MIT"
] | permissive | L4ventures/plasma-cash | b66b6c55b366a8bf1b1b92be552dbfabe836cfe5 | 1a6cc76e048f4ee7a85f0188cb3d4a4e9d8f9359 | refs/heads/master | 2020-03-31T02:10:34.002164 | 2018-10-26T12:09:13 | 2018-10-26T12:09:13 | 151,811,789 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 257 | py | import sha3
def short_keccak256(x):
k = sha3.keccak_256()
k.update(x)
return k.digest()[0:1]
def bl(x):
k = short_keccak256(x)
k = int.from_bytes(k, 'big')
return "{0:b}".format(k)
print(bl(b''))
print(bl(b'a'))
# def smt(depth, elems) | [
"xuanji@gmail.com"
] | xuanji@gmail.com |
1afc24adfe7a79c184a08009fa6d8f3031d965d5 | b02a759c59a9d469f5fee00b3775703c5405e1b2 | /16.RecommenderSystems/test_evaluation_model.py | 0697d622f761ff08d2ef829abbb01ea9d92d7d8e | [] | no_license | asdlei99/MachineLearningCombat | a18672e11306d26cc59b9bb16ee90db06af24df5 | 57e8dafcef849eb407bc79a0b0724abc9931bd27 | refs/heads/master | 2021-09-15T12:50:24.531567 | 2018-06-02T01:30:27 | 2018-06-02T01:30:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,754 | py | import random
import math
def SplitData(data, M, k, seed):
test = []
train = []
random.seed(seed)
for user, item in data:
if random.randint(0, M) == k:
test.append([user, item])
else:
train.append([user, item])
return train, test
# 准确率
def Precision(train, test, N):
hit = 0
all = 0
for user in train.keys():
tu = test[user]
rank = GetRecommendation(user, N)
for item, pui in rank:
if item in tu:
hit += 1
all += N
return hit / (all * 1.0)
# 召回率
def Recall(train, test, N):
hit = 0
all = 0
for user in train.keys():
tu = test[user]
rank = GetRecommendation(user, N)
for item, pui in rank:
if item in tu:
hit += 1
all += len(tu)
return hit / (all * 1.0)
# 覆盖率
def Coverage(train, test, N):
recommend_items = set()
all_items = set()
for user in train.keys():
for item in train[user].keys():
all_items.add(item)
rank = GetRecommendation(user, N)
for item, pui in rank:
recommend_items.add(item)
return len(recommend_items) / (len(all_items) * 1.0)
# 新颖度
def Popularity(train, test, N):
item_popularity = dict()
for user, items in train.items():
for item in items.keys():
if item not in item_popularity:
item_popularity[item] = 0
item_popularity[item] += 1
ret = 0
n = 0
for user in train.keys():
rank = GetRecommendation(user, N)
for item, pui in rank:
ret += math.log(1 + item_popularity[item])
n += 1
ret /= n * 1.0
return ret
| [
"wnma3mz@163.com"
] | wnma3mz@163.com |
924614ca77686fbed9c16d7c46a7bc9e6c37e0a0 | 6471dcdf07fc0bdcde9797914857d154d85e3c07 | /pie_app/bSerial.py | 8f5c20566aaf7756b96301008a6c2c9f609a675b | [] | no_license | cudmore/pie | e43ec3c4c95acb5a051a25a8d5549071908ed5c8 | b74b105bc101a8504453d20a066fcd764864731f | refs/heads/master | 2023-05-15T01:51:54.326274 | 2023-05-09T02:17:11 | 2023-05-09T02:17:11 | 139,335,434 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,915 | py | """
Author: Robert H Cudmore
Date: 20180808
"""
import time, threading, serial, queue
import logging
logger = logging.getLogger('flask.app')
#########################################################################
class mySerialThread(threading.Thread):
"""
background thread which monitors inSerialQueue and sends commands out serial.
"""
def __init__(self, inSerialQueue, outSerialQueue, errorSerialQueue, port, baud):
threading.Thread.__init__(self)
self._stop_event = threading.Event()
self.inSerialQueue = inSerialQueue
self.outSerialQueue = outSerialQueue
self.errorSerialQueue = errorSerialQueue
self.port = port #'/dev/ttyACM0'
self.baud = baud #115200
logger.debug('mySerialThread initializing, port:' + str(port) + ' baud:' + str(baud))
self.mySerial = None
try:
# there is no corresponding self.mySerial.close() ???
self.mySerial = serial.Serial(port, baud, timeout=0.25)
except (serial.SerialException) as e:
logger.error(str(e))
errorSerialQueue.put(str(e))
except:
logger.error('other exception in mySerialThread init')
raise
#else:
# errorSerialQueue.put('None')
def stop(self):
"""
call stop() then join() to ensure thread is done
"""
self._stop_event.set()
def run(self):
logger.debug('starting mySerialThread')
while not self._stop_event.is_set():
try:
# serialDict is {'type': command/dump, 'str': command/filePath}
serialDict = self.inSerialQueue.get(block=False, timeout=0)
except (queue.Empty) as e:
# there was nothing in the queue
pass
else:
# there was something in the queue
#logger.info('serialThread inSerialQueue: "' + str(serialCommand) + '"')
serialType = serialDict['type']
serialCommand = serialDict['str']
try:
if self.mySerial is not None:
if serialType == 'dump':
# dump a teensy/arduino trial to a file
fullSavePath = serialCommand
self.mySerial.write('d\n'.encode()) # write 'd\n'
#time.sleep(0.01)
resp = self.mySerial.readline().decode().strip()
with open(fullSavePath, 'w') as file:
while resp:
file.write(resp + '\n')
resp = self.mySerial.readline().decode().strip()
elif serialType == 'command':
# send a command to teensy and get one line response
if not serialCommand.endswith('\n'):
serialCommand += '\n'
self.mySerial.write(serialCommand.encode())
#time.sleep(0.01)
resp = self.mySerial.readline().decode().strip()
self.outSerialQueue.put(resp)
logger.info('serialThread outSerialQueue: "' + str(resp) + '"')
else:
logger.error('bad serial command type' + str(serialDict))
except (serial.SerialException) as e:
logger.error(str(e))
except:
logger.error('other exception in mySerialThread run')
raise
# make sure not to remove this
time.sleep(0.1)
| [
"robert.cudmore@gmail.com"
] | robert.cudmore@gmail.com |
94a07ff0b6b63ad755a82045a5463b43c343005b | 30fef5018756f8036b6c754a056f0a27450b215c | /desafios/DESAFIO 4.py | 575c8f4749beff4750ac3190835d33587b90ad3a | [] | no_license | JMass1/curso_python | b1dd2b2b01019898bdf58486c8c6d38a71cb6e83 | 7c2a64401bb34b445e71ee28a3903587de669c05 | refs/heads/master | 2022-11-21T19:10:53.810525 | 2020-07-19T19:54:33 | 2020-07-19T19:54:33 | 272,292,122 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 328 | py | print('========== DESAFIO #4 ==========')
var: str = input('Digite algo')
print('Você digitou: {}'.format(var))
print('O tipo primitivo do que você digitou é {}'.format(type(var)))
if var.isnumeric():
print('O que você digitou é numérico')
if var.isalpha():
print('O que você digitou é alfabético')
| [
"juliano.massanetto@gmail.com"
] | juliano.massanetto@gmail.com |
3802507496894d4653a42e21e7b67071fb3f637a | d5e8610ad12b6733e5b014abba5cf356cb658a44 | /network/client.py | cbcce48537f16c421516b99c718c8210d6b6b600 | [
"WTFPL"
] | permissive | EliasFarhan/GBJam | 494c4bfcff1e2246001d489c3e60a6e9ddb6ae62 | 156a003378b8db994d4bd1f9c3a12415ceac7c01 | refs/heads/master | 2016-09-05T11:41:50.329113 | 2014-08-10T20:14:31 | 2014-08-10T20:14:31 | 22,541,009 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,869 | py | import copy
import socket
from threading import Lock, Thread
from engine.const import log, CONST
from engine.vector import Vector2
players = {}
players_lock = Lock()
player_pos = Vector2()
player_anim_state = ""
player_anim_counter = 0
client_player_lock = Lock()
PORT = CONST.port
HOST = CONST.host
update_thread = None
self_id = 0
sock = None
def get_players():
global players,players_lock
players_lock.acquire()
tmp_players = copy.deepcopy(players)
players_lock.release()
return tmp_players
def get_player():
global player_pos, player_anim_counter, player_anim_state, client_player_lock
client_player_lock.acquire()
pos = copy.deepcopy(player_pos)
state = copy.deepcopy(player_anim_state)
frame = copy.deepcopy(player_anim_counter)
client_player_lock.release()
return pos, state, frame
def set_player(new_player):
global player_pos, player_anim_counter, player_anim_state, client_player_lock
from engine.init import engine
client_player_lock.acquire()
player_pos = new_player.pos + new_player.screen_relative_pos * engine.get_screen_size()
player_anim_state = new_player.anim.state
player_anim_counter = new_player.anim.anim_counter
client_player_lock.release()
def get_self_id():
global self_id
return self_id
def init():
global update_thread, self_id,sock
data = "ID_REQUEST;"
new_id_request = None
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((HOST, PORT))
sock.sendall(data)
new_id_request = sock.recv(1024)
except socket.error as e:
sock.close()
sock = None
log("Network init: "+str(e),1)
return
self_id = new_id_request.split(";")[1]
get_thread = Thread(target=client_get)
get_thread.daemon = True
get_thread.start()
set_thread = Thread(target=client_set)
set_thread.daemon = True
set_thread.start()
def client_get():
log("START UPDATE SERVER")
from engine.init import engine
udp_sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
while not engine.finish:
udp_sock.sendto("GET_REQUEST;%s"%self_id, (HOST, PORT+1))
get_player_data = udp_sock.recv(1024)
"""Position"""
parsed_data = get_player_data.split(';')
parsed_data[1] = parsed_data[1].split(',')
parsed_data[1] = Vector2(int(float(parsed_data[1][0])), int(float(parsed_data[1][1])))
"""Frame"""
parsed_data[3] = int(parsed_data[3])
"""update players position"""
players_lock.acquire()
players[parsed_data[0]] = parsed_data
players_lock.release()
def client_set():
log("START UPDATE SERVER")
from engine.init import engine
udp_sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
while not engine.finish:
(pos, state, frame) = get_player()
if state != "":
udp_sock.sendto("SET_REQUEST;"+str(self_id)+";"
+pos.get_string() +";"
+state+";"
+str(frame)+";"
, (HOST, PORT+2))
def set_request(pos, state, frame):
global sock
"""Change the position of the player on the server"""
"""Set correct pos, state, frame"""
try:
if not sock:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((HOST, PORT))
sock.sendall("SET_REQUEST;"+str(self_id)+";"+pos.get_string() +";"+state+";"+str(frame)+";")
sock.recv(1024)
except socket.error as e:
sock.close()
sock = None
log("Network set: "+str(e),1)
return
def get_players_request():
global sock
try:
if not sock:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((HOST, PORT))
sock.sendall("GET_REQUEST;")
get_request_nmb = sock.recv(1024)
#log(get_request_nmb)
try:
nmb = int(get_request_nmb.split(';')[1])
sock.sendall("%i;"%nmb)
length = 5
for i in range(nmb):
get_request_data = sock.recv(1024)
#log(get_request_data)
"""Position"""
parsed_data = get_request_data.split(';')
parsed_data[1] = parsed_data[1].split(',')
parsed_data[1] = Vector2(int(float(parsed_data[1][0])), int(float(parsed_data[1][1])))
"""Frame"""
parsed_data[3] = int(parsed_data[3])
"""update players position"""
players[parsed_data[0]] = parsed_data
sock.sendall("NEXT")
except IndexError:
pass
except socket.error as e:
sock.close()
sock = None
log("Network get: "+str(e),1)
return
| [
"elias.farhan@gmail.com"
] | elias.farhan@gmail.com |
432ca0ebc4743de7f870d71fc907a5319f21044b | ced40a3c765266da4c8567baa57b302d7f18708c | /ug_data.py | 43d7ca7863e72bae8bd0b13a385e50cbd1018ea6 | [] | no_license | vojtapolasek/united_guards | ed661e819a89376566920124f06520e644ebf2c1 | 629909f8fcd2f92fb3e40c3ddc21d499c40dff47 | refs/heads/master | 2016-09-06T11:57:35.542211 | 2012-12-30T22:12:00 | 2012-12-30T22:12:00 | 5,668,337 | 1 | 0 | null | 2013-01-03T16:04:49 | 2012-09-04T06:21:38 | Python | UTF-8 | Python | false | false | 1,298 | py | #!/usr/bin/env python
#data for the game united guards
import pygame, os.path, menu, game
pygame.mixer.init()
#initialisation of sounds
plane = []
planecount = 8
for i in range(1, planecount + 1):
plane.append (pygame.mixer.Sound(os.path.normpath("sounds/plane"+str(i)+".ogg")))
mg = []
mgcount = 9
for i in range(1,mgcount + 1):
mg.append (pygame.mixer.Sound(os.path.normpath("sounds/mg"+str(i)+".ogg")))
aim = pygame.mixer.Sound(os.path.normpath("sounds/aim.ogg"))
missile = []
for i in range (0, 3):
missile.append (pygame.mixer.Sound(os.path.normpath("sounds/missile"+str(i)+".ogg")))
planehit = pygame.mixer.Sound (os.path.normpath("sounds/planehit.ogg"))
dead = []
deadcount = 3
for i in range (1, deadcount +1):
dead.append (pygame.mixer.Sound(os.path.normpath("sounds/dead"+str(i)+".ogg")))
bhit = []
bhitcount = 4
for i in range (1, bhitcount +1):
bhit.append (pygame.mixer.Sound(os.path.normpath("sounds/bhit"+str(i)+".ogg")))
ricochet = []
ricochetcount =8
for i in range (1, ricochetcount +1):
ricochet.append (pygame.mixer.Sound(os.path.normpath("sounds/ricoch"+str(i)+".ogg")))
stereotest = pygame.mixer.Sound(os.path.normpath("sounds/stereotest.ogg"))
#channel initialisation
pygame.mixer.set_reserved(2)
chan=pygame.mixer.Channel(0)
mgchan = pygame.mixer.Channel(1)
| [
"krecoun@gmail.com"
] | krecoun@gmail.com |
48aa0744a9a5193d71a379dbdc5c37fb9c8decf9 | 61e6a917253d67c8d26442cd32c913eee3817af4 | /app/src/main/cpp/NNPACK/src/x86_64-fma/blas/s4c6gemm.py | 0b3792c924be50f27cd1c52ba59c66c66ac18106 | [
"BSD-2-Clause",
"BSD-3-Clause"
] | permissive | hjimce/android-image | c1e0a90a28ee2e85a8b493ffb5e332d98c37116c | 4f3b570253ed11ddca58d263771820589443848e | refs/heads/master | 2021-01-24T11:39:25.820256 | 2017-04-21T03:33:35 | 2017-04-21T03:33:35 | 70,216,265 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 8,142 | py | mr, nr = 2, 2
for conjugate_b, transpose_c in [(False, False), (True, False), (True, True)]:
arg_k = Argument(size_t, "k")
arg_update = Argument(size_t, "update")
arg_a = Argument(ptr(const_float_), "a")
arg_b = Argument(ptr(const_float_), "b")
arg_c = Argument(ptr(float_), "c")
arg_row_stride = Argument(size_t, "row_stride_c")
with Function("nnp_s4c6gemm{conjb}{transc}_only_{mr}x{nr}__fma3".format(mr=mr, nr=nr,
conjb="_conjb" if conjugate_b else "",
transc="_transc" if transpose_c else ""),
(arg_k, arg_update, arg_a, arg_b, arg_c, arg_row_stride),
target=uarch.default + isa.fma3):
reg_k = GeneralPurposeRegister64()
LOAD.ARGUMENT(reg_k, arg_k)
reg_update = GeneralPurposeRegister64()
LOAD.ARGUMENT(reg_update, arg_update)
reg_a = GeneralPurposeRegister64()
LOAD.ARGUMENT(reg_a, arg_a)
reg_b = GeneralPurposeRegister64()
LOAD.ARGUMENT(reg_b, arg_b)
reg_c = GeneralPurposeRegister64()
LOAD.ARGUMENT(reg_c, arg_c)
reg_row_stride = GeneralPurposeRegister64()
LOAD.ARGUMENT(reg_row_stride, arg_row_stride)
SHL(reg_row_stride, 2)
with Block() as prefetch_c:
if not transpose_c:
for m in range(mr):
PREFETCHT0([reg_c])
if m + 1 != mr:
ADD(reg_c, reg_row_stride)
else:
for n in range(nr):
PREFETCHT0([reg_c])
if n + 1 != nr:
ADD(reg_c, reg_row_stride)
ymm_c_re, ymm_c_im = tuple([[YMMRegister() for n in range(nr)] for m in range(mr)] for c in range(2))
VZEROALL()
ymm_a_re, ymm_a_im = tuple([YMMRegister() for m in range(2*mr)] for c in range(2))
ymm_b_re, ymm_b_im = tuple([YMMRegister() for n in range(2*nr)] for c in range(2))
with Loop() as loop:
for m in range(mr):
VMOVAPS(ymm_a_re[m], [reg_a + (2*m+0) * YMMRegister.size])
VMOVAPS(ymm_a_im[m], [reg_a + (2*m+1) * YMMRegister.size])
SUB(reg_a, -YMMRegister.size * 2 * mr)
for n in range(nr):
VMOVAPS(ymm_b_re[n], [reg_b + (2*n+0) * YMMRegister.size])
for m in range(mr):
VFMADD231PS(ymm_c_re[m][n], ymm_a_re[m], ymm_b_re[n])
VMOVAPS(ymm_b_im[n], [reg_b + (2*n+1) * YMMRegister.size])
VBLENDPS(ymm_b_re[n], ymm_b_re[n], ymm_b_im[n], 0b00000011)
for m in range(mr):
VFMADD231PS(ymm_c_im[m][n], ymm_a_im[m], ymm_b_re[n])
SUB(reg_b, -YMMRegister.size * 2 * nr)
ymm_zero_columns01_mask = YMMRegister()
VMOVAPS(ymm_zero_columns01_mask, Constant.uint32x8(0x00000000, 0x00000000, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF))
for n in range(nr):
VANDPS(ymm_b_im[n], ymm_b_im[n], ymm_zero_columns01_mask)
for m in range(mr):
if conjugate_b:
VFMADD231PS(ymm_c_re[m][n], ymm_a_im[m], ymm_b_im[n])
VFNMADD231PS(ymm_c_im[m][n], ymm_a_re[m], ymm_b_im[n])
else:
VFNMADD231PS(ymm_c_re[m][n], ymm_a_im[m], ymm_b_im[n])
VFMADD231PS(ymm_c_im[m][n], ymm_a_re[m], ymm_b_im[n])
DEC(reg_k)
JNZ(loop.begin)
store_c = Block()
# Check if we need to update C or overwrite it
TEST(reg_update, reg_update)
JZ(store_c.begin)
if transpose_c:
mr, nr = nr, mr
ymm_c_re = [list(ymm_column) for ymm_column in zip(*ymm_c_re)]
ymm_c_im = [list(ymm_column) for ymm_column in zip(*ymm_c_im)]
with Block() as update_c:
for m in reversed(range(mr)):
for n in range(nr):
VADDPS(ymm_c_re[m][n], ymm_c_re[m][n], [reg_c + (2*n+0) * YMMRegister.size])
VADDPS(ymm_c_im[m][n], ymm_c_im[m][n], [reg_c + (2*n+1) * YMMRegister.size])
VMOVAPS([reg_c + (2*n+0) * YMMRegister.size], ymm_c_re[m][n])
VMOVAPS([reg_c + (2*n+1) * YMMRegister.size], ymm_c_im[m][n])
if m != 0:
SUB(reg_c, reg_row_stride)
RETURN()
with store_c:
for m in reversed(range(mr)):
for n in range(nr):
VMOVAPS([reg_c + (2*n+0) * YMMRegister.size], ymm_c_re[m][n])
VMOVAPS([reg_c + (2*n+1) * YMMRegister.size], ymm_c_im[m][n])
if m != 0:
SUB(reg_c, reg_row_stride)
RETURN()
arg_mr = Argument(uint32_t, "mr")
arg_nr = Argument(uint32_t, "nr")
arg_k = Argument(size_t, "k")
arg_update = Argument(size_t, "update")
arg_a = Argument(ptr(const_float_), "a")
arg_b = Argument(ptr(const_float_), "b")
arg_c = Argument(ptr(float_), "c")
arg_row_stride = Argument(size_t, "row_stride_c")
with Function("nnp_s4c6gemm{conjb}{transc}_upto_{mr}x{nr}__fma3".format(mr=mr, nr=nr,
conjb="_conjb" if conjugate_b else "",
transc="_transc" if transpose_c else ""),
(arg_mr, arg_nr, arg_k, arg_update, arg_a, arg_b, arg_c, arg_row_stride),
target=uarch.default + isa.fma3):
reg_mr = GeneralPurposeRegister32()
LOAD.ARGUMENT(reg_mr, arg_mr)
reg_nr = GeneralPurposeRegister32()
LOAD.ARGUMENT(reg_nr, arg_nr)
reg_k = GeneralPurposeRegister64()
LOAD.ARGUMENT(reg_k, arg_k)
reg_update = GeneralPurposeRegister64()
LOAD.ARGUMENT(reg_update, arg_update)
reg_a = GeneralPurposeRegister64()
LOAD.ARGUMENT(reg_a, arg_a)
reg_b = GeneralPurposeRegister64()
LOAD.ARGUMENT(reg_b, arg_b)
reg_c = GeneralPurposeRegister64()
LOAD.ARGUMENT(reg_c, arg_c)
reg_row_stride = GeneralPurposeRegister64()
LOAD.ARGUMENT(reg_row_stride, arg_row_stride)
SHL(reg_row_stride, 2)
ymm_c_re, ymm_c_im = tuple([[YMMRegister() for n in range(nr)] for m in range(mr)] for c in range(2))
VZEROALL()
ymm_a_re, ymm_a_im = tuple([YMMRegister() for m in range(mr)] for c in range(2))
ymm_b_re, ymm_b_im = tuple([YMMRegister() for n in range(nr)] for c in range(2))
with Loop() as loop:
with Block() as load_a:
for m, (ymm_re, ymm_im) in enumerate(zip(ymm_a_re, ymm_a_im)):
VMOVAPS(ymm_re, [reg_a])
VMOVAPS(ymm_im, [reg_a + YMMRegister.size])
ADD(reg_a, 2 * YMMRegister.size)
if m + 1 != mr:
CMP(reg_mr, m + 1)
JE(load_a.end)
with Block() as load_b:
for n in range(nr):
VMOVAPS(ymm_b_re[n], [reg_b])
for m in range(mr):
VFMADD231PS(ymm_c_re[m][n], ymm_a_re[m], ymm_b_re[n])
VMOVAPS(ymm_b_im[n], [reg_b + YMMRegister.size])
VBLENDPS(ymm_b_re[n], ymm_b_re[n], ymm_b_im[n], 0b00000011)
for m in range(mr):
VFMADD231PS(ymm_c_im[m][n], ymm_a_im[m], ymm_b_re[n])
ADD(reg_b, YMMRegister.size * 2)
if n + 1 != nr:
CMP(reg_nr, n + 1)
JE(load_b.end)
ymm_zero_columns01_mask = YMMRegister()
VMOVAPS(ymm_zero_columns01_mask, Constant.uint32x8(0x00000000, 0x00000000, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF))
for n in range(nr):
VANDPS(ymm_b_im[n], ymm_b_im[n], ymm_zero_columns01_mask)
for m in range(mr):
if conjugate_b:
VFMADD231PS(ymm_c_re[m][n], ymm_a_im[m], ymm_b_im[n])
VFNMADD231PS(ymm_c_im[m][n], ymm_a_re[m], ymm_b_im[n])
else:
VFNMADD231PS(ymm_c_re[m][n], ymm_a_im[m], ymm_b_im[n])
VFMADD231PS(ymm_c_im[m][n], ymm_a_re[m], ymm_b_im[n])
DEC(reg_k)
JNZ(loop.begin)
store_c = Block()
# Check if we need to update C or overwrite it
TEST(reg_update, reg_update)
JZ(store_c.begin)
if transpose_c:
mr, nr = nr, mr
reg_mr, reg_nr = reg_nr, reg_mr
ymm_c_re = [list(ymm_column) for ymm_column in zip(*ymm_c_re)]
ymm_c_im = [list(ymm_column) for ymm_column in zip(*ymm_c_im)]
with Block() as update_c:
for m in range(mr):
with Block() as update_c_row:
for n in range(nr):
VADDPS(ymm_c_re[m][n], ymm_c_re[m][n], [reg_c + (2*n+0) * YMMRegister.size])
VADDPS(ymm_c_im[m][n], ymm_c_im[m][n], [reg_c + (2*n+1) * YMMRegister.size])
VMOVAPS([reg_c + (2*n+0) * YMMRegister.size], ymm_c_re[m][n])
VMOVAPS([reg_c + (2*n+1) * YMMRegister.size], ymm_c_im[m][n])
if n + 1 != nr:
CMP(reg_nr, n + 1)
JE(update_c_row.end)
if m + 1 != mr:
CMP(reg_mr, m + 1)
JE(update_c.end)
ADD(reg_c, reg_row_stride)
RETURN()
with store_c:
for m in range(mr):
with Block() as store_c_row:
for n in range(nr):
VMOVAPS([reg_c + (2*n+0) * YMMRegister.size], ymm_c_re[m][n])
VMOVAPS([reg_c + (2*n+1) * YMMRegister.size], ymm_c_im[m][n])
if n + 1 != nr:
CMP(reg_nr, n + 1)
JE(store_c_row.end)
if m + 1 != mr:
CMP(reg_mr, m + 1)
JE(store_c.end)
ADD(reg_c, reg_row_stride)
RETURN()
| [
"hjimce.huang@avatarworks.com"
] | hjimce.huang@avatarworks.com |
25468c8b5d80c06fdabcbc7e5fb4b79db9ae0c10 | 3eee6120ded9defbc9c9582d8cbac364b3359586 | /__init__.py | fd1b664b7cae2a94005333e7c16ecf58c317e58d | [] | no_license | noahbraunf/ysopy | 18aa10636d90ecb58e2a253f18476a1541804f68 | 6a7ec2ad20a9abe513a9b87dd14982a2925793ac | refs/heads/main | 2023-03-30T07:30:39.704791 | 2021-04-05T13:03:38 | 2021-04-05T13:03:38 | 322,464,988 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11 | py | alksdjlakj
| [
"Braunfeld"
] | Braunfeld |
670e3279efdafed42ae531d9129bbbdd829e0b5a | b472c692ac9efc39e508d2709fe14e2b4e844fd7 | /Python/test.py | 818eb63debb2c34cbca6a32dc628f5e91c3de1f1 | [] | no_license | enningxie/Interview | 75ac734873282dc67503815019718a6e1b27c512 | becdd40463c01551f2c8a53abc9d2281979f2bc1 | refs/heads/master | 2020-03-18T08:13:00.049201 | 2018-12-25T01:02:45 | 2018-12-25T01:02:45 | 134,496,872 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 216 | py | from collections import defaultdict
if __name__ == '__main__':
d_dict = defaultdict()
nums1 = [1, 2, 2, 1]
nums2 = [2, 2]
for i, value in enumerate(nums1):
d_dict[i] = value
d_dict.pop()
| [
"enningxie@163.com"
] | enningxie@163.com |
c4435eea970f7de45ee8535fe10e0a6cd287b9b2 | 55df73cfcf0e898196038b35584bde9ea0635bd7 | /src/shops/migrations/0002_auto_20180707_1205.py | d0823f353b2310a7551701eeb33999fe21fe4453 | [] | no_license | PatrykJanMatlak/ype-app | deeb85100e59268450b226f94aef3052ef23faa4 | 429d1144495e10c70c9d95613004496850b752a3 | refs/heads/master | 2020-03-22T01:56:56.248901 | 2018-07-07T10:55:29 | 2018-07-07T10:55:29 | 139,332,418 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 749 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.13 on 2018-07-07 10:05
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('shops', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='shopmodel',
name='locations',
),
migrations.AddField(
model_name='shopmodel',
name='city',
field=models.CharField(blank=True, max_length=120, null=True),
),
migrations.AddField(
model_name='shopmodel',
name='street',
field=models.CharField(blank=True, max_length=120, null=True),
),
]
| [
"patrykjan.matlak@gmail.com"
] | patrykjan.matlak@gmail.com |
68e645b36a8b348a8e63c0b8a5812c9a920e8704 | bda385488a7e20655c06a02b81abedfa131464d2 | /cvpods/cvpods/data/datasets/imagenetlt.py | 828264c668a9f5a6b92e80a7a229d1a8dec9987c | [
"Apache-2.0"
] | permissive | attilab97/DeFCN | a52c40a5c0ed39777ef3948d888699922e0ce42b | 811335665f62624d63fa28c6f13edd53ce5cc395 | refs/heads/main | 2023-07-31T12:32:56.927705 | 2021-10-07T08:03:12 | 2021-10-07T08:03:12 | 397,968,657 | 0 | 0 | Apache-2.0 | 2021-08-19T14:18:48 | 2021-08-19T14:18:48 | null | UTF-8 | Python | false | false | 4,369 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) BaseDetection, Inc. and its affiliates. All Rights Reserved
import logging
import os
import os.path as osp
from copy import deepcopy
import numpy as np
import torch
from cvpods.utils import Timer
from ..base_dataset import BaseDataset
from ..registry import DATASETS
from .paths_route import _PREDEFINED_SPLITS_IMAGENETLT
from .imagenet_categories import IMAGENET_CATEGORIES
"""
This file contains functions to parse ImageNet-format annotations into dicts in "cvpods format".
"""
logger = logging.getLogger(__name__)
@DATASETS.register()
class ImageNetLTDataset(BaseDataset):
def __init__(self, cfg, dataset_name, transforms=[], is_train=True):
super(ImageNetLTDataset, self).__init__(cfg, dataset_name, transforms, is_train)
image_root, label_file = _PREDEFINED_SPLITS_IMAGENETLT["imagenetlt"][self.name]
self.label_file = osp.join(self.data_root, label_file)
self.image_root = osp.join(self.data_root, image_root)
self.meta = self._get_metadata()
self.dataset_dicts = self._load_annotations()
self._set_group_flag()
self.eval_with_gt = cfg.TEST.get("WITH_GT", False)
def __getitem__(self, index):
"""Load data, apply transforms, converto to Instances.
"""
dataset_dict = deepcopy(self.dataset_dicts[index])
# read image
image = self._read_data(dataset_dict["file_name"])
annotations = dataset_dict.get("annotations", None)
# apply transfrom
images, annotations = self._apply_transforms(
image, annotations)
def process(dd, img, annos):
if isinstance(annos, list):
annos = [a for a in annos if a is not None]
# image shape: CHW / NCHW
# TODO: fix hack
if img.shape[0] == 3: # CHW
dd["image"] = torch.as_tensor(np.ascontiguousarray(img))
elif len(img.shape) == 3 and img.shape[-1] == 3:
dd["image"] = torch.as_tensor(
np.ascontiguousarray(img.transpose(2, 0, 1)))
elif len(img.shape) == 4 and img.shape[-1] == 3:
# NHWC -> NCHW
dd["image"] = torch.as_tensor(
np.ascontiguousarray(img.transpose(0, 3, 1, 2)))
return dd
if isinstance(images, dict):
ret = {}
# multiple input pipelines
for desc, item in images.items():
img, anno = item
ret[desc] = process(deepcopy(dataset_dict), img, anno)
return ret
else:
return process(dataset_dict, images, annotations)
def __len__(self):
return len(self.dataset_dicts)
def _get_metadata(self):
assert len(IMAGENET_CATEGORIES.keys()) == 1000
cat_ids = [v[0] for v in IMAGENET_CATEGORIES.values()]
assert min(cat_ids) == 1 and max(cat_ids) == len(cat_ids), \
"Category ids are not in [1, #categories], as expected"
# Ensure that the category list is sroted by id
imagenet_categories = sorted(IMAGENET_CATEGORIES.items(), key=lambda x: x[1][0])
thing_classes = [v[1][1] for v in imagenet_categories]
meta = {
"thing_classes": thing_classes,
"evaluator_type": _PREDEFINED_SPLITS_IMAGENETLT["evaluator_type"]["imagenetlt"],
}
return meta
def _load_annotations(self):
timer = Timer()
"""Constructs the imdb."""
# Compile the split data path
logger.info('{} data path: {}'.format(self.name, self.label_file))
# Construct the image db
imdb = []
f = open(self.label_file, "r")
for line in f.readlines():
img_path, label = line.strip().split(" ")
imdb.append({
"im_path": os.path.join(self.image_root, img_path),
"class": int(label),
})
f.close()
logging.info("Loading {} takes {:.2f} seconds.".format(self.label_file, timer.seconds()))
dataset_dicts = []
for i, item in enumerate(imdb):
dataset_dicts.append({
"image_id": i,
"category_id": item["class"],
"file_name": item["im_path"],
})
return dataset_dicts
| [
"attilab97@gmail.com"
] | attilab97@gmail.com |
ef5cedf36af3d5382bcecb579ae28b374f22bd7d | 7bd5ca970fbbe4a3ed0c7dadcf43ba8681a737f3 | /atcoder/arc/arc006/c.py | cb6d240266da7927722159dc9328cf026bb5874f | [] | no_license | roiti46/Contest | c0c35478cd80f675965d10b1a371e44084f9b6ee | c4b850d76796c5388d2e0d2234f90dc8acfaadfa | refs/heads/master | 2021-01-17T13:23:30.551754 | 2017-12-10T13:06:42 | 2017-12-10T13:06:42 | 27,001,893 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 332 | py | # -*- coding: utf-8 -*-
import sys,copy,math,heapq,itertools as it,fractions,re,bisect,collections as coll
N = int(raw_input())
a = []
for i in xrange(N):
w = int(raw_input())
for j in xrange(len(a)):
if a[j] >= w:
a[j] = w
break
else:
a.append(w)
a = sorted(a)
print len(a)
| [
"roiti46@gmail.com"
] | roiti46@gmail.com |
9f917f45e6c9b1456878ff5582c67a2029ec932a | 44bce4a2b7c7b2ace313ba7b18a697d0da135192 | /news/migrations/0004_article_tags.py | 8e971cef4865dc648a2bd648b6b489ca589bf93c | [] | no_license | Tuitoek/The-Moringa-Tribune | b5637f71cea7bfb9fd7c4d3db469587d64ba2baf | 6d69abef6048f9d4205fa37f4999c7fcfdcdf17d | refs/heads/master | 2020-04-27T09:33:10.990718 | 2019-03-07T05:18:37 | 2019-03-07T05:18:37 | 174,219,430 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 439 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2019-03-06 11:33
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('news', '0003_article'),
]
operations = [
migrations.AddField(
model_name='article',
name='tags',
field=models.ManyToManyField(to='news.tags'),
),
]
| [
"sjtoek@gmail.com"
] | sjtoek@gmail.com |
784fa6abdaea42679c74b37c9104130a6c8ab49a | ee7e42417d9d1e76b0e84e44dc6eb037adc3ebad | /.history/manage_20190703133205.py | 97195c48fcefe2c35d0dc6fce4c318d0b570fa33 | [] | no_license | web3-qa/pets-api | 4632127ee84a299f207d95754f409fc1e4c0013d | ee4a04e7291740ac8eb6147c305b41d27d5be29c | refs/heads/master | 2023-05-12T09:09:47.509063 | 2019-07-18T15:07:13 | 2019-07-18T15:07:13 | 197,611,701 | 0 | 0 | null | 2023-05-01T19:42:17 | 2019-07-18T15:19:59 | Python | UTF-8 | Python | false | false | 429 | py | import os
import sys
from os import path
from xmlrpc.client import Server
from virtualenv import create_bootstrap_script
from application import create_bootstrap_script
from flask_script import Manager, Server
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__),'..')))
app = create_app()
manager = Manager(app)
manager.add_command("runserver", Server
use_debugger = True,
use_reloader = True,
) | [
"dcolmer@statestreet.com"
] | dcolmer@statestreet.com |
287cf4242619df964ea93c253d77dd595ecfb795 | eea1d64c544de63733a1953d8378fccfa7f65ce0 | /docs/conf.py | c35099da27fc93fb4aaa2180a2e59033b18ef9c8 | [] | no_license | lxsmd1/docs | 588cbd935d22843d282e268680cdd8da9d0d5496 | 21d1530d661322cdfc7e723c5326e3d2fc45fa88 | refs/heads/master | 2020-03-30T07:02:31.384737 | 2018-09-29T23:20:50 | 2018-09-29T23:20:50 | 150,910,513 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,049 | py | # -*- coding: utf-8 -*-
#
# Configuration file for the Sphinx documentation builder.
#
# This file does only contain a selection of the most common options. For a
# full list see the documentation:
# http://www.sphinx-doc.org/en/master/config
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- Project information -----------------------------------------------------
project = 'Personal Docs'
copyright = '2018, lexac1'
author = 'lexac1'
# The short X.Y version
version = ''
# The full version, including alpha/beta/rc tags
release = '0.0.1'
# -- General configuration ---------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.todo',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
# source_suffix = '.md'
# Add recommonmark parser to support .md and .rst extensions
from recommonmark.parser import CommonMarkParser
source_parsers = {
'.md': CommonMarkParser,
}
source_suffix = ['.rst', '.md']
# The master toctree document.
master_doc = 'index'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = None
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
# html_theme = 'alabaster'
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
html_theme_options = {
'canonical_url': '',
'analytics_id': '',
'logo_only': False,
'display_version': True,
'prev_next_buttons_location': 'bottom',
'style_external_links': False,
# Toc options
'collapse_navigation': True,
'sticky_navigation': True,
'navigation_depth': 4,
'includehidden': True,
'titles_only': False
}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# The default sidebars (for documents that don't match any pattern) are
# defined by theme itself. Builtin themes are using these templates by
# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
# 'searchbox.html']``.
#
# html_sidebars = {}
# -- Options for HTMLHelp output ---------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'PersonalDocsdoc'
# -- Options for LaTeX output ------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'PersonalDocs.tex', 'Personal Docs Documentation',
'lexac1', 'manual'),
]
# -- Options for manual page output ------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'personaldocs', 'Personal Docs Documentation',
[author], 1)
]
# -- Options for Texinfo output ----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'PersonalDocs', 'Personal Docs Documentation',
author, 'PersonalDocs', 'One line description of project.',
'Miscellaneous'),
]
# -- Options for Epub output -------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#
# epub_identifier = ''
# A unique identification for the text.
#
# epub_uid = ''
# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']
# -- Extension configuration -------------------------------------------------
# -- Options for todo extension ----------------------------------------------
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
| [
"lexac1@gmail.com"
] | lexac1@gmail.com |
6a931f060c2887b219f056e42a472d1d1183b493 | 9a104b460fdaf3eb46caa1553b3e9ff3f66412b0 | /lab2/settings.py | 00afb261df4abfe67925d32a684c7d627a9dd861 | [] | no_license | RakhmetovKuanysh/drf-api | 57ff7f3f4f872fc847b3265ea74c7e2e2217913b | 309b0eabf0b9db188fd8e0af7164b4db346e3cc4 | refs/heads/master | 2020-03-08T02:54:34.897252 | 2018-04-11T13:38:48 | 2018-04-11T13:38:48 | 127,874,259 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,132 | py | """
Django settings for lab2 project.
Generated by 'django-admin startproject' using Django 2.0.3.
For more information on this file, see
https://docs.djangoproject.com/en/2.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.0/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '5o1#lx!ix5*n$ak=h7hy$(!3$7dy^_#km__se#ot)wc*7$&8dj'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'todo',
'contacts',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'lab2.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'lab2.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.0/howto/static-files/
STATIC_URL = '/static/'
| [
"kuanyshrakhmetov@gmail.com"
] | kuanyshrakhmetov@gmail.com |
447599fa5f89e64b14c04b5958cf029d5b240d5b | 628470e06dd335f88413285ae34c978a0db22877 | /.config/.config/sublime-text-3/Backup/20190911075358/sublime_lib/st3/sublime_lib/encodings.py | 310f6b421dd24d98db13ebe9f82eeca4a89ba968 | [
"MIT"
] | permissive | tblain/dotfiles | 3811a5903ac0c44dbbc2fec6e7e07ba68681161a | 343796edea44c62fb128eac157b34f4917a5b1db | refs/heads/master | 2022-02-26T23:07:29.952074 | 2020-12-15T20:00:02 | 2020-12-15T20:00:02 | 168,537,217 | 1 | 0 | null | 2022-02-13T11:37:09 | 2019-01-31T14:20:52 | Python | UTF-8 | Python | false | false | 2,698 | py | from codecs import lookup
__all__ = ['from_sublime', 'to_sublime']
def from_sublime(name):
"""Translate `name` from a Sublime encoding name to a standard Python encoding name.
:raise ValueError: if `name` is not a Sublime encoding.
.. code-block:: python
>>> from_sublime("Western (Windows 1252)")
"cp1252"
.. versionchanged:: 1.3
Raise :exc:`ValueError` if `name` is not a Sublime encoding.
"""
try:
return SUBLIME_TO_STANDARD[name]
except KeyError:
raise ValueError("Unknown Sublime encoding {!r}.".format(name)) from None
def to_sublime(name):
"""Translate `name` from a standard Python encoding name to a Sublime encoding name.
:raise ValueError: if `name` is not a Python encoding.
.. code-block:: python
>>> to_sublime("cp1252")
"Western (Windows 1252)"
.. versionchanged:: 1.3
Raise :exc:`ValueError` if `name` is not a Python encoding.
"""
try:
return STANDARD_TO_SUBLIME[lookup(name).name]
except LookupError:
raise ValueError("Unknown Python encoding {!r}.".format(name)) from None
SUBLIME_TO_STANDARD = { # noqa: E121
"UTF-8": "utf-8",
"UTF-8 with BOM": "utf-8-sig",
"UTF-16 LE": "utf-16-le",
"UTF-16 LE with BOM": "utf-16",
"UTF-16 BE": "utf-16-be",
"UTF-16 BE with BOM": "utf-16",
"Western (Windows 1252)": "cp1252",
"Western (ISO 8859-1)": "iso8859-1",
"Western (ISO 8859-3)": "iso8859-3",
"Western (ISO 8859-15)": "iso8859-15",
"Western (Mac Roman)": "mac-roman",
"DOS (CP 437)": "cp437",
"Arabic (Windows 1256)": "cp1256",
"Arabic (ISO 8859-6)": "iso8859-6",
"Baltic (Windows 1257)": "cp1257",
"Baltic (ISO 8859-4)": "iso8859-4",
"Celtic (ISO 8859-14)": "iso8859-14",
"Central European (Windows 1250)": "cp1250",
"Central European (ISO 8859-2)": "iso8859-2",
"Cyrillic (Windows 1251)": "cp1251",
"Cyrillic (Windows 866)": "cp866",
"Cyrillic (ISO 8859-5)": "iso8859-5",
"Cyrillic (KOI8-R)": "koi8-r",
"Cyrillic (KOI8-U)": "koi8-u",
"Estonian (ISO 8859-13)": "iso8859-13",
"Greek (Windows 1253)": "cp1253",
"Greek (ISO 8859-7)": "iso8859-7",
"Hebrew (Windows 1255)": "cp1255",
"Hebrew (ISO 8859-8)": "iso8859-8",
"Nordic (ISO 8859-10)": "iso8859-10",
"Romanian (ISO 8859-16)": "iso8859-16",
"Turkish (Windows 1254)": "cp1254",
"Turkish (ISO 8859-9)": "iso8859-9",
"Vietnamese (Windows 1258)": "cp1258",
}
STANDARD_TO_SUBLIME = { # noqa: E121
standard_name: sublime_name
for sublime_name, standard_name in SUBLIME_TO_STANDARD.items()
}
STANDARD_TO_SUBLIME['utf-16'] = 'UTF-16 LE with BOM'
| [
"tblain@pm.me"
] | tblain@pm.me |
a60d9c6c7be0100fafe478b6ec339d494eeebbe0 | 3a6e0924b238c5146f37536170d5e14b9e4371ef | /challenges/05-Best Time to Buy and Sell Stock II/tests.py | 3ef30e6b2c804b9b9db7d5349dd04ff856e84d9d | [] | no_license | dcordero/Leetcode_30_day_challenge_April_2020 | d8de23c7943dcc14f065cc81670a530bec2d2a50 | 815594b574c2b154fa4b8ed24635537dec1354f9 | refs/heads/master | 2021-05-20T10:43:07.714819 | 2020-04-05T17:43:52 | 2020-04-05T17:43:52 | 252,255,816 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 581 | py | import unittest
from challenge import Solution
class TestSingleNumber(unittest.TestCase):
def setUp(self):
self.sut = Solution()
def test_a(self):
input = [1, 2, 3, 4, 5]
expected_output = 4
actual_output = self.sut.maxProfit(input)
self.assertEqual(actual_output, expected_output)
def test_b(self):
input = [7, 1, 5, 3, 6, 4]
expected_output = 7
actual_output = self.sut.maxProfit(input)
self.assertEqual(actual_output, expected_output)
if __name__ == '__main__':
unittest.main() | [
"david@corderoramirez.com"
] | david@corderoramirez.com |
7adc533ac355fd55ced129cc62a36e61832872be | bfe411f80806732bf537f73aed51f6f77c59776b | /academy/__manifest__.py | 8d425998d88ac89b7a80f1c77a5f6161eb818ef4 | [] | no_license | jgarciaodowd/odooSSH | 3192dbde33155dd2a40109e1b98bb62f109307fd | 70caee84c0293128f52835d34b9602f5c035773d | refs/heads/master | 2023-03-14T18:44:26.549497 | 2021-03-16T08:28:45 | 2021-03-16T08:28:45 | 347,403,248 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 847 | py | # -*- coding: utf-8 -*-
{
'name': "academy",
'summary': """
Short (1 phrase/line) summary of the module's purpose, used as
subtitle on modules listing or apps.openerp.com""",
'description': """
Long description of module's purpose
""",
'author': "My Company",
'website': "http://www.yourcompany.com",
# Categories can be used to filter modules in modules listing
# Check https://github.com/odoo/odoo/blob/14.0/odoo/addons/base/data/ir_module_category_data.xml
# for the full list
'category': 'Uncategorized',
'version': '0.1',
# any module necessary for this one to work correctly
'depends': ['base'],
# always loaded
'data': [
'views/openacademy.xml',
],
# only loaded in demonstration mode
'demo': [
'demo/demo.xml',
],
}
| [
"jgarciaodowd@danielcastelao.org"
] | jgarciaodowd@danielcastelao.org |
653215bb812cd32d782ea1ad92f4d09fa0a163bb | a2419e48f7a8ea87f71a96775eff2faa087e1603 | /TSIS 3/1.py | c845af3b363bb692b9d86b9bd869f3ef8c7db31b | [] | no_license | muhlik20033/muhlik20033 | 6bc7211074a97aed25229a22e8e729b29df494c6 | 60bcdda9eeb360de6c233ed45597e48a874c349f | refs/heads/main | 2023-06-24T01:43:44.032740 | 2021-07-29T08:45:04 | 2021-07-29T08:45:04 | 380,452,848 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 107 | py | a = list(input().split())
for i in range(len(a)):
x = int(a[i])
if x % 2 == 1:
print(x) | [
"noreply@github.com"
] | muhlik20033.noreply@github.com |
3e0f44d691a846473e3bd241080b059dfc70b086 | 8fd56e9b9dbc49c16b4a8afe1007f824183bb0ab | /Python_Stack/django/django_fundamentals/django_intro/dojo_and_ninjas/dojo_and_ninjas/settings.py | f0d9e05a39974011a73b5ec3195f1ab742aa2ea1 | [] | no_license | DiyarBarham/CodingDojo | b1cc7d7355f5fb139cb640168f78d6b7f91e372a | 0891e2c41ddbb9004eadfd2d54fe7f34d6d4ef58 | refs/heads/main | 2023-07-08T12:12:33.227932 | 2021-08-07T13:55:33 | 2021-08-07T13:55:33 | 363,878,740 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 3,293 | py | """
Django settings for dojo_and_ninjas project.
Generated by 'django-admin startproject' using Django 3.2.3.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.2/ref/settings/
"""
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'django-insecure-53wty_=t8u#0m8n^_@cd965s9q2u-cj=wrylu)y0ty#e9=@#z_'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'dojo_and_ninjas_app',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'dojo_and_ninjas.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'dojo_and_ninjas.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.2/howto/static-files/
STATIC_URL = '/static/'
# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
| [
"diyarbarham@gmail.com"
] | diyarbarham@gmail.com |
7731f5ccff17adc9926ec0deb2306373aaaccfa2 | ce92f28dd084c53d053098dfe46131b233b2512b | /cake/parsing/equation.py | d9e5b4b421f19a2fe3678c068833004e155eb88d | [
"MIT"
] | permissive | TheGenocides/Cake | defc3a82699313b5652f766b2948602ef01a939f | 01a6339b8ac34735651498446308891ecbcc6139 | refs/heads/main | 2023-08-25T00:52:52.205036 | 2021-10-09T14:01:22 | 2021-10-09T14:01:22 | 415,329,135 | 1 | 0 | MIT | 2021-10-09T14:17:16 | 2021-10-09T14:17:15 | null | UTF-8 | Python | false | false | 89 | py | class Equation(object):
def __init__(self):
raise NotImplementedError()
| [
"riotinghorse@Gmail.com"
] | riotinghorse@Gmail.com |
bff8c1e537389300427596e9c36f71fadaf71296 | 9b9a02657812ea0cb47db0ae411196f0e81c5152 | /repoData/mirumee-django-messages/allPythonContent.py | 9918ddf59a580b2c2fb4f59768881fac6fa2cc3f | [] | no_license | aCoffeeYin/pyreco | cb42db94a3a5fc134356c9a2a738a063d0898572 | 0ac6653219c2701c13c508c5c4fc9bc3437eea06 | refs/heads/master | 2020-12-14T14:10:05.763693 | 2016-06-27T05:15:15 | 2016-06-27T05:15:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 37,475 | py | __FILENAME__ = admin
from django import forms
from django.conf import settings
from django.utils.translation import gettext_lazy as _
from django.contrib import admin
from django.contrib.auth.models import User, Group
if "notification" in settings.INSTALLED_APPS:
from notification import models as notification
else:
notification = None
from django_messages.models import Message
class MessageAdminForm(forms.ModelForm):
"""
Custom AdminForm to enable messages to groups and all users.
"""
recipient = forms.ModelChoiceField(
label=_('Recipient'), queryset=User.objects.all(), required=True)
group = forms.ChoiceField(label=_('group'), required=False,
help_text=_('Creates the message optionally for all users or a group of users.'))
def __init__(self, *args, **kwargs):
super(MessageAdminForm, self).__init__(*args, **kwargs)
self.fields['group'].choices = self._get_group_choices()
def _get_group_choices(self):
return [('', u'---------'), ('all', _('All users'))] + \
[(group.pk, group.name) for group in Group.objects.all()]
class Meta:
model = Message
class MessageAdmin(admin.ModelAdmin):
form = MessageAdminForm
fieldsets = (
(None, {
'fields': (
'sender',
('recipient', 'group'),
),
}),
(_('Message'), {
'fields': (
'parent_msg',
'subject', 'body',
),
'classes': ('monospace' ),
}),
(_('Date/time'), {
'fields': (
'sent_at', 'read_at', 'replied_at',
'deleted_at',
),
'classes': ('collapse', 'wide'),
}),
)
list_display = ('subject', 'sender', 'recipient', 'sent_at', 'read_at')
list_filter = ('sent_at', 'sender', 'recipient')
search_fields = ('subject', 'body')
def save_model(self, request, obj, form, change):
"""
Saves the message for the recipient and looks in the form instance
for other possible recipients. Prevents duplication by excludin the
original recipient from the list of optional recipients.
When changing an existing message and choosing optional recipients,
the message is effectively resent to those users.
"""
obj.save()
if notification:
# Getting the appropriate notice labels for the sender and recipients.
if obj.parent_msg is None:
recipients_label = 'messages_received'
else:
recipients_label = 'messages_reply_received'
if form.cleaned_data['group'] == 'all':
# send to all users
recipients = User.objects.exclude(pk=obj.recipient.pk)
else:
# send to a group of users
recipients = []
group = form.cleaned_data['group']
if group:
group = Group.objects.get(pk=group)
recipients.extend(
list(group.user_set.exclude(pk=obj.recipient.pk)))
# create messages for all found recipients
for user in recipients:
obj.pk = None
obj.recipient = user
obj.save()
if notification:
# Notification for the recipient.
notification.send([user], recipients_label, {'message' : obj,})
admin.site.register(Message, MessageAdmin)
########NEW FILE########
__FILENAME__ = context_processors
from django_messages.models import inbox_count_for
def inbox(request):
if request.user.is_authenticated():
return {'messages_inbox_count': inbox_count_for(request.user)}
else:
return {}
########NEW FILE########
__FILENAME__ = fields
"""
Based on http://www.djangosnippets.org/snippets/595/
by sopelkin
"""
from django import forms
from django.forms import widgets
from django.contrib.auth.models import User
from django.utils.translation import ugettext_lazy as _
class CommaSeparatedUserInput(widgets.Input):
input_type = 'text'
def render(self, name, value, attrs=None):
if value is None:
value = ''
elif isinstance(value, (list, tuple)):
value = (', '.join([user.username for user in value]))
return super(CommaSeparatedUserInput, self).render(name, value, attrs)
class CommaSeparatedUserField(forms.Field):
widget = CommaSeparatedUserInput
def __init__(self, *args, **kwargs):
recipient_filter = kwargs.pop('recipient_filter', None)
self._recipient_filter = recipient_filter
super(CommaSeparatedUserField, self).__init__(*args, **kwargs)
def clean(self, value):
super(CommaSeparatedUserField, self).clean(value)
if not value:
return ''
if isinstance(value, (list, tuple)):
return value
names = set(value.split(','))
names_set = set([name.strip() for name in names])
users = list(User.objects.filter(username__in=names_set))
unknown_names = names_set ^ set([user.username for user in users])
recipient_filter = self._recipient_filter
invalid_users = []
if recipient_filter is not None:
for r in users:
if recipient_filter(r) is False:
users.remove(r)
invalid_users.append(r.username)
if unknown_names or invalid_users:
raise forms.ValidationError(_(u"The following usernames are incorrect: %(users)s") % {'users': ', '.join(list(unknown_names)+invalid_users)})
return users
########NEW FILE########
__FILENAME__ = forms
import datetime
from django import forms
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from django.utils.translation import ugettext_noop
from django.contrib.auth.models import User
import uuid
if "notification" in settings.INSTALLED_APPS:
from notification import models as notification
else:
notification = None
from django_messages.models import Message
from django_messages.fields import CommaSeparatedUserField
from django_messages.utils import format_quote
class MessageForm(forms.ModelForm):
"""
base message form
"""
recipients = CommaSeparatedUserField(label=_(u"Recipient"))
subject = forms.CharField(label=_(u"Subject"))
body = forms.CharField(label=_(u"Body"),
widget=forms.Textarea(attrs={'rows': '12', 'cols':'55'}))
class Meta:
model = Message
fields = ('recipients', 'subject', 'body',)
def __init__(self, sender, *args, **kw):
recipient_filter = kw.pop('recipient_filter', None)
self.sender = sender
super(MessageForm, self).__init__(*args, **kw)
if recipient_filter is not None:
self.fields['recipients']._recipient_filter = recipient_filter
def create_recipient_message(self, recipient, message):
return Message(
owner = recipient,
sender = self.sender,
to = recipient.username,
recipient = recipient,
subject = message.subject,
body = message.body,
thread = message.thread,
sent_at = message.sent_at,
)
def get_thread(self, message):
return message.thread or uuid.uuid4().hex
def save(self, commit=True):
recipients = self.cleaned_data['recipients']
instance = super(MessageForm, self).save(commit=False)
instance.sender = self.sender
instance.owner = self.sender
instance.recipient = recipients[0]
instance.thread = self.get_thread(instance)
instance.unread = False
instance.sent_at = datetime.datetime.now()
message_list = []
# clone messages in recipients inboxes
for r in recipients:
if r == self.sender: # skip duplicates
continue
msg = self.create_recipient_message(r, instance)
message_list.append(msg)
instance.to = ','.join([r.username for r in recipients])
if commit:
instance.save()
for msg in message_list:
msg.save()
if notification:
notification.send([msg.recipient],
"messages_received", {'message': msg,})
return instance, message_list
class ComposeForm(MessageForm):
"""
A simple default form for private messages.
"""
class Meta:
model = Message
fields = ('recipients', 'subject', 'body',)
class ReplyForm(MessageForm):
"""
reply to form
"""
class Meta:
model = Message
fields = ('recipients', 'subject', 'body',)
def __init__(self, sender, message, *args, **kw):
self.parent_message = message
initial = kw.pop('initial', {})
initial['recipients'] = message.sender.username
initial['body'] = self.quote_message(message)
initial['subject'] = self.quote_subject(message.subject)
kw['initial'] = initial
super(ReplyForm, self).__init__(sender, *args, **kw)
def quote_message(self, original_message):
return format_quote(original_message.sender, original_message.body)
def quote_subject(self, subject):
return u'Re: %s' % subject
def create_recipient_message(self, recipient, message):
msg = super(ReplyForm, self).create_recipient_message(recipient, message)
msg.replied_at = datetime.datetime.now()
# find parent in recipient messages
try:
msg.parent_msg = Message.objects.get(
owner=recipient,
sender=message.recipient,
recipient=message.sender,
thread=message.thread)
except (Message.DoesNotExist, Message.MultipleObjectsReturned):
# message may be deleted
pass
return msg
def get_thread(self, message):
return self.parent_message.thread
def save(self, commit=True):
instance, message_list = super(ReplyForm, self).save(commit=False)
instance.replied_at = datetime.datetime.now()
instance.parent_msg = self.parent_message
if commit:
instance.save()
for msg in message_list:
msg.save()
if notification:
notification.send([msg.recipient],
"messages_reply_received", {
'message': msg,
'parent_msg': self.parent_message,
})
return instance, message_list
########NEW FILE########
__FILENAME__ = management
from django.db.models import get_models, signals
from django.conf import settings
from django.utils.translation import ugettext_noop as _
if "notification" in settings.INSTALLED_APPS:
from notification import models as notification
def create_notice_types(app, created_models, verbosity, **kwargs):
notification.create_notice_type("messages_received", _("Message Received"), _("you have received a message"), default=2)
notification.create_notice_type("messages_reply_received", _("Reply Received"), _("you have received a reply to a message"), default=2)
signals.post_syncdb.connect(create_notice_types, sender=notification)
else:
print "Skipping creation of NoticeTypes as notification app not found"
########NEW FILE########
__FILENAME__ = models
import datetime
from django.db import models
from django.conf import settings
from django.db.models import signals
from django.db.models.query import QuerySet
from django.contrib.auth.models import User
from django.utils.translation import ugettext_lazy as _
class MessageQueryset(QuerySet):
def unread(self):
return self.filter(unread=True)
class BaseMessageManager(models.Manager):
def get_query_set(self):
return MessageQueryset(self.model)
def trash(self, messages):
"""
move messages to trash
"""
messages.update(deleted=True, deleted_at=datetime.datetime.now())
def send(self, messages):
"""
send messages
"""
pass
class Inbox(BaseMessageManager):
def get_query_set(self):
return super(Inbox, self).get_query_set().filter(deleted=False)
def for_user(self, user):
"""
Returns all messages that were received by the given user and are not
marked as deleted.
"""
return self.get_query_set().filter(owner=user, recipient=user)
class Outbox(BaseMessageManager):
def get_query_set(self):
return super(Outbox, self).get_query_set().filter(deleted=False)
def for_user(self, user):
"""
Returns all messages that were sent by the given user and are not
marked as deleted.
"""
return self.get_query_set().filter(owner=user, sender=user)
class Trash(BaseMessageManager):
"""
Trash manager
"""
def get_query_set(self):
return super(Trash, self).get_query_set().filter(deleted=True)
def for_user(self, user):
"""
Returns all messages that were either received or sent by the given
user and are marked as deleted.
"""
return self.get_query_set().filter(owner=user)
class Message(models.Model):
"""
A private message from user to user
"""
owner = models.ForeignKey(User, related_name='messages')
to = models.CharField(max_length=255) # recipient usernames comma separated
subject = models.CharField(_("Subject"), max_length=120)
body = models.TextField(_("Body"))
sender = models.ForeignKey(User, related_name='+', verbose_name=_("Sender"))
recipient = models.ForeignKey(User, related_name='+', null=True, blank=True, verbose_name=_("Recipient"))
thread = models.CharField(max_length=64, null=True, blank=True, db_index=True)
parent_msg = models.ForeignKey('self', related_name='next_messages', null=True, blank=True, verbose_name=_("Parent message"))
sent_at = models.DateTimeField(_("sent at"), null=True, blank=True)
unread = models.BooleanField(default=True, db_index=True)
read_at = models.DateTimeField(_("read at"), null=True, blank=True)
replied_at = models.DateTimeField(_("replied at"), null=True, blank=True)
deleted = models.BooleanField(default=False, db_index=True)
deleted_at = models.DateTimeField(_("Sender deleted at"), null=True, blank=True)
objects = BaseMessageManager()
inbox = Inbox()
outbox = Outbox()
trash = Trash()
def is_unread(self):
"""returns whether the recipient has read the message or not"""
return bool(self.read_at is None)
def undelete(self):
self.deleted = False
self.deleted_at = None
def mark_read(self):
self.unread = False
self.read_at = datetime.datetime.now()
def mark_unread(self):
self.unread = True
self.read_at = None
def move_to_trash(self):
self.deleted = True
self.deleted_at = datetime.datetime.now()
def replied(self):
"""returns whether the recipient has written a reply to this message"""
return bool(self.replied_at is not None)
def __unicode__(self):
return self.subject
def all_recipients(self):
return User.objects.filter(username__in=self.to.split(','))
@models.permalink
def get_absolute_url(self):
return ('messages_detail', None, {'message_id': self.pk})
class Meta:
ordering = ['-sent_at']
verbose_name = _("Message")
verbose_name_plural = _("Messages")
db_table = 'messages_message'
def inbox_count_for(user):
"""
returns the number of unread messages for the given user but does not
mark them seen
"""
return Message.inbox.for_user(user).unread().count()
# fallback for email notification if django-notification could not be found
if "notification" not in settings.INSTALLED_APPS:
from django_messages.utils import new_message_email
signals.post_save.connect(new_message_email, sender=Message)
########NEW FILE########
__FILENAME__ = signals
########NEW FILE########
__FILENAME__ = inbox
from django.template import Library, Node, TemplateSyntaxError
from django_messages.models import inbox_count_for
class InboxOutput(Node):
def __init__(self, varname=None):
self.varname = varname
def render(self, context):
try:
user = context['user']
count = inbox_count_for(user)
except (KeyError, AttributeError):
count = ''
if self.varname is not None:
context[self.varname] = count
return ""
else:
return "%s" % (count)
def do_print_inbox_count(parser, token):
"""
A templatetag to show the unread-count for a logged in user.
Returns the number of unread messages in the user's inbox.
Usage::
{% load inbox %}
{% inbox_count %}
{# or assign the value to a variable: #}
{% inbox_count as my_var %}
{{ my_var }}
"""
bits = token.contents.split()
if len(bits) > 1:
if len(bits) != 3:
raise TemplateSyntaxError, "inbox_count tag takes either no arguments or exactly two arguments"
if bits[1] != 'as':
raise TemplateSyntaxError, "first argument to inbox_count tag must be 'as'"
return InboxOutput(bits[2])
else:
return InboxOutput()
register = Library()
register.tag('inbox_count', do_print_inbox_count)
########NEW FILE########
__FILENAME__ = tests
import datetime
from django.test import TestCase
from django.contrib.auth.models import User
from django_messages.models import Message
class SendTestCase(TestCase):
def setUp(self):
self.user1 = User.objects.create_user('user1', 'user1@example.com', '123456')
self.user2 = User.objects.create_user('user2', 'user2@example.com', '123456')
self.msg1 = Message(sender=self.user1, recipient=self.user2, subject='Subject Text', body='Body Text')
self.msg1.save()
def testBasic(self):
self.assertEquals(self.msg1.sender, self.user1)
self.assertEquals(self.msg1.recipient, self.user2)
self.assertEquals(self.msg1.subject, 'Subject Text')
self.assertEquals(self.msg1.body, 'Body Text')
self.assertEquals(self.user1.sent_messages.count(), 1)
self.assertEquals(self.user1.received_messages.count(), 0)
self.assertEquals(self.user2.received_messages.count(), 1)
self.assertEquals(self.user2.sent_messages.count(), 0)
class DeleteTestCase(TestCase):
def setUp(self):
self.user1 = User.objects.create_user('user3', 'user3@example.com', '123456')
self.user2 = User.objects.create_user('user4', 'user4@example.com', '123456')
self.msg1 = Message(sender=self.user1, recipient=self.user2, subject='Subject Text 1', body='Body Text 1')
self.msg2 = Message(sender=self.user1, recipient=self.user2, subject='Subject Text 2', body='Body Text 2')
self.msg1.sender_deleted_at = datetime.datetime.now()
self.msg2.recipient_deleted_at = datetime.datetime.now()
self.msg1.save()
self.msg2.save()
def testBasic(self):
self.assertEquals(Message.objects.outbox_for(self.user1).count(), 1)
self.assertEquals(Message.objects.outbox_for(self.user1)[0].subject, 'Subject Text 2')
self.assertEquals(Message.objects.inbox_for(self.user2).count(),1)
self.assertEquals(Message.objects.inbox_for(self.user2)[0].subject, 'Subject Text 1')
#undelete
self.msg1.sender_deleted_at = None
self.msg2.recipient_deleted_at = None
self.msg1.save()
self.msg2.save()
self.assertEquals(Message.objects.outbox_for(self.user1).count(), 2)
self.assertEquals(Message.objects.inbox_for(self.user2).count(),2)
########NEW FILE########
__FILENAME__ = urls
from django.conf.urls.defaults import *
from django.views.generic.simple import redirect_to
from django_messages.views import *
urlpatterns = patterns('',
url(r'^$', redirect_to, {'url': 'inbox/'}),
url(r'^inbox/$', inbox, name='messages_inbox'),
url(r'^outbox/$', outbox, name='messages_outbox'),
url(r'^compose/$', compose, name='messages_compose'),
url(r'^compose/(?P<recipient>[\+\w]+)/$', compose, name='messages_compose_to'),
url(r'^reply/(?P<message_id>[\d]+)/$', reply, name='messages_reply'),
url(r'^view/(?P<message_id>[\d]+)/$', view, name='messages_detail'),
url(r'^delete/(?P<message_id>[\d]+)/$', delete, name='messages_delete'),
url(r'^undelete/(?P<message_id>[\d]+)/$', undelete, name='messages_undelete'),
url(r'^trash/$', trash, name='messages_trash'),
)
########NEW FILE########
__FILENAME__ = utils
# -*- coding:utf-8 -*-
import re
from django.conf import settings
from django.contrib.sites.models import Site
from django.utils.encoding import force_unicode
from django.utils.text import wrap
from django.utils.translation import ugettext_lazy as _
from django.template import Context, loader
from django.template.loader import render_to_string
# favour django-mailer but fall back to django.core.mail
if "mailer" in settings.INSTALLED_APPS:
from mailer import send_mail
else:
from django.core.mail import send_mail
def format_quote(sender, body):
"""
Wraps text at 55 chars and prepends each
line with `> `.
Used for quoting messages in replies.
"""
lines = wrap(body, 55).split('\n')
for i, line in enumerate(lines):
lines[i] = "> %s" % line
quote = '\n'.join(lines)
return _(u"%(sender)s wrote:\n%(body)s") % {
'sender': sender,
'body': quote,
}
def new_message_email(sender, instance, signal,
subject_prefix=_(u'New Message: %(subject)s'),
template_name="django_messages/new_message.html",
default_protocol=None,
*args, **kwargs):
"""
This function sends an email and is called via Django's signal framework.
Optional arguments:
``template_name``: the template to use
``subject_prefix``: prefix for the email subject.
``default_protocol``: default protocol in site URL passed to template
"""
if default_protocol is None:
default_protocol = getattr(settings, 'DEFAULT_HTTP_PROTOCOL', 'http')
if 'created' in kwargs and kwargs['created']:
try:
current_domain = Site.objects.get_current().domain
subject = subject_prefix % {'subject': instance.subject}
message = render_to_string(template_name, {
'site_url': '%s://%s' % (default_protocol, current_domain),
'message': instance,
})
if instance.recipient.email != "":
send_mail(subject, message, settings.DEFAULT_FROM_EMAIL,
[instance.recipient.email,])
except Exception, e:
#print e
pass #fail silently
########NEW FILE########
__FILENAME__ = views
# -*- coding:utf-8 -*-
import datetime
from django.http import Http404, HttpResponseRedirect
from django.shortcuts import render_to_response, get_object_or_404, redirect
from django.template import RequestContext
from django.contrib.auth.models import User
from django.contrib.auth.decorators import login_required
from django.contrib import messages
from django.utils.translation import ugettext as _
from django.utils.translation import ugettext_noop
from django.core.urlresolvers import reverse
from django.conf import settings
from django.db import transaction
from django.views.generic.list_detail import object_list, object_detail
from django_messages.models import Message
from django_messages.forms import ComposeForm, ReplyForm
from django_messages.utils import format_quote
@login_required
def message_list(request, queryset, paginate_by=25,
extra_context=None, template_name=None):
return object_list(request, queryset=queryset, paginate_by=paginate_by,
extra_context=extra_context, template_name=template_name,
template_object_name='message')
@login_required
def inbox(request, template_name='django_messages/inbox.html', **kw):
"""
Displays a list of received messages for the current user.
"""
kw['template_name'] = template_name
queryset = Message.inbox.for_user(request.user)
return message_list(request, queryset, **kw)
@login_required
def outbox(request, template_name='django_messages/outbox.html', **kw):
"""
Displays a list of sent messages for the current user.
"""
kw['template_name'] = template_name
queryset = Message.outbox.for_user(request.user)
return message_list(request, queryset, **kw)
@login_required
def trash(request, template_name='django_messages/trash.html', **kw):
"""
Displays a list of deleted messages.
"""
kw['template_name'] = template_name
queryset = Message.trash.for_user(request.user)
return message_list(request, queryset, **kw)
@login_required
@transaction.commit_on_success
def compose(request, recipient=None, form_class=ComposeForm,
template_name='django_messages/compose.html', success_url=None,
recipient_filter=None, extra_context=None):
"""
Displays and handles the ``form_class`` form to compose new messages.
Required Arguments: None
Optional Arguments:
``recipient``: username of a `django.contrib.auth` User, who should
receive the message, optionally multiple usernames
could be separated by a '+'
``form_class``: the form-class to use
``template_name``: the template to use
``success_url``: where to redirect after successfull submission
``extra_context``: extra context dict
"""
if request.method == "POST":
form = form_class(request.user, data=request.POST,
recipient_filter=recipient_filter)
if form.is_valid():
instance, message_list = form.save()
Message.objects.send(message_list)
messages.add_message(request, messages.SUCCESS, _(u"Message successfully sent."))
return redirect(success_url or request.GET.get('next') or inbox)
else:
form = form_class(request.user, initial={'recipients': recipient})
ctx = extra_context or {}
ctx.update({
'form': form,
})
return render_to_response(template_name, RequestContext(request, ctx))
@login_required
@transaction.commit_on_success
def reply(request, message_id, form_class=ReplyForm,
template_name='django_messages/reply.html', success_url=None,
recipient_filter=None, extra_context=None):
"""
Prepares the ``form_class`` form for writing a reply to a given message
(specified via ``message_id``).
"""
parent = get_object_or_404(Message, pk=message_id, owner=request.user)
if request.method == "POST":
form = form_class(request.user, parent, data=request.POST,
recipient_filter=recipient_filter)
if form.is_valid():
instance, message_list = form.save()
Message.objects.send(message_list)
messages.add_message(request, messages.SUCCESS, _(u"Message successfully sent."))
return redirect(success_url or inbox)
else:
form = form_class(request.user, parent)
ctx = extra_context or {}
ctx.update({
'form': form,
})
return render_to_response(template_name,
RequestContext(request, ctx))
@login_required
@transaction.commit_on_success
def delete(request, message_id, success_url=None):
"""
Marks a message as deleted by sender or recipient. The message is not
really removed from the database, because two users must delete a message
before it's save to remove it completely.
A cron-job should prune the database and remove old messages which are
deleted by both users.
As a side effect, this makes it easy to implement a trash with undelete.
You can pass ?next=/foo/bar/ via the url to redirect the user to a different
page (e.g. `/foo/bar/`) than ``success_url`` after deletion of the message.
"""
message = get_object_or_404(Message, pk=message_id, owner=request.user)
message.move_to_trash()
message.save()
messages.add_message(request, messages.SUCCESS, _(u"Message successfully deleted."))
return redirect(request.GET.get('next') or success_url or inbox)
@login_required
@transaction.commit_on_success
def undelete(request, message_id, success_url=None):
"""
Recovers a message from trash.
"""
message = get_object_or_404(Message, pk=message_id, owner=request.user)
message.undelete()
message.save()
message_view = inbox # should be dependent on message box (inbox,outbox)
messages.add_message(request, messages.SUCCESS,
_(u"Message successfully recovered."))
return redirect(request.GET.get('next') or success_url or message_view)
@login_required
def view(request, message_id, template_name='django_messages/view.html',
extra_context=None):
"""
Shows a single message.``message_id`` argument is required.
The user is only allowed to see the message, if he is either
the sender or the recipient. If the user is not allowed a 404
is raised.
If the user is the recipient and the message is unread
``read_at`` is set to the current datetime.
"""
message = get_object_or_404(Message, pk=message_id, owner=request.user)
if message.is_unread():
message.mark_read()
message.save()
ctx = extra_context or {}
ctx.update({
'message': message,
})
return render_to_response(template_name, RequestContext(request, ctx))
########NEW FILE########
__FILENAME__ = conf
# -*- coding: utf-8 -*-
#
# django-messages documentation build configuration file, created by
# sphinx-quickstart on Mon Jan 26 10:27:49 2009.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# The contents of this file are pickled, so don't put values in the namespace
# that aren't pickleable (module imports are okay, they're removed automatically).
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If your extensions are in another directory, add it here. If the directory
# is relative to the documentation root, use os.path.abspath to make it
# absolute, like shown here.
#sys.path.append(os.path.abspath('.'))
# General configuration
# ---------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['.templates']
# The suffix of source filenames.
source_suffix = '.txt'
# The encoding of source files.
#source_encoding = 'utf-8'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'django-messages'
copyright = u'2009, Arne Brodowski'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.4'
# The full version, including alpha/beta/rc tags.
release = '0.4.3pre'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
unused_docs = ['README',]
# List of directories, relative to source directory, that shouldn't be searched
# for source files.
exclude_trees = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# Options for HTML output
# -----------------------
html_theme_path = ['.',]
# The style sheet to use for HTML and HTML Help pages. A file of that name
# must exist either in Sphinx' static/ path, or in one of the custom paths
# given in html_static_path.
# html_style = 'default.css'
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
html_logo = './django-messages.png'
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['.static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
html_use_modindex = False
# If false, no index is generated.
html_use_index = False
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, the reST sources are included in the HTML build as _sources/<name>.
html_copy_source = False
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'django-messagesdoc'
# Options for LaTeX output
# ------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, document class [howto/manual]).
latex_documents = [
('index', 'django-messages.tex', ur'django-messages Documentation',
ur'Arne Brodowski', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_use_modindex = True
########NEW FILE########
| [
"dyangUCI@github.com"
] | dyangUCI@github.com |
8f629fd50ba1ec120f55b90d665fc66b65f77590 | 07a783c06bb4bb059e8c38589fe3f9bfc5a14b22 | /tests/builtins/test_iter.py | b06f6fbbe28a6ac4114a49d71dc32bf850c37594 | [
"BSD-3-Clause",
"MIT"
] | permissive | vishalsodani/batavia | a3c79b0342069fe6387eb3d7cc3ac3f4947d1842 | 690e5093da6653456381466e5fb9c153c295cb6b | refs/heads/master | 2021-01-22T08:10:07.777012 | 2016-09-03T15:14:52 | 2016-09-03T15:14:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 395 | py | from .. utils import TranspileTestCase, BuiltinFunctionTestCase
class IterTests(TranspileTestCase):
pass
class BuiltinIterFunctionTests(BuiltinFunctionTestCase, TranspileTestCase):
functions = ["iter"]
not_implemented = [
'test_bytearray',
'test_bytes',
'test_complex',
'test_dict',
'test_frozenset',
'test_NotImplemented',
]
| [
"russell@keith-magee.com"
] | russell@keith-magee.com |
23c91c56f0fc9c4870d7e4c085d6e4f2840e365c | 945dc4d044065020eaca2ef81be407964681fcdc | /file1.py | d27435f233ae5fe7b9ea8243facbdc95e581694e | [] | no_license | jannusandeep/kernelproject | f36d0b075ef881a20e38f0769975ff150360e935 | cdffc9255fcb628013b1654739f3bdf3038b3547 | refs/heads/master | 2020-03-18T06:24:09.640797 | 2018-05-22T09:34:26 | 2018-05-22T09:34:26 | 134,393,184 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 30 | py | I am new to git, learning git
| [
"windows@gmail.com"
] | windows@gmail.com |
c2cd8ebb3aa2e587826eb4e5bc59ed28adcf2863 | 78158f942bd0e223f11016457a494c76b7a689b4 | /Supybot-plugins-20060723/Debian/test.py | 8fc0b5c08ea397d4e540c7300527f79cec23560a | [] | no_license | frumiousbandersnatch/supybot-plugins | ad607dd22529372d72c7dc0083100b5b5cdb719b | 8c7f16c0584bdf393a56dccff6b35a83142e5ece | refs/heads/master | 2021-01-17T05:46:10.917776 | 2020-03-12T11:42:20 | 2020-03-12T11:42:20 | 6,639,759 | 0 | 1 | null | 2020-03-12T11:42:22 | 2012-11-11T15:21:36 | Python | UTF-8 | Python | false | false | 3,848 | py | ###
# Copyright (c) 2003-2005, James Vega
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions, and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the author of this software nor the name of
# contributors to this software may be used to endorse or promote products
# derived from this software without specific prior written consent.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
###
import os
import time
from supybot.test import *
class DebianTestCase(PluginTestCase):
plugins = ('Debian',)
timeout = 100
cleanDataDir = False
fileDownloaded = False
if network:
def setUp(self, nick='test'):
PluginTestCase.setUp(self)
try:
datadir = conf.supybot.directories.data
if os.path.exists(datadir.dirize('Contents-i386.gz')):
pass
else:
print
print "Downloading files, this may take awhile."
filename = datadir.dirize('Contents-i386.gz')
while not os.path.exists(filename):
time.sleep(1)
print "Download complete."
print "Starting test ..."
self.fileDownloaded = True
except KeyboardInterrupt:
pass
def testDebBugNoHtml(self):
self.assertNotRegexp('debian bug 287792', r'\<em\>')
def testDebversion(self):
self.assertHelp('debian version')
self.assertRegexp('debian version lakjdfad',
r'^No package.*\(all\)')
self.assertRegexp('debian version unstable alkdjfad',
r'^No package.*\(unstable\)')
self.assertRegexp('debian version gaim',
r'\d+ matches found:.*gaim.*\(stable')
self.assertRegexp('debian version linux-wlan',
r'\d+ matches found:.*linux-wlan.*')
self.assertRegexp('debian version --exact linux-wlan',
r'^No package.*\(all\)')
self.assertError('debian version unstable')
def testDebfile(self):
self.assertHelp('file')
if not self.fileDownloaded:
pass
self.assertRegexp('file --exact bin/gaim', r'net/gaim')
def testDebincoming(self):
self.assertNotError('incoming')
def testDebianize(self):
self.assertNotError('debianize supybot')
def testDebstats(self):
self.assertNotError('stats supybot')
# vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=79:
| [
"gsf747@gmail.com"
] | gsf747@gmail.com |
dbfee9b9af6cb812be348a4573cc993fd8d52d08 | c7a6f8ed434c86b4cdae9c6144b9dd557e594f78 | /ECE364/.PyCharm40/system/python_stubs/348993582/gtk/_gtk/TreeSortable.py | 37cf02c4e1bb9f31abf693a5bf90d28a3cd5e6da | [] | no_license | ArbalestV/Purdue-Coursework | 75d979bbe72106975812b1d46b7d854e16e8e15e | ee7f86145edb41c17aefcd442fa42353a9e1b5d1 | refs/heads/master | 2020-08-29T05:27:52.342264 | 2018-04-03T17:59:01 | 2018-04-03T17:59:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,309 | py | # encoding: utf-8
# module gtk._gtk
# from /usr/lib64/python2.6/site-packages/gtk-2.0/gtk/_gtk.so
# by generator 1.136
# no doc
# imports
import atk as __atk
import gio as __gio
import gobject as __gobject
import gobject._gobject as __gobject__gobject
class TreeSortable(__gobject.GInterface):
# no doc
@classmethod
def do_has_default_sort_func(cls, *args, **kwargs): # real signature unknown
pass
@classmethod
def do_set_sort_column_id(cls, *args, **kwargs): # real signature unknown
pass
@classmethod
def do_sort_column_changed(cls, *args, **kwargs): # real signature unknown
pass
def get_sort_column_id(self, *args, **kwargs): # real signature unknown
pass
def has_default_sort_func(self, *args, **kwargs): # real signature unknown
pass
def set_default_sort_func(self, *args, **kwargs): # real signature unknown
pass
def set_sort_column_id(self, *args, **kwargs): # real signature unknown
pass
def set_sort_func(self, *args, **kwargs): # real signature unknown
pass
def sort_column_changed(self, *args, **kwargs): # real signature unknown
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
__gtype__ = None # (!) real value is ''
| [
"pkalita@princeton.edu"
] | pkalita@princeton.edu |
7be3d8ec2df095d1c6a98a5253b133840087eb33 | 0a0df65e7efee5a48a6b83fa83f84e3588c89a69 | /blog/api/views.py | 6c556b2a8db422b2999e7ecab4fb8bc9ea7016a1 | [] | no_license | dunfred/MegaTv-Web-App | a39abf125f8035ec1422652718bf70cb5c397b09 | 7064cb7455893a8f932ce6cd466c670a3339ba26 | refs/heads/master | 2022-12-11T08:47:22.914662 | 2021-07-29T08:52:09 | 2021-07-29T08:52:09 | 237,991,656 | 3 | 0 | null | 2022-12-08T07:03:59 | 2020-02-03T15:01:23 | JavaScript | UTF-8 | Python | false | false | 1,292 | py | from rest_framework.response import Response
from rest_framework.generics import ListCreateAPIView, ListAPIView
from rest_framework.views import APIView
from django.http import JsonResponse
from .serializer import CommentSerializer
from blog.models import Post, Comment, Author
from django.shortcuts import get_object_or_404
class ApiForAllCommentsView(APIView):
def get(self, request, *args, **kwargs):
qs = Comment.objects.all()
serializer = CommentSerializer(qs, many=True)
return Response(data=serializer.data)
def post(self, request, *args, **kwargs):
qr = dict(request.data)
author = Author.objects.filter(author=qr['author'])
if len(author) == 1:
qr['author_id'] = int(author[0].id)
else:
new_author = Author(author=qr['author'])
new_author.save()
qr['author_id'] = int(new_author.id)
qr['author'] = qr['author'][0]
qr['body'] = qr['body'][0]
qr['post'] = int(qr['post'][0])
serializer = CommentSerializer(data=qr)
if serializer.is_valid():
serializer.save()
return Response(serializer.data)
return Response(serializer.errors)
| [
"donklenam2@gmail.com"
] | donklenam2@gmail.com |
31a19fe8af08abd87b739ef20aafc4f44fe5f260 | 97c5fe6a54636de9b056719ea62ac1de4e76ebdc | /src/newsletter/views.py | 04f05109ae7071108c1fdf1cfd58ac97df81bd14 | [
"MIT"
] | permissive | EdwardBetts/matchmaker | 937ece7acbfd1fcb57ab59cd13b16c3cd67d54f3 | ec56d18c6af8ca904325deca3be56484d3415c70 | refs/heads/master | 2020-12-11T01:50:10.773983 | 2016-01-26T16:53:29 | 2016-01-26T16:53:29 | 56,478,725 | 0 | 0 | null | 2016-04-18T05:11:12 | 2016-04-18T05:11:12 | null | UTF-8 | Python | false | false | 3,301 | py | from django.conf import settings
from django.core.mail import send_mail
from django.shortcuts import render, get_object_or_404
from django.views.generic.base import TemplateView
from django.views.generic.detail import DetailView
from django.views.generic.edit import ModelFormMixin
from likes.models import Like
from matches.models import Match
from profiles.models import UserJob
from questions.mixins import PostQuestionMixin
from questions.models import UserAnswer, Question, LEVELS
from questions.forms import QuestionForm
from .forms import ContactForm, SignUpForm
from .models import SignUp
class homeTemplateView(PostQuestionMixin, ModelFormMixin, TemplateView):
template_name = "home.html"
form_class = QuestionForm
def get_context_data(self, *args, **kwargs):
if self.request.user.is_authenticated():
self.object = Question.objects.get_unanswered_questions(self.request.user).order_by("?").first()
positions = []
locations = []
employers = []
context = super(homeTemplateView, self).get_context_data(*args, **kwargs)
matches, users = Match.objects.get_match_all(self.request.user)[:6]
jobs = UserJob.objects.filter(user__in=users).order_by("?")[:6]
user_like = get_object_or_404(Like, user=self.request.user)
context["answers"] = UserAnswer.objects.filter(
user=self.request.user,
question=self.object).first()
if jobs:
for job in jobs:
if job.position not in positions:
positions.append(job.position)
if job.location not in locations:
locations.append(job.location)
if job.employer not in employers:
employers.append(job.employer)
context["positions"] = positions
context["locations"] = locations
context["employers"] = employers
context["liked_users"] = user_like.liked_users.all()
context["matches_list"] = matches
context["question_object"] = self.object
context["levels"] = LEVELS
context["form"] = self.form_class
return context
def contact(request):
title = 'Contact Us'
title_align_center = True
form = ContactForm(request.POST or None)
if form.is_valid():
form_email = form.cleaned_data.get("email")
form_message = form.cleaned_data.get("message")
form_full_name = form.cleaned_data.get("full_name")
subject = 'Site contact form'
from_email = settings.EMAIL_HOST_USER
to_email = [from_email, 'youotheremail@email.com']
contact_message = "%s: %s via %s" % (
form_full_name,
form_message,
form_email)
some_html_message = """
<h1>hello</h1>
"""
send_mail(subject,
contact_message,
from_email,
to_email,
html_message=some_html_message,
fail_silently=True)
context = {
"form": form,
"title": title,
"title_align_center": title_align_center,
}
return render(request, "forms.html", context)
| [
"eddie.valv@gmail.com"
] | eddie.valv@gmail.com |
f41ef0801111314ead7d07efb93cd9336de689ed | c63629e0e0477aeb6ff2e8751d00d9985500e7cd | /Mmani/__init__.py | de7539b6d20d2871550662c0a7e3757fabc33446 | [
"BSD-2-Clause"
] | permissive | Jerryzcn/Mmani | 1389016e68eeac05d0d735aa153320913467d2a4 | 732d3c7581fddb762390699216b724a1e0890508 | refs/heads/master | 2020-12-28T22:46:17.515828 | 2016-01-26T00:40:13 | 2016-01-26T00:40:13 | 49,289,748 | 0 | 0 | null | 2016-01-26T00:44:02 | 2016-01-08T18:41:59 | Python | UTF-8 | Python | false | false | 66 | py | """Mmani: Scalable Manifold Learning"""
__version__ = "0.1.dev0"
| [
"jakevdp@gmail.com"
] | jakevdp@gmail.com |
db48c0c93edddecd96744893ce7b03645ca4f5b2 | 4b79b37003b6fdf236857cef3e6c8304ea30b0f3 | /timerbot.py | 7229ca8de49416741c49cafeb0f34b31a2827c6e | [] | no_license | SinyTim/TelegramBot | d69815104ec43e351f3aecc1163c2572a44f85c0 | 0605d9a4ea3eb4d7b97d664bbee072cb788ae608 | refs/heads/master | 2020-03-26T05:22:33.279776 | 2018-08-13T08:51:45 | 2018-08-13T08:51:45 | 144,332,948 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,311 | py | from telegram.ext import CommandHandler
def get_commands():
commands = "Timer commands:\n/timer <seconds>\n/stoptimer\n"
return commands
def add_handlers(dispatcher):
timer_handler = CommandHandler("timer", timer, pass_args=True, pass_job_queue=True, pass_user_data=True)
dispatcher.add_handler(timer_handler)
stoptimer_handler = CommandHandler("stoptimer", stoptimer, pass_user_data=True)
dispatcher.add_handler(stoptimer_handler)
def timer(bot, update, args, job_queue, user_data):
chat_id = update.message.chat_id
try:
duration = int(args[0])
if duration > 0:
job = job_queue.run_once(alarm, when=duration, context=chat_id)
user_data['job'] = job
update.message.reply_text('Timer set.')
else:
update.message.reply_text("Time must be positive.")
except (IndexError, ValueError):
update.message.reply_text("Timer is not set, try again.")
def stoptimer(bot, update, user_data):
if 'job' in user_data:
job = user_data['job']
job.schedule_removal()
del user_data['job']
update.message.reply_text('Timer stopped.')
else:
update.message.reply_text('No active timer.')
def alarm(bot, job):
bot.send_message(job.context, text='Timer!!!')
| [
"SinyTim@mail.ru"
] | SinyTim@mail.ru |
176126b9efcae477ed9a23b33396a77d90f3bbca | 8889e97b82d76cbdfb1aee3096ad9c6348f7eb7a | /actions/md.py | b83c9fdb6ec28dba054f97472134825105c517be | [
"MIT"
] | permissive | Mymineplays/ExtendedKawaii-DiscordBot | 8ac631b5b76f81b5bc38dbfb7915a0015df6d41d | 4f9304e45414a9cb99492f7aafa31c9bd6730486 | refs/heads/master | 2022-12-11T09:46:54.301465 | 2020-08-06T09:44:41 | 2020-08-06T09:44:41 | 294,209,980 | 0 | 0 | MIT | 2020-09-09T19:29:55 | 2020-09-09T19:29:54 | null | UTF-8 | Python | false | false | 482 | py | commands = ["md", "markdown"]
requires_mention = False
accepts_mention = False
description = "How to MD"
async def execute(message):
msg = ""
msg += "__**Markdown**__\n\n"
msg += "``*Nachricht*`` => *Nachricht*\n"
msg += "``**Nachricht**`` => **Nachricht**\n"
msg += "``__Nachricht__`` => __Nachricht__\n"
msg += "\nDie Symbole können auch kombiniert werden um zum Beispiel unterstrichenen & kursiven Text zu erzeugen"
await message.channel.send(msg)
| [
"vincentscode@gmail.com"
] | vincentscode@gmail.com |
d9ba184c15bf42e9d66972ef3f3696ecbb251ac5 | 166e502744521a566c30ade97cb9ac4113d0d5f2 | /test/browser_test_implicit.py | 6599200ec402b1430d8e0c22278090d2e0a00b92 | [] | no_license | bellkev/docker-webdriver | 2e5030908e7f97e47e31e8c34f7787119d31f30d | 839dd805ba7139b53b25273eb657d4e4400597ed | refs/heads/master | 2016-08-11T21:41:42.109992 | 2015-12-10T04:08:58 | 2015-12-10T04:08:58 | 46,310,011 | 8 | 4 | null | 2016-02-03T00:28:09 | 2015-11-16T23:34:09 | Python | UTF-8 | Python | false | false | 112 | py | from selenium import webdriver
def magic_browser_framework():
driver = webdriver.Chrome()
driver.foo()
| [
"kevin.a.bell@gmail.com"
] | kevin.a.bell@gmail.com |
398f920fea3197e7331d12b015ea69d1d67a1f7b | f50f60d45ddca6ac9e2d409d926282fa9703d14e | /shop/mainapp/models.py | 585b9d6f082265655345665a76f255d545d40cc2 | [] | no_license | Dudichevis/Django_online_store | 89742ba6eb505b5b6d52bf4ae1d4c968c2e015e0 | d2b0435e3d85f62c6194620f2cec6f4d366dace6 | refs/heads/master | 2023-02-14T19:39:50.298140 | 2021-01-11T12:19:22 | 2021-01-11T12:19:22 | 328,347,556 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,945 | py | from django.db import models
import sys
from PIL import Image
from io import BytesIO
from django.contrib.auth import get_user_model
from django.contrib.contenttypes.models import ContentType
from django.core.files.uploadedfile import InMemoryUploadedFile
from django.urls import reverse
from django.utils import timezone
User = get_user_model()
class Category(models.Model):
name = models.CharField(max_length=255, verbose_name="Имя категории")
slug = models.SlugField(unique=True)
def __str__(self):
return self.name
def get_absolute_url(self):
return reverse('category_detail', kwargs= {'slug': self.slug})
def get_fields_for_filter_in_template(self):
return ProductFeatures.objects.filter(
category = self,
use_in_filter=True
).prefetch_related('category').value('feature_key', 'feature_measure', 'feature_name', 'filter_type')
class Product(models.Model):
Min_resolution = (400, 400)
Max_resolution = (800, 800)
Max_image_size = 3145728
category = models.ForeignKey('Category', verbose_name='Категория', on_delete=models.CASCADE)
title = models.CharField(max_length=250, verbose_name='Название продукта')
slug = models.SlugField(unique=True)
price = models.DecimalField(max_digits=9, decimal_places=2, verbose_name='Цена')
image = models.ImageField(verbose_name='Изображение')
description = models.TextField(verbose_name='Описание', null=True)
def __str__(self):
return self.title
def get_model_name(self):
return self.__class__.__name__.lower()
def save(self, *args, **kwargs):
image = self.image
img = Image.open(image)
new_img = img.convert('RGB')
resized_new_img = new_img.resize((200, 200), Image.ANTIALIAS)
filestream = BytesIO()
resized_new_img.save(filestream, 'JPEG', quality=90)
filestream.seek(0)
name = '{}.{}'.format(*self.image.name.split('.'))
self.image = InMemoryUploadedFile(
filestream, 'ImageField', name, 'jpeg/image', sys.getsizeof(filestream), None
)
super().save(*args, **kwargs)
def get_absolute_url(self):
return reverse('product_detail', kwargs={'slug': self.slug})
class ProductFeatures(models.Model):
RADIO ='radio'
CHECKBOX = 'checkbox'
FILTER_RADIO_CHOICES = (
(RADIO, 'Радиокнопка'),
(CHECKBOX, 'Чекбокс')
)
feature_key = models.CharField(max_length=100, verbose_name='Ключ характеристики')
feature_name = models.CharField(max_length=255, verbose_name='Наименование характеристики')
category = models.ForeignKey(Category, verbose_name='Категория', on_delete=models.CASCADE)
postfix_for_value = models.CharField(max_length=20, null=True, blank=True, verbose_name='Постфикс для значения',
help_text=f'Например для характеристики "Часы работы" к значению можно добавить постфикс "часов",'
f' и как результат - значение "10 часов"'
)
use_in_filter = models.BooleanField(default=False, verbose_name='Использовать в фильтрации товаров в шаблоне')
filter_type = models.CharField(
max_length=20, verbose_name='Тип фильтра',
default=CHECKBOX, choices=FILTER_RADIO_CHOICES
)
filter_measure = models.CharField(
max_length=50, verbose_name='Единица измерения для фильтра',
help_text='Единица измерения для конкретного фильтра. Например "Частота процессора (Ghz). Единицей измерения будет информация в скобках"'
)
def __str__(self):
return f'Категория - "{self.category.name}" | Характеристика - "{self.feature_name}"'
class ProductFeatureValidators(models.Model):
category = models.ForeignKey(Category, verbose_name='Категория', on_delete=models.CASCADE)
feature = models.ForeignKey(ProductFeatures, verbose_name='Характеристика', null=True, blank=True, on_delete=models.CASCADE)
feature_value = models.CharField(max_length=255, unique=True, null=True, blank=True, verbose_name='Значение характеристики')
def __str__(self):
if not self.feature:
return f'Валидатор категории "{self.category.name}" - характеристика не выбрана'
return f'Валидатор категории "{self.category.name}" |' \
f'Характеристика - "{self.feature_name}" |' \
f'Значение - "{self.feature_value}"'
class CartProduct(models.Model):
user = models.ForeignKey('Customer', verbose_name='Покупатель', on_delete=models.CASCADE)
cart = models.ForeignKey('Cart', verbose_name='Корзина', on_delete=models.CASCADE, related_name='related_products')
product = models.ForeignKey(Product, verbose_name='Товар', on_delete=models.CASCADE)
qty = models.PositiveIntegerField(default=1)
final_price = models.DecimalField(max_digits=9, decimal_places=2, verbose_name='Общая цена')
def __str__(self):
return "Продукт: {} (для корзины)".format(self.product.title)
def save(self, *args, **kwargs):
self.final_price = self.qty * self.product.price
super().save(*args, **kwargs)
class Cart(models.Model):
owner = models.ForeignKey('Customer', null=True, verbose_name='Владелец товара', on_delete=models.CASCADE)
products = models.ManyToManyField(CartProduct, blank=True, related_name='related_cart')
total_products = models.PositiveIntegerField(default=0)
final_price = models.DecimalField(max_digits=9, default=0, decimal_places=2, verbose_name='Итоговая цена')
in_order = models.BooleanField(default=False)
for_anonymous_user = models.BooleanField(default=False)
def __str__(self):
return str(self.id)
class Customer(models.Model):
user = models.ForeignKey(User, verbose_name='Пользователь', on_delete=models.CASCADE)
phone = models.CharField(max_length=20, verbose_name='Номер телефона', null=True, blank=True)
address = models.CharField(max_length=255, verbose_name='Адрес', null=True, blank=True)
orders = models.ManyToManyField('Order', verbose_name='Заказы покупателя', blank=True, related_name='related_order')
def __str__(self):
return "Покупатель: {} {}".format(self.user.first_name, self.user.last_name)
class Order(models.Model):
STATUS_NEW = 'new'
STATUS_IN_PROGRESS = 'in_progress'
STATUS_READY = 'is_ready'
STATUS_COMPLETED = 'completed'
STATUS_PAYED = 'payed'
BUYING_TYPE_SELF = 'self'
BUYING_TYPE_DELIVERY = 'delivery'
STATUS_CHOICES = (
(STATUS_PAYED, 'Заказ оплачен'),
(STATUS_NEW, 'Новый заказ'),
(STATUS_IN_PROGRESS, 'Заказ в обработке'),
(STATUS_READY, 'Заказ готов'),
(STATUS_COMPLETED, 'Заказ выполнен')
)
BUYING_TYPE_CHOICES = (
(BUYING_TYPE_SELF, 'Самовывоз'),
(BUYING_TYPE_DELIVERY, 'Доставка')
)
customer = models.ForeignKey(Customer, verbose_name='Покупатель', related_name='related_orders', on_delete=models.CASCADE)
first_name = models.CharField(max_length=255, verbose_name='Имя')
last_name = models.CharField(max_length=255, verbose_name='Фамилия')
phone = models.CharField(max_length=20, verbose_name='Номер телефона')
cart = models.ForeignKey(Cart, verbose_name='Корзина', on_delete=models.CASCADE, null=True, blank=True)
address = models.CharField(max_length=255, verbose_name='Адрес', null=True, blank=True)
status = models.CharField(max_length=100, verbose_name='Статус заказа', choices=STATUS_CHOICES, default=STATUS_NEW)
buying_type = models.CharField(max_length=100, verbose_name='Тип заказа', choices=BUYING_TYPE_CHOICES, default=BUYING_TYPE_SELF)
comment = models.TextField(verbose_name='Комментарий к заказу', null=True, blank=True)
created_at = models.DateTimeField(auto_now=True, verbose_name='Дата создания заказа')
order_date = models.DateTimeField(verbose_name='Дата получения заказа', default=timezone.now)
def __str__(self):
return str(self.id)
| [
"igordudichev@gmail.com"
] | igordudichev@gmail.com |
f2a8228648d6df6d7ab605f993bb91c991aca628 | ae0351eea1f35a521ceb136ebf1fd3dd56ff715a | /activity_predictor/classifiertrain.py | e049324c28bd74659ee22637c6dca4b787d81ff6 | [] | no_license | Ridhanya/PepGAN | eea833da88c48835e94154410453670b5d66e4d2 | 1f11e9f6fe571f8b2fd8b5f3e07ee06ce67bd1d8 | refs/heads/master | 2022-11-20T06:31:51.657891 | 2020-07-15T01:45:39 | 2020-07-15T01:45:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,121 | py | import numpy as np
import os
import sys
import multiprocessing as mp
from keras.models import Sequential
from keras.layers import Dense, Activation,TimeDistributed,MaxPooling1D
from keras.layers import LSTM,GRU
from keras.layers.embeddings import Embedding
from keras.optimizers import RMSprop, Adam
from keras.utils.data_utils import get_file
from keras.layers import Dropout
import numpy as np
import random
import sys
from keras.utils.np_utils import to_categorical
from keras.preprocessing import sequence
from keras.models import model_from_json
from random import sample
from keras.callbacks import CSVLogger
#Defining the vocabulary (should be constant with the vocabulary used for the generative model)
aalist=["B","A","R","N","D","C","Q","E","G","H","I","L","K","M","F","P","S","T","W","Y","V","X"," "]
def seqfrmat(seqinp,maxlnpep):
tmp=seqinp.strip()+"X"
while len(tmp)<=maxlnpep:
tmp=tmp+" "
coding=[]
seqid=[]
for x in range(0,maxlnpep+1):
tmpctgr=to_categorical(aalist.index(tmp[x]), num_classes=len(aalist))
coding.append(tmpctgr)
seqid.append(aalist.index(tmp[x]))
return seqid,coding
def loaddata(csvpath,csvpathneg,maxlnpep):
f=open(csvpath,'r')
ln=f.readlines()[1:]
lenln=len(ln)
clnpep=[]
clncoding=[]
f.close()
fn=open(csvpathneg,'r')
lnn=fn.readlines()[1:]
lenlnn=len(lnn)
fn.close()
datacutoff=0
f=open("RNN-dropoutdata-GRU256-64.csv","w")
seqlist=sample(range(0,lenln),lenln)
seqlistneg=sample(range(0,lenlnn),lenlnn)
for i in range(0,lenln):
if (len(ln[i])<=maxlnpep)&(i in seqlist):
frmseq,frmcod=seqfrmat(ln[i],maxlnpep)
frmcod=[[1]]
clnpep.append(frmseq)
clncoding.append(frmcod)
else:
f.write(ln[i].strip()+"X"+"\n")
for i in range(0,lenlnn):
if (len(lnn[i])<=maxlnpep)&(i in seqlistneg):
frmseq,frmcod=seqfrmat(lnn[i],maxlnpep)
frmcod=[[0]]
clnpep.append(frmseq)
clncoding.append(frmcod)
else:
f.write(lnn[i].strip()+"X"+"\n")
f.close()
return clnpep,clncoding
def save_model(model):
model_json = model.to_json()
with open("Model-GRU256-64.json", "w") as json_file:
json_file.write(model_json)
model.save_weights("Model-GRU256-64.h5")
print("Saved model to disk")
if __name__ == "__main__":
maxlnpep=55
nproc=4
#Set the link to the positive and negative data
Positive_set,Negative_set=loaddata("/Users/andrejstucs/Documents/Results/16/PepGAN/data/amp_all.csv","/Users/andrejstucs/Documents/Results/16/PepGAN/data/nonamp.csv",maxlnpep)
X=np.array((Positive_set))
Y=np.array((Negative_set))
model = Sequential()
aalstln=len(aalist)
dataln=X.shape[1]
#Model set-up
model.add(Embedding(input_dim=aalstln, output_dim=len(aalist), input_length=dataln,mask_zero=False))
model.add(GRU(output_dim=256, activation='tanh',return_sequences=True))
model.add(Dropout(0.2))
model.add(Dense(1, activation='sigmoid'))
model.add(MaxPooling1D(pool_size=52))
optimizer=Adam(lr=0.00001)
print(model.summary())
model.compile(loss='binary_crossentropy', optimizer=optimizer, metrics=['accuracy'])
history_callback = model.fit(X,Y,epochs=1000, batch_size=512,validation_split=0.1)
loss_history = history_callback.history["loss"]
acc_history = history_callback.history["acc"]
val_loss_history = history_callback.history["val_loss"]
val_acc_history = history_callback.history["val_acc"]
numpy_loss_history = np.array(loss_history)
numpy_acc_history = np.array(acc_history)
numpy_val_loss_history = np.array(val_loss_history)
numpy_val_acc_history = np.array(val_acc_history)
np.savetxt("loss_history.txt", numpy_loss_history, delimiter=",")
np.savetxt("acc_history.txt", numpy_acc_history, delimiter=",")
np.savetxt("val_loss_history.txt", numpy_val_loss_history, delimiter=",")
np.savetxt("val_acc_history.txt", numpy_val_acc_history, delimiter=",")
save_model(model)
| [
"noreply@github.com"
] | Ridhanya.noreply@github.com |
8fb11db047f674207152035c36dabffce36646da | a77630a21bc23100133836e101616f069e5fa051 | /venv/bin/easy_install-3.7 | 436c4ebc8e5fcf2edeba229bed554d68a8257d7e | [] | no_license | vityakril/flask_alarm | f20ac0056ca75c70e6a4a1ac51f848bcf0a56aa4 | 26b52809c232bf8dfa9268e02982c15eba9935aa | refs/heads/master | 2020-07-08T21:28:26.895516 | 2019-08-22T11:49:47 | 2019-08-22T11:49:47 | 203,646,714 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 259 | 7 | #!/home/vkril/test_capensis/venv/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from setuptools.command.easy_install import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"vkril@scalair.fr"
] | vkril@scalair.fr |
84aaf4120a0a6a01012a2a5dcf06b0d75f9c3de5 | 8afb5afd38548c631f6f9536846039ef6cb297b9 | /_MY_ORGS/Web-Dev-Collaborative/blog-research/Data-Structures/1-Python/strings/contain_string.py | 67056fed67317b8f05ae54f52aee5108734c2c45 | [
"MIT"
] | permissive | bgoonz/UsefulResourceRepo2.0 | d87588ffd668bb498f7787b896cc7b20d83ce0ad | 2cb4b45dd14a230aa0e800042e893f8dfb23beda | refs/heads/master | 2023-03-17T01:22:05.254751 | 2022-08-11T03:18:22 | 2022-08-11T03:18:22 | 382,628,698 | 10 | 12 | MIT | 2022-10-10T14:13:54 | 2021-07-03T13:58:52 | null | UTF-8 | Python | false | false | 651 | py | """
Implement strStr().
Return the index of the first occurrence of needle in haystack, or -1 if needle is not part of haystack.
Example 1:
Input: haystack = "hello", needle = "ll"
Output: 2
Example 2:
Input: haystack = "aaaaa", needle = "bba"
Output: -1
Reference: https://leetcode.com/problems/implement-strstr/description/
"""
def contain_string(haystack, needle):
if len(needle) == 0:
return 0
if len(needle) > len(haystack):
return -1
for i in range(len(haystack)):
if len(haystack) - i < len(needle):
return -1
if haystack[i:i+len(needle)] == needle:
return i
return -1
| [
"bryan.guner@gmail.com"
] | bryan.guner@gmail.com |
1b0838c37fc5ca4472ea147a4533d07d6fae4cb0 | 835528e5d723b4e33c7bea7e0382388c0d60be08 | /bin/haxerenderer.py | d1634cdca7ef00ce218908e8e9beb08491ca0e8e | [
"BSD-2-Clause"
] | permissive | aduros/Browser.hx | 0307a14390017bf2c0e03e43be1756e274a1d2cd | c43be016fe4865a713150af63bdbb43a6e84b28a | refs/heads/master | 2021-01-01T20:05:23.206192 | 2014-02-21T22:37:45 | 2014-02-21T22:37:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 25,656 | py | #
# Renders a single IDL file into Haxe
import itertools
import re
from idlnode import *
haxe_idl_types = {
"boolean": "Bool",
"byte": "Int",
"signed byte": "Int",
"unsigned byte": "Int",
"char": "Int",
"signed char": "Int",
"unsigned char": "Int",
"octet": "Int",
"short": "Int",
"unsigned short": "Int",
"int": "Int",
"unsigned int": "Int",
"long": "Int", # Float?
"unsigned long": "Int", # Float?
"long long": "Int", # Float?
"unsigned long long": "Int", # Float?
"float": "Float",
"double": "Float",
"void": "Void",
"any": "Dynamic",
"custom": "Dynamic",
"Date": "Date",
"DOMObject": "Dynamic",
"DOMString": "String",
"DOMTimeStamp": "Int", # Float?
"DOMStringMap": "Dynamic<String>",
"Dictionary": "Dynamic",
"object": "Dynamic",
"String": "String",
"Array": "Array<Dynamic>",
"ObjectArray": "Array<Dynamic>",
"sequence": "Array<Dynamic>",
"MutationRecordArray": "Array<MutationRecord>",
# Where did these go?
"PositionOptions": "Dynamic",
"SerializedScriptValue": "Dynamic",
"TimeoutHandler": "Void->Void",
"CompareHow": "Int",
}
# If these identifiers appear in the IDL, they should be suffixed
haxe_keywords = [
"callback",
"continue",
"default",
"override",
]
# Used to generate document.createElement shortcuts
html_elements = {
"AnchorElement": "a",
"AppletElement": "applet",
"AreaElement": "area",
"AudioElement": "audio",
"BaseElement": "base",
"BaseFontElement": "basefont",
"BodyElement": "body",
"BRElement": "br",
"ButtonElement": "button",
"CanvasElement": "canvas",
"ContentElement": "content",
"DataListElement": "datalist",
"DetailsElement": "details",
"DirectoryElement": "dir",
"DivElement": "div",
"DListElement": "dl",
# "Element",
"EmbedElement": "embed",
"FieldSetElement": "fieldset",
"FontElement": "font",
"FormElement": "form",
"FrameElement": "frame",
"FrameSetElement": "frameset",
"HeadElement": "head",
# "HeadingElement"
"HRElement": "hr",
"HtmlElement": "html",
"IFrameElement": "iframe",
"ImageElement": "img",
"InputElement": "input",
"KeygenElement": "keygen",
"LabelElement": "label",
"LegendElement": "legend",
"LIElement": "li",
"LinkElement": "link",
"MapElement": "map",
"MarqueeElement": "marquee",
"MediaElement": "media",
"MenuElement": "menu",
"MetaElement": "meta",
"MeterElement": "meter",
"ModElement": "mod",
"ObjectElement": "object",
"OListElement": "ol",
"OptGroupElement": "optgroup",
"OptionElement": "option",
"OutputElement": "output",
"ParagraphElement": "p",
"ParamElement": "param",
"PreElement": "pre",
"ProgressElement": "progress",
"QuoteElement": "quote",
"ScriptElement": "script",
"SelectElement": "select",
"ShadowElement": "shadow",
"SourceElement": "source",
"SpanElement": "span",
"StyleElement": "style",
"TableCaptionElement": "caption",
"TableCellElement": "td",
"TableColElement": "col",
"TableElement": "table",
"TableRowElement": "tr",
"TableSectionElement": "thead",
"TextAreaElement": "textarea",
"TitleElement": "title",
"TrackElement": "track",
"UListElement": "ul",
# "UnknownElement",
"VideoElement": "video",
}
class PackageGroup:
def __init__ (self, names, remove_prefix=None):
self.names = names
self.remove_prefix = remove_prefix
packaged_classes = {
# Web Audio classes (by grepping for WEB_AUDIO in the IDL database)
"audio": PackageGroup([
"AnalyserNode",
"AudioBuffer",
"AudioBufferCallback",
"AudioBufferSourceNode",
"AudioContext",
"AudioDestinationNode",
"AudioGain",
"AudioListener",
"AudioNode",
"AudioParam",
"AudioProcessingEvent",
"AudioSourceNode",
"BiquadFilterNode",
"ChannelMergerNode",
"ChannelSplitterNode",
"ConvolverNode",
"DelayNode",
"DynamicsCompressorNode",
"GainNode",
"MediaElementAudioSourceNode",
"MediaStreamAudioSourceNode",
"OfflineAudioCompletionEvent",
"OscillatorNode",
"PannerNode",
"ScriptProcessorNode",
"WaveShaperNode",
"WaveTable",
]),
# FILE_SYSTEM
"fs": PackageGroup([
# "DataTransferItem",
"DOMFileSystem",
"DOMFileSystemSync",
"DirectoryEntry",
"DirectoryEntrySync",
"DirectoryReader",
"DirectoryReaderSync",
"EntriesCallback",
"Entry",
"EntryArray",
"EntryArraySync",
"EntryCallback",
"EntrySync",
"ErrorCallback",
"FileCallback",
"FileEntry",
"FileEntrySync",
"FileError",
"FileException",
"FileSystemCallback",
"FileWriter",
"FileWriterCallback",
"FileWriterSync",
"Metadata",
"MetadataCallback",
], remove_prefix="DOM"),
# MEDIA_STREAM
"rtc": PackageGroup([
"LocalMediaStream",
"MediaStream",
# "MediaStreamAudioSourceNode",
"MediaStreamEvent",
"MediaStreamList",
"MediaStreamTrack",
"MediaStreamTrackEvent",
"MediaStreamTrackList",
"NavigatorUserMediaError",
"NavigatorUserMediaErrorCallback",
"NavigatorUserMediaSuccessCallback",
"RTCDataChannel",
"RTCDataChannelEvent",
"RTCErrorCallback",
"RTCIceCandidate",
"RTCIceCandidateEvent",
"RTCPeerConnection",
"RTCSessionDescription",
"RTCSessionDescriptionCallback",
"RTCStatsCallback",
"RTCStatsElement",
"RTCStatsReport",
"RTCStatsResponse",
], remove_prefix="RTC"),
# SQL_DATABASE
"sql": PackageGroup([
"Database",
"DatabaseCallback",
"DatabaseSync",
"SQLError",
"SQLException",
"SQLResultSet",
"SQLResultSetRowList",
"SQLStatementCallback",
"SQLStatementErrorCallback",
"SQLTransaction",
"SQLTransactionCallback",
"SQLTransactionErrorCallback",
"SQLTransactionSync",
"SQLTransactionSyncCallback",
], remove_prefix="SQL"),
}
# Merged class pairs
merged_targets = {
"HTMLElement": "Element",
"HTMLDocument": "Document",
}
merged_sources = dict([[v,k] for k,v in merged_targets.iteritems()])
def to_haxe(id):
"""Converts an IDL type name to Haxe."""
if id.endswith("..."):
# TODO(bruno): Support varargs somehow. Probably by emitting a few @:overloads
id = id[:-3]
match = re.match(r"(?:sequence<(\w+)>|(\w+)\[\])$", id)
if match:
return ["Array<%s>" % to_haxe_package(match.group(1) or match.group(2))]
if id in merged_targets:
id = merged_targets.get(id)
# Strip the "HTML" prefix from elements
match = re.match(r"HTML(.+)Element", id)
if match:
id = match.group(1)+"Element"
# Temporary hackery?
if id == "Acceleration" or id == "RotationRate":
id = "Device"+id
if id in haxe_idl_types:
return [haxe_idl_types.get(id)]
id = strip_vendor(id)
path = ["js", "html"]
if id.startswith("SVG"):
id = id[len("SVG"):]
path += ["svg"]
elif id.startswith("WebGL"):
id = id[len("WebGL"):]
path += ["webgl"]
elif id.startswith("OES") or id.startswith("EXT"):
path += ["webgl"]
elif id.startswith("IDB"):
id = id[len("IDB"):]
path += ["idb"]
else:
for package, group in packaged_classes.iteritems():
if id in group.names:
path += [package]
if group.remove_prefix and id.startswith(group.remove_prefix):
id = id[len(group.remove_prefix):]
break
path += [id]
return path
def to_haxe_package(id):
return ".".join(to_haxe(id))
def to_haxe_class(id):
return to_haxe(id)[-1]
def array_access(interface):
"""Returns the type to use for ArrayAccess, or None."""
if "IndexedGetter" in interface.ext_attrs:
for op in interface.operations:
if op.id == "item":
return op.type.id
elif "TypedArray" in interface.ext_attrs:
return interface.ext_attrs["TypedArray"]
return None
def constructable(interface):
"""Returns whether the interface has a constructor."""
return set(["CustomConstructor", "V8CustomConstructor", "Constructor",
"NamedConstructor", "ConstructorTemplate"]) & set(interface.ext_attrs)
def escape_keyword(id):
"""Escapes a Haxe keyword."""
return id+"_" if id in haxe_keywords else id
def strip_vendor(id):
"""Strips vendor prefixes (webkit) from an id."""
id = re.sub(r"^WebKit", "", id)
id = re.sub(r"^WEBKIT_", "", id)
id = re.sub(r"^onwebkit", "on", id)
id = re.sub(r"^initWebKit", "init", id) # Used in some Events
if id.startswith("webkit"):
id = re.sub(r"^webkit", "", id)
id = id[0].lower() + id[1:]
return id
def strip_vendor_fields(nodes, remove=True):
for node in nodes:
stripped = strip_vendor(node.id)
if stripped != node.id:
if remove:
exists = False
for x in nodes:
if x.id == stripped:
exists = True
if exists:
nodes.remove(node)
continue
node.id = stripped
def is_callback(node):
return "Callback" in node.ext_attrs \
or node.id == "EventListener" \
or node.id == "MediaQueryListListener"
def is_optional(node):
if "HaxeOptional" in node.ext_attrs: return True
if "Optional" in node.ext_attrs:
return node.ext_attrs["Optional"] is None or \
node.ext_attrs.get("TreatNullAs") == "NullString"
return False
def render(db, idl_node, mdn_js, mdn_css, header=None):
output = []
indent_stack = []
EventTarget = db.GetInterface("EventTarget")
package = None
def begin_indent():
indent_stack.append("\t")
def end_indent():
indent_stack.pop()
def sort(nodes):
return sorted(nodes, key=lambda node: node.id)
def get_parent(interface):
if interface.parents:
parent = interface.parents[0].type.id
if interface.id not in merged_targets and parent in merged_sources:
parent = merged_sources.get(parent)
return db.GetInterface(parent)
if "EventTarget" in interface.ext_attrs:
return EventTarget
return None
def defined_in_parent(interface, id):
"""Whether an id is already defined in an interface's parents"""
parent = get_parent(interface)
if parent:
if parent.attributes:
for attribute in parent.attributes:
if attribute.id == id:
return True
if parent.operations:
for operation in parent.operations:
if operation.id == id:
return True
return defined_in_parent(parent, id)
return False
def to_haxe_local(id):
# Omit the package if unnecessary
global package
path = to_haxe(id)
return path[-1] if path[0:-1] == package else ".".join(path)
def wln(node=None):
"""Writes the given node and adds a new line."""
w(node)
output.append("\n")
def wsp(node):
"""Writes the given node and adds a space if there was output."""
mark = len(output)
w(node)
if mark != len(output):
w(" ")
def w_doc(text):
text = text.strip()
if text != "":
wln("/** %s */" % text)
def w_typed_shortcut(name, return_type, code):
w_doc("A typed shortcut for <code>%s</code>." % code)
wln("public inline function %s() : %s { return cast %s; }" % (name, return_type, code))
def w_arguments(node):
"""Handle the stdlib's tricky argument formatting"""
if node:
w(" ")
w(node, ", ")
w(" ")
def w(node, list_separator=None):
"""Writes the given node.
Args:
node -- a string, IDLNode instance or a list of such.
list_separator -- if provided, and node is a list,
list_separator will be written between the list items.
"""
if node is None:
return
elif isinstance(node, str) or isinstance(node, unicode):
if output and output[-1].endswith("\n"):
# Auto-indent.
output.extend(indent_stack)
output.append(node)
elif isinstance(node, list):
for i in range(0, len(node)):
if i > 0:
w(list_separator)
w(node[i])
elif isinstance(node, IDLFile):
w(node.modules)
w(node.interfaces)
elif isinstance(node, IDLModule):
w(node.interfaces)
w(node.typeDefs)
elif isinstance(node, IDLInterface):
global package
package = to_haxe(node.id)[0:-1]
wln("package %s;" % ".".join(package))
wln()
constants = node.constants[:]
attributes = node.attributes[:]
operations = node.operations[:]
if node.id in merged_sources:
def append_uniques(tgt, src):
for s in src:
duplicate = False
for t in tgt:
if s.id == t.id:
duplicate = True
break
if not duplicate:
tgt.append(s)
source = db.GetInterface(merged_sources.get(node.id))
append_uniques(constants, source.constants)
append_uniques(attributes, source.attributes)
operations += source.operations
print("Merged %s onto %s" % (source.id, node.id))
class_doc = None
if node.id in mdn_js:
class_doc = mdn_js[node.id]
if "summary" in class_doc:
w_doc("<br><br>\n".join([
class_doc["summary"],
"Documentation for this class was provided by <a href=\"%s\">MDN</a>." % class_doc["srcUrl"]
]))
def w_member_doc(node):
docs = []
if hasattr(node, "raises") and node.raises:
docs += ["Throws %s." % to_haxe_class(node.raises.id)]
if hasattr(node, "get_raises") and node.get_raises:
docs += ["Getter throws %s." % to_haxe_class(node.get_raises.id)]
if hasattr(node, "set_raises") and node.set_raises:
docs += ["Setter throws %s." % to_haxe_class(node.set_raises.id)]
if class_doc and class_doc["members"]:
for member in class_doc["members"]:
if member["name"] == node.id:
docs = [member["help"]] + docs
break
w_doc(" ".join(docs))
def w_constructor_doc():
if class_doc and "constructor" in class_doc:
w_doc(class_doc["constructor"])
def w_members(members):
for member in members:
w_member_doc(member)
wln(member)
if is_callback(node):
# Generate a function typedef if this is a callback
callback = operations[0]
if callback.arguments:
# Use Dynamic->Void for EventListener for a lot more expressiveness at the cost
# of a little type safety
arguments = " -> ".join([
"Dynamic" if x.type.id == "Event" else to_haxe_local(x.type.id)
for x in callback.arguments])
else:
arguments = "Void"
w("typedef %s = %s -> %s;" % (to_haxe_class(node.id), arguments, to_haxe_local(callback.type.id)))
return
class_name = to_haxe_class(node.id)
if "HaxeTypedef" in node.ext_attrs:
w("typedef %s =" % class_name)
else:
interface_name = node.ext_attrs["InterfaceName"] if "InterfaceName" in node.ext_attrs else node.id
wln("@:native(\"%s\")" % strip_vendor(interface_name))
w("extern class %s" % class_name)
strip_vendor_fields(constants)
strip_vendor_fields(attributes)
strip_vendor_fields(operations, False)
parent = get_parent(node)
if parent:
w(" extends %s" % to_haxe_local(parent.id))
if len(node.parents) > 1:
print("Omitting excess superclasses from %s" % node.id)
array_type = array_access(node)
if array_type:
w(" implements ArrayAccess<%s>" % to_haxe_local(array_type))
wln()
wln("{")
begin_indent()
if constants:
w_members(sort(constants))
if attributes:
attributes = sort([x for x in attributes if not defined_in_parent(node, x.id)])
if "ExtendsDOMGlobalObject" in node.ext_attrs:
# Omit class contructors from the global object
w_members([x for x in attributes if not x.type.id.endswith("Constructor")])
else:
w_members(attributes)
if node.id == "CSSStyleDeclaration":
for prop in sorted(mdn_css):
w_doc(mdn_css[prop])
# Convert to camel case
prop = "".join([x[0].upper()+x[1:] for x in prop.split("-")])
prop = prop[0].lower() + prop[1:] # Re-lowercase the first word
wln("var " + prop + " : String;")
wln()
if constructable(node):
constructors = []
if "ConstructorTemplate" in node.ext_attrs:
template = node.ext_attrs["ConstructorTemplate"]
if template == "Event":
constructors += [
["type : String", "canBubble : Bool = true", "cancelable : Bool = true"],
]
elif template == "TypedArray":
array_type = to_haxe_local(node.ext_attrs["TypedArray"])
constructors += [
["length : Int"],
["array : Array<%s>" % array_type],
["array : %s" % class_name],
["buffer : ArrayBuffer", "?byteOffset : Int", "?length : Int"],
]
else:
raise TypeError("Unrecognized ConstructorTemplate for " + node.id)
elif "ConstructorParameters" in node.ext_attrs:
c = []
for ii in range(int(node.ext_attrs["ConstructorParameters"])):
c += ["?arg%s : Dynamic" % ii]
constructors += [c]
elif "Constructor" in node.ext_attrs and node.ext_attrs["Constructor"] is not None:
constructors += [node.ext_attrs["Constructor"].arguments]
else:
constructors += [[]]
w_constructor_doc()
for ii, c in enumerate(constructors):
if ii < len(constructors)-1:
w("@:overload( function(")
w_arguments(c)
wln(") : Void {} )")
else:
w("function new(")
w_arguments(c)
wln(") : Void;")
wln()
if operations:
operations = sort([x for x in operations if not defined_in_parent(node, x.id)])
for id, group in itertools.groupby(operations, lambda node: node.id):
group = list(group)
ll = len(group)
if ll > 1:
w_member_doc(group[0])
for ii, overload in enumerate(group):
if ii < ll-1:
w("@:overload( function(")
w_arguments(overload.arguments)
wln(") :%s {} )" % to_haxe_local(overload.type.id))
else:
wln(overload)
else:
wln(group[0])
if node.id == "Document":
for type, tag_name in html_elements.iteritems():
w_typed_shortcut("create"+type, type, "createElement(\"%s\")" % tag_name)
elif node.id == "HTMLCanvasElement":
w_typed_shortcut("getContext2d", "CanvasRenderingContext2D", "getContext(\"2d\")")
wln()
wln("public inline function getContextWebGL( ?attribs :js.html.webgl.ContextAttributes ) :js.html.webgl.RenderingContext {")
begin_indent()
wln("return CanvasUtil.getContextWebGL(this, attribs);")
end_indent()
wln("}")
end_indent()
wln("}")
# getContextWebGL is too big to inline, put it in a helper class
if node.id == "HTMLCanvasElement":
wln()
wln("private class CanvasUtil {")
begin_indent()
wln("public static function getContextWebGL( canvas :CanvasElement, attribs :Dynamic ) {")
begin_indent()
wln('for (name in ["webgl", "experimental-webgl"]) {')
begin_indent()
wln('var ctx = (untyped canvas).getContext(name, attribs);')
wln('if (ctx != null) return ctx;')
end_indent()
wln("}")
wln("return null;")
end_indent()
wln("}")
end_indent()
w("}")
elif isinstance(node, IDLAttribute):
stripped = strip_vendor(node.id)
escaped = escape_keyword(stripped)
attr_type = to_haxe_local(node.type.id)
if is_optional(node):
w("@:optional ")
w("var %s" % escaped)
if escaped != stripped:
wln("(get,%s) : %s;" % ("null" if node.is_read_only else "set", attr_type))
wln("private inline function get_%s() : %s {" % (escaped, attr_type))
begin_indent()
wln("return untyped this[\"%s\"];" % stripped)
end_indent()
wln("}")
if not node.is_read_only:
wln("private inline function set_%s(x : %s) : %s {" % (escaped, attr_type, attr_type))
begin_indent()
wln("return untyped this[\"%s\"] = x;" % stripped)
end_indent()
wln("}")
else:
if node.is_read_only:
w("(default,null)")
wln(" : %s;" % to_haxe_local(node.type.id))
elif isinstance(node, IDLConstant):
wln("static inline var %s : %s = %s;" % (escape_keyword(strip_vendor(node.id)),
to_haxe_local(node.type.id), node.value))
elif isinstance(node, IDLOperation):
stripped = strip_vendor(node.id)
escaped = escape_keyword(stripped)
return_type = to_haxe_local(node.type.id)
if node.is_static:
w("static ")
if escaped != stripped:
w("private inline function %s(" % escaped)
w_arguments(node.arguments)
wln(") : %s {" % return_type)
begin_indent()
if return_type != "Void":
w("return ")
wln("(untyped this[\"%s\"])(%s);" % (stripped,
", ".join([escape_keyword(strip_vendor(arg.id)) for arg in node.arguments])))
end_indent()
wln("}")
else:
w("function %s(" % escaped)
w_arguments(node.arguments)
if "CallWith" in node.ext_attrs and "ScriptArguments" in node.ext_attrs["CallWith"].split("|"):
if node.arguments: w(", ")
w(["?p1 : Dynamic", "?p2 : Dynamic", "?p3 : Dynamic", "?p4 : Dynamic", "?p5 : Dynamic"], ", ")
wln(") : %s;" % return_type)
elif isinstance(node, IDLArgument):
if is_optional(node):
w("?")
w("%s : %s" % (escape_keyword(strip_vendor(node.id)), to_haxe_local(node.type.id)))
else:
raise TypeError("Expected str or IDLNode but %s found" %
type(node))
if header:
w(header)
w(idl_node)
return "".join(output)
| [
"b@aduros.com"
] | b@aduros.com |
e816fd541e13285f5cf59d5f3ff75b8c79ee5fa5 | ae629344805025350f412c43a1281ac2034fd24d | /july5_CV_DL/hands_on/solutions/cnn_cifar10.py | 7d9058b93aef70f454de48a1d669f5ab5dadee83 | [] | no_license | m-and-ms/visum-2019 | f78cc76afd7b7f937c1bfdd00ae9638d18b7a7f0 | 54a21a5e49698266cea396842f3e715ec913c6d2 | refs/heads/master | 2020-08-04T17:15:16.003125 | 2019-07-11T19:46:31 | 2019-07-11T19:46:31 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,208 | py | #
# Perform CNN classification on CIFAR-10.
#
import os
import sys
import numpy as np
import argparse
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from torchvision import datasets, transforms
import resnet
#
# Load the CIFAR 10 dataset.
#
def load_cifar10(basedir, batch_size, kwargs):
# Input channels normalization.
normalize = transforms.Normalize(mean=[0.4914, 0.4822, 0.4465],
std=[0.2023, 0.1994, 0.2010])
# Load train data.
trainloader = torch.utils.data.DataLoader(
datasets.CIFAR10(root=basedir+'cifar10/', train=True,
transform=transforms.Compose([
transforms.RandomCrop(32, 4),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
normalize,
]), download=True),
batch_size=batch_size, shuffle=True, **kwargs)
# Load test data.
testloader = torch.utils.data.DataLoader(
datasets.CIFAR10(root=basedir+'cifar10/', train=False,
transform=transforms.Compose([
transforms.ToTensor(),
normalize,
])),
batch_size=batch_size, shuffle=True, **kwargs)
return trainloader, testloader
################################################################################
# Training epoch.
################################################################################
#
# Main function for training.
#
def main_train(model, device, trainloader, optimizer, f_loss, epoch):
# Set mode to training.
model.train()
avgloss, avglosscount = 0., 0.
# Go over all batches.
for bidx, (data, target) in enumerate(trainloader):
data = torch.autograd.Variable(data).cuda()
target = torch.autograd.Variable(target).cuda()
# Compute outputs and losses.
output = model(data)
loss = f_loss(output, target)
# Backpropagation.
optimizer.zero_grad()
loss.backward()
optimizer.step()
# Update loss.
avgloss += loss.item()
avglosscount += 1.
newloss = avgloss / avglosscount
# Print updates.
print "Training epoch %d: loss %8.4f - %.0f\r" \
%(epoch, newloss, 100.*(bidx+1)/len(trainloader)),
sys.stdout.flush()
print
################################################################################
# Testing epoch.
################################################################################
#
# Main function for testing.
#
def main_test(model, device, testloader):
# Set model to evaluation and initialize accuracy and cosine similarity.
model.eval()
cos = nn.CosineSimilarity(eps=1e-9)
acc = 0
ty, tp = [], []
# Go over all batches.
with torch.no_grad():
for data, target in testloader:
# Data to device.
data = torch.autograd.Variable(data).cuda()
target = target.cuda(async=True)
target = torch.autograd.Variable(target)
# Forward.
output = model(data).float()
pred = output.max(1, keepdim=True)[1]
acc += pred.eq(target.view_as(pred)).sum().item()
ty.append(target.data.cpu().numpy())
tp.append(pred.data.cpu().numpy())
ty = np.concatenate(ty).astype(int)
tp = np.concatenate(tp).astype(int)[:,0]
acc = np.mean(ty == tp)
# Print results.
testlen = len(testloader.dataset)
print "Testing: classification accuracy: %d/%d - %.3f" \
%(np.sum(ty == tp), testlen, 100. * acc)
################################################################################
# Main entry point of the script.
################################################################################
#
# Parse all user arguments.
#
def parse_args():
parser = argparse.ArgumentParser(description="CIFAR-10 classification")
parser.add_argument("--datadir", dest="datadir", default="data/", type=str)
parser.add_argument("-l", dest="learning_rate", default=0.01, type=float)
parser.add_argument("-m", dest="momentum", default=0.9, type=float)
parser.add_argument("-c", dest="decay", default=0.0001, type=float)
parser.add_argument("-s", dest="batch_size", default=128, type=int)
parser.add_argument("-e", dest="epochs", default=250, type=int)
parser.add_argument("--seed", dest="seed", default=100, type=int)
parser.add_argument("--drop1", dest="drop1", default=100, type=int)
parser.add_argument("--drop2", dest="drop2", default=200, type=int)
args = parser.parse_args()
return args
#
# Main entry point of the script.
#
if __name__ == "__main__":
# Parse user parameters and set device.
args = parse_args()
os.environ["CUDA_VISIBLE_DEVICES"] = "0"
device = torch.device("cuda")
kwargs = {'num_workers': 64, 'pin_memory': True}
# Set the random seeds.
seed = args.seed
torch.manual_seed(seed)
torch.cuda.manual_seed(seed)
torch.cuda.manual_seed_all(seed)
np.random.seed(seed)
# Load data.
trainloader, testloader = load_cifar10(args.datadir, args.batch_size, kwargs)
# Load the model.
model = resnet.ResNet(8, 10)
model = model.to(device)
# Load the optimizer.
optimizer = optim.SGD(model.parameters(), lr=args.learning_rate, \
momentum=args.momentum, weight_decay=args.decay)
# Initialize the loss functions.
f_loss = nn.CrossEntropyLoss().cuda()
# Main loop.
learning_rate = args.learning_rate
for i in xrange(args.epochs):
#print "---"
# Learning rate decay.
if i in [args.drop1, args.drop2]:
learning_rate *= 0.1
for param_group in optimizer.param_groups:
param_group['lr'] = learning_rate
# Train and test.
main_train(model, device, trainloader, optimizer, f_loss, i)
main_test(model, device, testloader)
| [
"diogo.pernes.cunha@gmail.com"
] | diogo.pernes.cunha@gmail.com |
b4ad6ef2f4856234dc3df9216d876d61614e4356 | aca3beed4a07ca4ae0b29ce5564da4c97d799c76 | /blog/blog/urls.py | 661c7ef24600d6344d8e25a63d9e05bb66ec0311 | [] | no_license | keenson/DRF | 75e260f5163353318be3df9c97242ea83e10231b | d1d785b90e905f3a7f81119d5f7a015333571f0e | refs/heads/main | 2023-04-11T16:34:33.864983 | 2021-05-10T19:50:33 | 2021-05-10T19:50:33 | 364,716,781 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 846 | py | """blog URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
urlpatterns = [
path('', include('api.urls')),
path('admin/', admin.site.urls),
path('api-auth/', include('rest_framework.urls')),
]
| [
"keensono@gmail.com"
] | keensono@gmail.com |
d9d6cd5d0b3e3c817e498b3cb5d6e428f87e86be | 50fe188cd5640f99f020b2fc019d3a1172116d22 | /iftrial.py | 62816db9dd6e65647c22de65630dddcd8130194d | [] | no_license | khatere/unb-sample | db9a896394a1926531a05897674ed3ded26b8d24 | 2dafe621df3373330f4cb8ddc502130812785981 | refs/heads/master | 2021-01-22T09:47:39.406091 | 2014-10-03T16:54:13 | 2014-10-03T16:54:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 179 | py | dave = 23
jeff = 23
if jeff>dave:
print 'jeff is older than dave'
elif jeff<dave:
print 'jeff is younger than dave'
else:
print'jeff and dave are the same age' | [
"Khatere@Khatere-PC.ad.unb.ca"
] | Khatere@Khatere-PC.ad.unb.ca |
b3aed492fb0c9d66a56f8865e3b79bd60ba12445 | 48a3f99dae4dca3d2dfce15127b00b7953fb9662 | /Module5/Training.py | 79432f59b0d94117fe2099a6bb756d566d8fa633 | [] | no_license | Chetank003/Rainfall-prediction-using-ANN-and-SVM | 139f8daa841d5192baf2b920b6e7e406a26eb100 | 1cf703890835db4af8eeff3f5c69d9f8ee1720e2 | refs/heads/master | 2022-11-29T00:35:28.670279 | 2020-08-09T08:13:21 | 2020-08-09T08:13:21 | 286,194,739 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 373 | py | import pandas as pd
from sklearn.model_selection import train_test_split
import matplotlib.pyplot as plt
from sklearn import metrics
from sklearn.svm import SVR
from joblib import dump,load
df=pd.read_csv('data1modify.csv')
df=df.dropna()
a=list(df.columns)
X=df[[a[0],a[1],a[2],a[3],a[4],a[5]]]
Y = df[a[6]]
svm = clf = SVR(gamma='auto').fit(X,Y)
dump(svm,'svm.model')
| [
"noreply@github.com"
] | Chetank003.noreply@github.com |
675ae4611cdb22f5676993b3c7c77fdad3196c7b | e638e9fda0e672fa9a414515d0c05a24ab55ad38 | /FindPeakElement.py | 8ff51d79a507758cc2e9a7895bd046e1d669a131 | [] | no_license | zjuzpz/Algorithms | 8d1c7d50429aa5540eb817dc5495a20fc3f11125 | 2df1a58aa9474f2ecec2ee7c45ebf12466181391 | refs/heads/master | 2021-01-21T05:55:48.768728 | 2020-08-04T22:44:08 | 2020-08-04T22:44:08 | 44,586,024 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,221 | py | """
162. Find Peak Element
A peak element is an element that is greater than its neighbors.
Given an input array where num[i] ≠ num[i+1], find a peak element and return its index.
The array may contain multiple peaks, in that case return the index to any one of the peaks is fine.
You may imagine that num[-1] = num[n] = -∞.
For example, in array [1, 2, 3, 1], 3 is a peak element
and your function should return the index number 2.
"""
# O(logn)
# O(1)
class Solution(object):
def findPeakElement(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
if not nums:
return
lower, upper = 0, len(nums) - 1
while lower < upper:
mid = (lower + upper) // 2
if mid == 0:
if len(nums) == 1 or nums[mid] > nums[mid + 1]:
return mid
return mid + 1
if nums[mid] > nums[mid - 1]:
if mid == len(nums) - 1 or nums[mid] > nums[mid + 1]:
return mid
lower = mid + 1
else:
upper = mid - 1
return lower
if __name__ == "__main__":
print(Solution().findPeakElement([1,2,3,1]))
| [
"zjuzpz@gmail.com"
] | zjuzpz@gmail.com |
77eb8378416b048b90b0d5fabdbc0bd20a66ffe7 | c2d815908243b6bcb91d1e54ff532cb30e891ad8 | /messenger_bot/settings.py | 87b29c141331c3db856df958fde6afd17b28eefe | [] | no_license | mashrikt/fb-bot | 73f36d957410fbece42dcb22d5e52b401976468e | 497fe0c5d87ee4c1372b691fbe01b20bf9ce4433 | refs/heads/master | 2021-04-15T10:00:10.878134 | 2018-03-26T22:15:49 | 2018-03-26T22:15:49 | 126,896,195 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,369 | py | """
Django settings for messenger_bot project.
Generated by 'django-admin startproject' using Django 1.9.13.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = ')pc=v(r-$@3sc!-54c&wim^*maw62f=0d7&&igsis%oe4wjl43'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'messenger_bot.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'messenger_bot.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'
VERIFY_TOKEN = os.environ.get("VERIFY_TOKEN", 'token')
FACEBOOK_APP_SECRET = os.environ.get("FACEBOOK_APP_SECRET", '')
PAGE_ACCESS_TOKEN = os.environ.get("PAGE_ACCESS_TOKEN", '')
| [
"mashrikt@gmail.com"
] | mashrikt@gmail.com |
2d36f7508061e9fbfb03504ec3e5c8fe1382e954 | ecb2885d161ad7295cbf39b43388c5a62ced975a | /Modulo 3/Proyectos/Proyecto1/Proyecto_fantasia/proyecto-fantasia.py | 4cf42606cbe83e02b243a8da1fadad4114127323 | [] | no_license | galigaribaldi/Python-Turtle | ee8709a677f00b5b31242321a889319d9cd8f91b | 9ea378293d520605b673fceb080a12d7ccb642eb | refs/heads/master | 2021-07-14T16:10:20.171960 | 2021-06-08T20:54:37 | 2021-06-08T20:54:37 | 126,338,064 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,513 | py | import turtle
##Pantalla
ws = turtle.Screen()
#ws.bgcolor("black")
ws.title("Sharingan")
ws.setup(width=900, height=900) #geometry of the GUI
##Tortuga
frank = turtle.Turtle()
frank.speed(0)
frank.shape("turtle")
def circulo_interno(tam):
frank.rt(180)
frank.begin_fill()
frank.circle(tam)
frank.end_fill()
frank.rt(70)
###
if tam > 50:
frank.pensize(30)
else:
frank.pensize(10)
frank.circle(tam-5,100)
frank.penup()
frank.circle(tam-5,260)
frank.lt(90)
frank.fd(tam/3)
frank.rt(90)
frank.pendown()
frank.circle(tam,80)
frank.penup()
frank.circle(tam,280)
frank.pendown()
frank.pensize(1)
frank.rt(110)
def triangulo(tam):
for _ in range(3):
frank.lt(360/3)
frank.penup()
frank.forward(tam)
frank.pendown()
frank.color("black","black")
frank.begin_fill()
circulo_interno(20)
frank.end_fill()
#input()
###Circulo exterior
frank.pensize(10)
frank.color("black","red")
frank.begin_fill()
frank.penup()
frank.goto(10,-210)
frank.pendown()
frank.circle(300)
frank.end_fill()
#####
frank.penup()
frank.goto(150,0)
frank.pendown()
triangulo(300)
####Cicrulo de en medio
frank.penup()
frank.goto(0,70)
frank.pendown()
frank.color("black","black")
frank.begin_fill()
frank.circle(40)
frank.end_fill()
####Cicrulo de en medio
frank.penup()
frank.goto(5,-90)
frank.pendown()
frank.color("black")
frank.pensize(5)
frank.circle(180)
#####
turtle.done() | [
"galigaribaldi@live.com"
] | galigaribaldi@live.com |
84e04268feae1b1a5487fcbd2eaeda1fadbcb044 | 978248bf0f275ae688f194593aa32c267832b2b6 | /xlsxwriter/test/table/test_table04.py | b4fda1d6dc147b5f25e5321e06cd322bd6883955 | [
"BSD-2-Clause-Views"
] | permissive | satish1337/XlsxWriter | b0c216b91be1b74d6cac017a152023aa1d581de2 | 0ab9bdded4f750246c41a439f6a6cecaf9179030 | refs/heads/master | 2021-01-22T02:35:13.158752 | 2015-03-31T20:32:28 | 2015-03-31T20:32:28 | 33,300,989 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,867 | py | ###############################################################################
#
# Tests for XlsxWriter.
#
# Copyright (c), 2013-2015, John McNamara, jmcnamara@cpan.org
#
import unittest
from ...compatibility import StringIO
from ..helperfunctions import _xml_to_list
from ...table import Table
from ...worksheet import Worksheet
from ...workbook import WorksheetMeta
from ...sharedstrings import SharedStringTable
class TestAssembleTable(unittest.TestCase):
"""
Test assembling a complete Table file.
"""
def test_assemble_xml_file(self):
"""Test writing a table"""
self.maxDiff = None
worksheet = Worksheet()
worksheet.worksheet_meta = WorksheetMeta()
worksheet.str_table = SharedStringTable()
worksheet.add_table('C3:F13', {'autofilter': False})
worksheet._prepare_tables(1)
fh = StringIO()
table = Table()
table._set_filehandle(fh)
table._set_properties(worksheet.tables[0])
table._assemble_xml_file()
exp = _xml_to_list("""
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<table xmlns="http://schemas.openxmlformats.org/spreadsheetml/2006/main" id="1" name="Table1" displayName="Table1" ref="C3:F13" totalsRowShown="0">
<tableColumns count="4">
<tableColumn id="1" name="Column1"/>
<tableColumn id="2" name="Column2"/>
<tableColumn id="3" name="Column3"/>
<tableColumn id="4" name="Column4"/>
</tableColumns>
<tableStyleInfo name="TableStyleMedium9" showFirstColumn="0" showLastColumn="0" showRowStripes="1" showColumnStripes="0"/>
</table>
""")
got = _xml_to_list(fh.getvalue())
self.assertEqual(got, exp)
| [
"jmcnamara@cpan.org"
] | jmcnamara@cpan.org |
5e0a1ff00ad1855c0bdc3be2344ec1dc35745ce0 | cc773e68159ab90ea0a8f2c5c89b1cafa27090e6 | /blong/spiders/crawler.py | 61699d09f9537da7cb25569729c1a148d5209f92 | [] | no_license | imlifeilong/OnLeopard | a724a83f7b5a00e6bdc3e11e4b3f9c23cf4b412f | fd2cbe1400c47446581e50f6ce80545da353c07b | refs/heads/master | 2020-04-30T07:19:59.301475 | 2020-02-26T04:39:14 | 2020-02-26T04:39:14 | 176,681,060 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,668 | py | # import scrapy
# from scrapy_redis.spiders import RedisSpider
#
# from Leopard.items import LeopardItem
# from Leopard.spiders.loader import LeopardItemLoader
#
# class Crawler(scrapy.Spider):
# name = 'crawler'
# start_urls = [
# 'http://sousuo.gov.cn/column/30469/0.htm'
# ]
#
# def start_requests(self):
# url = self.start_urls[0]
# # print('+++++++++++++++++++++++++>', url, globals())
# yield scrapy.Request(url=url, callback=self.parse)
#
# def parse(self, response):
#
# for row in response.xpath('//ul[@class="listTxt"]//li//h4'):
# loader = LeopardItemLoader(item=LeopardItem(), selector=row, response=response)
# loader.add_xpath('date', 'span/text()')
# loader.add_xpath('link', 'a/@href')
# loader.add_xpath('title', 'a/text()')
# item = loader.load_item()
# item['date'] = item['date'].strip('[]')
# yield item
#
# class RedisCrawler(RedisSpider):
# name = 'redisbaiduspider'
# redis_key = 'redisbaiduspider:start_urls'
#
# def __init__(self, *args, **kwargs):
# super(RedisCrawler, self).__init__(*args, **kwargs)
#
# def parse(self, response):
# for row in response.xpath('//ul[@class="listTxt"]//li//h4'):
# loader = LeopardItemLoader(item=LeopardItem(), selector=row, response=response)
# loader.add_xpath('date', 'span/text()')
# loader.add_xpath('link', 'a/@href')
# loader.add_xpath('title', 'a/text()')
# item = loader.load_item()
# item['date'] = item['date'].strip('[]')
# yield item
| [
"13772032410@163.com"
] | 13772032410@163.com |
61aa7599d8cc5d642c8f394964d58d2753b8d5b2 | ff3e10beec9279cc11c523006efa8ace9120ef75 | /src/inception/__init__.py | 17422a65cf12e103357c4e5779750d72d2d4e266 | [
"MIT"
] | permissive | jercytryn/inception | 07b7e90b0c0e7d7ab56eb4081bba676ecdde17fd | f7489cccdd308b5bb2d6ea04588dc67113e995b5 | refs/heads/master | 2020-04-13T23:44:38.253281 | 2015-09-18T02:37:48 | 2015-09-18T02:37:48 | 30,546,890 | 6 | 2 | null | 2016-07-06T13:29:58 | 2015-02-09T16:58:52 | C++ | UTF-8 | Python | false | false | 555 | py | """
Inception api for semi-automaed 2D object insertion into indoor scenery
>>> import inception
>>> inception.magic_insert('http://my/awesome/foreground.jpg', '/Users/mrayder/background.png', (30, 40, 300, 500))
"""
# expose the top level api methods up the very top level of the package
# e.g. so that can just do
# >>> import inception
# >>> inception.magic_insert()
# for instance
from .base import inception, magic_insert, floodfill, scale, poissonblend, shadow, statadjust
from .generate import generate_magic_composite
from .image import Image
| [
"jercytryn@gmail.com"
] | jercytryn@gmail.com |
b9da67d441c8a8ea04914a68e4e11e3566b32dde | 9ca55981d3245d87d45debce8e9825b60db43046 | /chemicals/thermal_conductivity.pyi | 75e0492a449dc884cb25dbc4ed2080c24b0a1a97 | [
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | CalebBell/chemicals | c6b1ebd409c32e0e1053c4f97668a8ebcc92b969 | 37e32a7c7f819e0cb8e2a8784f8448f68b9a4215 | refs/heads/master | 2023-07-25T23:34:17.754310 | 2023-07-25T02:00:14 | 2023-07-25T02:00:14 | 264,697,738 | 137 | 33 | MIT | 2022-06-05T18:21:02 | 2020-05-17T15:27:11 | Python | UTF-8 | Python | false | false | 2,921 | pyi | # DO NOT EDIT - AUTOMATICALLY GENERATED BY tests/make_test_stubs.py!
from typing import List
from pandas.core.frame import DataFrame
from typing import (
List,
Optional,
Union,
)
def Bahadori_gas(T: float, MW: int) -> float: ...
def Bahadori_liquid(T: float, M: int) -> float: ...
def Chung(T: float, MW: float, Tc: float, omega: float, Cvm: float, mu: float) -> float: ...
def Chung_dense(
T: float,
MW: float,
Tc: float,
Vc: float,
omega: float,
Cvm: float,
Vm: float,
mu: float,
dipole: float,
association: float = ...
) -> float: ...
def DIPPR9B(
T: float,
MW: float,
Cvm: float,
mu: float,
Tc: Optional[float] = ...,
chemtype: Optional[str] = ...
) -> float: ...
def DIPPR9G(T: float, P: float, Tc: float, Pc: float, kl: float) -> float: ...
def DIPPR9H(ws: List[float], ks: List[float]) -> float: ...
def DIPPR9I(zs: List[float], Vms: List[float], ks: List[float]) -> float: ...
def Eli_Hanley(T: float, MW: float, Tc: float, Vc: float, Zc: float, omega: float, Cvm: float) -> float: ...
def Eli_Hanley_dense(
T: float,
MW: float,
Tc: float,
Vc: float,
Zc: float,
omega: float,
Cvm: float,
Vm: float
) -> float: ...
def Eucken(MW: float, Cvm: float, mu: float) -> float: ...
def Eucken_modified(MW: float, Cvm: float, mu: float) -> float: ...
def Filippov(ws: List[float], ks: List[float]) -> float: ...
def Gharagheizi_gas(T: float, MW: float, Tb: float, Pc: float, omega: float) -> float: ...
def Gharagheizi_liquid(T: int, M: int, Tb: int, Pc: float, omega: float) -> float: ...
def Lakshmi_Prasad(T: float, M: int) -> float: ...
def Lindsay_Bromley(
T: float,
ys: List[float],
ks: List[float],
mus: List[float],
Tbs: List[float],
MWs: List[float]
) -> float: ...
def Mersmann_Kind_thermal_conductivity_liquid(T: int, MW: float, Tc: float, Vc: float, na: int) -> float: ...
def Missenard(T: float, P: float, Tc: float, Pc: float, kl: float) -> float: ...
def Nicola(T: int, M: float, Tc: float, Pc: float, omega: float) -> float: ...
def Nicola_original(T: int, M: float, Tc: float, omega: float, Hfus: int) -> float: ...
def Sato_Riedel(T: int, M: int, Tb: int, Tc: int) -> float: ...
def Sheffy_Johnson(T: int, M: int, Tm: int) -> float: ...
def Stiel_Thodos_dense(T: float, MW: float, Tc: float, Pc: float, Vc: float, Zc: float, Vm: float, kg: float) -> float: ...
def Wassiljewa_Herning_Zipperer(
zs: List[float],
ks: List[float],
MWs: Union[List[int], List[float]],
MW_roots: Optional[List[float]] = ...
) -> float: ...
def __getattr__(name: str) -> DataFrame: ...
def _load_k_data() -> None: ...
def k_IAPWS(
T: float,
rho: float,
Cp: Optional[float] = ...,
Cv: Optional[float] = ...,
mu: Optional[float] = ...,
drho_dP: Optional[float] = ...
) -> float: ...
__all__: List[str] | [
"Caleb.Andrew.Bell@gmail.com"
] | Caleb.Andrew.Bell@gmail.com |
03ac8c891da817f67d9b5c8e05d36778c398ff8e | 8015f1c62a2cb4efd21aa8938336913bf8117868 | /bamap/ba3483.pngMap.py | 11a26fda8cd8a7ca0878fb5776a770da9bee2f06 | [] | no_license | GamerNoTitle/Beepers-and-OLED | 675b5e3c179df0f0e27b42bf594c43860d03b9af | afe1340e5394ae96bda5f9022a8a66824368091e | refs/heads/master | 2020-04-20T00:09:47.122471 | 2019-04-29T04:59:35 | 2019-04-29T04:59:35 | 168,515,579 | 4 | 2 | null | null | null | null | UTF-8 | Python | false | false | 8,468 | py | ba3483.pngMap = [
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000011100000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000111100000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000111111111100000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000001111111111100000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000111111111111111000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000001001111111111111000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000001100111111111100000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000101111111111000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000001111110101000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000001111111111000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000111111111111000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000001111111111111000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000001111111111111000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000001111111111111100000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000001111111111111111000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000001111111111111111000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000001111111111111111110000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000111111111111111110000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000011111111111111110000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000011111111111111110000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000011111111111100000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000011111111111100000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000011111111111111110000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000001111111111111110000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000111111111111111111111000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000111111111111111111111100000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000111111111111111111111111000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000111111111111111111111111010000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000011111111111111111111111111010000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000011111111111111111111111111111000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000011111111111111111111111111110000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000011111111111111111111111111111000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000001111111111111111111111111111010000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000001111111111111111111111111111000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000001111111111111111111111111100000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000001111111111111111111111111000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000001111111111111111111111111000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000011111111111111111111110000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000001100000000111000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000001100000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000100010000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
]
| [
"bili33@87ouo.top"
] | bili33@87ouo.top |
a2d3191890cbeec635cdf2151309c60a1abb1802 | b3b4ff12c3c95601c32067fb28004a0ab02b7f83 | /alien_invasion.py | 31ec1b078e9e0468f7be544ea23d4fa4faa0a88d | [] | no_license | eemmiillyy/AlienInvasion | 1967f68a2f223f7bf1ad6f1f8f70b9736b0a4eb8 | bf528db3940786a600eb1c79ab55daa53f00a345 | refs/heads/master | 2020-08-06T02:05:02.123351 | 2019-10-07T12:29:51 | 2019-10-07T12:29:51 | 212,793,767 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,665 | py | import pygame
from pygame.sprite import Group
from settings import Settings
from ship import Ship
from bullet import Bullet
import game_functions as gf
from game_stats import GameStats
from button import Button
from scoreboard import Scoreboard
def run_game():
# Initialize game, setttings and create a screen object.
pygame.init()
ai_settings = Settings()
screen = pygame.display.set_mode((
ai_settings.screen_width, ai_settings.screen_height
))
# Screen is a surface like every other object in the game.
pygame.display.set_caption("Alien Invasion")
# Make play button.
play_button = Button(ai_settings, screen, "Play")
# Create an instance to store game stats.
stats = GameStats(ai_settings)
# Create an instance of score board.
sb = Scoreboard(ai_settings, screen, stats)
# Make a ship, a group of bullets, and a group of aliens.
ship = Ship(ai_settings, screen)
# Make a group to store bullets in.
bullets = Group()
# Make a group to store aliens in.
aliens = Group()
# Create fleet of aliens.
gf.create_fleet(ai_settings, screen, ship, aliens)
# Start the main game loop.
while True:
# Watch for keyboard and mouse events.
gf.check_events(ai_settings, screen, stats, sb, play_button, ship, aliens, bullets)
if stats.game_active:
ship.update()
gf.update_bullets(ai_settings, screen, stats, sb, ship, aliens, bullets)
gf.update_aliens(ai_settings, screen, stats, sb, ship, aliens, bullets)
gf.update_screen(ai_settings, screen, stats, sb, ship, aliens, bullets, play_button)
run_game()
| [
"emily.morgan@code.berlin"
] | emily.morgan@code.berlin |
43f29fd533adf49583c860a5559461d733a08362 | 05e5ac0bdf5eb5a28d86a0a9da15115e2db798a6 | /vlad/validators/networks_create.py | 57aa21636999462daa3c3df09eb3e851b8e6d578 | [
"MIT"
] | permissive | stevommmm/vlad | b7065e3f897b9c61aaf4d59697b689a88ae52451 | 249bc0dc007db91cb549ee128d417ff2c5f703e2 | refs/heads/master | 2020-07-31T04:27:21.815783 | 2020-07-13T03:56:44 | 2020-07-13T03:56:44 | 210,484,670 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 337 | py | # handles: /networks/create:post
from vlad.validators import handles
@handles.post('networks', 'create')
async def validate_request(req):
'''Allow creation of networks in our OU'''
if not req.req_body['Name'].startswith(req.OU_prefix):
return f'That network is outside your OU prefix. {req.OU_prefix}'
return True
| [
"s.mcgregor@griffith.edu.au"
] | s.mcgregor@griffith.edu.au |
21e56e6d53d2cac2aa95a6acb832376f4738e889 | 1fffc557e1d02e6c6396bf1b3ca5f6471836fb0f | /title.py | 99398ec172c4d9f8c44f86ed97141c42dd039b71 | [] | no_license | sundharparamasivam/playerset1 | 5afc2e90b392301a4b9c9b8d39a3b0095a2a5eb7 | 4e135becc94f7f69e7db4f4c5900ebd188c135b6 | refs/heads/master | 2020-06-17T05:17:57.613846 | 2019-08-09T05:43:11 | 2019-08-09T05:43:11 | 195,809,892 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 52 | py | su=list(input().split())
print("".join(su.title()))
| [
"noreply@github.com"
] | sundharparamasivam.noreply@github.com |
ae1e7ec5825d2a6fc3ae116e2bacde48835b0c45 | f12ca917e17c180458f64c149431374252eef1aa | /test/4_test/websoc/server.py | 671f6868b9b6419ae6a5e6fedd38a81e16ff3a60 | [] | no_license | vipulgpt10/eid-fall2018 | 47b0d50fe40728a987c65cd2cc5942dad844693e | 8db7be7a9c7b3124bf500caabff7307177fa66a3 | refs/heads/master | 2020-03-28T20:37:14.249249 | 2018-12-10T22:29:24 | 2018-12-10T22:29:24 | 149,087,963 | 0 | 0 | null | 2018-09-17T07:45:06 | 2018-09-17T07:45:06 | null | UTF-8 | Python | false | false | 1,137 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import tornado.web
import tornado.websocket
import tornado.ioloop
import tornado.options
import tornado.httpserver
import time
from tornado.options import define, options
define("port", default=3000 , help="run on the given port", type=int)
global n
n = 0
class Application(tornado.web.Application):
def __init__(self):
handlers = [(r"/", MainHandler)]
settings = dict(debug=True)
tornado.web.Application.__init__(self, handlers, **settings)
class MainHandler(tornado.websocket.WebSocketHandler):
def on_message(self, message):
global n
n += 1
print('message received:')
self.write_message(message)
if n == 3:
n = 0
time.sleep(1)
#tornado.ioloop.IOLoop.instance().stop()
def check_origin(self, origin):
return True
def open(self):
print("A client connected.")
def on_close(self):
print("A client disconnected")
def main():
tornado.options.parse_command_line()
app = Application()
app.listen(options.port)
tornado.ioloop.IOLoop.instance().start()
if __name__ == "__main__":
main()
| [
"vipul.gupta@colorado.edu"
] | vipul.gupta@colorado.edu |
47c2d02951db7604b35e5da5d690abc1744355f0 | 5cb8b2e8496a2e6d0cfa7f2bae26d43bce263632 | /example2.py | 820d5006d40bf4e7f280e7882c49bd00a38c06f4 | [] | no_license | Malak-Abdallah/Intro_to_python | 3745786e499c8d6a95c414d3e5d87d27e5332039 | 9dbd8fa6e52b3056ef5406ab1337291feefad8da | refs/heads/master | 2023-06-25T09:09:16.709645 | 2021-07-28T12:26:18 | 2021-07-28T12:26:18 | 383,780,163 | 0 | 1 | null | 2021-07-26T13:02:32 | 2021-07-07T11:48:29 | Python | UTF-8 | Python | false | false | 306 | py | if __name__ == '__main__':
x=list(map(int,input().split(" ")))
lists={}
for i in range(x[0]):
lists[i]=list(map(int,input().split(" ")))
lists[i].sort(reverse=True)
num=0
for i in range(x[0]):
num = num+ lists[i][0] ** 2
if num <x[1]:
print(num)
| [
"malkobeidallah@gmail.com"
] | malkobeidallah@gmail.com |
7e28e8e2b39ff6c199ff679c531e0392c8edd814 | 447cf5b4c23033306e0880e4b9ab8df2c7ffb633 | /src/neutralisers/seq2seq/train.py | 4ff5dda4a2b4276b6b1e3b38ed94aec04a2d8750 | [] | no_license | PaulB99/MABEL | 1658129a2c5554b3ac4ebae484c9a2f98ceff602 | 3a909a47bc2c52f63101d33958b4d12fccf8f658 | refs/heads/master | 2023-07-02T06:24:10.682942 | 2021-08-12T11:28:10 | 2021-08-12T11:28:10 | 383,640,247 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,873 | py | import torch
import torch.nn as nn
import model as md
import torch.optim as optim
import pandas as pd
import tokeniser
from transformers import BertTokenizer
import time
import matplotlib.pyplot as plt
# Device
device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
def train_step(model, input_tensor, target_tensor, optimiser, criterion):
optimiser.zero_grad()
#input_length = input_tensor.size(0)
loss = 0
epoch_loss = 0
output = model(input_tensor, target_tensor)
# Calculate the loss from prediction and target
target_length = target_tensor.shape[0]
for i in range(target_length):
loss += criterion(output[i], target_tensor[i])
loss.backward()
optimiser.step()
epoch_loss = loss.item()
return epoch_loss
def main():
# Load in data
data_path = '../../../data/datasets/main/train_neutralisation.csv'
data_df = pd.read_csv(data_path, header=None, skiprows=1, names=['text', 'target'])
#tok = tokeniser.tokeniser(device, data_path)
tok = BertTokenizer.from_pretrained('bert-base-uncased')
lang_size = 30522
print('Tokeniser initialised of size {}'.format(lang_size))
model = md.seq2seq(device, lang_size).to(device)
model.train()
print('Model initialised')
optimiser = optim.Adam(model.parameters(), lr=3e-5)
criterion = nn.CrossEntropyLoss()
total_loss_iterations = 0
num_epochs = 11
start_time = time.perf_counter()
loss_vals = []
loss_points = []
for i in range(num_epochs): #num_epochs
j=0
for index, row in data_df.iterrows():
input_tensor = tok.encode(row['text'], return_tensors="pt")[0].to(device)
input_tensor = input_tensor.view(-1, 1)
target_tensor = tok.encode(row['target'], return_tensors="pt")[0].to(device)
target_tensor = target_tensor.view(-1, 1)
loss = train_step(model, input_tensor, target_tensor, optimiser, criterion)
total_loss_iterations += loss
if j % 500 == 0:
average_loss= total_loss_iterations / 500
if j != 0:
loss_vals.append(average_loss)
loss_points.append(j+(70000*i))
total_loss_iterations = 0
print('%d %.4f' % (j, average_loss))
j+=1
end_time = time.perf_counter()
print('Mini seq2seq model trained in {}'.format(end_time-start_time))
torch.save(model.state_dict(), '../../../cache/neutralisers/seq2seq.pt')
print('Model saved!')
# Save loss graph
plt.plot(loss_points, loss_vals)
plt.xlabel('Training steps')
plt.ylabel('Training loss')
plt.title('Training loss of seq2seq model')
plt.savefig('loss_graph.png')
if __name__ == "__main__":
main() | [
"paulbyrne99@rocketmail.com"
] | paulbyrne99@rocketmail.com |
1600f351dbbca8f6da3f35d1a196503aafe8b954 | 647c290c6fd5b2e39ac1e546635c9c46d5477418 | /mysite1/settings.py | 303672ce28e2752d6eb5442903933e65f209f45c | [] | no_license | zeeshan3534/textutlis.in | d8e0b359f06c914d1917b0701117051859ca641c | ddd322ad8912884f5b4a469bd785129a5fa15e6e | refs/heads/master | 2022-12-18T14:27:27.011385 | 2020-09-23T14:42:00 | 2020-09-23T14:42:00 | 297,999,541 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,196 | py | """
Django settings for mysite1 project.
Generated by 'django-admin startproject' using Django 3.1.1.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.1/ref/settings/
"""
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'e1m$we3kv-8dwcl9^$#ls$__0nlkdxi6u8fduje$5@c9)y_fd2'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'mysite1.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': ['templates'],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'mysite1.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/
STATIC_URL = '/static/'
| [
"noreply@github.com"
] | zeeshan3534.noreply@github.com |
28ebcfe613a2a075aea560cfdadf3958e4417f1d | c4c472cd291dfb8829616e8e4a2a4878bf0f3390 | /fibonachi.py | 029cb413c070597d5d639de2b3e6a898ec500aa3 | [] | no_license | DanisAvko/algoritmy | 39c376f1fe375d7bf938647ffed723cff23b92c5 | b85b45353f8b0fc2cdc573f874098827c6d980d3 | refs/heads/master | 2020-04-26T10:29:35.983438 | 2019-05-05T13:21:50 | 2019-05-05T13:21:50 | 173,487,961 | 0 | 1 | null | 2019-03-02T19:30:48 | 2019-03-02T19:07:26 | null | UTF-8 | Python | false | false | 333 | py | """без рекурсии"""
def fib(n):
fib1=fib2=1
i = 2
while i < n:
fib_next = fib2 + fib1
fib1 = fib2
fib2 = fib_next
i += 1
return fib_next;
print (fib(30))
"""с рекурсией"""
def fib(n):
if n in(1,2):
return 1
return fib(n-1)+fib(n-2)
print (fib(30)) | [
"valievdanis@bk.ru"
] | valievdanis@bk.ru |
2a1ed1e43f9e463149478c027499a2d168d8a840 | 0d2fedc7f567963c6e80fa7b37a9c618ea8dc597 | /pages/urls.py | 6764a0487fc896d9a366114091b251cb904cd706 | [
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | akashdraut/btre_project | 7edbdc5cc91622af350629c52ca787c2a2224673 | baacab2b9903825904f6fa39d50934c8fb4f5dee | refs/heads/master | 2022-05-02T21:43:40.119656 | 2020-06-03T12:56:32 | 2020-06-03T12:56:32 | 256,933,633 | 0 | 0 | MIT | 2022-04-22T23:15:05 | 2020-04-19T06:49:27 | CSS | UTF-8 | Python | false | false | 166 | py | from django.urls import path
from . import views
urlpatterns = [
path('', views.index, name='index'),
path('about', views.about, name='about'),
]
| [
"adraut93@gmail.com"
] | adraut93@gmail.com |
efde0bd1158f2af6861f6ccd43d34cadf581acf1 | c0e7f3e1801bfd99c5fc86067ea552a8436c47dd | /junk_drawer/store.py | 91bbab4f943ccdcd3bb220b99dccdc8e8a27896d | [] | no_license | Opentrons/junk-drawer | 27a463053dbb45e56902f3d8286f57f931631f59 | 32ca381f754489b476e26fbf18001bbf98881ea9 | refs/heads/main | 2023-08-29T06:11:09.028658 | 2021-10-08T18:03:20 | 2021-10-08T18:03:20 | 298,059,402 | 0 | 0 | null | 2023-05-23T16:17:06 | 2020-09-23T18:26:53 | Python | UTF-8 | Python | false | false | 5,183 | py | """Store module for junk_drawer."""
from __future__ import annotations
from logging import getLogger
from typing import Optional
from .read_store import SCHEMA_VERSION_KEY, ReadStore, ModelT
from .filesystem import (
PathNotFoundError,
RemoveFileError,
FileEncodeError,
FileWriteError,
)
log = getLogger(__name__)
class Store(ReadStore[ModelT]):
"""A Store is used to create, read, update, and delete items in a collection."""
async def put(self, item: ModelT, key: Optional[str] = None) -> Optional[str]:
"""
Put a single item to the store.
Returns the key of the added item. If `ignore_errors` is set to `True`,
`put` will return None if the item was unable to be added.
"""
item_key = self._get_item_key(item, key)
key_path = self._get_key_path(item_key)
try:
await self._filesystem.write_json(
key_path, item, encode_json=self.encode_json
)
return item_key
except (FileWriteError, FileEncodeError) as error:
self._maybe_raise_file_error(error)
return None
def put_sync(self, item: ModelT, key: Optional[str] = None) -> Optional[str]:
"""
Put a single item to the store.
Synchronous version of :py:meth:`put`.
"""
item_key = self._get_item_key(item, key)
key_path = self._get_key_path(item_key)
try:
self._filesystem.sync.write_json(
key_path, item, encode_json=self.encode_json
)
return item_key
except (FileWriteError, FileEncodeError) as error:
self._maybe_raise_file_error(error)
return None
async def ensure(self, default_item: ModelT, key: Optional[str] = None) -> ModelT:
"""
Ensure an item exists in the store at the given key.
If an item with `key` already exists, `ensure` will return the item. If
no item with `key` exists, it will write `default_item` to the store
before returning the item.
This method is a shortcut for a `get` followed by a `put` if the `get`
returns `None`.
"""
item_key = self._get_item_key(default_item, key)
result = await self.get(item_key)
if result is None:
await self.put(default_item, key)
result = default_item
return result
def ensure_sync(self, default_item: ModelT, key: Optional[str] = None) -> ModelT:
"""
Ensure an item exists in the store at the given key.
Synchronous version of :py:meth:`ensure`.
"""
item_key = self._get_item_key(default_item, key)
result = self.get_sync(item_key)
if result is None:
self.put_sync(default_item, key)
result = default_item
return result
async def delete(self, key: str) -> Optional[str]:
"""
Delete a single item in the store.
Returns the deleted key if the item was removed or None if no item was
found at that key. If `ignore_errors` is set, delete will also return
None if the item is unable to be removed.
"""
key_path = self._get_key_path(key)
try:
await self._filesystem.remove(key_path)
return key
except (PathNotFoundError, RemoveFileError) as error:
self._maybe_raise_file_error(error)
return None
def delete_sync(self, key: str) -> Optional[str]:
"""
Delete a single item in the store.
Synchronous version of :py:meth:`delete`.
"""
key_path = self._get_key_path(key)
try:
self._filesystem.sync.remove(key_path)
return key
except (PathNotFoundError, RemoveFileError) as error:
self._maybe_raise_file_error(error)
return None
async def delete_store(self) -> None:
"""Delete the store and all its items."""
return await self._filesystem.remove_dir(self._directory)
def delete_store_sync(self) -> None:
"""
Delete the store and all its items.
Synchronous version of :py:meth:`delete_store`.
"""
return self._filesystem.sync.remove_dir(self._directory)
def encode_json(self, item: ModelT) -> str:
"""Encode a model instance into JSON."""
obj = item.dict()
obj[SCHEMA_VERSION_KEY] = len(self._migrations)
# NOTE(mc, 2020-10-25): __json_encoder__ is an undocumented property
# of BaseModel, but its usage here is to ensure Pydantic model config
# related to serialization is properly used. This functionality is
# covered by basic integration tests
return item.__config__.json_dumps(obj, default=item.__json_encoder__)
def parse_json(self, data: str) -> ModelT:
"""Decode a string into a model instance."""
obj = self._schema.__config__.json_loads(data)
schema_version = obj.pop(SCHEMA_VERSION_KEY, 0)
for migrate in self._migrations[schema_version:]:
obj = migrate(obj)
return self._schema.parse_obj(obj)
| [
"noreply@github.com"
] | Opentrons.noreply@github.com |
3b92b8f60bbeca7d4bab2c4a990b932bcb2c54bf | c02ed8f12fe1d838acdc12ab02514eece1941fde | /carshop/wsgi.py | fe7f95465bcd820646e0a68aff6baf95cf44bbed | [] | no_license | Serik310/carmarket | b1333e95915b6a5b0d3d1fd90c8822fcce7695ca | 7adb8e28e5476071d012e82718715c386f585fde | refs/heads/master | 2023-01-22T16:38:06.549436 | 2020-12-08T16:10:35 | 2020-12-08T16:10:35 | 303,439,545 | 0 | 0 | null | 2020-12-08T16:10:36 | 2020-10-12T15:48:08 | Python | UTF-8 | Python | false | false | 391 | py | """
WSGI config for carshop project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'carshop.settings')
application = get_wsgi_application()
| [
"serik31000@gmail.com"
] | serik31000@gmail.com |
31f1f3cf2bad88a53e42e9b081d1803a77815b46 | b4e6c39c01db5c59832b810c1a72ccfe427ea87a | /compareList.py | 2f436b876b0b5c4c3c82247be7b9ba19626c3e6c | [] | no_license | plpla/Scripts | 3df19422fd262115a9c0c9d456d42a16ec069da5 | 661637c6a438891af03f646fe481ffe8b4c08f99 | refs/heads/master | 2021-01-20T12:00:53.904242 | 2013-01-16T15:27:02 | 2013-01-16T15:27:02 | 7,648,105 | 1 | 0 | null | null | null | null | ISO-8859-1 | Python | false | false | 602 | py | #! /usr/bin/env python
# -*- coding:Utf-8 -*-
#Author: Pier-Luc Plante
#License:GPL
"""
This script compare 2 list of entry and output a list of the ones that are only in the second file
How to use:
python compareList file1 file2
"""
import sys
if len(sys.argv) != 3:
print(__doc__)
sys.exit(1);
list1=[];
print("Debut de la création de la liste de référence")
for i in open(sys.argv[1]):
list1.append(i)
print("Fin de la création de la liste de référence")
a=0
for entry in open(sys.argv[2]):
if entry in list1:
continue
else:
print(entry)
| [
"pier-luc.plante.1@ulaval.ca"
] | pier-luc.plante.1@ulaval.ca |
5ec219db8c8838935154f8cebad0789540c0d348 | 94ce54fb099265c40939fb63b9935410742d446f | /_prog/_python/_aulas/_aida/_aulas/dict.py | db3e6b54a837c6cc493428f41906ea8d7c3cb17a | [] | no_license | habraino/meus-scripts | f98d0780563d3228fa1451978adacf69a27e43e7 | 158fc463cbf3d2d99f0d5f584b11c777133a4279 | refs/heads/master | 2023-04-09T09:27:05.643106 | 2021-04-14T12:13:34 | 2021-04-14T12:13:34 | 357,892,446 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 153 | py | a = {"livro":"romance", "nome":"Gesiney","morada":"Cadão"}
for b in a.keys():
print(b)
print('-------------')
for c in a.values():
print(c)
| [
"habraino12@gmail.com"
] | habraino12@gmail.com |
375dc7a9f08e87def6b9d83af33b3624c9f7ab69 | 56df6683865fd9319b389afd6dd4a922299da593 | /source/scripts/python/host/source/host.py.in | 0e4829aa2a05b70be869733b69853d892d4ff567 | [
"Python-2.0",
"GPL-2.0-or-later",
"MPL-1.1",
"NCSA",
"LicenseRef-scancode-proprietary-license",
"GPL-1.0-or-later",
"BSD-3-Clause",
"MPL-2.0",
"Ruby",
"BSD-2-Clause",
"MIT",
"Apache-2.0"
] | permissive | metacall/core | 4f36fe0b13924853aab6d0f053285b649398cc1d | 419ffb573b17501c91662f0f161032bb19ea1ab3 | refs/heads/develop | 2023-08-23T10:19:30.898387 | 2023-08-10T18:39:08 | 2023-08-10T18:39:08 | 163,221,062 | 1,391 | 167 | Apache-2.0 | 2023-09-13T23:49:43 | 2018-12-26T22:02:57 | C | UTF-8 | Python | false | false | 957 | in | #!/usr/bin/env python3
import sys
# Insert Python Port folder first in the system path list
sys.path.insert(0, '@PROJECT_METACALL_PORTS_DIRECTORY@')
from metacall import metacall, metacall_load_from_memory
script = '''#!/usr/bin/env node
'use strict';
const path = require('path');
/* Load MetaCall addon */
const addon = (() => {
try {
/* This forces metacall port to be run always by metacall cli */
return process._linkedBinding('node_loader_port_module');
} catch (e) {
console.error('MetaCall failed to load, probably you are importing this file from NodeJS directly.');
console.error('You should use MetaCall CLI instead. Install it from: https://github.com/metacall/install');
throw e;
}
})();
function b() {
return addon.metacall('c');
}
module.exports = {
b
};
'''
metacall_load_from_memory('node', script)
def a():
result = metacall('b')
print('Result call from b:')
print(result)
return result
def c():
return 3.0
| [
"vic798@gmail.com"
] | vic798@gmail.com |
1da8e69eee5613a50da1c487c248b17fc70d2da6 | 9bc15f431ab84c26ad7d6810f9a25d045bfe9b4f | /do_OPTICS.py | 1e5b89eb16da1496f281a77902c572cad528fbfe | [] | no_license | genomexyz/punya_orang | 8fbd9ca253f01577c5e8e506f40a410888043035 | 0bfbea9f6d835d82ec1b26d7181b864f92bfea42 | refs/heads/master | 2020-09-28T13:59:52.357483 | 2019-12-09T05:33:05 | 2019-12-09T05:33:05 | 226,791,798 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,186 | py | #!/usr/bin/python3
from sklearn.cluster import DBSCAN, OPTICS
from sklearn import metrics
#import tkinter as tk
import numpy as np
import pymongo
import sys
import matplotlib.gridspec as gridspec
import matplotlib.pyplot as plt
import pandas as pd
import json
#setting
tahun = 2019
bulan = 6
hari = 5
jam = 3
menit = 0
jenis_petir1 = 'CGPositive'
jenis_petir2 = '0'
#jenis petir
#0 = CGPositive
#1 = CGNegative
#2 = between cloud
def convert(o):
if isinstance(o, np.int64): return int(o)
raise TypeError
if len(sys.argv) > 7:
tahun = int(sys.argv[1])
bulan = int(sys.argv[2])
hari = int(sys.argv[3])
jam = int(sys.argv[4])
menit = int(sys.argv[5])
jenis_petir1 = sys.argv[6]
jenis_petir2 = sys.argv[7]
elif len(sys.argv) > 6:
tahun = int(sys.argv[1])
bulan = int(sys.argv[2])
hari = int(sys.argv[3])
jam = int(sys.argv[4])
menit = int(sys.argv[5])
jenis_petir1 = sys.argv[6]
client = pymongo.MongoClient()
collection = client['geof_achieve']['petir']
if len(sys.argv) > 7:
documents = list(collection.find({'tahun' : tahun, 'bulan' : bulan, 'hari' : hari, 'jam' : jam, 'menit' : menit,
'$or' : [{'jenis_petir' : jenis_petir1}, {'jenis_petir' : jenis_petir2}]}))
elif len(sys.argv) > 6:
documents = list(collection.find({'tahun' : tahun, 'bulan' : bulan, 'hari' : hari, 'jam' : jam, 'menit' : menit,
'jenis_petir' : jenis_petir1}))
else:
documents = list(collection.find({'tahun' : tahun, 'bulan' : bulan, 'hari' : hari, 'jam' : jam, 'menit' : menit,
'$or' : [{'jenis_petir' : jenis_petir1}, {'jenis_petir' : jenis_petir2}]}))
latlon = []
for i in range(len(documents)):
lat = documents[i]['latitude']
lon = documents[i]['longitude']
latlon.append([lon, lat])
latlon = np.asarray(latlon)
clust = OPTICS(eps=0.5, min_samples=10).fit(latlon)
#plot
space = np.arange(len(latlon))
reachability = clust.reachability_[clust.ordering_]
labels = clust.labels_[clust.ordering_]
true_label = clust.labels_
print(true_label)
fig = plt.figure(figsize=(10, 10))
G = gridspec.GridSpec(2, 2)
ax1 = plt.subplot(G[0, :])
ax2 = plt.subplot(G[1, :])
# Reachability plot
colors = ['g.', 'r.', 'b.', 'y.', 'c.']
for klass, color in zip(range(0, 5), colors):
Xk = space[labels == klass]
Rk = reachability[labels == klass]
ax1.plot(Xk, Rk, color, alpha=1)
ax1.plot(space[labels == -1], reachability[labels == -1], 'k.', alpha=0.3)
ax1.plot(space, np.full_like(space, 2., dtype=float), 'k-', alpha=0.5)
ax1.plot(space, np.full_like(space, 0.5, dtype=float), 'k-.', alpha=0.5)
ax1.set_ylabel('Reachability (epsilon distance)')
ax1.set_title('Reachability Plot')
if bulan < 10:
bulan = '0'+str(bulan)
if hari < 10:
hari = '0'+str(hari)
if jam < 10:
jam = '0'+str(jam)
if menit < 10:
menit = '0'+str(menit)
if jenis_petir1 == '2':
jenis_petir1 = 'Clout-to-Cloud_In-Cloud'
# OPTICS
#colors = ['g.', 'r.', 'b.', 'y.', 'c.']
#for klass, color in zip(range(0, 5), colors):
# Xk = latlon[clust.labels_ == klass]
# ax2.plot(Xk[:, 0], Xk[:, 1], color, alpha=0.8)
#ax2.plot(latlon[clust.labels_ == -1, 0], latlon[clust.labels_ == -1, 1], 'k+', alpha=0.3)
#ax2.set_title('Automatic Clustering\nOPTICS')
#ax2.set_title('Clustering %s%s%s%s%s\n%s'%(tahun, bulan, hari, jam, menit, jenis_petir1))
#plt.tight_layout()
#fig.savefig('output/%s%s%s%s%s-%s.png'%(tahun, bulan, hari, jam, menit, jenis_petir1), bbox_inches='tight')
#save JSON
#total_cluster = np.max(clust.labels_)+1
#group_cluster = {}
#for i in range(len(latlon)):
# group_cluster['%s,%s'%(latlon[i,1], latlon[i,0])] = clust.labels_[i]
#json_save = json.dumps(group_cluster, default=convert)
#data_save = open('output/%s%s%s%s%s-%s.json'%(tahun, bulan, hari, jam, menit, jenis_petir1), 'w')
#data_save.write(json_save)
#save to MongoDB
coll_save = client['geof_achieve']['cluster']
array_dict_save = []
for i in range(len(true_label)):
data_save = {'tahun' : int(tahun), 'bulan' : int(bulan), 'hari' : int(hari), 'jam' : int(jam), 'menit' : int(menit),
'latitude' : float(latlon[i,1]), 'longitude' : float(latlon[i,0]), 'jenis_petir' : jenis_petir1, 'cluster_index' : int(true_label[i])}
array_dict_save.append(data_save)
saved_data = coll_save.insert_many(array_dict_save)
print(saved_data)
| [
"nayrhum4code@gmail.com"
] | nayrhum4code@gmail.com |
53d2fbfd9f3c99ec42a32fc5ee87f71345a8cd07 | 14e7058adf766352a0b90b66b7dcf887105a481c | /djangoappengine/settings_base.py | b62b7cbd0c0dfb09d8af41eeb26eecfd9cb6af34 | [
"BSD-2-Clause"
] | permissive | brunogamacatao/portalsaladeaula | 2b7f07f07c2518dd359f043483fbb27417f62aaf | 9429e485aa37ffea3208339a807032e9230a3c84 | refs/heads/master | 2020-12-29T01:42:18.594281 | 2012-06-22T12:24:44 | 2012-06-22T12:24:44 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,102 | py | # Initialize App Engine SDK if necessary
try:
from google.appengine.api import api_proxy_stub_map
except ImportError:
from .boot import setup_env
setup_env()
from djangoappengine.utils import on_production_server, have_appserver
DEBUG = not on_production_server
TEMPLATE_DEBUG = DEBUG
ROOT_URLCONF = 'urls'
DATABASES = {
'default': {
'ENGINE': 'djangoappengine.db',
},
}
if on_production_server:
EMAIL_BACKEND = 'djangoappengine.mail.AsyncEmailBackend'
else:
EMAIL_BACKEND = 'djangoappengine.mail.EmailBackend'
PREPARE_UPLOAD_BACKEND = 'djangoappengine.storage.prepare_upload'
SERVE_FILE_BACKEND = 'djangoappengine.storage.serve_file'
DEFAULT_FILE_STORAGE = 'djangoappengine.storage.BlobstoreStorage'
FILE_UPLOAD_MAX_MEMORY_SIZE = 1024 * 1024
FILE_UPLOAD_HANDLERS = (
'djangoappengine.storage.BlobstoreFileUploadHandler',
'django.core.files.uploadhandler.MemoryFileUploadHandler',
)
CACHE_BACKEND = 'memcached://?timeout=0'
SESSION_ENGINE = 'django.contrib.sessions.backends.cached_db'
if not on_production_server:
INTERNAL_IPS = ('127.0.0.1',)
| [
"brunogamacatao@gmail.com"
] | brunogamacatao@gmail.com |
080bcf39abb2b1192174b56c122775222dc094e5 | 55c250525bd7198ac905b1f2f86d16a44f73e03a | /Python/Games/Pygame/pygame_widgets/widgets/holder.py | 96b94e5e4579dd18844513a989799935872a599d | [] | no_license | NateWeiler/Resources | 213d18ba86f7cc9d845741b8571b9e2c2c6be916 | bd4a8a82a3e83a381c97d19e5df42cbababfc66c | refs/heads/master | 2023-09-03T17:50:31.937137 | 2023-08-28T23:50:57 | 2023-08-28T23:50:57 | 267,368,545 | 2 | 1 | null | 2022-09-08T15:20:18 | 2020-05-27T16:18:17 | null | UTF-8 | Python | false | false | 129 | py | version https://git-lfs.github.com/spec/v1
oid sha256:72853c292faeedcdb3d5218c2cef2738641f5b2b222c1b069415a537cc49181f
size 7770
| [
"nateweiler84@gmail.com"
] | nateweiler84@gmail.com |
b754c81abae1b1e5debc3af1df9c99ecc7c13ff7 | 5ec5bfebdaa69d86594bbe41cccecdd66749d481 | /tests/integration/engine_objects/test_engine_objects_integration.py | 98944ace5d616b7d7088dd77de1b9dc0a680fd52 | [
"MIT"
] | permissive | dutradda/myreco | 7ae64fe485d1f78b0b920a0a5a5333520c4f1072 | bbc3ff8f734a68c4b9650348fdcf6a7605673b61 | refs/heads/master | 2020-04-18T05:25:56.489964 | 2018-10-09T21:17:26 | 2018-10-09T21:17:42 | 67,266,853 | 8 | 2 | null | null | null | null | UTF-8 | Python | false | false | 27,200 | py | # MIT License
# Copyright (c) 2016 Diogo Dutra <dutradda@gmail.com>
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import asyncio
import tempfile
from datetime import datetime
from time import sleep
from unittest import mock
from swaggerit.models._base import _all_models
from tests.integration.fixtures import TopSellerArrayTest
import pytest
import ujson
@pytest.fixture
def init_db(models, session, api):
user = {
'name': 'test',
'email': 'test',
'password': 'test',
'admin': True
}
session.loop.run_until_complete(models['users'].insert(session, user))
tmp = tempfile.TemporaryDirectory()
store = {
'name': 'test',
'country': 'test',
'configuration': {}
}
session.loop.run_until_complete(models['stores'].insert(session, store))
item_type = {
'name': 'products',
'schema': {
'type': 'object',
'id_names': ['sku'],
'properties': {'sku': {'type': 'string'}}
},
'stores': [{'id': 1}]
}
session.loop.run_until_complete(models['item_types'].insert(session, item_type))
strategy = {
'name': 'test',
'class_module': 'tests.integration.fixtures',
'class_name': 'EngineStrategyTest'
}
session.loop.run_until_complete(models['engine_strategies'].insert(session, strategy))
engine_object = {
'name': 'Top Seller Object',
'type': 'top_seller_array',
'configuration': {'days_interval': 7},
'store_id': 1,
'item_type_id': 1,
'strategy_id': 1
}
session.loop.run_until_complete(models['engine_objects'].insert(session, engine_object))
yield tmp.name
tmp.cleanup()
_all_models.pop('store_items_products_1', None)
class TestEngineObjectsModelPost(object):
async def test_post_without_body(self, init_db, client, headers, headers_without_content_type):
client = await client
resp = await client.post('/engine_objects/', headers=headers)
assert resp.status == 400
assert (await resp.json()) == {'message': 'Request body is missing'}
async def test_post_with_invalid_body(self, init_db, client, headers, headers_without_content_type):
client = await client
resp = await client.post('/engine_objects/', headers=headers, data='[{}]')
assert resp.status == 400
assert (await resp.json()) == {
'message': "'name' is a required property. "\
"Failed validating instance['0'] for schema['items']['required']",
'schema': {
'type': 'object',
'additionalProperties': False,
'required': ['name', 'type', 'configuration', 'strategy_id', 'item_type_id', 'store_id'],
'properties': {
'name': {'type': 'string'},
'type': {'type': 'string'},
'strategy_id': {'type': 'integer'},
'item_type_id': {'type': 'integer'},
'store_id': {'type': 'integer'},
'configuration': {}
}
}
}
async def test_post(self, init_db, client, headers, headers_without_content_type):
client = await client
body = [{
'name': 'Top Seller Object Test',
'type': 'top_seller_array',
'configuration': {'days_interval': 7},
'store_id': 1,
'item_type_id': 1,
'strategy_id': 1
}]
resp = await client.post('/engine_objects/', headers=headers, data=ujson.dumps(body))
resp_json = (await resp.json())
body[0]['id'] = 2
body[0]['store'] = resp_json[0]['store']
body[0]['strategy'] = resp_json[0]['strategy']
body[0]['item_type'] = resp_json[0]['item_type']
assert resp.status == 201
assert resp_json == body
async def test_post_with_invalid_grant(self, client):
client = await client
body = [{
'name': 'Top Seller Object Test',
'type': 'top_seller_array',
'configuration': {'days_interval': 7},
'store_id': 1,
'item_type_id': 1,
'strategy_id': 1
}]
resp = await client.post('/engine_objects/', headers={'Authorization': 'invalid'}, data=ujson.dumps(body))
assert resp.status == 401
assert (await resp.json()) == {'message': 'Invalid authorization'}
class TestEngineObjectsModelGet(object):
async def test_get_not_found(self, init_db, headers_without_content_type, client):
client = await client
resp = await client.get(
'/engine_objects/?store_id=2&item_type_id=1&strategy_id=1',
headers=headers_without_content_type
)
assert resp.status == 404
async def test_get_invalid_with_body(self, init_db, headers, client):
client = await client
resp = await client.get(
'/engine_objects/?store_id=1&item_type_id=1&strategy_id=1',
headers=headers,
data='{}'
)
assert resp.status == 400
assert await resp.json() == {'message': 'Request body is not acceptable'}
async def test_get_valid(self, init_db, headers, headers_without_content_type, client):
body = [{
'name': 'Top Seller Object',
'type': 'top_seller_array',
'configuration': {"days_interval": 7},
'store_id': 1,
'item_type_id': 1,
'strategy_id': 1,
'id': 1,
'store': {
'id': 1,
'name': 'test',
'country': 'test',
'configuration': {}
},
'item_type': {
'id': 1,
'store_items_class': None,
'stores': [{
'configuration': {},
'country': 'test',
'id': 1,
'name': 'test'
}],
'name': 'products',
'schema': {
'type': 'object',
'id_names': ['sku'],
'properties': {'sku': {'type': 'string'}}
},
'available_filters': [{'name': 'sku', 'schema': {'type': 'string'}}]
},
'strategy': {
'id': 1,
'name': 'test',
'class_module': 'tests.integration.fixtures',
'class_name': 'EngineStrategyTest',
'object_types': ['top_seller_array']
}
}]
client = await client
resp = await client.get(
'/engine_objects/?store_id=1&item_type_id=1&strategy_id=1',
headers=headers_without_content_type
)
assert resp.status == 200
assert await resp.json() == body
class TestEngineObjectsModelUriTemplatePatch(object):
async def test_patch_without_body(self, init_db, client, headers, headers_without_content_type):
client = await client
resp = await client.patch('/engine_objects/1/', headers=headers, data='')
assert resp.status == 400
assert (await resp.json()) == {'message': 'Request body is missing'}
async def test_patch_with_invalid_body(self, init_db, client, headers, headers_without_content_type):
client = await client
resp = await client.patch('/engine_objects/1/', headers=headers, data='{}')
assert resp.status == 400
assert (await resp.json()) == {
'message': '{} does not have enough properties. '\
"Failed validating instance for schema['minProperties']",
'schema': {
'type': 'object',
'additionalProperties': False,
'minProperties': 1,
'properties': {
'name': {'type': 'string'},
'configuration': {}
}
}
}
async def test_patch_with_invalid_config(self, init_db, client, headers, headers_without_content_type):
client = await client
body = {
'configuration': {}
}
resp = await client.patch('/engine_objects/1/', headers=headers, data=ujson.dumps(body))
assert resp.status == 400
print(ujson.dumps(await resp.json(), indent=4))
assert (await resp.json()) == {
'message': "'days_interval' is a required property. "\
"Failed validating instance for schema['required']",
'schema': {
'type': 'object',
'required': ['days_interval'],
'additionalProperties': False,
'properties': {
'days_interval': {'type': 'integer'}
}
}
}
async def test_patch_not_found(self, init_db, client, headers, headers_without_content_type):
client = await client
body = {
'name': 'Top Seller Object Test'
}
resp = await client.patch('/engine_objects/2/', headers=headers, data=ujson.dumps(body))
assert resp.status == 404
async def test_patch(self, init_db, client, headers, headers_without_content_type):
client = await client
body = [{
'name': 'Top Seller Object Test',
'type': 'top_seller_array',
'configuration': {'days_interval': 7},
'store_id': 1,
'item_type_id': 1,
'strategy_id': 1
}]
resp = await client.post('/engine_objects/', headers=headers, data=ujson.dumps(body))
obj = (await resp.json())[0]
body = {
'name': 'test2'
}
resp = await client.patch('/engine_objects/2/', headers=headers, data=ujson.dumps(body))
obj['name'] = 'test2'
assert resp.status == 200
assert (await resp.json()) == obj
class TestEngineObjectsModelUriTemplateGet(object):
async def test_get_with_body(self, init_db, headers, client):
client = await client
resp = await client.get('/engine_objects/1/', headers=headers, data='{}')
assert resp.status == 400
assert await resp.json() == {'message': 'Request body is not acceptable'}
async def test_get_not_found(self, init_db, headers_without_content_type, client):
client = await client
resp = await client.get('/engine_objects/2/', headers=headers_without_content_type)
assert resp.status == 404
async def test_get(self, init_db, headers, headers_without_content_type, client):
client = await client
resp = await client.get('/engine_objects/1/', headers=headers_without_content_type)
body = {
'name': 'Top Seller Object',
'type': 'top_seller_array',
'configuration': {"days_interval": 7},
'store_id': 1,
'item_type_id': 1,
'strategy_id': 1,
'id': 1,
'store': {
'id': 1,
'name': 'test',
'country': 'test',
'configuration': {}
},
'item_type': {
'id': 1,
'store_items_class': None,
'stores': [{
'configuration': {},
'country': 'test',
'id': 1,
'name': 'test'
}],
'name': 'products',
'schema': {
'type': 'object',
'id_names': ['sku'],
'properties': {'sku': {'type': 'string'}}
},
'available_filters': [{'name': 'sku', 'schema': {'type': 'string'}}]
},
'strategy': {
'id': 1,
'name': 'test',
'class_module': 'tests.integration.fixtures',
'class_name': 'EngineStrategyTest',
'object_types': ['top_seller_array']
}
}
assert resp.status == 200
assert await resp.json() == body
class TestEngineObjectsModelUriTemplateDelete(object):
async def test_delete_with_body(self, init_db, client, headers):
client = await client
resp = await client.delete('/engine_objects/1/', headers=headers, data='{}')
assert resp.status == 400
assert (await resp.json()) == {'message': 'Request body is not acceptable'}
async def test_delete_valid(self, init_db, client, headers, headers_without_content_type):
client = await client
resp = await client.get('/engine_objects/1/', headers=headers_without_content_type)
assert resp.status == 200
resp = await client.delete('/engine_objects/1/', headers=headers_without_content_type)
assert resp.status == 204
resp = await client.get('/engine_objects/1/', headers=headers_without_content_type)
assert resp.status == 404
def datetime_mock():
mock_ = mock.MagicMock()
mock_.now.return_value = datetime(1900, 1, 1)
return mock_
async def _wait_job_finish(client, headers_without_content_type, job_name='export'):
sleep(0.05)
while True:
resp = await client.get(
'/engine_objects/1/{}?job_hash=6342e10bd7dca3240c698aa79c98362e'.format(job_name),
headers=headers_without_content_type)
if (await resp.json())['status'] != 'running':
break
return resp
def set_patches(monkeypatch):
monkeypatch.setattr('swaggerit.models.orm._jobs_meta.random.getrandbits',
mock.MagicMock(return_value=131940827655846590526331314439483569710))
monkeypatch.setattr('swaggerit.models.orm._jobs_meta.datetime', datetime_mock())
class TestEngineObjectsModelsDataImporter(object):
async def test_importer_post(self, init_db, headers, headers_without_content_type, client, monkeypatch):
set_patches(monkeypatch)
client = await client
resp = await client.post('/engine_objects/1/import_data', headers=headers_without_content_type)
assert resp.status == 201
assert await resp.json() == {'job_hash': '6342e10bd7dca3240c698aa79c98362e'}
await _wait_job_finish(client, headers_without_content_type, 'import_data')
async def test_importer_get_running(self, init_db, headers_without_content_type, client, monkeypatch):
set_patches(monkeypatch)
client = await client
await client.post('/engine_objects/1/import_data', headers=headers_without_content_type)
resp = await client.get('/engine_objects/1/import_data?job_hash=6342e10bd7dca3240c698aa79c98362e',
headers=headers_without_content_type)
assert await resp.json() == {'status': 'running'}
await _wait_job_finish(client, headers_without_content_type, 'import_data')
async def test_importer_get_done(self, init_db, headers_without_content_type, client, monkeypatch):
set_patches(monkeypatch)
client = await client
await client.post('/engine_objects/1/import_data', headers=headers_without_content_type)
resp = await _wait_job_finish(client, headers_without_content_type, 'import_data')
assert await resp.json() == {
'status': 'done',
'result': {'lines_count': 3},
'time_info': {
'elapsed': '0:00',
'start': '1900-01-01 00:00',
'end': '1900-01-01 00:00'
}
}
async def test_importer_get_with_error(self, init_db, headers_without_content_type, client, monkeypatch):
set_patches(monkeypatch)
monkeypatch.setattr('tests.integration.fixtures.TopSellerArrayTest.get_data',
mock.MagicMock(side_effect=Exception('testing')))
client = await client
await client.post('/engine_objects/1/import_data', headers=headers_without_content_type)
resp = await _wait_job_finish(client, headers_without_content_type, 'import_data')
assert await resp.json() == {
'status': 'error',
'result': {'message': 'testing', 'name': 'Exception'},
'time_info': {
'elapsed': '0:00',
'start': '1900-01-01 00:00',
'end': '1900-01-01 00:00'
}
}
async def _post_products(client, headers, headers_without_content_type, products=[{'sku': 'test'}]):
resp = await client.post('/item_types/1/items?store_id=1',
data=ujson.dumps(products), headers=headers)
resp = await client.post('/item_types/1/update_filters?store_id=1',
headers=headers_without_content_type)
sleep(0.05)
while True:
resp = await client.get(
'/item_types/1/update_filters?store_id=1&job_hash=6342e10bd7dca3240c698aa79c98362e',
headers=headers_without_content_type)
if (await resp.json())['status'] != 'running':
break
return resp
def set_readers_builders_patch(monkeypatch, values=None):
if values is None:
values = [[ujson.dumps({'value': 1, 'item_key': 'test'}).encode()]]
readers_builder = values
mock_ = mock.MagicMock()
mock_.return_value = readers_builder
monkeypatch.setattr(
'myreco.engine_objects.object_base.EngineObjectBase._build_csv_readers',
mock_
)
class TestEngineObjectsModelsObjectsExporter(object):
async def test_exporter_post(self, init_db, headers_without_content_type, headers, client, monkeypatch):
set_patches(monkeypatch)
set_readers_builders_patch(monkeypatch)
client = await client
await _post_products(client, headers, headers_without_content_type)
resp = await client.post('/engine_objects/1/export', headers=headers_without_content_type)
assert await resp.json() == {'job_hash': '6342e10bd7dca3240c698aa79c98362e'}
await _wait_job_finish(client, headers_without_content_type)
async def test_exporter_get_running(self, init_db, headers_without_content_type, headers, client, monkeypatch, loop):
set_patches(monkeypatch)
prods = [ujson.dumps({'value': i, 'item_key': 'test{}'.format(i)}).encode() for i in range(100)]
set_readers_builders_patch(monkeypatch, [[b'\n'.join(prods)]])
client = await client
products = [{'sku': 'test{}'.format(i)} for i in range(10)]
await _post_products(client, headers, headers_without_content_type, products)
await client.post('/engine_objects/1/export', headers=headers_without_content_type)
resp = await client.get(
'/engine_objects/1/export?job_hash=6342e10bd7dca3240c698aa79c98362e', headers=headers_without_content_type)
assert await resp.json() == {'status': 'running'}
await _wait_job_finish(client, headers_without_content_type)
async def test_exporter_get_done(self, init_db, headers_without_content_type, headers, client, monkeypatch):
set_patches(monkeypatch)
client = await client
await _post_products(client, headers, headers_without_content_type)
set_readers_builders_patch(monkeypatch)
await client.post('/engine_objects/1/export', headers=headers_without_content_type)
resp = await _wait_job_finish(client, headers_without_content_type)
assert await resp.json() == {
'status': 'done',
'result': {'length': 1, 'max_sells': 1, 'min_sells': 1},
'time_info': {
'elapsed': '0:00',
'start': '1900-01-01 00:00',
'end': '1900-01-01 00:00'
}
}
async def test_exporter_get_with_error(
self, init_db, headers_without_content_type, headers, client, monkeypatch):
set_patches(monkeypatch)
client = await client
await _post_products(client, headers, headers_without_content_type)
set_readers_builders_patch(monkeypatch, [])
await client.post('/engine_objects/1/export', headers=headers_without_content_type)
resp = await _wait_job_finish(client, headers_without_content_type)
assert await resp.json() == {
'status': 'error',
'result': {
'message': "No data found for engine object 'Top Seller Object'",
'name': 'EngineError'
},
'time_info': {
'elapsed': '0:00',
'start': '1900-01-01 00:00',
'end': '1900-01-01 00:00'
}
}
def CoroMock():
coro = mock.MagicMock(name="CoroutineResult")
corofunc = mock.MagicMock(name="CoroutineFunction", side_effect=asyncio.coroutine(coro))
corofunc.coro = coro
return corofunc
def set_data_importer_patch(monkeypatch, mock_=None):
if mock_ is None:
mock_ = mock.MagicMock()
monkeypatch.setattr('tests.integration.fixtures.TopSellerArrayTest.get_data', mock_)
return mock_
class TestEngineObjectsModelsObjectsExporterWithImport(object):
async def test_exporter_post_with_import(self, init_db, headers, headers_without_content_type, client, monkeypatch):
set_patches(monkeypatch)
client = await client
await _post_products(client, headers, headers_without_content_type)
set_readers_builders_patch(monkeypatch)
get_data_patch = set_data_importer_patch(monkeypatch)
get_data_patch.return_value = {}
resp = await client.post('/engine_objects/1/export?import_data=true',
headers=headers_without_content_type)
hash_ = await resp.json()
await _wait_job_finish(client, headers_without_content_type)
called = bool(TopSellerArrayTest.get_data.called)
TopSellerArrayTest.get_data.reset_mock()
assert hash_ == {'job_hash': '6342e10bd7dca3240c698aa79c98362e'}
assert called
async def test_exporter_get_running_with_import(self, init_db, headers, headers_without_content_type, client, monkeypatch):
set_patches(monkeypatch)
client = await client
def func(x, y, z):
sleep(1)
return {}
await _post_products(client, headers, headers_without_content_type)
set_readers_builders_patch(monkeypatch)
set_data_importer_patch(monkeypatch, func)
await client.post('/engine_objects/1/export?import_data=true',
headers=headers_without_content_type)
resp = await client.get(
'/engine_objects/1/export?job_hash=6342e10bd7dca3240c698aa79c98362e',
headers=headers_without_content_type)
assert await resp.json() == {'status': 'running'}
await _wait_job_finish(client, headers_without_content_type)
async def test_exporter_get_done_with_import(self, init_db, headers, headers_without_content_type, client, monkeypatch):
set_patches(monkeypatch)
client = await client
await _post_products(client, headers, headers_without_content_type)
set_readers_builders_patch(monkeypatch)
await client.post('/engine_objects/1/export?import_data=true',
headers=headers_without_content_type)
await _wait_job_finish(client, headers_without_content_type)
resp = await client.get(
'/engine_objects/1/export?job_hash=6342e10bd7dca3240c698aa79c98362e',
headers=headers_without_content_type)
assert await resp.json() == {
'status': 'done',
'result': {
'importer': {'lines_count': 3},
'exporter': {
'length': 1,
'max_sells': 1,
'min_sells': 1
}
},
'time_info': {
'elapsed': '0:00',
'start': '1900-01-01 00:00',
'end': '1900-01-01 00:00'
}
}
async def test_exporter_get_with_error_in_import_with_import(
self, init_db, headers, headers_without_content_type, client, monkeypatch):
set_patches(monkeypatch)
client = await client
await _post_products(client, headers, headers_without_content_type)
get_data_patch = set_data_importer_patch(monkeypatch)
get_data_patch.side_effect = Exception('testing')
await client.post('/engine_objects/1/export?import_data=true', headers=headers_without_content_type)
await _wait_job_finish(client, headers_without_content_type)
resp = await client.get(
'/engine_objects/1/export?job_hash=6342e10bd7dca3240c698aa79c98362e', headers=headers_without_content_type)
assert await resp.json() == {
'status': 'error',
'result': {'message': 'testing', 'name': 'Exception'},
'time_info': {
'elapsed': '0:00',
'start': '1900-01-01 00:00',
'end': '1900-01-01 00:00'
}
}
async def test_exporter_get_with_error_in_export_with_import(
self, init_db, headers, headers_without_content_type, client, monkeypatch):
set_patches(monkeypatch)
client = await client
await _post_products(client, headers, headers_without_content_type)
set_readers_builders_patch(monkeypatch, [])
await client.post('/engine_objects/1/export?import_data=true', headers=headers_without_content_type)
await _wait_job_finish(client, headers_without_content_type)
resp = await client.get(
'/engine_objects/1/export?job_hash=6342e10bd7dca3240c698aa79c98362e', headers=headers_without_content_type)
assert await resp.json() == {
'status': 'error',
'result': {
'message': "No data found for engine object 'Top Seller Object'",
'name': 'EngineError'
},
'time_info': {
'elapsed': '0:00',
'start': '1900-01-01 00:00',
'end': '1900-01-01 00:00'
}
}
| [
"dutradda@gmail.com"
] | dutradda@gmail.com |
e2165522c3bb4459305797ac81bd3f7b39820945 | 335ad4f2f024f4d3aa528447a40fe7cc7ea838d5 | /src/merge-patch.py | 9d914b3f25982c038a0d7f0deef2aff13308560b | [] | no_license | SnakeHunt2012/text-classification | 2e1c19e2b045d47de8dc61e337271137fe6898f3 | b5511bb6edce176deda9a2356c2da8bc3e2179d3 | refs/heads/master | 2020-04-12T08:56:16.968575 | 2017-02-17T04:05:58 | 2017-02-17T04:05:58 | 62,780,080 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,924 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from codecs import open
from argparse import ArgumentParser
def load_patch_dict(patch_file):
patch_dict = {}
new_category_set = set()
with open(patch_file, 'r') as fd:
for line in fd:
splited_line = line.strip("\n").strip("\r").split("\t")
assert len(splited_line) == 4
old_parent_tag, old_sub_tag, new_parent_tag, new_sub_tag = splited_line
if len(old_parent_tag) == 0:
assert len(old_sub_tag) == 0
new_category_set.add("%s|%s" % (new_parent_tag, new_sub_tag))
continue
old_category = "%s|%s" % (old_parent_tag, old_sub_tag)
new_category = "%s|%s" % (new_parent_tag, new_sub_tag)
patch_dict[old_category] = new_category
return patch_dict, new_category_set
def main():
parser = ArgumentParser()
parser.add_argument("new_patch_file", help = "new_patch_file")
parser.add_argument("new_new_patch_file", help = "new_new_patch_file")
args = parser.parse_args()
new_patch_file = args.new_patch_file
new_new_patch_file = args.new_new_patch_file
new_patch_dict, new_category_set = load_patch_dict(new_patch_file)
new_new_patch_dict, new_new_category_set = load_patch_dict(new_new_patch_file)
aggregate_patch_dict = {}
for key, value in new_patch_dict.iteritems():
if new_patch_dict[key] not in new_new_patch_dict:
print new_patch_dict[key]
aggregate_patch_dict[key] = new_new_patch_dict[new_patch_dict[key]]
for category in new_category_set:
aggregate_patch_dict[category] = new_new_patch_dict[category]
for old_category, new_category in aggregate_patch_dict.iteritems():
print "%s\t%s" % (old_category, new_category)
for category in new_category_set:
print "\t%s" % category
if __name__ == "__main__":
main()
| [
"SnakeHunt2012@gmail.com"
] | SnakeHunt2012@gmail.com |
af1a0b1e3fbc5532f301616c7de79889ed3c1f13 | 338298474c517e28d9a214c3525b9709625fa438 | /YouWeesh/Controllers/RegisterController.py | e1c6418416713ae4a61fcdd2a707f2d628d8db50 | [] | no_license | vincehar/Backend | f5a8f0e264de2ba7ccadba3bce015f3a30e9c478 | fb143c6c70cb65018d0436bf5b891cb72620208d | refs/heads/master | 2023-07-06T10:37:50.057555 | 2023-06-28T13:04:36 | 2023-06-28T13:04:36 | 73,698,999 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,284 | py | from base64 import b64decode
from django.core.files.base import ContentFile
from django.http import Http404
from mongoengine.django.auth import User
from rest_framework.decorators import api_view, renderer_classes, permission_classes
from rest_framework.permissions import AllowAny
from rest_framework.renderers import JSONRenderer
from rest_framework.response import Response
from YouWeesh.Models.Address import Address
from YouWeesh.Models.Preferences import Preferences
from YouWeesh.Models.SocialNetworks import SocialNetworks
from YouWeesh.Models.Users import Users
from YouWeesh.Tools.app import App
@api_view(('POST',))
@permission_classes((AllowAny,))
@renderer_classes((JSONRenderer,))
def registeruser(request):
username = request.POST['username']
password = request.POST['password']
email = request.POST['email'].lower();
lastname = request.POST['lastname']
firstname = request.POST['firstname']
socialnetwork = request.POST['socialnetwork']
pictureBase64 = request.POST['picture']
home_town = 'Geneve'#request.POST['home_town']
picturedata = b64decode(pictureBase64)
socialnetworkObject = SocialNetworks.objects.get(label=socialnetwork)
u=User.objects.create(username=username, email=email, first_name=firstname, last_name=lastname)
if socialnetwork == 'Youweesh':
u.set_password(password)
u.save()
preferences = Preferences()
preferences.save()
if home_town != "":
addr = Address()
addr.city = home_town
addr.getorUpdateCoordinates()
addr.save()
users = Users.objects.create(user=u, social_network=socialnetworkObject, address=addr, preferences=preferences)
else:
users = Users.objects.create(user=u, social_network=socialnetworkObject, preferences=preferences)
if socialnetwork == 'Facebook' or socialnetwork == 'Twitter':
users.picture.replace(ContentFile(picturedata))
users.save()
return Response(True)
@api_view(('POST',))
@permission_classes((AllowAny,))
@renderer_classes((JSONRenderer,))
def registerFCMToken(request):
try:
connected_user = App.getCurrentUser(request)
connected_user.update_fcm_token(request.POST['fcmToken'])
except connected_user.DoesNotExist:
raise Http404('Not logged')
return Response(True) | [
"you@example.com"
] | you@example.com |
95a9b99b23d9c3361f738b051a584a36d3f1b9e5 | 6e35052e119652419ed7d2291d9c3f2fbac8db14 | /cm_ext_lib/make_manifest.py | a03f86c52ec408e474d34d4e67e323f8f3da3c07 | [] | no_license | khirodpatra/mk_cdh_parcel | 191f5735bd04f25f4639ea844f121fba1fa9b826 | 3de93c50427d43373e0af33230be7fef82e65b43 | refs/heads/master | 2021-06-05T13:06:29.745230 | 2016-10-26T05:59:36 | 2016-10-26T05:59:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,814 | py | #!/usr/bin/env python
#
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This program creates a manifest.json file from a directory of parcels and
# places the file in the same directory as the parcels.
# Once created, the directory can be served over http as a parcel repository.
import hashlib
import json
import os
import re
import sys
import tarfile
import time
def _get_parcel_dirname(parcel_name):
"""
Extract the required parcel directory name for a given parcel.
eg: CDH-5.0.0-el6.parcel -> CDH-5.0.0
"""
parts = re.match(r"^(.*?)-(.*)-(.*?)$", parcel_name).groups()
return parts[0] + '-' + parts[1]
def _safe_copy(key, src, dest):
"""
Conditionally copy a key/value pair from one dictionary to another.
Nothing is done if the key is not present in the source dictionary
"""
if key in src:
dest[key] = src[key]
def make_manifest(path, timestamp=time.time()):
"""
Make a manifest.json document from the contents of a directory.
This function will scan the specified directory, identify any parcel files
in it, and then build a manifest from those files. Certain metadata will be
extracted from the parcel and copied into the manifest.
@param path: The path of the directory to scan for parcels
@param timestamp: Unix timestamp to place in manifest.json
@return: the manifest.json as a string
"""
manifest = {}
manifest['lastUpdated'] = int(timestamp * 1000)
manifest['parcels'] = []
files = os.listdir(path)
for f in files:
if not f.endswith('.parcel'):
continue
print("Found parcel %s" % (f,))
entry = {}
entry['parcelName'] = f
fullpath = os.path.join(path, f)
with open(fullpath, 'rb') as fp:
entry['hash'] = hashlib.sha1(fp.read()).hexdigest()
with tarfile.open(fullpath, 'r') as tar:
try:
json_member = tar.getmember(os.path.join(_get_parcel_dirname(f),
'meta', 'parcel.json'))
except KeyError:
print("Parcel does not contain parcel.json")
continue
try:
parcel = json.loads(tar.extractfile(json_member).read().decode(encoding='UTF-8'))
except:
print("Failed to parse parcel.json")
continue
_safe_copy('depends', parcel, entry)
_safe_copy('replaces', parcel, entry)
_safe_copy('conflicts', parcel, entry)
_safe_copy('components', parcel, entry)
try:
notes_member = tar.getmember(os.path.join(_get_parcel_dirname(f),
'meta', 'release-notes.txt'))
entry['releaseNotes'] = tar.extractfile(notes_member).read().decode(encoding='UTF-8')
except KeyError:
# No problem if there's no release notes
pass
manifest['parcels'].append(entry)
return json.dumps(manifest, indent=4, separators=(',', ': '))
if __name__ == "__main__":
path = os.path.curdir
if len(sys.argv) > 1:
path = sys.argv[1]
print("Scanning directory: %s" % (path))
manifest = make_manifest(path)
with open(os.path.join(path, 'manifest.json'), 'w') as fp:
fp.write(manifest)
| [
"khirodpatra@gmail.com"
] | khirodpatra@gmail.com |
c4f8b602455159c96bfee7d9af8b244c0fb7e10d | 8056fb11a606a59c0e11e23c54f53aa48d184631 | /djangoCMS/sensoriumSite/migrations/0014_aboutus_bioplaceholdermodel2.py | 88fab3149dd824088b84aed0e53837f0aa343ec1 | [] | no_license | Mark2208/Sensorium_Website_Group | f3785403cd7f44e3e1ae17ca6935f2458fb50937 | 3a6b6282238b8cd2580edaa48e311b1065072ff1 | refs/heads/master | 2020-04-22T16:09:42.873493 | 2019-05-08T17:24:26 | 2019-05-08T17:24:26 | 170,499,346 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 830 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.20 on 2019-04-28 21:23
from __future__ import unicode_literals
import cms.models.fields
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('cms', '0022_auto_20180620_1551'),
('sensoriumSite', '0013_auto_20190428_2221'),
]
operations = [
migrations.CreateModel(
name='aboutUs_BioPlaceholderModel2',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('plc_bio', cms.models.fields.PlaceholderField(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, slotname='Bio Line', to='cms.Placeholder')),
],
),
]
| [
"markpereira2208@gmail.com"
] | markpereira2208@gmail.com |
251cea80fc221b29db818fc6b6ea2ee0e662fbcc | 4b1638310dcf0188aadb2ff5d61bf48306ae282f | /e_shop/order/serializers.py | 1ea72e60a772f5529ff98269b6a550ce4a88e055 | [] | no_license | LexxLuey/vue-django-shop | 95bb2d6b7e64277578982391f2d0a603f9d651ea | e9ad17c245627b4e13c7785926cdca7f31602450 | refs/heads/main | 2023-08-30T08:00:53.265764 | 2021-10-14T16:31:24 | 2021-10-14T16:31:24 | 417,199,096 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,790 | py | from rest_framework import serializers
from .models import Order, OrderItem
from product.serializers import ProductSerializer
class MyOrderItemSerializer(serializers.ModelSerializer):
product = ProductSerializer()
class Meta:
model = OrderItem
fields = (
"price",
"product",
"quantity",
)
class MyOrderSerializer(serializers.ModelSerializer):
items = MyOrderItemSerializer(many=True)
class Meta:
model = Order
fields = (
"id",
"first_name",
"last_name",
"email",
"address",
"zipcode",
"place",
"phone",
"delivered",
"reference_id",
"transaction_id",
"paid",
"items",
"paid_amount"
)
class OrderItemSerializer(serializers.ModelSerializer):
class Meta:
model = OrderItem
fields = (
"price",
"product",
"quantity",
)
class OrderSerializer(serializers.ModelSerializer):
items = OrderItemSerializer(many=True)
class Meta:
model = Order
fields = (
"id",
"first_name",
"last_name",
"email",
"address",
"zipcode",
"place",
"phone",
"items",
'paid_amount',
'reference_id',
'transaction_id',
)
def create(self, validated_data):
items_data = validated_data.pop('items')
order = Order.objects.create(**validated_data)
for item_data in items_data:
OrderItem.objects.create(order=order, **item_data)
return order | [
"biggestluey@gmail.com"
] | biggestluey@gmail.com |
f2dd5ac8b68469e874aa5f39d5908a14de7ffc04 | 97024a97c877d3874a055146141ac7106a976e43 | /src/layers.py | 26e03b8ce8e7671684a4e1e71831c620fc8588a1 | [] | no_license | colinclement/snnp | ff8f3f64f400d65e425dfba37cc690e19192b973 | 69f554a0f394c49142358e0c0be19ab6a420a834 | refs/heads/master | 2021-05-06T07:22:37.487577 | 2017-12-11T22:43:34 | 2017-12-11T22:43:34 | 113,916,313 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,964 | py | """
SNNP - Simple Neural Networks with Python
author: Colin Clement
date: 2015-03-13
Neural network layer classes based on torch nn module
"""
import numpy as np
from scipy.linalg import inv, sqrtm
from scipy.special import expit
from scipy.misc import logsumexp
def orthogonalize(m):
"""Make columns orthogonal, to use Ganguli's initial conditions"""
return np.real(m.dot(inv(sqrtm(m.T.dot(m)))))
class Layer(object):
""" A single layer of a neural network
Contains the following state variables:
self.output - computed with last call of self.forward()
self.gradInput - Gradients w.r.t. inputs computed with
last call of self.updateGradInput
self.gradOutput - Gradients w.r.t. output of layer
self.train - Bool if network is being trained
"""
def __init__(self, **kwargs):
"""Default initializatin"""
self.output = None #Computed with last call of forward()
self.gradInput = None # Gradients w.r.t. inputs
self.W = None #learnable weights
self.b = None #learnable biases
self.a = None #Activation parameters
self.gradW = None #Gradient of W
self.gradb = None #Gradient of b
self.grada = None #Gradient of a
self.train = False
self.scale = 1.
if 'seed' in kwargs:
self.seed = kwargs['seed']
self.rng = np.random.RandomState()
self.rng.seed(self.seed)
else:
self.rng = np.random.RandomState()
def forward(self, inp):
""" Given an input, compute the forward pass """
return self.updateOutput(inp)
def backward(self, inp, gradOutput, scale=1.):
""" Given an gradient of output, do backprob back
through the layer and return gradient of input"""
self.updateGradInput(inp, gradOutput)
self.accGradParameters(inp, gradOutput, scale)
return self.gradInput
def updateOutput(self, inp):
""" Override to set and return self.output"""
self.output = inp
return self.output
def updateGradInput(self, inp, gradOutput):
"""Compute gradient of layer w.r.t. its own paramm"""
def accGradParameters(self, inp, gradOutput, scale):
"""Accumulates gradients w.r.t. parameters"""
return self.gradInput
def zeroGradParameters(self):
"""Used to set grad parameter accumulation to zero"""
p, gp = self.parameters()
for g in gp:
g.fill(0)
def updateParameters(self, learningrate):
"""Update parameters accumulated """
ps, gps = self.parameters()
for p, gp in zip(ps, gps):
p -= learningrate*gp
def parameters(self):
"""Returns learnable parameters and gradients of those parameters """
if self.W is not None and self.b is not None:
return [self.W, self.b], [self.gradW, self.gradb]
elif self.W is not None:
return [self.W], [self.gradW]
elif self.b is not None:
return [self.b], [self.gradb]
elif self.a is not None: #Activations like PReLU
return [self.a], [self.grada]
else:
return [], []
def getParameters(self):
"""Returns flat arrays of params and gradParams"""
p, gp = self.parameters()
if p and gp:
return map(np.ravel, p), map(np.ravel, gp)
else:
return p, gp
def training(self):
self.train = True
def evaluate(self):
self.train = False
class Linear(Layer):
"""Linear layer with weights and biases Wx+b"""
def __init__(self, fan_in, fan_out, weight_init='PReLU', **kwargs):
super(Linear, self).__init__(**kwargs)
if weight_init == 'PReLU':
self.W = self.rng.normal(0.0, np.sqrt(2./fan_out), (fan_out, fan_in))
self.b = self.rng.normal(0.0, np.sqrt(2./fan_out), fan_out)
elif weight_init == 'Ganguli': #Saxe & Ganguli initial conditions
self.W = self.rng.normal(0.0, 1./np.sqrt(fan_out), (fan_out, fan_in))
self.W = orthogonalize(self.W)
self.b = self.rng.normal(0.0, 1./np.sqrt(fan_out), fan_out)
self.gradW = np.zeros_like(self.W)
self.gradb = np.zeros_like(self.b)
def updateOutput(self, inp):
self.output = self.W.dot(inp) + self.b[:,None]
return self.output
def updateGradInput(self, inp, gradOutput):
self.gradInput = (gradOutput.T.dot(self.W)).T
def accGradParameters(self, inp, gradOutput, scale=1.0):
""" Update gradient accumulations, mean used so learningrate is
independent of number of samples"""
self.gradW += scale * (gradOutput[:,None,:] * inp[None,:,:]).mean(-1)
self.gradb += scale * gradOutput.mean(-1)
class Softmax(Layer):
""" Softmax layer"""
def softmax(self, inp):
maxsub = inp - inp.max(0)
sm = np.exp(maxsub)
return sm/sm.sum(axis=0)
def updateOutput(self, inp):
self.output = self.softmax(inp)
return self.output
def updateGradInput(self, inp, gradOutput):
""" Assumes updateOutput was called previously, uses self.output as
softmax of last inp on foreward call"""
if self.output is not None:
sm = self.output
else:
sm = self.softmax(inp)
smeye = (sm[:,None].T*np.eye(len(sm))).T
self.gradInput = gradOutput *(smeye - np.einsum('np,mp->nmp',sm,sm))
class ReLU(Layer):
""" Rectified Linear activation layer """
def updateOutput(self, inp):
"""inp if inp>0 else 0"""
self.mask = (inp>0.).astype('int')
self.output = self.mask*inp
return self.output
def updateGradInput(self, inp, gradOutput):
self.gradInput = gradOutput*self.mask
class PReLU(Layer):
""" Parametrized Rectified Linear activation layer"""
def __init__(self, n_hidden, a = None):
self.n_hidden = n_hidden
self.a = a or np.random.rand(self.n_hidden)
self.grada = np.zeros_like(self.a)
super(PReLU, self).__init__()
def updateOutput(self, inp):
positive = inp > 0
self.posmask = (positive).astype('int')
self.negmask = ((not positive).astype('int'))*self.a[:,None]
self.output = (self.posmask + self.negmask)*inp
return self.output
def updateGradInput(self, inp, gradOutput):
self.gradInput = gradOutput * (self.posmask + self.negmask)
def accGradInput(self, inp, gradOutput, scale=1.0):
self.grada += scale * gradOutput * (self.posmask + self.negmask)
class Sigmoid(Layer):
""" Sigmoid activation """
def updateOutput(self, inp):
self.output = expit(inp)
return self.output
def updateGradInput(self, inp, gradOutput):
""" Assumes forward was called before backward """
self.gradInput = gradOutput * self.output*(1.-self.output)
class Tanh(Layer):
""" Hyperbolic tangent activation layer """
def updateOutput(self, inp):
self.output = np.tanh(inp)
return self.output
def updateGradInput(self, inp, gradOutput):
self.gradInput = gradOutput * (1.-self.output*self.output)
class Dropout(Layer):
""" Removes connections while training to promote
sparsity and regularize learning process."""
def __init__(self, p=0.5):
self.p = p
super(Dropout, self).__init__()
def updateOutput(self, inp):
if self.train:
self.mask = ((np.random.rand(*inp.shape) > self.p).astype('int'))/(1.-self.p)
self.output = self.mask*inp
else:
self.output = inp
return self.output
def updateGradInput(self, inp, gradOutput):
if self.train:
self.gradInput = gradOutput * self.mask
else:
self.gradInput = gradOutput
| [
"colin.clement@gmail.com"
] | colin.clement@gmail.com |
e23484f389705ca89a122d6106d3e1c4709ca68e | 89b3c2230405ffbe3ffea223b06e9a6f9d229317 | /Selenium/test_with_unittest.py | bdb961aaba65130ee3eb329d78d37302abcaa561 | [] | no_license | skate-moss/Stepik-Homework | 7666612b7560f6527ab28f139b28bc7ecbae4907 | 759ad3f369c4a4988907de2f1a5c814adec9e5ef | refs/heads/master | 2020-07-31T10:44:33.807034 | 2019-09-24T13:40:52 | 2019-09-24T13:40:52 | 210,577,953 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,816 | py | from selenium import webdriver
import time
import unittest
class Test(unittest.TestCase):
def test_1_test(self):
try:
link = "http://suninjuly.github.io/registration1.html"
browser = webdriver.Chrome()
browser.get(link)
# Ваш код, который заполняет обязательные поля
input1 = browser.find_element_by_xpath("/html/body/div/form/div[1]/div[1]/input")
input1.send_keys("Ivan")
input2 = browser.find_element_by_xpath("/html/body/div/form/div[1]/div[2]/input")
input2.send_keys("Petrov")
input3 = browser.find_element_by_xpath("/html/body/div/form/div[1]/div[3]/input")
input3.send_keys("yandex@mail.ru")
# Отправляем заполненную форму
button = browser.find_element_by_css_selector("button.btn")
button.click()
# Проверяем, что смогли зарегистрироваться
# ждем загрузки страницы
time.sleep(1)
# находим элемент, содержащий текст
welcome_text_elt = browser.find_element_by_tag_name("h1")
# записываем в переменную welcome_text текст из элемента welcome_text_elt
welcome_text = welcome_text_elt.text
# с помощью assert проверяем, что ожидаемый текст совпадает с текстом на странице сайта
self.assertEqual("Congratulations! You have successfully registered!", welcome_text,
"Test complete! Well done!")
finally:
# ожидание чтобы визуально оценить результаты прохождения скрипта
browser.quit()
def test_2_test(self):
try:
link2 = "http://suninjuly.github.io/registration2.html"
browser = webdriver.Chrome()
browser.get(link2)
# Ваш код, который заполняет обязательные поля
input1 = browser.find_element_by_xpath("/html/body/div/form/div[1]/div[1]/input")
input1.send_keys("Ivan")
input2 = browser.find_element_by_xpath("/html/body/div/form/div[1]/div[2]/input")
input2.send_keys("Petrov")
input3 = browser.find_element_by_xpath("/html/body/div/form/div[1]/div[3]/input")
input3.send_keys("yandex@mail.ru")
# Отправляем заполненную форму
button = browser.find_element_by_css_selector("button.btn")
button.click()
# Проверяем, что смогли зарегистрироваться
# ждем загрузки страницы
time.sleep(1)
# находим элемент, содержащий текст
welcome_text_elt = browser.find_element_by_tag_name("h1")
# записываем в переменную welcome_text текст из элемента welcome_text_elt
welcome_text = welcome_text_elt.text
# с помощью assert проверяем, что ожидаемый текст совпадает с текстом на странице сайта
self.assertEqual("Congratulations! You have successfully registered!", welcome_text,
"Test complete! Well done!")
finally:
# закрываем браузер после всех манипуляций
time.sleep(5)
browser.quit()
if __name__ == "__main__":
unittest.main()
| [
"gooseftwx@yandex.ru"
] | gooseftwx@yandex.ru |
5833e6dc30d2c6ebaec174a55b4d82ecc71d3cac | 817a6685fc27bb024ecaea40d95dceaf2c824c31 | /validation/validate_poi.py | cfce493107f43e80c92a0c4eb886c86e71f88e84 | [] | no_license | saraswathykrk/Udacity_ML | 105f134465a1e019be76153942b9caefe4715783 | b781742d75ce45c6afe7ae6b7ca398fe3bb2f495 | refs/heads/master | 2020-04-03T06:26:42.328386 | 2018-10-28T14:16:04 | 2018-10-28T14:16:04 | 155,074,886 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,389 | py | #!/usr/bin/python
"""
Starter code for the validation mini-project.
The first step toward building your POI identifier!
Start by loading/formatting the data
After that, it's not our code anymore--it's yours!
"""
import pickle
import sys
from time import time
sys.path.append("../tools/")
from feature_format import featureFormat, targetFeatureSplit
from sklearn.model_selection import train_test_split
data_dict = pickle.load(open("../final_project/final_project_dataset.pkl", "r") )
### first element is our labels, any added elements are predictor
### features. Keep this the same for the mini-project, but you'll
### have a different feature list when you do the final project.
features_list = ["poi", "salary"]
data = featureFormat(data_dict, features_list)
labels, features = targetFeatureSplit(data)
features_train, features_test, labels_train, labels_test = train_test_split(features, labels, test_size = 0.3, random_state = 42)
### it's all yours from here forward!
from sklearn.tree import DecisionTreeClassifier
clf = DecisionTreeClassifier()
t0 = time()
clf.fit(features_train, labels_train)
print "Training time :", round(time() - t0,3), "s"
t0 = time()
pred = clf.predict(features_test)
print "Testing time :", round(time() - t0,3), "s"
from sklearn.metrics import accuracy_score
acc = accuracy_score(pred,labels_test)
print "Accuracy is:" , acc
| [
"noreply@github.com"
] | saraswathykrk.noreply@github.com |
80e78c403085d8c221c35fb9e8bbcf9fca10ca5e | e770ac1b69bfbcefb58a289c3409203eab1ffe55 | /analysis/migrations/0011_auto_20200617_1740.py | 806c64994b85422202b83efc4ffd496d7994d344 | [] | no_license | Saihgax/SBSPS-Challenge-4206-Sentimental-Analysis-of-COVID-19-tweets | f4bad71e8cec25ca1c813ab172a1e0e043152c89 | 1b9b2793a656f1c842b67a4a5fd599fc805a21e6 | refs/heads/master | 2022-11-14T05:58:50.794760 | 2020-07-14T19:07:28 | 2020-07-14T19:07:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 460 | py | # Generated by Django 3.0.7 on 2020-06-17 12:10
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('analysis', '0010_auto_20200617_1652'),
]
operations = [
migrations.AlterField(
model_name='analysis',
name='date_created',
field=models.DateTimeField(default=datetime.datetime(2020, 6, 17, 17, 40, 16, 266604)),
),
]
| [
"pranavsaihgal911@gmail.com"
] | pranavsaihgal911@gmail.com |
a7ba62674bde540766b0c1c66bafddae4b446c94 | cec125d7a04cf63809cc3eb47bc8e778d11e8abb | /example/Reproducible.py | 91bd8db6e8124afbd3431b68c3e4c09dd0a3b25c | [
"Apache-2.0"
] | permissive | emerrf/MIDAS.Python | 24a4cfda400c2fb432a31d8279dac41bd0348b67 | 264df6c6bf03d126f3f1784ced27fd7276ef0ae4 | refs/heads/master | 2022-12-20T03:35:39.702394 | 2020-10-14T18:05:13 | 2020-10-14T18:05:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,934 | py | # ------------------------------------------------------------------------------
# Copyright 2020 Rui LIU (@liurui39660)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ------------------------------------------------------------------------------
from pathlib import Path
from numpy import around
from pandas import read_csv
from sklearn.metrics import roc_auc_score
from tqdm import tqdm
from MIDAS import FilteringCore, NormalCore, RelationalCore
if __name__ == '__main__':
root = (Path(__file__) / '../..').resolve()
label = read_csv(root / "data/DARPA/darpa_ground_truth.csv", header=None, squeeze=True, dtype=int)
# midas = NormalCore(2, 1024)
# midas.numCurrent.param1 = midas.numTotal.param1 = [2, 3]
# midas.numCurrent.param2 = midas.numTotal.param2 = [5, 7]
# midas = RelationalCore(2, 1024)
# midas.numCurrentSource.param1 = midas.numTotalSource.param1 = [2, 3]
# midas.numCurrentSource.param2 = midas.numTotalSource.param2 = [5, 7]
# midas.numCurrentDestination.param1 = midas.numTotalDestination.param1 = [11, 13]
# midas.numCurrentDestination.param2 = midas.numTotalDestination.param2 = [17, 19]
# midas.numCurrentEdge.param1 = midas.numTotalEdge.param1 = [23, 29]
# midas.numCurrentEdge.param2 = midas.numTotalEdge.param2 = [31, 37]
midas = FilteringCore(2, 1024, 1e3)
midas.numCurrentSource.param1 = midas.numTotalSource.param1 = midas.scoreSource.param1 = [2, 3]
midas.numCurrentSource.param2 = midas.numTotalSource.param2 = midas.scoreSource.param2 = [5, 7]
midas.numCurrentDestination.param1 = midas.numTotalDestination.param1 = midas.scoreDestination.param1 = [11, 13]
midas.numCurrentDestination.param2 = midas.numTotalDestination.param2 = midas.scoreDestination.param2 = [17, 19]
midas.numCurrentEdge.param1 = midas.numTotalEdge.param1 = midas.scoreEdge.param1 = [23, 29]
midas.numCurrentEdge.param2 = midas.numTotalEdge.param2 = midas.scoreEdge.param2 = [31, 37]
score = [0.] * label.shape[0]
with open(root / "data/DARPA/darpa_processed.csv", 'r') as file:
for i in tqdm(range(label.shape[0]), unit_scale=True): # Much faster than pandas indexing
data = file.readline().split(',')
score[i] = midas(int(data[0]), int(data[1]), int(data[2]))
score = around(score, 6) # Same as c++ version
print(f"ROC-AUC = {roc_auc_score(label, score)}")
print(f"# Raw anomaly scores will be exported to")
print(f"# {root / 'temp/Score.txt'}")
score.tofile(root / "temp/Score.txt", '\n')
| [
"xxliuruiabc@gmail.com"
] | xxliuruiabc@gmail.com |
508ff2b6d6d352b25f08f40f8685987a07f0a9f1 | d511eeb3baaafc5e5c83918f10b018a38174d3ad | /test_selenium/test_Baidu/test_baidu_base.py | ff3d02cb3eaded24f811da36d52d0b8ad9a7ce4a | [] | no_license | cheng2020-G/20200507_hogwarts | 2389fb1b4bd625771753c912feb84a4fcdb94747 | 5b7ba5bff63389dbd563a6059f3877c11489a393 | refs/heads/master | 2022-11-23T20:34:36.935755 | 2020-07-30T10:05:01 | 2020-07-30T10:05:01 | 262,307,405 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 296 | py | from selenium import webdriver
class TestBaiduBase():
def setup(self):
self.driver = webdriver.Chrome()
self.driver.get("https://www.baidu.com/")
self.driver.maximize_window()
self.driver.implicitly_wait(3)
def teardown(self):
self.driver.quit() | [
"myautotest123698745"
] | myautotest123698745 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.