blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 281 | content_id stringlengths 40 40 | detected_licenses listlengths 0 57 | license_type stringclasses 2 values | repo_name stringlengths 6 116 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 313 values | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 18.2k 668M ⌀ | star_events_count int64 0 102k | fork_events_count int64 0 38.2k | gha_license_id stringclasses 17 values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 107 values | src_encoding stringclasses 20 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 4 6.02M | extension stringclasses 78 values | content stringlengths 2 6.02M | authors listlengths 1 1 | author stringlengths 0 175 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
9137b0a07320448c05ca54c3dde46318d637b4c1 | 8d26c6d3836c0580d37d8e71d02f113e009532bc | /tests/debug/test_controller.py | 5760cd5416a84b680056b8fffcc6eb7990767d00 | [
"MIT"
] | permissive | fchauvel/rasp-machine | 3bd5b62c5dfcb7474f948dbe7bfc1377a59fe0bb | 140fd3bb38d5c7933389c5a010fd4fcecc607876 | refs/heads/main | 2023-04-29T14:02:22.174472 | 2021-05-22T08:55:04 | 2021-05-22T08:55:04 | 365,338,342 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,610 | py | #
# This file is part of rasp-machine.
#
# Copyright (C) 2021 by Franck Chauvel
#
# This code is licensed under the MIT License.
# See LICENSE.txt for details
#
from rasp.debug.controller import Break, DebugController, Quit, SetAccumulator, SetMemory, \
SetInstructionPointer, ShowCPU, ShowMemory, ShowSource, Step
from unittest import TestCase
class CommandParserTest(TestCase):
def verify(self, expectation, text):
command = DebugController._parse_command(text)
self.assertEqual(expectation, command)
def test_set_ip(self):
self.verify(SetInstructionPointer(25),
"set ip 25")
def test_set_acc(self):
self.verify(SetAccumulator(25),
"set acc 25")
def test_set_memory(self):
self.verify(SetMemory(25, -10),
"set memory 25 -10")
def test_break(self):
self.verify(Break(24),
"break at address 24")
def test_step(self):
self.verify(Step(), "step")
def test_quit(self):
self.verify(Quit(), "quit")
def test_show_memory(self):
self.verify(ShowMemory(10, 30),
"show memory 10 30")
def test_show_cpu(self):
self.verify(ShowCPU(), "show cpu")
def test_show_source(self):
self.verify(ShowSource(), "show source")
def test_show_source_with_start(self):
self.verify(ShowSource(start=12),
"show source 12")
def test_show_source_with_start_and_end(self):
self.verify(ShowSource(12, 20),
"show source 12 20")
| [
"fchauvel@Francks-MacBook-Air.local"
] | fchauvel@Francks-MacBook-Air.local |
58f06df37bdf3aa5057e2dc27ba176ff46525400 | 6178e1601a88aaf05d19b4aabf4395d96513a3ae | /RockPaperScissors.py | 3ca80590a73c21bc450635ba6dc621fdc7f2bd58 | [] | no_license | chanan-hash/RPS---rock-paper-scissors | 3ceee5da74ee0aeea74acd182df458a166380180 | 7100936ce97b1fbfcc6a00f0b5a3c0ea794edb39 | refs/heads/master | 2022-11-30T09:01:18.939263 | 2020-08-08T20:23:45 | 2020-08-08T20:23:45 | 286,111,929 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,319 | py | # lets creat our pc by random
import random
# the moves that the pc can choose
moves = ["rock", "paper", "scissors"]
keep_playing = "true"
countC = 0
countP = 0
countT = 0
while keep_playing == "true":
cmove = random.choice(moves)
pmoves = input("what is your move: rock, paper or scissors ?")
print("The computer choos", cmove)
if cmove == pmoves:
print("Tie")
countT +=1
elif pmoves == "rock" and cmove == "scissors":
print("Player Wins!!")
countP +=1
elif pmoves == "rock" and cmove == "paper":
print("Computer wins ): ")
countC +=1
elif pmoves == "paper" and cmove == "rock":
print("Player Wins!!")
countP +=1
elif pmoves == "paper" and cmove == "scissors":
print("Computer wins ): ")
countC +=1
elif pmoves == "scissors" and cmove == "paper":
print("Player Wins!!")
countP +=1
elif pmoves == "scissors" and cmove == "rock":
print("Computer wins ): ")
countC +=1
print("Computer won: ", countC)
print("Player won: ", countP)
print("Tie", countT)
if countC == 3:
print("The computer won ):")
break
elif countP == 3:
print("Great job! you have won the game!!")
break
# a counter for wins and tie
| [
"chananhelman@gmail.com"
] | chananhelman@gmail.com |
804b7d2aeaf690de61b0b87bbb40796c12287a2a | dfaf5cd5607c2c4e55ec9173d2d7ca12842db129 | /104_findDiagonalOrder.py | add0e471b6091ea711a6ab960ca377f92f09bd77 | [] | no_license | khinthandarkyaw98/Leetcode | 2b0be053931b3ddec6309d136228dae1f4c61b2b | 578f2a38d8a41864ebfd6c4e941f6915c7c0a508 | refs/heads/master | 2023-06-24T02:34:59.399319 | 2021-07-14T19:37:14 | 2021-07-14T19:37:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 488 | py | import collections
class Solution:
def findDiagonalOrder(self, nums: List[List[int]]) -> List[int]:
res = []
q = collections.deque()
q.append((0,0))
while q:
row, col = q.popleft()
if col == 0 and row < len(nums)-1:
q.append((row + 1, col))
if col< len(nums[row])-1:
q.append((row, col+1))
res.append(nums[row][col])
return res
| [
"noreply@github.com"
] | noreply@github.com |
c8cfdcbe34ea2f3661ecc2ae9ab3418ab1b65fca | 593a914230467f6dd37af2f58a552f9ed0beb691 | /movierater/urls.py | fc9f48ed293027e419d2dbb44077d9a80999b60d | [] | no_license | tom3108/Django-Restfull-API | 3b816aa02742887593bd5274355cbb95dd7d4cf0 | a749c8f4c0b6f2f09c53247528f1be7935ba78fa | refs/heads/master | 2022-11-14T04:43:02.821656 | 2020-07-05T21:59:11 | 2020-07-05T21:59:11 | 275,021,032 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 206 | py | from django.urls import include
from api import urls
from django.contrib import admin
from django.urls import path
urlpatterns = [
path('admin/', admin.site.urls),
path('api/', include(urls)),
]
| [
"t.madziara@gmail.com"
] | t.madziara@gmail.com |
f16456d6543dfb8a815b5f970e49aa59bfc89f36 | 8848d3340d83a0a6e3d6ab1a88504655ba380215 | /asset/migrations/0015_auto_20170905_1604.py | 8b42a0be9776b25d27ecf3143641f2ff50327671 | [] | no_license | ezbuy/cmdb | 51480e38492ee63717cdb5489231d80ac69e0bc0 | 9a3649f952001075ae1cfa4578fbc47505f5bd62 | refs/heads/master | 2022-12-12T05:15:41.282991 | 2018-07-13T07:42:25 | 2018-07-13T07:42:25 | 121,585,817 | 12 | 11 | null | 2022-12-08T00:00:31 | 2018-02-15T02:56:02 | Python | UTF-8 | Python | false | false | 766 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2017-09-05 16:04
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('asset', '0014_gotemplaterevision'),
]
operations = [
migrations.AddField(
model_name='goservicerevision',
name='gotemplate_last_rev',
field=models.IntegerField(default=1, verbose_name='gotemplate latest successful revision'),
preserve_default=False,
),
migrations.AlterField(
model_name='goservicerevision',
name='last_rev',
field=models.IntegerField(verbose_name='goproject latest successful revision'),
),
]
| [
"pengzihe@ezbuy.com"
] | pengzihe@ezbuy.com |
83cfc39f2209d26c0a9c99269007926b5565da96 | 84268b920e7959590d1bf11a883dd7619f783759 | /app/app.py | 926cdfe9c020de578ebacfc7f8cc8d90a0556efa | [
"MIT"
] | permissive | teguh87/flask_scrappy | 37071bf82a4ffae9b179ff2198735aee87f3e33e | 9d477f550aa8bf1579ac60ece4949a78f83258ed | refs/heads/master | 2020-03-28T08:06:43.909417 | 2018-09-10T00:51:24 | 2018-09-10T00:51:24 | 147,945,199 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 129 | py | import os
from eve import Eve
import sassutils.wsgi
app = Eve()
app.wsgi_app = sassutils.wsgi.SassMiddleware(app.wsgi_app, {}) | [
"teguh@finmas.co.id"
] | teguh@finmas.co.id |
22a4c9a7a54a2b11a51889f8c59e7bff6a55fe19 | 68e71bb1f07d1daf047a528127497a015f970b38 | /scalex/job.py | 0832342dc4807d90fbd4ca479183f45a65910a2b | [] | no_license | scalextremeinc/scalexsdk | 46c4ca79476b33df4e128eac7113c855d9201645 | 5daa1bbe6948b49f12915847e3c8755537f8a2e8 | refs/heads/master | 2021-01-19T03:23:46.129788 | 2016-02-22T02:32:41 | 2016-02-22T02:32:41 | 4,515,864 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,956 | py | '''
@undocumented: __package__
@undocumented: create
'''
import urllib
import urllib2
import json
import datetime
import time
#
from scalex import userinfo
from scalex import script
# JOB API
# 3.29 + // restJobController.testCreateJob();
# 3.30 + // restJobController.testRunInfo();
# 3.31 + // restJobController.testJobRunOutput();
# 3.32 + // restJobController.testJobList();
# 3.33 // restJobController.testEditjob();
# 3.34 // restJobController.testDeleteJob();
# 3.35 + // restJobController.testCreatePatchJob();
# 3.36 + restJobController.testCancelJob();
def getJobs(type = 'script', object = {}):
'''
Get jobs of a given script
Example:
- getJobs(type='patch') # get applied patch jobs
- getJobs(type='script', object=script) # get script jobs
@note: Currently support script jobs only.
@todo: Add support for update/patch job
@type type: string
@param type: Job type, default is B{script}. Valid values are:
- B{script}, get script jobs
- B{patch}, get applied patch jobs
- B{update}, get applied update jobs
@type object: dict
@param object: If type is script, object is the script returned by scalex.script.getScripts()
@rtype: list
@return: List of jobs
@change:
- API usage changed.
- Add parameter B{type}
- Add parameter B{object}
'''
#
# API : /jobs?type=<script, template etc,>&id=<id of script, id of template etc.,>
# Method : GET
# URL Structure: https://<servername>/v0/jobs?type=script&id=<script id>& access_token=<valid access token>
# Input :
# type (required), valid values are script, template, patch, update etc.,
# id
# '''
# id = str(object['scriptId'])
assert type in ['script', 'update', 'patch'], 'wrong type'
path = '/jobs'
query = {
'type': type,
}
if type in ['script']:
assert object != {}, 'no script object'
query['id'] = object['scriptId']
else:
query['id'] = 0
query['type'] = 'apply' + type
url = userinfo.geturl(path, query)
request = urllib2.Request(url)
response = urllib2.urlopen(request)
returnData = json.loads(response.read())
return returnData
def _create_or_update_job(path, name, script = None, targets = None, arguments = [], type = 'script', version = -1, serverGroups = [], scheduleType = 0, startTime = 0, repeatInterval = 60, endTime = 0, repeatCount = 0):
'''
For Internal Use ONLY
'''
# For Internal Use ONLY
# API : /jobs
# Method : POST
# URL structure: https://<servername>/v0/jobs
# Input param: Following json payload
# {
# "name":"Sample Job",
# "scriptId":2446,
# "targets":[140],
# "scriptArgs":["Test1","Test2"],
# "type":"script",
#
# "repeatCount":0,
# "serverGroups":[],
# "endTime":0,
# "startTime":1339353250011,
# "scheduleType":12,
# "taskParameters":[],
# "repeatInterval":0
# }
# scheduleType: 0, Run Once
# 1, Recurring
# '''
_schecule_type = [12, 14, 2]
if scheduleType == 0:
if startTime != 0:
d = datetime.datetime.strptime(startTime, "%Y-%m-%d-%H:%M")
startTime = int(time.mktime(d.timetuple())*1000)
elif scheduleType == 1:
if repeatCount == 0 and endTime == 0:
assert False, 'wrong schedule'
if endTime != 0:
d = datetime.datetime.strptime(endTime, "%Y-%m-%d-%H:%M")
endTime = int(time.mktime(d.timetuple())*1000)
pass
else:
assert False, 'wrong schedule type'
scheduleType = _schecule_type[scheduleType]
if not isinstance(targets, list):
t = targets
targets = []
targets.append(t)
agents = []
for n in targets:
agents.append(n['nodeId'])
scriptid = 0
if type == 'script' and version == -1:
version = script['version']
scriptid = script['scriptId']
payload = {
"name":name,
"scriptId":scriptid,
"targets":agents,
"scriptArgs":arguments,
"type":type,
"version":version,
"repeatCount":repeatCount,
"serverGroups":serverGroups,
"endTime":endTime,
"startTime":startTime,
"scheduleType":scheduleType,
"taskParameters":[],
"repeatInterval":repeatInterval,
}
postData = json.dumps(payload)
url = userinfo.geturl(path)
request = urllib2.Request(url, postData)
request.add_header('Content-Type', 'application/json')
response = urllib2.urlopen(request)
returnData = json.loads(response.read())
return returnData
def create(name, script, targets, arguments = [], type = 'script', version = -1, serverGroups = [], scheduleType = 0, startTime = 0, repeatInterval = 60, endTime = 0, repeatCount = 0):
path = '/jobs'
return _create_or_update_job(path, name, script, targets, arguments, type, version, serverGroups, scheduleType, startTime, repeatInterval, endTime, repeatCount)
def update(job, name, script, targets, arguments = [], type = 'script', version = -1, serverGroups = [],
scheduleType = 0, startTime = 0, repeatInterval = 60, endTime = 0, repeatCount = 0):
'''
Update a job
@todo: params and tags not implement, server will return 404 ERROR if name exists
@type job: dict
@param job: Job returned by getJobs()
@type name: string
@param name: Job Name
@type script: dict
@param script: Script returned by getScripts()
@param targets: Targets returned by scalex.node.getNodes() or a single node.
@type arguments: list
@param arguments: Arguments of the script, default is []
@type scheduleType: int
@param scheduleType: Schedule type of job, default is 0, valid values are:
- B{0}, Run Once
- B{1}, Recurring
@param startTime: Start time formatted like B{2012-12-12-00:00}, default is now
@param repeatInterval: Repeat interval of recurring schedule, default is 60 mins.
@param endTime: End time of recurring schedule, formatted like B{2012-12-12-00:00}. You must specify this argument if you want to schedule a recurring job and with a repeat interval.
@type repeatCount: int
@param repeatCount: Repeat count of a recurring scheduled job.
@rtype: dict
@return: Job just updated
@change:
- Delete Cron Expression support
'''
path = '/jobs/' + str(job['jobId'])
return _create_or_update_job(path, name, script, targets, arguments, type, version, serverGroups, scheduleType, startTime, repeatInterval, endTime, repeatCount)
#
#def _appliedUpdatesOrPatches(path):
# '''
# '''
# value = {
# 'companyid':userinfo.companyid,
# 'user':userinfo.userid,
# 'role':userinfo.rolename,
# 'rid':userinfo.rid
# }
# query = urllib.urlencode(value)
# url = '%s%s?%s' % (userinfo.domain, path, query)
# payload = {
# 'companyId': userinfo.companyid,
# 'user': str(userinfo.userid),
# 'role': userinfo.rolename,
# 'scriptId': 0
# }
# postData = 'payload=' + json.dumps(payload)
# request = urllib2.Request(url, postData)
# request.add_header('cookie', userinfo.cookie)
# response = urllib2.urlopen(request)
# returnData = json.loads(response.read())
# return returnData
#
#def getUpdateJobs():
# '''
# '''
# updatesPath = '/managejob/appliedupdates'
# return _appliedUpdatesOrPatches(updatesPath)
#
#def getPatchJobs():
# '''
# '''
# patchesPath = '/managejob/appliedpatches'
# return _appliedUpdatesOrPatches(patchesPath)
#
def getRuns(job):
'''
Get runs of a given job.
@type job: dict
@param job: Job returned by getJobs()
@rtype: list
@return: List of runs
@change:
- Not changed
'''
# NOTE, no runid
# API: /jobs/{jobid}/runinfo
# Method: GET
# URL structure: https://<servername>/v0/jobs/{jobid}/runinfo?access_token=<valid access token>
# Input: runid (optional), can specify runid
# Output:
# [{"jobId":1814,"taskPropertyBeans":[],"status":"complete","role":"Admin","companyId":40042,"projectId":69,"runId":65,"user":"10093","runTimestamp":1339099219184}]
# '''
path = '/jobs/%s/runinfo' % (job['jobId'])
url = userinfo.geturl(path)
request = urllib2.Request(url)
response = urllib2.urlopen(request)
returnData = json.loads(response.read())
return returnData
def getOutputs(run):
'''
Get outputs of a given run.
@type run: dict
@param run: Run returned by getRuns()
@rtype: list
@return: List of outputs
'''
# API : /jobs/{jobid}/runoutput?runid=<valid runid>
# Method : GET
# URL structure: https://<servername>/v0/jobs/{jobid}/runoutput?runid=<validrunid>&access_token=<valid access token>
# Input : runid(required)
# '''
path = '/jobs/%s/runoutput' % (str(run['jobId']))
query = {
'runid':run['runId']
}
url = userinfo.geturl(path, query)
request = urllib2.Request(url)
response = urllib2.urlopen(request)
returnData = json.loads(response.read())
return returnData
def cancel(run):
'''
Cancel future runs
@param run: The run you want to cancel
'''
# NOTE, why we need a runID????
# /jobs/1234/cancel/
jobid = run['jobId']
runid = run['runId']
path = '/jobs/%s/cancel/' % (str(jobid))
query = {
'runid':runid,
}
url = userinfo.geturl(path, query)
request = urllib2.Request(url, '')
response = urllib2.urlopen(request)
returnData = json.loads(response.read())
return returnData
def delete(job):
'''
Delete a job
@param job: The job you want to delete
'''
path = '/jobs'
query = {}
# if script != '':
path = path + '/' + str(job['jobId'])
# else:
# assert type in ['user', 'org', 'purchase'], 'wrong script type'
# query['type'] = type
url = userinfo.geturl(path, query)
request = urllib2.Request(url)
request.get_method = lambda: 'DELETE'
response = urllib2.urlopen(request)
returnData = json.loads(response.read())
return returnData
| [
"midnacc@gmail.com"
] | midnacc@gmail.com |
e5a585746b41b9ab864636f386143fac7a8d2f36 | 4d1aadff2aec7681f9b34b1b4e31106bc313528d | /twilio_whatsapp.py | edf5b82ef400566c4092cf7f30112a32b07b1724 | [] | no_license | andreivirtosu/canyon_outlet_checker | ff20a3b3da4b8a6a7e4735ac406033980e73f2a6 | ed10b80165b720bc0f3ff04ca358b88ebc88331a | refs/heads/master | 2023-01-21T06:41:15.998119 | 2020-05-24T14:06:18 | 2020-05-24T14:06:18 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 437 | py | from twilio.rest import Client
import smtplib
import settings
def send_whatsapp(message):
client = Client(settings.ACCOUNT_SID, settings.AUTH_TOKEN)
message = client.messages.create(
from_=settings.WHATSAPP_FROM,
body=message,
to=settings.WHATSAPP_TO
)
print(f'Sent message: {message.sid}')
| [
"andrei.virtosu@gmail.com"
] | andrei.virtosu@gmail.com |
989cebdde1f1edb13d8b2c625c233a9d8db44468 | cd04112e1b8995cabb3464fe408c308aa3005bdd | /pylib/appy/pod/renderer.py | cd28abab24a9009d15aec2f623c459106b8ec56f | [] | no_license | vampolo/cacerp | 6fc132bf827fb9ec245f32d6caf6d4c5ab827e2d | 57aef2008ae4cb6e783d46b0cfc7cfc32b16a54c | refs/heads/master | 2021-01-19T05:28:57.675300 | 2011-03-28T17:17:24 | 2011-03-28T17:17:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 23,528 | py | # ------------------------------------------------------------------------------
# Appy is a framework for building applications in the Python language.
# Copyright (C) 2007 Gaetan Delannay
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,USA.
# ------------------------------------------------------------------------------
import zipfile, shutil, xml.sax, os, os.path, re, mimetypes, time
from UserDict import UserDict
import appy.pod
from appy.pod import PodError
from appy.shared import mimeTypesExts
from appy.shared.xml_parser import XmlElement
from appy.shared.utils import FolderDeleter, executeCommand
from appy.pod.pod_parser import PodParser, PodEnvironment, OdInsert
from appy.pod.converter import FILE_TYPES
from appy.pod.buffers import FileBuffer
from appy.pod.xhtml2odt import Xhtml2OdtConverter
from appy.pod.doc_importers import OdtImporter, ImageImporter, PdfImporter
from appy.pod.styles_manager import StylesManager
# ------------------------------------------------------------------------------
BAD_CONTEXT = 'Context must be either a dict, a UserDict or an instance.'
RESULT_FILE_EXISTS = 'Result file "%s" exists.'
CANT_WRITE_RESULT = 'I cannot write result file "%s". %s'
TEMP_FOLDER_EXISTS = 'I need to use a temp folder "%s" but this folder ' \
'already exists.'
CANT_WRITE_TEMP_FOLDER = 'I cannot create temp folder "%s". %s'
NO_PY_PATH = 'Extension of result file is "%s". In order to perform ' \
'conversion from ODT to this format we need to call OpenOffice. ' \
'But the Python interpreter which runs the current script does ' \
'not know UNO, the library that allows to connect to ' \
'OpenOffice in server mode. If you can\'t install UNO in this ' \
'Python interpreter, you can specify, in parameter ' \
'"pythonWithUnoPath", the path to a UNO-enabled Python ' \
'interpreter. One such interpreter may be found in ' \
'<open_office_path>/program.'
PY_PATH_NOT_FILE = '"%s" is not a file. You must here specify the absolute ' \
'path of a Python interpreter (.../python, .../python.sh, ' \
'.../python.exe, .../python.bat...).'
BLANKS_IN_PATH = 'Blanks were found in path "%s". Please use the DOS-names ' \
'(ie, "progra~1" instead of "Program files" or "docume~1" ' \
'instead of "Documents and settings".'
BAD_RESULT_TYPE = 'Result "%s" has a wrong extension. Allowed extensions ' \
'are: "%s".'
CONVERT_ERROR = 'An error occurred during the conversion. %s'
BAD_OO_PORT = 'Bad OpenOffice port "%s". Make sure it is an integer.'
XHTML_ERROR = 'An error occurred while rendering XHTML content.'
WARNING_INCOMPLETE_ODT = 'Warning: your ODT file may not be complete (ie ' \
'imported documents may not be present). This is ' \
'because we could not connect to OpenOffice in ' \
'server mode: %s'
DOC_NOT_SPECIFIED = 'Please specify a document to import, either with a ' \
'stream (parameter "content") or with a path (parameter ' \
'"at")'
DOC_FORMAT_ERROR = 'POD was unable to deduce the document format. Please ' \
'specify it through parameter named "format" (=odt, gif, ' \
'png, ...).'
DOC_WRONG_FORMAT = 'Format "%s" is not supported.'
WARNING_FINALIZE_ERROR = 'Warning: error while calling finalize function. %s'
# Default automatic text styles added by pod in content.xml
f = open('%s/styles.in.content.xml' % os.path.dirname(appy.pod.__file__))
CONTENT_POD_STYLES = f.read()
f.close()
# Default font added by pod in content.xml
CONTENT_POD_FONTS = '<@style@:font-face style:name="PodStarSymbol" ' \
'@svg@:font-family="StarSymbol"/>'
# Default text styles added by pod in styles.xml
f = file('%s/styles.in.styles.xml' % os.path.dirname(appy.pod.__file__))
STYLES_POD_STYLES = f.read()
f.close()
# Default font added by pod
STYLES_POD_FONTS = '<@style@:font-face @style@:name="PodStarSymbol" ' \
'@svg@:font-family="StarSymbol"/>'
# ------------------------------------------------------------------------------
class Renderer:
def __init__(self, template, context, result, pythonWithUnoPath=None,
ooPort=2002, stylesMapping={}, forceOoCall=False,
finalizeFunction=None):
'''This Python Open Document Renderer (PodRenderer) loads a document
template (p_template) which is an ODT file with some elements
written in Python. Based on this template and some Python objects
defined in p_context, the renderer generates an ODT file
(p_result) that instantiates the p_template and fills it with objects
from the p_context.
- If p_result does not end with .odt, the Renderer
will call OpenOffice to perform a conversion. If p_forceOoCall is
True, even if p_result ends with .odt, OpenOffice will be called, not
for performing a conversion, but for updating some elements like
indexes (table of contents, etc) and sections containing links to
external files (which is the case, for example, if you use the
default function "document").
- If the Python interpreter which runs the current script is not
UNO-enabled, this script will run, in another process, a UNO-enabled
Python interpreter (whose path is p_pythonWithUnoPath) which will
call OpenOffice. In both cases, we will try to connect to OpenOffice
in server mode on port p_ooPort.
- If you plan to make "XHTML to OpenDocument" conversions, you may
specify a styles mapping in p_stylesMapping.
- If you specify a function in p_finalizeFunction, this function will
be called by the renderer before re-zipping the ODT result. This way,
you can still perform some actions on the content of the ODT file
before it is zipped and potentially converted. This function must
accept one arg: the absolute path to the temporary folder containing
the un-zipped content of the ODT result.'''
self.template = template
self.templateZip = zipfile.ZipFile(template)
self.result = result
self.contentXml = None # Content (string) of content.xml
self.stylesXml = None # Content (string) of styles.xml
self.stylesManager = None # Manages the styles defined into the ODT
# template
self.tempFolder = None
self.env = None
self.pyPath = pythonWithUnoPath
self.ooPort = ooPort
self.forceOoCall = forceOoCall
self.finalizeFunction = finalizeFunction
# Retain potential files or images that will be included through
# "do ... from document" statements: we will need to declare them in
# META-INF/manifest.xml.
self.fileNames = []
self.prepareFolders()
# Unzip template
self.unzipFolder = os.path.join(self.tempFolder, 'unzip')
os.mkdir(self.unzipFolder)
for zippedFile in self.templateZip.namelist():
# Before writing the zippedFile into self.unzipFolder, create the
# intermediary subfolder(s) if needed.
fileName = None
if zippedFile.endswith('/') or zippedFile.endswith(os.sep):
# This is an empty folder. Create it nevertheless.
os.makedirs(os.path.join(self.unzipFolder, zippedFile))
else:
fileName = os.path.basename(zippedFile)
folderName = os.path.dirname(zippedFile)
fullFolderName = self.unzipFolder
if folderName:
fullFolderName = os.path.join(fullFolderName, folderName)
if not os.path.exists(fullFolderName):
os.makedirs(fullFolderName)
# Unzip the file in self.unzipFolder
if fileName:
fullFileName = os.path.join(fullFolderName, fileName)
f = open(fullFileName, 'wb')
fileContent = self.templateZip.read(zippedFile)
if (fileName == 'content.xml') and not folderName:
# content.xml files may reside in subfolders.
# We modify only the one in the root folder.
self.contentXml = fileContent
elif (fileName == 'styles.xml') and not folderName:
# Same remark as above.
self.stylesManager = StylesManager(fileContent)
self.stylesXml = fileContent
f.write(fileContent)
f.close()
self.templateZip.close()
# Create the content.xml parser
pe = PodEnvironment
contentInserts = (
OdInsert(CONTENT_POD_FONTS,
XmlElement('font-face-decls', nsUri=pe.NS_OFFICE),
nsUris={'style': pe.NS_STYLE, 'svg': pe.NS_SVG}),
OdInsert(CONTENT_POD_STYLES,
XmlElement('automatic-styles', nsUri=pe.NS_OFFICE),
nsUris={'style': pe.NS_STYLE, 'fo': pe.NS_FO,
'text': pe.NS_TEXT, 'table': pe.NS_TABLE}))
self.contentParser = self.createPodParser('content.xml', context,
contentInserts)
# Create the styles.xml parser
stylesInserts = (
OdInsert(STYLES_POD_FONTS,
XmlElement('font-face-decls', nsUri=pe.NS_OFFICE),
nsUris={'style': pe.NS_STYLE, 'svg': pe.NS_SVG}),
OdInsert(STYLES_POD_STYLES,
XmlElement('styles', nsUri=pe.NS_OFFICE),
nsUris={'style': pe.NS_STYLE, 'fo': pe.NS_FO}))
self.stylesParser = self.createPodParser('styles.xml', context,
stylesInserts)
# Stores the styles mapping
self.setStylesMapping(stylesMapping)
def createPodParser(self, odtFile, context, inserts):
'''Creates the parser with its environment for parsing the given
p_odtFile (content.xml or styles.xml). p_context is given by the pod
user, while p_inserts depends on the ODT file we must parse.'''
evalContext = {'xhtml': self.renderXhtml,
'test': self.evalIfExpression,
'document': self.importDocument} # Default context
if hasattr(context, '__dict__'):
evalContext.update(context.__dict__)
elif isinstance(context, dict) or isinstance(context, UserDict):
evalContext.update(context)
else:
raise PodError(BAD_CONTEXT)
env = PodEnvironment(evalContext, inserts)
fileBuffer = FileBuffer(env, os.path.join(self.tempFolder,odtFile))
env.currentBuffer = fileBuffer
return PodParser(env, self)
def renderXhtml(self, xhtmlString, encoding='utf-8', stylesMapping={}):
'''Method that can be used (under the name 'xhtml') into a pod template
for converting a chunk of XHTML content (p_xhtmlString) into a chunk
of ODT content.'''
stylesMapping = self.stylesManager.checkStylesMapping(stylesMapping)
ns = self.currentParser.env.namespaces
# xhtmlString is only a chunk of XHTML. So we must surround it a tag in
# order to get a XML-compliant file (we need a root tag)
xhtmlContent = '<podXhtml>%s</podXhtml>' % xhtmlString
return Xhtml2OdtConverter(xhtmlContent, encoding, self.stylesManager,
stylesMapping, ns).run()
def evalIfExpression(self, condition, ifTrue, ifFalse):
'''This method implements the method 'test' which is proposed in the
default pod context. It represents an 'if' expression (as opposed to
the 'if' statement): depending on p_condition, expression result is
p_ifTrue or p_ifFalse.'''
if condition:
return ifTrue
return ifFalse
imageFormats = ('png', 'jpeg', 'jpg', 'gif')
ooFormats = ('odt',)
def importDocument(self, content=None, at=None, format=None,
anchor='as-char'):
'''If p_at is not None, it represents a path or url allowing to find
the document. If p_at is None, the content of the document is
supposed to be in binary format in p_content. The document
p_format may be: odt or any format in imageFormats. p_anchor is only
relevant for images.'''
ns = self.currentParser.env.namespaces
importer = None
# Is there someting to import?
if not content and not at:
raise PodError(DOC_NOT_SPECIFIED)
# Guess document format
if not format:
# It should be deduced from p_at
if not at:
raise PodError(DOC_FORMAT_ERROR)
format = os.path.splitext(at)[1][1:]
else:
# If format is a mimeType, convert it to an extension
if mimeTypesExts.has_key(format):
format = mimeTypesExts[format]
isImage = False
if format in self.ooFormats:
importer = OdtImporter
self.forceOoCall = True
elif format in self.imageFormats:
importer = ImageImporter
isImage = True
elif format == 'pdf':
importer = PdfImporter
else:
raise PodError(DOC_WRONG_FORMAT % format)
imp = importer(content, at, format, self.tempFolder, ns)
if isImage:
imp.setAnchor(anchor)
res = imp.run()
if imp.fileNames:
self.fileNames += imp.fileNames
return res
def prepareFolders(self):
# Check if I can write the result
if os.path.exists(self.result):
raise PodError(RESULT_FILE_EXISTS % self.result)
try:
f = open(self.result, 'w')
f.write('Hello')
f.close()
except OSError, oe:
raise PodError(CANT_WRITE_RESULT % (self.result, oe))
except IOError, ie:
raise PodError(CANT_WRITE_RESULT % (self.result, ie))
self.result = os.path.abspath(self.result)
os.remove(self.result)
# Check that temp folder does not exist
self.tempFolder = os.path.abspath(self.result) + '.temp'
if os.path.exists(self.tempFolder):
raise PodError(TEMP_FOLDER_EXISTS % self.tempFolder)
try:
os.mkdir(self.tempFolder)
except OSError, oe:
raise PodError(CANT_WRITE_TEMP_FOLDER % (self.result, oe))
def patchManifest(self):
'''Declares, in META-INF/manifest.xml, images or files included via the
"do... from document" statements if any.'''
if self.fileNames:
j = os.path.join
toInsert = ''
for fileName in self.fileNames:
mimeType = mimetypes.guess_type(fileName)[0]
toInsert += ' <manifest:file-entry manifest:media-type="%s" ' \
'manifest:full-path="%s"/>\n' % (mimeType, fileName)
manifestName = j(self.unzipFolder, j('META-INF', 'manifest.xml'))
f = file(manifestName)
manifestContent = f.read()
hook = '</manifest:manifest>'
manifestContent = manifestContent.replace(hook, toInsert+hook)
f.close()
# Write the new manifest content
f = file(manifestName, 'w')
f.write(manifestContent)
f.close()
# Public interface
def run(self):
'''Renders the result.'''
# Remember which parser is running
self.currentParser = self.contentParser
# Create the resulting content.xml
self.currentParser.parse(self.contentXml)
self.currentParser = self.stylesParser
# Create the resulting styles.xml
self.currentParser.parse(self.stylesXml)
# Patch META-INF/manifest.xml
self.patchManifest()
# Re-zip the result
self.finalize()
def getStyles(self):
'''Returns a dict of the styles that are defined into the template.'''
return self.stylesManager.styles
def setStylesMapping(self, stylesMapping):
'''Establishes a correspondance between, on one hand, CSS styles or
XHTML tags that will be found inside XHTML content given to POD,
and, on the other hand, ODT styles found into the template.'''
try:
stylesMapping = self.stylesManager.checkStylesMapping(stylesMapping)
self.stylesManager.stylesMapping = stylesMapping
except PodError, po:
self.contentParser.env.currentBuffer.content.close()
self.stylesParser.env.currentBuffer.content.close()
if os.path.exists(self.tempFolder):
FolderDeleter.delete(self.tempFolder)
raise po
def reportProblem(self, msg, resultType):
'''When trying to call OO in server mode for producing ODT
(=forceOoCall=True), if an error occurs we still have an ODT to
return to the user. So we produce a warning instead of raising an
error.'''
if (resultType == 'odt') and self.forceOoCall:
print WARNING_INCOMPLETE_ODT % msg
else:
raise msg
def callOpenOffice(self, resultOdtName, resultType):
'''Call Open Office in server mode to convert or update the ODT
result.'''
ooOutput = ''
try:
if (not isinstance(self.ooPort, int)) and \
(not isinstance(self.ooPort, long)):
raise PodError(BAD_OO_PORT % str(self.ooPort))
try:
from appy.pod.converter import Converter, ConverterError
try:
Converter(resultOdtName, resultType,
self.ooPort).run()
except ConverterError, ce:
raise PodError(CONVERT_ERROR % str(ce))
except ImportError:
# I do not have UNO. So try to launch a UNO-enabled Python
# interpreter which should be in self.pyPath.
if not self.pyPath:
raise PodError(NO_PY_PATH % resultType)
if self.pyPath.find(' ') != -1:
raise PodError(BLANKS_IN_PATH % self.pyPath)
if not os.path.isfile(self.pyPath):
raise PodError(PY_PATH_NOT_FILE % self.pyPath)
if resultOdtName.find(' ') != -1:
qResultOdtName = '"%s"' % resultOdtName
else:
qResultOdtName = resultOdtName
convScript = '%s/converter.py' % \
os.path.dirname(appy.pod.__file__)
if convScript.find(' ') != -1:
convScript = '"%s"' % convScript
cmd = '%s %s %s %s -p%d' % \
(self.pyPath, convScript, qResultOdtName, resultType,
self.ooPort)
ooOutput = executeCommand(cmd)
except PodError, pe:
# When trying to call OO in server mode for producing
# ODT (=forceOoCall=True), if an error occurs we still
# have an ODT to return to the user. So we produce a
# warning instead of raising an error.
if (resultType == 'odt') and self.forceOoCall:
print WARNING_INCOMPLETE_ODT % str(pe)
else:
raise pe
return ooOutput
def finalize(self):
'''Re-zip the result and potentially call OpenOffice if target format is
not ODT or if forceOoCall is True.'''
for odtFile in ('content.xml', 'styles.xml'):
shutil.copy(os.path.join(self.tempFolder, odtFile),
os.path.join(self.unzipFolder, odtFile))
if self.finalizeFunction:
try:
self.finalizeFunction(self.unzipFolder)
except Exception, e:
print WARNING_FINALIZE_ERROR % str(e)
resultOdtName = os.path.join(self.tempFolder, 'result.odt')
try:
resultOdt = zipfile.ZipFile(resultOdtName,'w', zipfile.ZIP_DEFLATED)
except RuntimeError:
resultOdt = zipfile.ZipFile(resultOdtName,'w')
for dir, dirnames, filenames in os.walk(self.unzipFolder):
for f in filenames:
folderName = dir[len(self.unzipFolder)+1:]
resultOdt.write(os.path.join(dir, f),
os.path.join(folderName, f))
if not dirnames and not filenames:
# This is an empty leaf folder. We must create an entry in the
# zip for him
folderName = dir[len(self.unzipFolder):]
zInfo = zipfile.ZipInfo("%s/" % folderName,time.localtime()[:6])
zInfo.external_attr = 48
resultOdt.writestr(zInfo, '')
resultOdt.close()
resultType = os.path.splitext(self.result)[1]
try:
if (resultType == '.odt') and not self.forceOoCall:
# Simply move the ODT result to the result
os.rename(resultOdtName, self.result)
else:
if resultType.startswith('.'): resultType = resultType[1:]
if not resultType in FILE_TYPES.keys():
raise PodError(BAD_RESULT_TYPE % (
self.result, FILE_TYPES.keys()))
# Call OpenOffice to perform the conversion or document update
output = self.callOpenOffice(resultOdtName, resultType)
# I (should) have the result. Move it to the correct name
resPrefix = os.path.splitext(resultOdtName)[0] + '.'
if resultType == 'odt':
# converter.py has (normally!) created a second file
# suffixed .res.odt
resultName = resPrefix + 'res.odt'
if not os.path.exists(resultName):
resultName = resultOdtName
# In this case OO in server mode could not be called to
# update indexes, sections, etc.
else:
resultName = resPrefix + resultType
if not os.path.exists(resultName):
raise PodError(CONVERT_ERROR % output)
os.rename(resultName, self.result)
finally:
FolderDeleter.delete(self.tempFolder)
# ------------------------------------------------------------------------------
| [
"vincenzo.ampolo@gmail.com"
] | vincenzo.ampolo@gmail.com |
c3cfdf6b8b26ef6a5ba2d41b9255b819f05c9320 | 4448b231f32cafd1c4184d009befef2387a66d97 | /Session10/drill2.py | 21480b98ed32c1cd0faf03b9281f3b8a1dc5e8f8 | [] | no_license | NguyenTheGiaHuy1/NguyenTheGiaHuy-Python-B08 | 5c25b78888b8f53105f70982b051a45c239bedc1 | c010c3723550078e6786d81c824dc56552677451 | refs/heads/master | 2020-07-25T23:02:56.289781 | 2019-11-08T15:24:09 | 2019-11-08T15:24:09 | 208,450,997 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 129 | py | tu_dien = {
"name" : "tên",
"left" : "bên trái",
"right" : "bên phải",
"pen" : "bút mực"
}
print(tu_dien) | [
"trankimdung6479@gmail.com"
] | trankimdung6479@gmail.com |
f36498b94c34dadcda318c0658bad0c3f69ef38d | a7173ccfeb2c22371b077939b1007d2a9f7ecd91 | /etl/lib/test_keymap.py | 30d3d6b6493763b595d377e4bf45860ab0594e92 | [] | no_license | RealMassive-Archive/ETL | 6b8d41c77c141e8c74a02e4b282c9206ca24950b | 018ca8dc01d9cf847dcad09ec5dfc4e206550398 | refs/heads/master | 2021-06-06T20:09:23.937301 | 2016-10-17T18:53:06 | 2016-10-17T18:53:06 | 60,286,315 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 310 | py | import os
from keymap import KeyMap
def test_keymap():
km = KeyMap()
km.insert(1234, 'building', 'abcde', 5678, 'building')
assert km.get_new('building', 'building', 1234) == 5678
outfile = '/tmp/keymap.csv'
km.dump(outfile)
assert os.path.exists(outfile)
os.remove(outfile)
| [
"clifalison@gmail.com"
] | clifalison@gmail.com |
606a71da75b0fde28073e51976b220804b4d1857 | 4a6c6fcb62383a674788687d66a1f3a5bf72305e | /entrypoints/cli/main.py | 8db9ab0e3250d36bcb228cc686a92fd7f2c03404 | [] | no_license | nitor-infotech-oss/python-event-driven-code | 35b0ff8eb2d2bd135b50937ea9e7cfa1ca94426e | 3c4c3e73fce8d91d067d0fa17b450b3e0e3d8b54 | refs/heads/main | 2022-12-27T03:43:35.426521 | 2020-10-13T16:11:13 | 2020-10-13T16:11:13 | 313,206,473 | 0 | 0 | null | 2020-11-16T06:07:14 | 2020-11-16T06:07:13 | null | UTF-8 | Python | false | false | 300 | py | from services.user_service import UserService
from infrastructure.dynamodb.user_repository import UserRepository
from domain.user import User
if __name__ == "__main__":
user = User(id="1",first_name="a",last_name="b")
user_repo = UserRepository()
UserService(user_repo).create_user(user) | [
"vipidnas2@gmail.com"
] | vipidnas2@gmail.com |
8edae035598a6bff9f6a7325d526abfd07cb3fab | e5ba55ac56d2d07aeebd7253fbe5d186196c9a52 | /catkin_ws/catkin_ws/build/iai_kinect2/kinect2_registration/catkin_generated/pkg.installspace.context.pc.py | 85e878c71d08a7cf827ee4e610db3258f4e5642e | [] | no_license | masiro97/darrsm | 5305a3e7c1fba2635a4925b9e079f45b40162862 | b881d00427d2af5d75ca509a191e57f2890e1ece | refs/heads/master | 2021-05-10T21:57:17.760536 | 2018-01-20T15:13:56 | 2018-01-20T15:13:56 | 111,084,804 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 605 | py | # generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "/home/cun/catkin_ws/install/include;/usr/include".split(';') if "/home/cun/catkin_ws/install/include;/usr/include" != "" else []
PROJECT_CATKIN_DEPENDS = "".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "-lkinect2_registration;-l:/usr/lib/x86_64-linux-gnu/libOpenCL.so".split(';') if "-lkinect2_registration;-l:/usr/lib/x86_64-linux-gnu/libOpenCL.so" != "" else []
PROJECT_NAME = "kinect2_registration"
PROJECT_SPACE_DIR = "/home/cun/catkin_ws/install"
PROJECT_VERSION = "0.0.1"
| [
"estrk7120@gmail.com"
] | estrk7120@gmail.com |
f8f8d0fcc0626e8acc6e68b8e7a68732519ad1ab | e34ea9281833c412a68ccaba4eb5d51f73c41c15 | /triangle3.py | e822ee2aa8d42f5fdb8d97f482ed8517634d0369 | [] | no_license | RansomBroker/shape-python | 035083a2712b5e66edfbcbb7391c20d706447847 | 4a34420aac127d9c347d6e62bdbd4391d7556522 | refs/heads/main | 2023-01-21T04:53:41.335889 | 2020-11-23T07:17:24 | 2020-11-23T07:17:24 | 315,227,075 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 113 | py | n = 5
for i in range(n):
print(i+1, end=' ')
for j in range(n-i):
print(j+1, end=' ')
print() | [
"yadisoke@gmail.com"
] | yadisoke@gmail.com |
bde29ad2458262b1c8ab48e3fef1c35acf7d9092 | 0cf396a646638f485a8ce0da93b0aa1f81bf13b7 | /counter.py | dfd68121ae433a792525d4e765e57ca569828f4b | [] | no_license | Siriapps/hackerrankPython | c698ee77bb3d59f6b1833b63c2916e54dda0e45b | 3ceb2600db536170e643e830520e7f9c50053d9b | refs/heads/main | 2023-04-05T05:46:24.071778 | 2021-04-16T02:06:17 | 2021-04-16T02:06:17 | 311,360,333 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,042 | py | # https://www.hackerrank.com/challenges/collections-counter/problem
from collections import Counter
n = int(input()) # Number of shoes
shoes = Counter(map(int, input().split())) # Number of shoes in stock
customers = int(input()) # number of customers
total = []
for i in range(customers):
size, price = map(int, input().split())
# checking if the shoe size is present in the shoes
if shoes[size] > 0:
total.append(price) # then,appending the price of that shoe size
shoes.subtract(Counter([size])) # removing the number of occurances of the particular shoe size
# in the shoes in order to execute the condition i.e the no.of shoes
# in stock and the number of times the customer has asked for that shoe
# size should match and the prices of those shoe sizes would only be considered
# while calculating the total.
print(sum(total)) # total income
| [
"siriapps3@gmail.com"
] | siriapps3@gmail.com |
12b17a01e7b342ba83df6ec57ad8bed9335437d0 | ecceece5136fa6d7413b1589595407b8bc636757 | /src/readTexturedOBJ.py | fff3fe0f08d88c1bf9290ce9d2adccf78e014604 | [] | no_license | oarriaga/PyGPToolbox | 3ad7c4484deabac1434185182b2b97efabf86002 | 74c695f0857573b9c2f17c0d3ddeb226b1b50277 | refs/heads/master | 2020-03-28T06:14:34.346998 | 2018-03-20T15:48:15 | 2018-03-20T15:48:15 | 147,822,655 | 3 | 0 | null | 2018-09-07T12:49:40 | 2018-09-07T12:49:40 | null | UTF-8 | Python | false | false | 2,160 | py | from faceNormals import *
import numpy as np
def readTexturedOBJ(filepath):
V = []
F = []
TV = [] # UV coordinate
TF = [] # face list for texture vertices TV
with open(filepath, "rb") as f:
lines = f.readlines()
while True:
for line in lines:
if line == "":
break
elif line.strip().startswith("vn"):
continue
elif line.strip().startswith("vt"):
lineLength = len(line.replace("\n", "").split(" "))
UV = line.replace("\n", "").split(" ")[1:3]
UV = np.delete(UV,np.argwhere(UV == np.array([''])).flatten())
TV.append(map(float, UV))
elif line.strip().startswith("v"):
lineLength = len(line.replace("\n", "").split(" "))
vertices = line.replace("\n", "").split(" ")[1:4]
vertices = np.delete(vertices,np.argwhere(vertices == np.array([''])).flatten())
V.append(map(float, vertices))
elif line.strip().startswith("f"):
t_index_list = []
textureFaceList = []
for t in line.replace("\n", "").split(" ")[1:]:
t_index = t.split("/")[0]
try:
t_index_list.append(int(t_index) - 1)
except ValueError:
continue
textureFace = t.split("/")[1]
try:
textureFaceList.append(int(textureFace) - 1)
except ValueError:
continue
F.append(t_index_list)
TF.append(textureFaceList)
else:
continue
break
V = np.asarray(V)
F = np.asarray(F)
TV = np.asarray(TV)
TF = np.asarray(TF)
# flip triangles if wrong orientation
tempV = np.concatenate((TV, np.zeros((TV.shape[0],1))), axis = 1)
FN = faceNormals(tempV, TF)
flipIdx = np.where(FN[:,2] == -1)[0]
temp = TF[flipIdx,1]
TF[flipIdx,1] = TF[flipIdx,2]
TF[flipIdx,2] = temp
return V, F, TV, TF
| [
"htliu1992@gmail.com"
] | htliu1992@gmail.com |
8c0eca11dc07e53048fb9c39b4c4dd7b1179ed1f | cd79634eb30f8975bcf05c15c7be61bb6b7e6ddd | /WebCrawler/experiment.py | bba67e817eed31a52e974cef1bd0f7fe48d9725e | [] | no_license | memdreams/WebCrawler | 5f25648774d47be0da11a6ea4808862a53430097 | 54dc9100f4fc094958d52ef95e89fe5ad740c825 | refs/heads/master | 2020-03-21T23:47:57.918871 | 2018-08-11T01:36:21 | 2018-08-11T01:36:21 | 139,206,647 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 726 | py |
from lxml import etree
from lxml import html
text = '''
<div>
<ul>
<li class="item-0"><a href="link1.html">first item</a></li>
<li class="item-1"><a href="link2.html">second item</a></li>
<li class="item-inactive"><a href="link3.html">third item</a></li>
<li class="item-1"><a href="link4.html">fourth item</a></li>
<li class="item-0"><a href="link5.html">fifth item</a>
</ul>
</div>
'''
page = etree.HTML(text)
# try XPath rules
page = etree.parse('myfile.html', etree.HTMLParser())
page = html.fromstring(text)
result = page.xpath('//a[@href="link4.html"]/../@class')
print(result)
print(result[0])
# result = etree.tostring(html)
# print(result.decode('utf-8'))
| [
"memdreams@me.com"
] | memdreams@me.com |
c9a4c97fec0969912c7f9e3728790ea187ad6bd1 | a72e37ecb0fa6169390fb0fad5a55ac98b17e63e | /venv/bin/pyreverse | 0be0591e4ddd891c7413406242def1866d245f2c | [] | no_license | imhussein/btre_python | 896bc6048309c81876f9536cfe24c55430635ed3 | a2540afa7ceef3f6147ff2b5537a62cdcd306616 | refs/heads/master | 2022-10-04T08:38:03.295859 | 2020-06-07T09:44:13 | 2020-06-07T09:44:13 | 270,259,669 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 247 | #!/home/mohamed/btre/venv/bin/python3.7
# -*- coding: utf-8 -*-
import re
import sys
from pylint import run_pyreverse
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(run_pyreverse())
| [
"hussein@blockgemini.com"
] | hussein@blockgemini.com | |
7db6ac1d7bbede773a656374c72c50f123b81a47 | 06816e83a574cc3e64ebbd3cc2d61a7832da01d7 | /staircase.py | dc451923289ce17f8e92e1e3e1f52d7d56be4c35 | [] | no_license | d1joseph/HackerRank-problem-sets-and-solutions | 48edb69206f17b7d5063600d4c1544c45e175ee6 | d27d509d2fe8f747b57faa48cf7829b12663f3ca | refs/heads/master | 2023-07-06T03:24:05.582429 | 2023-07-03T10:03:22 | 2023-07-03T10:03:22 | 239,671,790 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 365 | py | #https://www.hackerrank.com/challenges/staircase/problem
#print a stair case of size = n
'''
#
##
###
####
'''
#the above is an example of n = 4, where the base = height of the staircase.
# constraints 0 < n =< 100
n = int(input())
def staircase(n):
for i in range(n):
string = ' ' * n
print(string.replace(' ', '#', 1 + i)[::-1])
staircase(n) | [
"djnholdings@gmail.com"
] | djnholdings@gmail.com |
354bddead8b362c4a86b3458ab7824512ff5a9ec | c5e866610729ba4258054900affeda1469fcf2e0 | /TokenType.py | 575f4c172ce03f7cadf69a8b66004c9e9384bd8b | [] | no_license | connorjan/lox | d5e9635ccccc6cb700bb1b2bfc7e4cec0bb1fad1 | 30c1c82bcb5eb91a363e19d3792a5b973bdd66cf | refs/heads/master | 2023-08-09T00:17:46.327750 | 2023-07-31T02:10:44 | 2023-07-31T02:27:16 | 174,078,157 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,142 | py | from enum import Enum, auto
class TokenType(Enum):
# Single-character tokens
LEFT_PAREN = auto()
RIGHT_PAREN = auto()
LEFT_BRACE = auto()
RIGHT_BRACE = auto()
COMMA = auto()
DOT = auto()
MINUS = auto()
PLUS = auto()
SEMICOLON = auto()
SLASH = auto()
AMPERSAND = auto()
BAR = auto()
CARROT = auto()
QUESTION = auto()
COLON = auto()
# One or two character tokens
BANG = auto()
BANG_EQUAL = auto()
EQUAL = auto()
EQUAL_EQUAL = auto()
GREATER = auto()
GREATER_EQUAL = auto()
GREATER_GREATER = auto()
LESS = auto()
LESS_EQUAL = auto()
LESS_LESS = auto()
STAR = auto()
STAR_STAR = auto()
# Literals
IDENTIFIER = auto()
STRING = auto()
NUMBER = auto()
# Keywords
AND = auto()
BREAK = auto()
CLASS = auto()
CONTINUE = auto()
ELSE = auto()
FALSE = auto()
FUN = auto()
FOR = auto()
IF = auto()
NIL = auto()
OR = auto()
PRINT = auto()
RETURN = auto()
SUPER = auto()
THIS = auto()
TRUE = auto()
VAR = auto()
WHILE = auto()
EOF = auto()
| [
"connor@connorgoldberg.com"
] | connor@connorgoldberg.com |
181d8eeac78ec6e4bb15132189a0c9e3c534ec7b | 88b198a3def52beda25891b749f5d3c8a7d44f20 | /samples/openapi3/client/petstore/python-experimental/petstore_api/model/some_object.py | 2d8e71360af0abadd0ceaa32aa82ce4ccd9ddce5 | [
"Apache-2.0"
] | permissive | weaselflink/openapi-generator | 03e92fe5d2680a04bc136e83c6c7317a6db7774d | 129fd0ad5c95b8de90ba5a6350de725a12019071 | refs/heads/master | 2022-10-05T09:13:00.427796 | 2022-05-30T09:53:26 | 2022-05-30T09:53:26 | 207,326,774 | 0 | 0 | Apache-2.0 | 2023-09-05T10:22:02 | 2019-09-09T14:18:30 | Java | UTF-8 | Python | false | false | 2,756 | py | # coding: utf-8
"""
OpenAPI Petstore
This spec is mainly for testing Petstore server and contains fake endpoints, models. Please do not use this for any other purpose. Special characters: \" \\ # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
import typing # noqa: F401
import functools # noqa: F401
from frozendict import frozendict # noqa: F401
import decimal # noqa: F401
from datetime import date, datetime # noqa: F401
from frozendict import frozendict # noqa: F401
from petstore_api.schemas import ( # noqa: F401
AnyTypeSchema,
ComposedSchema,
DictSchema,
ListSchema,
StrSchema,
IntSchema,
Int32Schema,
Int64Schema,
Float32Schema,
Float64Schema,
NumberSchema,
UUIDSchema,
DateSchema,
DateTimeSchema,
DecimalSchema,
BoolSchema,
BinarySchema,
NoneSchema,
none_type,
Configuration,
Unset,
unset,
ComposedBase,
ListBase,
DictBase,
NoneBase,
StrBase,
IntBase,
Int32Base,
Int64Base,
Float32Base,
Float64Base,
NumberBase,
UUIDBase,
DateBase,
DateTimeBase,
BoolBase,
BinaryBase,
Schema,
_SchemaValidator,
_SchemaTypeChecker,
_SchemaEnumMaker
)
class SomeObject(
ComposedSchema
):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
@classmethod
@property
@functools.cache
def _composed_schemas(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return {
'allOf': [
ObjectInterface,
],
'oneOf': [
],
'anyOf': [
],
'not':
None
}
def __new__(
cls,
*args: typing.Union[dict, frozendict, str, date, datetime, int, float, decimal.Decimal, None, list, tuple, bytes],
_configuration: typing.Optional[Configuration] = None,
**kwargs: typing.Type[Schema],
) -> 'SomeObject':
return super().__new__(
cls,
*args,
_configuration=_configuration,
**kwargs,
)
from petstore_api.model.object_interface import ObjectInterface
| [
"noreply@github.com"
] | noreply@github.com |
f22b08e51bb1d8ba20ff0b9ace6282205d0751dd | c883bf3cc158aec617c3c879c5fbb7f75baccb64 | /Community/bulk_delete_mac_address/migration.py | b2550279be021d596ae0122397c4609fb23f6dfa | [
"LicenseRef-scancode-commercial-license",
"Apache-2.0"
] | permissive | bluecatlabs/gateway-workflows | 46b3ddd5f743beeabd91a6d943530969d62daf73 | 60b36434e689c3ef852ab388ca2aae370e70c62d | refs/heads/master | 2023-09-05T20:57:11.804452 | 2023-08-23T21:51:59 | 2023-08-23T21:51:59 | 107,717,495 | 45 | 87 | Apache-2.0 | 2023-09-07T07:43:47 | 2017-10-20T19:20:29 | Python | UTF-8 | Python | false | false | 2,161 | py | # Copyright 2021 BlueCat Networks (USA) Inc. and its affiliates
# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# By: BlueCat Networks
# Date: 2020-12-15
# Gateway Version: 20.12.1
# Description: Bulk Delete MAC Address Migration
from bluecat.api_exception import PortalException
def get_mac_address(configuration, address):
mac_addr = None
try:
mac_addr = configuration.get_mac_address(address)
except PortalException:
pass
return mac_addr
def delete_mac_address(configuration, address):
mac_address = get_mac_address(configuration, address)
if mac_address is not None:
print('MAC Address %s is in configuration(%s)' % (address, configuration.get_name()))
try:
linked_ip_address_obj_list = list(mac_address.get_linked_entities(mac_address.IP4Address))
if linked_ip_address_obj_list == []:
print(f'MAC Address {address} has no linked IP Address')
mac_address.delete()
else:
ip_addresses = ''
for i, item in enumerate(linked_ip_address_obj_list):
ip_address = item.get_address()
if len(linked_ip_address_obj_list) -1 == i:
ip_addresses += ip_address
else:
ip_addresses += ip_address + ', '
print(f'MAC Address {address} has IP Addresses {ip_addresses} linked. Deletion aborted for this MAC Address')
except PortalException:
pass
else:
print('MAC Address %s is NOT in configuration(%s)' % (address, configuration.get_name()))
| [
"rtamura@MP14RTAMURA2.local"
] | rtamura@MP14RTAMURA2.local |
c4fd421d68ff28a7640dd58d8d940a722bc01081 | a78272823c369fdffeb6133fbc7c95f8ffbdf3ba | /predict_a_language.py | e3bf5e257b4e28cc68d04deb63673af5d5daab12 | [] | no_license | lucaswannen/source_code_classification_with_CNN | a0a5f0151cad6dbb37ff0d205a613b0f8857223c | 8d1d581b0df51cef4849f4bb3b91131cbcf3450c | refs/heads/master | 2023-04-29T22:50:17.740119 | 2021-05-22T21:10:00 | 2021-05-22T21:10:00 | 368,924,993 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,485 | py | # Ceci est un script permettant de prédire la langage d'un script contenu dans un fichier.
path_model = 'models/model_final.h5' #Chemin du modèle sauvegardé
#Imports
import numpy as np
import matplotlib.pyplot as plt
import cv2
from os import listdir
from os.path import isfile, join
import random
from tensorflow.keras.preprocessing.image import ImageDataGenerator
from tensorflow import keras
from tensorflow.keras.utils import to_categorical
from sklearn.preprocessing import MultiLabelBinarizer
from tensorflow.keras.applications import MobileNetV2
from tensorflow.keras.layers import AveragePooling2D
from tensorflow.keras.layers import Dropout
from tensorflow.keras.layers import Flatten
from tensorflow.keras.layers import Dense
from tensorflow.keras.layers import Input
from tensorflow.keras.models import Model
from sklearn.preprocessing import MultiLabelBinarizer
from tensorflow.keras.optimizers import Adam
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import OneHotEncoder
import tensorflow as tf
import string
import warnings
import sys, getopt
from scipy.special import softmax
#Fonction utiles
def keep_n_first_caracters(scripts,n): #garde les n premiers caractères de chaque script
res = []
for script in scripts:
res.append(script[:n])
return res
def oneHotEncoding(samples): #Fonction permettant le oneHotEncoding adéquat
characters = string.printable # All printable ASCII characters.
token_index = dict(zip(characters, range(1, len(characters) + 1)))
max_length = 1024
results = np.zeros((len(samples), max_length, max(token_index.values()) + 1))
for i, sample in enumerate(samples):
for j, character in enumerate(sample[:max_length]):
index = token_index.get(character)
results[i, j, index] = 1.
return results
def preprocessing_script(script): #Fonction permettant d'effectuer le preprocessing d'un script. Entrée de la forme : ['le_script']
n = 1024
if len(script[0])<n: #Retourne une erreur si le script est de taille inférieure à 1024
warnings.warn("Le script fait moins de 1024 caractères : il faut essayer avec un script plus long !")
else:
script_cut = keep_n_first_caracters(script, n)
script_encoded = oneHotEncoding(script_cut)
res = script_encoded.reshape(script_encoded.shape[0],script_encoded.shape[1],script_encoded.shape[2],1)
print()
return res
def decoder_sortie(output): #Interprète la sortie de manière à fournir une chaine de caractère
max_index = np.argmax(output[0], axis=0)
if(max_index==0):
return "C"
if(max_index==1):
return "html"
if(max_index==2):
return "java"
if(max_index==3):
return "python"
def predire(script, retourner_probas = False): # Utilise les fonctions précédentes pour effectuer une prédiction de A à Z.
# Entrée de la forme ['un_script']
# mettre retourner_probas à True si on veut avoir la distribution de probabilités
script_endoded = preprocessing_script(script)
output =model.predict(script_endoded)
res = decoder_sortie(output)
if retourner_probas==True:
return output
return res
def lire_fichier(path): #Lit et stocke le contenu d'un fichier dans une variable
file = open(path, "r")
script = file.read()
file.close()
return script
## Execution
model = keras.models.load_model(path_model)
fichier_a_predire = sys.argv[1]
script = lire_fichier(fichier_a_predire)
prediction = predire([script])
sortie_brut = predire([script], retourner_probas=True)
probabilités = softmax(sortie_brut)
#Affichage
print('\n\n')
print("/////////////////// Bienvenue dans notre prédicteur de langage de programmation ! //////////////\n")
if len(script)<1024: # Traite le cas d'un script trop court
print("Votre script ne contient que",len(script),"caractères. Notre prédicteur ne prend que en entrée des scripts de taille supérieure ou égale à 1024 caractères. Veuillez réessayer avec un script plus long")
else :
print("Pour le fichier",sys.argv[1],'le langage de programmation prédit est :',prediction, '\n')
languages = ["C","html","java","python"]
for i in range(len(probabilités[0])):
print("La probabilité pour que le code soit du",languages[i],"est de",probabilités[0][i]*100,"%")
| [
"lucas.wannenmacher@insa-rouen.fr"
] | lucas.wannenmacher@insa-rouen.fr |
2fd81783c34a50ffcd311b0c373bbea8c1419435 | 8a2c11d6997c43f163a0a2fa650ff9f98baa7562 | /packet.py | d4130043f42a40d1ee9faeb53c967a3891315929 | [
"BSD-3-Clause"
] | permissive | kkaszyk/pytap | 29445f016816610e8b9b42f6365dc1b992bf97e9 | fda709bdcb9974543dc94dedf9d730884f459159 | refs/heads/main | 2023-03-09T02:05:12.728297 | 2021-02-12T13:57:34 | 2021-02-12T13:57:34 | 334,768,916 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,133 | py | class InstructionPacket():
def __init__(self, pc, num_instructions, dependent_addrs):
self.pc = pc
self.num_instructions = num_instructions
self.dependent_addrs = dependent_addrs
def print(self):
print("Arithmetic: " + str(self.num_instructions))
class LoadPacket(InstructionPacket):
def __init__(self, pc, num_instructions, dependent_addrs, bytes, address):
super().__init__(pc, num_instructions, dependent_addrs)
self.bytes = bytes + 1
self.address = address
def print(self):
print("Load: " + str(hex(self.address)))
class StorePacket(InstructionPacket):
def __init__(self, pc, num_instructions, dependent_addrs, bytes, address):
super().__init__(pc, num_instructions, dependent_addrs)
self.bytes = bytes + 1
self.address = address
def print(self):
print("Store: " + str(hex(self.address)))
class BarrierPacket(InstructionPacket):
def __init__(self, pc, num_instructions, dependent_addrs):
super().__init__(pc, num_instructions, dependent_addrs)
def print(self):
print("Barrier")
| [
"kubakaszyk@gmail.com"
] | kubakaszyk@gmail.com |
06d8213d60b976c8a6249ca22c1d77c164221103 | 633bd08102d17b2e622cd951b7f7adfca1a3f67b | /web/artproject/secondapp/apps.py | bc97a6d25c31fa11fd45cb602860595d693751ac | [] | no_license | joohayoung/waple-art_hackathon | 41b1f71c7e98e7f0902f570f3c2986ebd8f91933 | b8f83328ecf348be42428ecb815577244e5b18c3 | refs/heads/main | 2023-01-21T14:40:07.112704 | 2020-12-07T14:46:14 | 2020-12-07T14:46:14 | 319,350,055 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 98 | py | from django.apps import AppConfig
class SecondappConfig(AppConfig):
name = 'secondapp'
| [
"hayjoo0723@gmail.com"
] | hayjoo0723@gmail.com |
3dda82b466bafbcaa40be352687a374972074d35 | 098861e76fad8355878e8762226f8282ff7b9d0d | /pyvizio_speaker/api_command.py | 4faefd2a067425af441cc575206cba3a68dea110 | [
"MIT"
] | permissive | jeradM/pyvizio-speaker | 8e1c15f648e3e057da78c1698fe91a994810deb7 | e57ace5b1abf573692302822cd6cf7cd8d38654e | refs/heads/master | 2020-03-14T05:40:35.355984 | 2018-04-29T16:32:52 | 2018-04-29T16:32:52 | 131,469,256 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,546 | py | from .util import HashValsHolder, Endpoint, KeyCode
from .api_response import Response
class Command:
def __init__(self, value=None):
self._hash_vals = HashValsHolder.instance()
self._value = value
@property
def hash_val(self):
return self._hash_vals.get_hash_val(self.name)
@property
def endpoint(self):
raise NotImplementedError
@property
def name(self):
raise NotImplementedError
@property
def request_obj(self):
return {
'REQUEST': 'MODIFY',
'HASHVAL': self._hash_vals.get_hash_val(self.name),
'VALUE': self._value
}
def handle_response(self, resp_obj):
resp = Response(resp_obj)
if resp.successful:
self._hash_vals.set_hash_val(self.name, resp.hash_val)
return resp
class InputListCommand(Command):
@property
def endpoint(self):
return Endpoint.INPUTS
@property
def name(self):
return 'inputs'
@property
def request_obj(self):
raise NotImplementedError
def handle_response(self, resp_obj):
return Response(resp_obj)
class InputCommand(Command):
@property
def endpoint(self):
return Endpoint.CURRENT_INPUT
@property
def name(self):
return 'input'
class VolumeCommand(Command):
@property
def endpoint(self):
return Endpoint.VOLUME
@property
def name(self):
return 'volume'
class KeyCommand(Command):
def __init__(self, *keys):
super().__init__(keys)
@property
def endpoint(self):
return Endpoint.KEY_COMMAND
@property
def name(self):
return 'key_command'
@property
def request_obj(self):
keylist = []
for key in self._value:
if not isinstance(key, KeyCode):
continue
keylist.append({
'CODESET': key.value[0],
'CODE': key.value[1],
'ACTION': 'KEYPRESS'
})
return {'KEYLIST': keylist}
class PowerCommand(Command):
@property
def endpoint(self):
return Endpoint.POWER
@property
def name(self):
return 'power'
def handle_response(self, resp_obj):
return Response(resp_obj)
class SystemCommand(Command):
@property
def endpoint(self):
return Endpoint.SYSTEM
@property
def name(self):
return 'system'
def handle_response(self, resp_obj):
return Response(resp_obj)
| [
"jerad.meisner@gmail.com"
] | jerad.meisner@gmail.com |
5277b890a3ae51ed650089fc6477697b688a0a18 | caa3befc9805d067b23f7e6fad3404c039593ca4 | /telegram_bot/tg_aria2_bot.py | b246dcb4546bdd0fe504dbdf81cffd1d35c1d53d | [] | no_license | mengyyy/learn_python | 95517c9407a0e6c0aea4ad6ff86cbea770aff737 | 523b0724933fd5e91c5c74fe8f4c5d0c68fc43a0 | refs/heads/master | 2021-01-22T19:25:49.818961 | 2018-08-31T17:11:01 | 2018-08-31T17:11:01 | 85,198,020 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 7,836 | py | #! /usr/bin/python3
# -*- coding:utf-8 -*-
import telegram
from telegram.ext import Updater
from telegram.ext import CommandHandler
from telegram.ext import MessageHandler, Filters
from telegram.ext import RegexHandler
from telegram.ext import BaseFilter
import logging
import cfscrape # apt-get install nodejs pip3 install cfscrape pycrypto
from requests.compat import urljoin
import bs4
import re
import os
import json
import pprint
my_chatid = [1234567890]
my_token = '1234567890:ABCDEFGHIJKLMNOPQRSTUVWXYZ'
ARIA2_TOKEN = 'aria_token'
log_path = '/home/tg_aria2_bot.log'
logger = logging.getLogger('tg_aria2_bot')
logger.setLevel(logging.DEBUG)
fh = logging.FileHandler(log_path)
fh.setLevel(logging.DEBUG)
# create console handler with a higher log level
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
# create formatter and add it to the handlers
formatter = logging.Formatter(
'%(asctime)s - %(levelname)s - %(message)s')
fh.setFormatter(formatter)
ch.setFormatter(formatter)
# add the handlers to the logger
logger.addHandler(fh)
logger.addHandler(ch)
re_dmhy = re.compile('https?://share\.dmhy\.org/topics/view/.*html')
re_magnet = re.compile('magnet:\?.*')
re_torrent = re.compile('https?://.*\.torrent')
re_nyaa = re.compile('https?://nyaa\.si/view/.*')
dmhy_url_re = re.compile('http://share\.dmhy\.org/topics/view/.*\.html')
magnet_dict = ('a', {'href': re.compile('magnet:\?.*')})
torrent_dict = ('a', {'href': re.compile('.*\.torrent')})
dmhy_reply_plan = 'torrent link : {}\nmagent link 1: {}\nmagent link 2: {}'
header = {
'Accept': '*/*',
'Accept-Encoding': 'gzip,deflate,sdch',
'Accept-Language': 'zh-CN,zh;q=0.8,gl;q=0.6,zh-TW;q=0.4',
'Connection': 'keep-alive',
'Content-Type': 'application/x-www-form-urlencoded',
'Host': 'music.163.com',
'Referer': 'http://music.163.com/',
'User-Agent':
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.152 Safari/537.36' # NOQA
}
requests_flare = cfscrape.create_scraper()
class DMHY_Filter(BaseFilter):
def filter(self, message):
return dmhy_url_re.match(message.entities[0].url)
dmhy_filter = DMHY_Filter()
def get_info_from_html(url, target_dict=('a', {'class': 'magnet'})):
req = requests_flare.get(url)
bso = bs4.BeautifulSoup(req.content, 'html.parser')
result = bso.findAll(*target_dict)
return result
def get_info_from_source(source, target_dict=('a', {'class': 'magnet'})):
bso = bs4.BeautifulSoup(source, 'html.parser')
result = bso.findAll(*target_dict)
return result
def get_dmhy_torrent_link(url):
req = requests_flare.get(url)
taga = get_info_from_source(
req.text, ('a', {'href': re.compile('.*\.torrent')}))[0]
link = 'http:' + taga['href']
logger.info('dmhy torrent link is {}'.format(link))
return link
def get_nyaa_link(url):
req = requests_flare.get(url)
magnet_href = get_info_from_source(req.text, ('a', {'href': re_magnet}))[0]
link = magnet_href['href']
# torrent_href = bsObj.find('a', href=re.compile('.*\.torrent'))
# torrent_link = urljoin(url,torrent_href)
logger.info('nyaa link is {}'.format(link))
return link
def add_mission_2aria2(link):
'''
Only add one link one time
According :
https://aria2.github.io/manual/en/html/aria2c.html?highlight=token#methods
'''
jsonreq = json.dumps({'jsonrpc': '2.0', 'id': '1',
'method': 'aria2.addUri',
'params': ["token:{}".format(ARIA2_TOKEN), [link]]})
c = requests_flare.post('http://localhost:6800/jsonrpc', data=jsonreq)
return c
# share dmhy org
def dmhy_deal(bot, update):
logger.info('share_dmhy url {}'.format(update.message.text))
url = update.message.text
link = get_dmhy_torrent_link(url)
gid = add_mission_2aria2(link).json()['result']
update.message.reply_text('gid is {}'.format(gid))
return gid
# nyaa.si
def nyaa_deal(bot, update):
logger.info('nyaa_si url {}'.format(update.message.text))
url = update.message.text
link = get_nyaa_link(url)
gid = add_mission_2aria2(link).json()['result']
update.message.reply_text('gid is {}'.format(gid))
return gid
def magnet_deal(bot, update):
logger.info('magnet link {}'.format(update.message.text))
magnet_link = update.message.text
gid = add_mission_2aria2(link).json()['result']
update.message.reply_text('gid is {}'.format(gid))
return gid
def torrent_deal(bot, update):
logger.info('torrent link {}'.format(update.message.text))
torrent_link = update.message.text
gid = add_mission_2aria2(link).json()['result']
update.message.reply_text('gid is {}'.format(gid))
return gid
def dmhy_trans_form_deal(bot, update):
logger.info('trans from {}'.format(update.message.chat_id))
dmhy_url = update.message.entities[0].url
ml = get_info_from_html(dmhy_url, magnet_dict)
magnet_list = [i.attrs['href'] for i in ml]
gid = add_mission_2aria2(magnet_list[0]).json()['result']
update.message.reply_text('gid is {}'.format(gid))
return gid
def deal_json(d):
if 'error' in d.keys():
error_msg = d['error']['message']
return error_msg
if 'result' in d.keys():
result = d['result'][0]
completedLength = int(result['completedLength']) / 1024 / 1024
status = result['status']
totalLength = int(result['totalLength']) / 1024 / 1024
file_path = [x['path'] for x in result['files']]
return 'status {} comple {}M total {}M done{}%\n file path {}'.format(
status,
completedLength,
totalLength,
completedLength / totalLength * 100,
file_path)
def tell_active(bot, update):
logger.info('tell active miassion')
bot.sendChatAction(chat_id=update.message.chat_id,
action=telegram.ChatAction.TYPING)
jsonreq = json.dumps({'jsonrpc': '2.0', 'id': '1',
'method': 'aria2.tellActive',
'params': ["token:{}".format(ARIA2_TOKEN)]})
d = requests_flare.post('http://localhost:6800/jsonrpc', data=jsonreq)
e = deal_json(d.json())
bot.sendMessage(chat_id=update.message.chat_id,
text=e)
def tell_stoped(bot, update):
logger.info('tell stoped mission')
bot.sendChatAction(chat_id=update.message.chat_id,
action=telegram.ChatAction.TYPING)
jsonreq = json.dumps({'jsonrpc': '2.0', 'id': '1',
'method': 'aria2.tellStopped',
'params': ["token:{}".format(ARIA2_TOKEN), 0, 3, ['gid',
'dir',
'totalLength',
'completedLength',
'status']]})
d = requests_flare.post('http://localhost:6800/jsonrpc', data=jsonreq)
pprint.pprint(d.json())
e = pprint.pformat(d.json())
bot.sendMessage(chat_id=update.message.chat_id,
text=e)
updater = Updater(token=my_token)
dp = updater.dispatcher
dp.add_handler(RegexHandler(re_dmhy, dmhy_deal))
dp.add_handler(RegexHandler(re_nyaa, nyaa_deal))
dp.add_handler(RegexHandler(re_magnet, magnet_deal))
dp.add_handler(RegexHandler(re_torrent, torrent_deal))
dp.add_handler(MessageHandler(dmhy_filter, dmhy_trans_form_deal))
dp.add_handler(CommandHandler('tell_active', tell_active))
dp.add_handler(CommandHandler('tell_stoped', tell_stoped))
updater.start_polling()
| [
"noreply@github.com"
] | noreply@github.com |
a62680358221a10ed97929f60004a3daf96d38ca | 8a9cfc55d18e99f6648ae2679b835fee5eaea6ec | /notebooks/407-person-tracking-webcam/deepsort_utils/tracker.py | 6a1eb6dfe3eb7c286244becd11e61e00b40aa04f | [
"Apache-2.0"
] | permissive | HoTaeWang/openvino_notebooks | 705ead404d49f59d50f689a31da2026fc9968b5f | cfa4d8b7a130e68a5240a40334c4db308fbdc20d | refs/heads/main | 2023-05-27T06:08:24.014685 | 2023-05-19T20:42:30 | 2023-05-19T20:42:30 | 360,728,400 | 0 | 0 | Apache-2.0 | 2021-04-23T01:29:25 | 2021-04-23T01:29:24 | null | UTF-8 | Python | false | false | 5,225 | py | import numpy as np
from . import kalman_filter
from . import linear_assignment
from . import iou_matching
from .track import Track
class Tracker:
"""
This is the multi-target tracker.
Parameters
----------
metric : nn_matching.NearestNeighborDistanceMetric
A distance metric for measurement-to-track association.
max_age : int
Maximum number of missed misses before a track is deleted.
n_init : int
Number of consecutive detections before the track is confirmed. The
track state is set to `Deleted` if a miss occurs within the first
`n_init` frames.
Attributes
----------
metric : nn_matching.NearestNeighborDistanceMetric
The distance metric used for measurement to track association.
max_age : int
Maximum number of missed misses before a track is deleted.
n_init : int
Number of frames that a track remains in initialization phase.
kf : kalman_filter.KalmanFilter
A Kalman filter to filter target trajectories in image space.
tracks : List[Track]
The list of active tracks at the current time step.
"""
def __init__(self, metric, max_iou_distance=0.7, max_age=70, n_init=3):
self.metric = metric
self.max_iou_distance = max_iou_distance
self.max_age = max_age
self.n_init = n_init
self.kf = kalman_filter.KalmanFilter()
self.tracks = []
self._next_id = 1
def predict(self):
"""Propagate track state distributions one time step forward.
This function should be called once every time step, before `update`.
"""
for track in self.tracks:
track.predict(self.kf)
def update(self, detections):
"""Perform measurement update and track management.
Parameters
----------
detections : List[deep_sort.detection.Detection]
A list of detections at the current time step.
"""
# Run matching cascade.
matches, unmatched_tracks, unmatched_detections = \
self._match(detections)
# Update track set.
for track_idx, detection_idx in matches:
self.tracks[track_idx].update(
self.kf, detections[detection_idx])
for track_idx in unmatched_tracks:
self.tracks[track_idx].mark_missed()
for detection_idx in unmatched_detections:
self._initiate_track(detections[detection_idx])
self.tracks = [t for t in self.tracks if not t.is_deleted()]
# Update distance metric.
active_targets = [t.track_id for t in self.tracks if t.is_confirmed()]
features, targets = [], []
for track in self.tracks:
if not track.is_confirmed():
continue
features += track.features
targets += [track.track_id for _ in track.features]
track.features = []
self.metric.partial_fit(
np.asarray(features), np.asarray(targets), active_targets)
def _match(self, detections):
def gated_metric(tracks, dets, track_indices, detection_indices):
features = np.array([dets[i].feature for i in detection_indices])
targets = np.array([tracks[i].track_id for i in track_indices])
cost_matrix = self.metric.distance(features, targets)
cost_matrix = linear_assignment.gate_cost_matrix(
self.kf, cost_matrix, tracks, dets, track_indices,
detection_indices)
return cost_matrix
# Split track set into confirmed and unconfirmed tracks.
confirmed_tracks = [
i for i, t in enumerate(self.tracks) if t.is_confirmed()]
unconfirmed_tracks = [
i for i, t in enumerate(self.tracks) if not t.is_confirmed()]
# Associate confirmed tracks using appearance features.
matches_a, unmatched_tracks_a, unmatched_detections = \
linear_assignment.matching_cascade(
gated_metric, self.metric.matching_threshold, self.max_age,
self.tracks, detections, confirmed_tracks)
# Associate remaining tracks together with unconfirmed tracks using IOU.
iou_track_candidates = unconfirmed_tracks + [
k for k in unmatched_tracks_a if
self.tracks[k].time_since_update == 1]
unmatched_tracks_a = [
k for k in unmatched_tracks_a if
self.tracks[k].time_since_update != 1]
matches_b, unmatched_tracks_b, unmatched_detections = \
linear_assignment.min_cost_matching(
iou_matching.iou_cost, self.max_iou_distance, self.tracks,
detections, iou_track_candidates, unmatched_detections)
matches = matches_a + matches_b
unmatched_tracks = list(set(unmatched_tracks_a + unmatched_tracks_b))
return matches, unmatched_tracks, unmatched_detections
def _initiate_track(self, detection):
mean, covariance = self.kf.initiate(detection.to_xyah())
self.tracks.append(Track(
mean, covariance, self._next_id, self.n_init, self.max_age,
detection.feature))
self._next_id += 1
| [
"noreply@github.com"
] | noreply@github.com |
608a6602c909e3eb19077fd474d488e6b05d6a21 | 37c31ea1b52aa65db8b0477b742b513528e81df0 | /pokemon/pokemon/urls.py | da5b89d0d81db1e4193a507973e08ced46813a16 | [] | no_license | Jkamo84/pokedex | 5f0477da442ac96f3f9d40c162c3778c994e2841 | 0880db14263f7bd50492ae2eecf6f544ad215625 | refs/heads/main | 2023-01-24T20:13:20.507556 | 2020-11-15T20:52:57 | 2020-11-15T20:52:57 | 313,110,783 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 173 | py | from django.contrib import admin
from django.urls import include, path
urlpatterns = [
path('pokedex/', include('pokedex.urls')),
path('admin/', admin.site.urls),
] | [
"juanplazasb84@gmail.com"
] | juanplazasb84@gmail.com |
c0e2db5983dbfcfad0a85134151f3dbc72727491 | 9ed45a701dc532bf3e26a506525c35be1f2a2c5f | /DjangoWStyle.py | 2dfade4b378f09620f9814460e32882a3df42d45 | [] | no_license | caseycesari/vectorformats.DjangoWStyle | 13d0ee3e3f436145eeb78f24de9e5dc300e7aec2 | 8ceebe0daf5dbbac8b943ea948fd228379edcecf | refs/heads/master | 2021-05-26T14:26:52.333285 | 2012-03-24T23:33:47 | 2012-03-24T23:33:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,488 | py | import pickle
from vectorformats.Feature import Feature
from vectorformats.Formats.Format import Format
class DjangoWStyle(Format):
""" This class is designed to decode a Django QuerySet object into
Feature.Vector objects, with some additonal properties and
style settings for Leaflet (leaflet.cloudmade.com).
Simply pass a query_set to the decode method, and it will return
a list of Features.
Example Usage:
>>> from vectorformats.Formats import DjangoWStyle, GeoJSON
>>> qs = Model.objects.filter(city="Cambridge")
>>> djf = Django.Django(geodjango="geometry",
properties=['city', 'state'],
style={'color': '#004070', 'weight': 4})
>>> geoj = GeoJSON.GeoJSON()
>>> string = geoj.encode(djf.decode(qs))
>>> print string
"""
geodjango = False
"""
If you have GeoDjango geometry columns, set this to the name of the
geometry column.
"""
pickled_geometry = False
"""If you are not using GeoDjango, but have instead stored your geometry
as pickled GeoJSON geometries in a column in GeoDjango, set
the pickled_geometry=True option in your class constructor.
"""
pickled_properties = False
"""A column in the database representing a pickled set of attributes.
This will be used in addition to any properties in the 'properties' list,
with the list as a preference.
"""
properties = []
"""
List of properties you want copied from the model to the
output object.
"""
style = {}
""" For use with Leaflet (leaflet.cloudmade.com).
Leaflet looks for the key 'style' in GeoJSON object and uses those
settings to style the GeoJSON layer. For a list of accepted parameters:
http://leaflet.cloudmade.com/reference.html#path-options
To see how to style a GeoJSON layer, see the "Styling Features" section
here: http://leaflet.cloudmade.com/examples/geojson.html
"""
relation_data = {}
"""Used to retrieve values and aggregrate data from related models,
which are not direct attributes of res, such as object_set, and
object_set.count() The dictionary should be set up as follows:
{ 'method' : ['model']}. The results are added to the properties
object as 'model_method' : value.
Currently, the supported keys are 'set_count', 'values_list' and 'display'
set_count executes object_set.count() on the specified model.
values_list executes related_model.values_list for the specified model,
which should be plural.
display gets the display name from a CHOICES object for the specified
field (which is specified in place of model)
"""
def decode(self, query_set, generator = False):
results = []
for res in query_set:
feature = Feature(res.id)
if self.pickled_geometry:
feature.geometry = pickle.loads(res.geometry)
elif self.geodjango:
geom = getattr(res, self.geodjango)
geometry = {}
geometry['type'] = geom.geom_type
geometry['coordinates'] = geom.coords
feature.geometry = geometry
if self.pickled_properties:
props = getattr(res, self.pickled_properties)
feature.properties = pickle.loads(props.encode("utf-8"))
if self.properties:
for p in self.properties:
feature.properties[p] = getattr(res, p)
if self.style:
feature.properties['style'] = self.style
if self.relation_data:
for method,models in self.relation_data.iteritems():
if method == 'set_count':
for model in models:
try:
result = getattr(res,model + '_set')
count = getattr(result,'count')
feature.properties[model + '_' + method] = count()
except AttributeError, err:
feature.properties[model + '_' + method] = 'AttributeError'
if method == 'values_list':
for model in models:
try:
result = getattr(res,model)
all_list = list(result.values_list())
feature.properties[model + '_' + method] = all_list
except AttributeError, err:
feature.properties[model + '_' + method] = 'AttributeError'
if method == 'display' :
for model in models:
try:
display = 'get_%s_display' % (model)
result = getattr(res,display)
feature.properties[model + '_' + method] = result()
except AttributeError, err:
feature.properties[model + '_' + method] = 'AttributeError'
results.append(feature)
return results
| [
"cpthomas@gmail.com"
] | cpthomas@gmail.com |
a197523a71e001c0bb1291d174a1846b0a80b7da | ec9d98c21beb3f5cc53da6207b8b84c64d3dbb67 | /zhijiadaren/migrations/0006_auto_20190831_1528.py | be7429abb549708fd96953693b094bfce93ac543 | [] | no_license | luffy9125/kediaoCenter | 8adf45298b13cbbe9bbc03774d2122ba0b1eb77f | ccc2eadfed0e3cfe659d51b7adb7d0a79c866211 | refs/heads/master | 2020-08-17T22:11:12.926416 | 2019-10-17T06:48:53 | 2019-10-17T06:48:53 | 215,716,932 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 864 | py | # Generated by Django 2.0 on 2019-08-31 07:28
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('zhijiadaren', '0005_blog_readed_num'),
]
operations = [
migrations.CreateModel(
name='ReadNum',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('read_num', models.IntegerField(default=0)),
],
),
migrations.RemoveField(
model_name='blog',
name='readed_num',
),
migrations.AddField(
model_name='readnum',
name='blog',
field=models.OneToOneField(on_delete=django.db.models.deletion.DO_NOTHING, to='zhijiadaren.Blog'),
),
]
| [
"luffy9125@163.com"
] | luffy9125@163.com |
35ba53add2b8261e7418dac4ec635415e0442b84 | 069ded12974b29642037adbced8ec0f8327ab543 | /component/bluetooth/scanner.py | 96d2069e4290e581ca2e7c427eab7664be4fb028 | [] | no_license | EricaTheRhino/brain_django | 1b521b23803d455b65ccb9137b1e89117be6cab0 | 5b2e20fd66ec6b61ac16792c82ddfca9af814d9b | refs/heads/master | 2016-09-06T11:01:31.488388 | 2015-09-11T16:01:31 | 2015-09-11T16:01:31 | 31,730,604 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,277 | py | import dbus
from dbus.mainloop.glib import DBusGMainLoop
DBusGMainLoop(set_as_default=True)
import gobject
import requests
import os, sys
import json
fpid = os.fork()
if fpid != 0:
sys.exit(0)
cache = {}
filtered = [
"00:80:98:EA:D3:CD",
"00:80:98:E9:1F:BB",
"00:1E:C2:9B:B6:A0",
"00:25:BC:68:E9:F6",
"00:1F:5B:E1:D0:C0"
]
bus = dbus.SystemBus()
manager = dbus.Interface(bus.get_object("org.bluez", "/"), "org.bluez.Manager")
adapter = dbus.Interface(bus.get_object("org.bluez", manager.DefaultAdapter()), "org.bluez.Adapter")
def handleFound(address, values):
global cache
global filtered
if not address in cache and not address in filtered:
print address
print values
requests.post("http://localhost/events/", data=json.dumps({'event':'environment.bluetooth.found', 'params':{'address':address}}))
cache[address] = True
def handleLost(address):
global cache
if address in cache:
del cache[address]
requests.post("http://localhost/events/", data=json.dumps({'event':'environment.bluetooth.lost', 'params':{'address':address}}))
adapter.connect_to_signal('DeviceFound', handleFound)
adapter.connect_to_signal('DeviceDisappeared', handleLost)
print "* Start discovery"
adapter.StartDiscovery()
print "* Entering main loop"
gobject.MainLoop().run()
| [
"mjewell@gmail.com"
] | mjewell@gmail.com |
5fa42b561291cec7207cee64b180cf8d69d352c1 | 90411561b2e01319d77f2fb86cf187867fe96ff1 | /FirstProj/src/Words.py | 6371eb1b576eeafd25568c1a9e2a424aa39cdf01 | [] | no_license | datarpita/Pythonics | e327fd9d5b78b0d2159c6aa2b0f20f4e7b1550ca | 9ffa5c6b2ad533ee61aa39a764f162ab6814959e | refs/heads/master | 2020-04-07T04:11:34.396518 | 2018-12-06T03:24:56 | 2018-12-06T03:24:56 | 158,043,909 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 758 | py | '''
Created on Nov 18, 2018
@author: Mgmi
'''
'''
This is doc string
This function finds the words in sentences
and stores them in a list
@param sentence_list: a list containing all the sentences
@return: a list containing the individual words
'''
def find_word(sentence_list):
word_list=[]
for sentence in sentence_list:
for word in sentence.split():
word_list.append(word)
return word_list
def print_word(word_list):
print ('The list of words are:')
for word in word_list:
print(word)
def main():
sentence_list=['This is my first file','What is your name','Where is your school located']
word_list=find_word(sentence_list)
print_word(word_list)
if __name__ == '__main__':
main()
| [
"arpita.datta@cognizant.com"
] | arpita.datta@cognizant.com |
7fb68897db9841bc5319ff7e955988b1d4194678 | d37c861ac45f97ab83957f08b320e76b4deef687 | /orders/migrations/0001_initial.py | 0fd2b0904ec95ab934476ffee280e4acc3a865a8 | [
"MIT"
] | permissive | MamvotaTake/Django-Application | ac2104039f3e585ce6b778905657cdbabafb647d | 81fbb01d6cd5cc4a91f6ba68c21f2a1e1d21a37e | refs/heads/main | 2023-08-12T20:57:59.350631 | 2021-10-20T08:15:50 | 2021-10-20T08:15:50 | 416,573,672 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,194 | py | # Generated by Django 3.1 on 2021-09-21 16:31
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('accounts', '0001_initial'),
('store', '0002_variation'),
]
operations = [
migrations.CreateModel(
name='Order',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('order_number', models.CharField(max_length=20)),
('first_name', models.CharField(max_length=50)),
('last_name', models.CharField(max_length=50)),
('phone', models.CharField(max_length=15)),
('email', models.EmailField(max_length=50)),
('address_line_1', models.CharField(max_length=50)),
('address_line_2', models.CharField(blank=True, max_length=50)),
('country', models.CharField(max_length=50)),
('state', models.CharField(max_length=50)),
('city', models.CharField(max_length=50)),
('order_note', models.CharField(blank=True, max_length=100)),
('order_total', models.FloatField()),
('tax', models.FloatField()),
('status', models.CharField(choices=[('New', 'New'), ('Accepted', 'Accepted'), ('Completed', 'Completed'), ('Cancelled', 'Cancelled')], default='New', max_length=20)),
('ip', models.CharField(blank=True, max_length=20)),
('is_ordered', models.BooleanField(default=False)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now_add=True)),
],
),
migrations.CreateModel(
name='Payment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('payment_id', models.CharField(max_length=100)),
('payment_method', models.CharField(max_length=100)),
('amount_paid', models.CharField(max_length=100)),
('status', models.CharField(max_length=100)),
('create_at', models.DateTimeField(auto_now_add=True)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='accounts.account')),
],
),
migrations.CreateModel(
name='OrderProduct',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('color', models.CharField(max_length=50)),
('size', models.CharField(max_length=50)),
('quantity', models.IntegerField()),
('product_price', models.FloatField()),
('ordered', models.BooleanField(default=False)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now_add=True)),
('order', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='orders.order')),
('payment', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='orders.payment')),
('product', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='store.product')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='accounts.account')),
('variation', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='store.variation')),
],
),
migrations.AddField(
model_name='order',
name='payment',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='orders.payment'),
),
migrations.AddField(
model_name='order',
name='user',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='accounts.account'),
),
]
| [
"tooyoungmamvota@gmail.com"
] | tooyoungmamvota@gmail.com |
515ca087d10d3513ea2227b5a61def1e683100d4 | 8143d965b9b61757f79a5e0a03ebf338e5a3f22e | /backend/main/routes/user_routes.py | 03da09e62fe3ad2c1804ba2d570cac620e66ef1c | [] | no_license | nchenche/alifs-api | 8ea35c39a77276295863a6949dc3ef01e23d635b | d3a3e905aed4002026db6d783f12140e2eb89e17 | refs/heads/master | 2023-06-25T10:03:49.051586 | 2021-07-16T12:18:49 | 2021-07-16T12:18:49 | 337,019,257 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,350 | py | from flask import (
Blueprint, flash, g, redirect, render_template, request, session, url_for, jsonify, abort, make_response
)
from main import db
from main.services import user_service
from main.models.user import User
bp = Blueprint('user', __name__, url_prefix='/alifs/api/auth')
@bp.before_app_request
def load_logged_in_user():
username = session.get('username')
if username is None:
g.user = None
else:
g.user = User.query.filter_by(username=username).first()
@bp.route('/all', methods=['GET'])
def get_all_users():
return jsonify(user_service.get_all_users())
@bp.route('<string:username>', methods=['GET'])
def get_user(username):
user = user_service.get_user(username)
if not user:
abort(404)
return jsonify({'user': user})
@bp.route('register', methods=['POST'])
def register_user():
request_data = request.get_json()
if not request_data:
abort(400)
if 'email' not in request_data:
abort(400)
if 'username' not in request_data:
abort(400)
if 'password' not in request_data:
abort(400)
if 'admin' not in request_data:
request_data['admin'] = False
response = user_service.register_user(request_data)
return response
@bp.route('login', methods=['POST'])
def log_user():
request_data = request.get_json()
if not request_data['username']:
abort(400)
if not request_data['password']:
abort(400)
response = user_service.log_user(request_data)
return response
@bp.route('/logout')
def logout():
auth_token = parse_auth_header(request)
response = user_service.logout(auth_token)
return response
@bp.route('/status', methods=['GET'])
def user_status():
auth_token = parse_auth_header(request)
response = user_service.get_user_status(auth_token)
return response
def parse_auth_header(request):
# get the auth token
auth_header = request.headers.get('Authorization')
if auth_header:
try:
auth_token = auth_header.split(" ")[1]
except:
responseObject = {
'status': 'fail',
'message': 'Bearer token malformed.'
}
return make_response(jsonify(responseObject)), 401
else:
auth_token = ''
return auth_token | [
"nicolas.chevrollier@laposte.net"
] | nicolas.chevrollier@laposte.net |
b47c8ba36e0258e136cfd4494a9301f2fb2e54b3 | 70b65dfa4c3ef89bb8ffc633f2033ab3d2c19fd6 | /back/src/migrations/versions/9fbe12168226_.py | 6f8110d1d9a523a2a027e734bb1389796e547a43 | [] | no_license | mayu-snba19/hacku-vol4 | c210244ed180491fac7f1879d025667c841878bd | 9ab881b85f8145122a61f4f5cb90fad61786f4c3 | refs/heads/main | 2023-04-14T02:37:18.213910 | 2021-03-24T02:03:56 | 2021-03-24T02:03:56 | 343,309,315 | 2 | 5 | null | 2023-05-23T02:14:09 | 2021-03-01T06:11:11 | TypeScript | UTF-8 | Python | false | false | 1,652 | py | """empty message
Revision ID: 9fbe12168226
Revises: 1e13dfec183c
Create Date: 2021-03-05 01:52:51.936763
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '9fbe12168226'
down_revision = '1e13dfec183c'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('users', sa.Column('picture_url', sa.String(length=200), nullable=True))
op.add_column('users', sa.Column('status_message', sa.String(length=200), nullable=True))
op.add_column('users', sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=True))
op.alter_column('users', 'created_at',
existing_type=postgresql.TIMESTAMP(),
nullable=True,
existing_server_default=sa.text('CURRENT_TIMESTAMP'))
op.alter_column('users', 'name',
existing_type=sa.VARCHAR(length=100),
nullable=True)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column('users', 'name',
existing_type=sa.VARCHAR(length=100),
nullable=False)
op.alter_column('users', 'created_at',
existing_type=postgresql.TIMESTAMP(),
nullable=False,
existing_server_default=sa.text('CURRENT_TIMESTAMP'))
op.drop_column('users', 'updated_at')
op.drop_column('users', 'status_message')
op.drop_column('users', 'picture_url')
# ### end Alembic commands ###
| [
"ugax2kontomo0314@gmail.com"
] | ugax2kontomo0314@gmail.com |
b942645e69d7dd1d7dcbcdb08855353708ff84f2 | 600df3590cce1fe49b9a96e9ca5b5242884a2a70 | /components/autofill/content/common/DEPS | c3505fa54c3ecf8453d1bfdeff494e6ca31545ae | [
"BSD-3-Clause"
] | permissive | metux/chromium-suckless | efd087ba4f4070a6caac5bfbfb0f7a4e2f3c438a | 72a05af97787001756bae2511b7985e61498c965 | refs/heads/orig | 2022-12-04T23:53:58.681218 | 2017-04-30T10:59:06 | 2017-04-30T23:35:58 | 89,884,931 | 5 | 3 | BSD-3-Clause | 2022-11-23T20:52:53 | 2017-05-01T00:09:08 | null | UTF-8 | Python | false | false | 58 | include_rules = [
"+content/public/common",
"+ipc",
]
| [
"enrico.weigelt@gr13.net"
] | enrico.weigelt@gr13.net | |
087e38af3d68a8694db3c9ceef3cd62c4bab3bb9 | ab40c4af90055b2387a4bf0fe2863941debfa7f1 | /setup.py | 16d426352a56fe27014f9fde737c27ee119d8c0b | [] | no_license | Granitosaurus/pq | a30ccad8db4044239beb2eb08502eb34b233c635 | cb716ca8df59583c2d8e448d14f84a65a00b782a | refs/heads/master | 2021-05-04T05:21:54.008379 | 2017-09-29T04:32:52 | 2017-09-29T04:32:52 | 70,983,383 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 491 | py | from distutils.core import setup
setup(
name='pq',
version='0.1',
packages=['pq'],
url='https://github.com/granitosaurus/pq',
license='GPLv3',
author='granitosaurus',
author_email='bernardas.alisauskas@gmail.com',
install_requires=[
'click',
'parsel',
'dicttoxml'
],
entry_points="""
[console_scripts]
pq=pq.cli:cli
""",
description='Command line xml and json processor for xpath and css selectors.'
)
| [
"bernardas.alisauskas@gmail.com"
] | bernardas.alisauskas@gmail.com |
e9ff54520e19034bdda3afa301b087d5db126dfa | 376b679da9707b91cedb2b9d490af879ed4697ce | /fft/real_imag/main.py | 40bfd5e4cdc0ad4bee502f4fefb76b1f8ac3b9d3 | [] | no_license | Srishti013/Time_Series_Classification | 17d9cbab1557fd4c150331026d2b176d0befb948 | 0708fd57d4611db84f09ee7e3a53f06540bbef5c | refs/heads/main | 2023-08-23T08:09:40.190565 | 2021-10-27T06:39:19 | 2021-10-27T06:39:19 | 381,101,104 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,178 | py | import funct as module
import pandas as pd
Name =['Adiac','ArrowHead','Beef','BeetleFly','BirdChicken','Car','CBF','ChlorineConcentration','Coffee','Computers','CricketX','CricketY','CricketZ','DiatomSizeReduction','DistalPhalanxOutlineAgeGroup','DistalPhalanxOutlineCorrect','DistalPhalanxTW','Earthquakes','ECG200','ECG5000','ECGFiveDays','ElectricDevices','FaceAll','FaceFour','FacesUCR','FiftyWords','Fish','FordA','FordB','GunPoint','Ham','HandOutlines','Haptics','Herring','InlineSkate','ItalyPowerDemand','LargeKitchenAppliances','Lightning2','Lightning7','Mallat','Meat','MedicalImages','MiddlePhalanxOutlineAgeGroup','MiddlePhalanxOutlineCorrect','MiddlePhalanxTW','MoteStrain','NonInvasiveFetalECGThorax1','NonInvasiveFetalECGThorax2','OliveOil','OSULeaf','PhalangesOutlinesCorrect','Phoneme','Plane','ProximalPhalanxOutlineAgeGroup','ProximalPhalanxOutlineCorrect','ProximalPhalanxTW','RefrigerationDevices','ScreenType','ShapeletSim','ShapesAll','SmallKitchenAppliances','SonyAIBORobotSurface1','SonyAIBORobotSurface2','Strawberry','SwedishLeaf','Symbols','SyntheticControl','ToeSegmentation1','ToeSegmentation2','Trace','TwoLeadECG','TwoPatterns','UWaveGestureLibraryAll','UWaveGestureLibraryX','UWaveGestureLibraryY','UWaveGestureLibraryZ','Wafer','Wine','WordSynonyms','Worms','WormsTwoClass','Yoga','ACSF1','BME','Chinatown','Crop','DodgerLoopDay','DodgerLoopGame','DodgerLoopWeekend','EOGHorizontalSignal','EOGVerticalSignal','EthanolLevel','FreezerRegularTrain','FreezerSmallTrain','Fungi','GunPointAgeSpan','GunPointMaleVersusFemale','GunPointOldVersusYoung','HouseTwenty','InsectEPGRegularTrain','InsectEPGSmallTrain','MelbournePedestrian','MixedShapesSmallTrain','PigAirwayPressure','PigArtPressure','PigCVP','PowerCons','Rock','SemgHandGenderCh2','SemgHandMovementCh2','SemgHandSubjectCh2','SmoothSubspace','UMD']
l=['Adiac_TRAIN.arff','ArrowHead_TRAIN.arff','Beef_TRAIN.arff','BeetleFly_TRAIN.arff','BirdChicken_TRAIN.arff','Car_TRAIN.arff','CBF_TRAIN.arff','ChlorineConcentration_TRAIN.arff','Coffee_TRAIN.arff','Computers_TRAIN.arff','CricketX_TRAIN.arff','CricketY_TRAIN.arff','CricketZ_TRAIN.arff','DiatomSizeReduction_TRAIN.arff','DistalPhalanxOutlineAgeGroup_TRAIN.arff','DistalPhalanxOutlineCorrect_TRAIN.arff','DistalPhalanxTW_TRAIN.arff','Earthquakes_TRAIN.arff','ECG200_TRAIN.arff','ECG5000_TRAIN.arff','ECGFiveDays_TRAIN.arff','ElectricDevices_TRAIN.arff','FaceAll_TRAIN.arff','FaceFour_TRAIN.arff','FacesUCR_TRAIN.arff','FiftyWords_TRAIN.arff','Fish_TRAIN.arff','FordA_TRAIN.arff','FordB_TRAIN.arff','GunPoint_TRAIN.arff','Ham_TRAIN.arff','HandOutlines_TRAIN.arff','Haptics_TRAIN.arff','Herring_TRAIN.arff','InlineSkate_TRAIN.arff','ItalyPowerDemand_TRAIN.arff','LargeKitchenAppliances_TRAIN.arff','Lightning2_TRAIN.arff','Lightning7_TRAIN.arff','Mallat_TRAIN.arff','Meat_TRAIN.arff','MedicalImages_TRAIN.arff','MiddlePhalanxOutlineAgeGroup_TRAIN.arff','MiddlePhalanxOutlineCorrect_TRAIN.arff','MiddlePhalanxTW_TRAIN.arff','MoteStrain_TRAIN.arff','NonInvasiveFetalECGThorax1_TRAIN.arff','NonInvasiveFetalECGThorax2_TRAIN.arff','OliveOil_TRAIN.arff','OSULeaf_TRAIN.arff','PhalangesOutlinesCorrect_TRAIN.arff','Phoneme_TRAIN.arff','Plane_TRAIN.arff','ProximalPhalanxOutlineAgeGroup_TRAIN.arff','ProximalPhalanxOutlineCorrect_TRAIN.arff','ProximalPhalanxTW_TRAIN.arff','RefrigerationDevices_TRAIN.arff','ScreenType_TRAIN.arff','ShapeletSim_TRAIN.arff','ShapesAll_TRAIN.arff','SmallKitchenAppliances_TRAIN.arff','SonyAIBORobotSurface1_TRAIN.arff','SonyAIBORobotSurface2_TRAIN.arff','Strawberry_TRAIN.arff','SwedishLeaf_TRAIN.arff','Symbols_TRAIN.arff','SyntheticControl_TRAIN.arff','ToeSegmentation1_TRAIN.arff','ToeSegmentation2_TRAIN.arff','Trace_TRAIN.arff','TwoLeadECG_TRAIN.arff','TwoPatterns_TRAIN.arff','UWaveGestureLibraryAll_TRAIN.arff','UWaveGestureLibraryX_TRAIN.arff','UWaveGestureLibraryY_TRAIN.arff','UWaveGestureLibraryZ_TRAIN.arff','Wafer_TRAIN.arff','Wine_TRAIN.arff','WordSynonyms_TRAIN.arff','Worms_TRAIN.arff','WormsTwoClass_TRAIN.arff','Yoga_TRAIN.arff','ACSF1_TRAIN.arff','BME_TRAIN.arff','Chinatown_TRAIN.arff','Crop_TRAIN.arff','DodgerLoopDay_TRAIN.arff','DodgerLoopGame_TRAIN.arff','DodgerLoopWeekend_TRAIN.arff','EOGHorizontalSignal_TRAIN.arff','EOGVerticalSignal_TRAIN.arff','EthanolLevel_TRAIN.arff','FreezerRegularTrain_TRAIN.arff','FreezerSmallTrain_TRAIN.arff','Fungi_TRAIN.arff','GunPointAgeSpan_TRAIN.arff','GunPointMaleVersusFemale_TRAIN.arff','GunPointOldVersusYoung_TRAIN.arff','HouseTwenty_TRAIN.arff','InsectEPGRegularTrain_TRAIN.arff','InsectEPGSmallTrain_TRAIN.arff','MelbournePedestrian_TRAIN.arff','MixedShapesSmallTrain_TRAIN.arff','PigAirwayPressure_TRAIN.arff','PigArtPressure_TRAIN.arff','PigCVP_TRAIN.arff','PowerCons_TRAIN.arff','Rock_TRAIN.arff','SemgHandGenderCh2_TRAIN.arff','SemgHandMovementCh2_TRAIN.arff','SemgHandSubjectCh2_TRAIN.arff','SmoothSubspace_TRAIN.arff','UMD_TRAIN.arff']
l1=['Adiac_TEST.arff','ArrowHead_TEST.arff','Beef_TEST.arff','BeetleFly_TEST.arff','BirdChicken_TEST.arff','Car_TEST.arff','CBF_TEST.arff','ChlorineConcentration_TEST.arff','Coffee_TEST.arff','Computers_TEST.arff','CricketX_TEST.arff','CricketY_TEST.arff','CricketZ_TEST.arff','DiatomSizeReduction_TEST.arff','DistalPhalanxOutlineAgeGroup_TEST.arff','DistalPhalanxOutlineCorrect_TEST.arff','DistalPhalanxTW_TEST.arff','Earthquakes_TEST.arff','ECG200_TEST.arff','ECG5000_TEST.arff','ECGFiveDays_TEST.arff','ElectricDevices_TEST.arff','FaceAll_TEST.arff','FaceFour_TEST.arff','FacesUCR_TEST.arff','FiftyWords_TEST.arff','Fish_TEST.arff','FordA_TEST.arff','FordB_TEST.arff','GunPoint_TEST.arff','Ham_TEST.arff','HandOutlines_TEST.arff','Haptics_TEST.arff','Herring_TEST.arff','InlineSkate_TEST.arff','ItalyPowerDemand_TEST.arff','LargeKitchenAppliances_TEST.arff','Lightning2_TEST.arff','Lightning7_TEST.arff','Mallat_TEST.arff','Meat_TEST.arff','MedicalImages_TEST.arff','MiddlePhalanxOutlineAgeGroup_TEST.arff','MiddlePhalanxOutlineCorrect_TEST.arff','MiddlePhalanxTW_TEST.arff','MoteStrain_TEST.arff','NonInvasiveFetalECGThorax1_TEST.arff','NonInvasiveFetalECGThorax2_TEST.arff','OliveOil_TEST.arff','OSULeaf_TEST.arff','PhalangesOutlinesCorrect_TEST.arff','Phoneme_TEST.arff','Plane_TEST.arff','ProximalPhalanxOutlineAgeGroup_TEST.arff','ProximalPhalanxOutlineCorrect_TEST.arff','ProximalPhalanxTW_TEST.arff','RefrigerationDevices_TEST.arff','ScreenType_TEST.arff','ShapeletSim_TEST.arff','ShapesAll_TEST.arff','SmallKitchenAppliances_TEST.arff','SonyAIBORobotSurface1_TEST.arff','SonyAIBORobotSurface2_TEST.arff','Strawberry_TEST.arff','SwedishLeaf_TEST.arff','Symbols_TEST.arff','SyntheticControl_TEST.arff','ToeSegmentation1_TEST.arff','ToeSegmentation2_TEST.arff','Trace_TEST.arff','TwoLeadECG_TEST.arff','TwoPatterns_TEST.arff','UWaveGestureLibraryAll_TEST.arff','UWaveGestureLibraryX_TEST.arff','UWaveGestureLibraryY_TEST.arff','UWaveGestureLibraryZ_TEST.arff','Wafer_TEST.arff','Wine_TEST.arff','WordSynonyms_TEST.arff','Worms_TEST.arff','WormsTwoClass_TEST.arff','Yoga_TEST.arff','ACSF1_TEST.arff','BME_TRAIN.arff','Chinatown_TEST.arff','Crop_TEST.arff','DodgerLoopDay_TEST.arff','DodgerLoopGame_TEST.arff','DodgerLoopWeekend_TEST.arff','EOGHorizontalSignal_TEST.arff','EOGVerticalSignal_TEST.arff','EthanolLevel_TEST.arff','FreezerRegularTrain_TEST.arff','FreezerSmallTrain_TEST.arff','Fungi_TEST.arff','GunPointAgeSpan_TEST.arff','GunPointMaleVersusFemale_TEST.arff','GunPointOldVersusYoung_TEST.arff','HouseTwenty_TEST.arff','InsectEPGRegularTrain_TEST.arff','InsectEPGSmallTrain_TEST.arff','MelbournePedestrian_TEST.arff','MixedShapesSmallTrain_TEST.arff','PigAirwayPressure_TEST.arff','PigArtPressure_TEST.arff','PigCVP_TEST.arff','PowerCons_TEST.arff','Rock_TEST.arff','SemgHandGenderCh2_TEST.arff','SemgHandMovementCh2_TEST.arff','SemgHandSubjectCh2_TEST.arff','SmoothSubspace_TEST.arff','UMD_TEST.arff']
w = [3,0,0,7,6,1,11,0,0,12,10,17,5,0,0,1,0,6,0,1,0,14,3,2,12,6,4,1,1,0,0,0,2,5,14,0,94,6,5,0,0,20,0,0,3,1,1,1,0,7,0,14,5,0,1,2,8,17,3,4,15,0,0,0,2,8,6,8,5,3,4,4,4,4,4,6,1,0,9,9,7,7,4,4,0,0,1,1,1,1,2,1,1,0,0,3,0,4,33,11,1,1,7,1,1,11,3,0,1,1,3,1,6]
length = [176,251,470,512,512,577,128,166,286,720,300,300,300,345,80,80,80,512,96,140,136,96,131,350,131,270,463,500,500,150,431,2709,1092,512,1882,24,720,637,319,1024,448,99,80,80,80,84,750,750,570,427,80,1024,144,80,80,80,720,720,500,512,720,70,65,235,128,398,60,277,343,275,82,128,945,315,315,315,152,234,270,900,900,426,1460,128,24,46,288,288,288,1250,1250,1751,301,301,201,150,150,150,2000,601,601,24,1024,2000,2000,2000,144,2844,1500,1500,1500,15,150]
ed = []
dtw = []
cdtw = []
for i in range(len(l)):
np_train,np_train_label = module.load_dataset(l[i])
np_test,np_test_label = module.load_dataset(l1[i])
fft_train = module.fourier_transform(np_train)
fft_test =module.fourier_transform(np_test)
fft_train_real,fft_train_imag= module.real_imaginary_values(fft_train)
fft_test_real,fft_test_imag= module.real_imaginary_values(fft_test)
edx = module.complex_euclidean(fft_train_real,fft_test_real,fft_test_imag,fft_train_imag,np_test_label,np_train_label)
ed.append( float("{0:.4f}".format(1-edx)))
result = {
"Name" : Name,
"Ed" : ed,
}
d = pd.DataFrame(result)
d.to_csv('result_imag.csv')
| [
"noreply@github.com"
] | noreply@github.com |
f7069d0de200d1883e7840ebfcdab0ad60192a08 | 3c000380cbb7e8deb6abf9c6f3e29e8e89784830 | /venv/Lib/site-packages/cobra/modelimpl/fv/abd.py | 3dd4529c22f77fef93c0dbd871cbccde845e5b30 | [] | no_license | bkhoward/aciDOM | 91b0406f00da7aac413a81c8db2129b4bfc5497b | f2674456ecb19cf7299ef0c5a0887560b8b315d0 | refs/heads/master | 2023-03-27T23:37:02.836904 | 2021-03-26T22:07:54 | 2021-03-26T22:07:54 | 351,855,399 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,840 | py | # coding=UTF-8
# **********************************************************************
# Copyright (c) 2013-2020 Cisco Systems, Inc. All rights reserved
# written by zen warriors, do not modify!
# **********************************************************************
from cobra.mit.meta import ClassMeta
from cobra.mit.meta import StatsClassMeta
from cobra.mit.meta import CounterMeta
from cobra.mit.meta import PropMeta
from cobra.mit.meta import Category
from cobra.mit.meta import SourceRelationMeta
from cobra.mit.meta import NamedSourceRelationMeta
from cobra.mit.meta import TargetRelationMeta
from cobra.mit.meta import DeploymentPathMeta, DeploymentCategory
from cobra.model.category import MoCategory, PropCategory, CounterCategory
from cobra.mit.mo import Mo
# ##################################################
class ABD(Mo):
meta = ClassMeta("cobra.model.fv.ABD")
meta.isAbstract = True
meta.moClassName = "fvABD"
meta.moClassName = "fvABD"
meta.rnFormat = ""
meta.category = MoCategory.REGULAR
meta.label = "None"
meta.writeAccessMask = 0x0
meta.readAccessMask = 0x1
meta.isDomainable = False
meta.isReadOnly = False
meta.isConfigurable = True
meta.isDeletable = False
meta.isContextRoot = False
meta.childClasses.add("cobra.model.fault.Delegate")
meta.childNamesAndRnPrefix.append(("cobra.model.fault.Delegate", "fd-"))
meta.superClasses.add("cobra.model.fv.L2Dom")
meta.superClasses.add("cobra.model.fv.Def")
meta.superClasses.add("cobra.model.pol.DefRoot")
meta.superClasses.add("cobra.model.fv.Dom")
meta.superClasses.add("cobra.model.naming.NamedObject")
meta.superClasses.add("cobra.model.pol.Obj")
meta.superClasses.add("cobra.model.pol.Def")
meta.concreteSubClasses.add("cobra.model.fv.SvcBD")
meta.concreteSubClasses.add("cobra.model.fv.BD")
meta.concreteSubClasses.add("cobra.model.fv.BDDef")
meta.rnPrefixes = [
]
prop = PropMeta("str", "OptimizeWanBandwidth", "OptimizeWanBandwidth", 34472, PropCategory.REGULAR)
prop.label = "Optimize Wan Bandwidth between sites"
prop.isConfig = True
prop.isAdmin = True
prop.defaultValue = False
prop.defaultValueStr = "no"
prop._addConstant("no", None, False)
prop._addConstant("yes", None, True)
meta.props.add("OptimizeWanBandwidth", prop)
prop = PropMeta("str", "arpFlood", "arpFlood", 1693, PropCategory.REGULAR)
prop.label = "ARP Flooding"
prop.isConfig = True
prop.isAdmin = True
prop.defaultValue = False
prop.defaultValueStr = "no"
prop._addConstant("no", None, False)
prop._addConstant("yes", None, True)
meta.props.add("arpFlood", prop)
prop = PropMeta("str", "bcastP", "bcastP", 1691, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("bcastP", prop)
prop = PropMeta("str", "childAction", "childAction", 4, PropCategory.CHILD_ACTION)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("deleteAll", "deleteall", 16384)
prop._addConstant("deleteNonPresent", "deletenonpresent", 8192)
prop._addConstant("ignore", "ignore", 4096)
meta.props.add("childAction", prop)
prop = PropMeta("str", "configIssues", "configIssues", 26448, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "ok"
prop._addConstant("FHS-enabled-on-l2-only-bd", "first-hop-security-enabled-on-layer-2-only-bridge-domain", 2)
prop._addConstant("igmp-snoop-disabled-on-multicast-if", "igmpsnoop-disabled-on-multicast-enabled-bd", 1)
prop._addConstant("ok", "ok", 0)
meta.props.add("configIssues", prop)
prop = PropMeta("str", "descr", "descr", 5614, PropCategory.REGULAR)
prop.label = "Description"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 128)]
prop.regex = ['[a-zA-Z0-9\\!#$%()*,-./:;@ _{|}~?&+]+']
meta.props.add("descr", prop)
prop = PropMeta("str", "dn", "dn", 1, PropCategory.DN)
prop.label = "None"
prop.isDn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("dn", prop)
prop = PropMeta("str", "epClear", "epClear", 31106, PropCategory.REGULAR)
prop.label = "Clear Endpoints"
prop.isConfig = True
prop.isAdmin = True
prop.defaultValue = False
prop.defaultValueStr = "no"
prop._addConstant("no", None, False)
prop._addConstant("yes", None, True)
meta.props.add("epClear", prop)
prop = PropMeta("str", "epMoveDetectMode", "epMoveDetectMode", 19110, PropCategory.REGULAR)
prop.label = "EP Move Detection Mode"
prop.isConfig = True
prop.isAdmin = True
prop._addConstant("garp", "garp-based-detection", 1)
meta.props.add("epMoveDetectMode", prop)
prop = PropMeta("str", "intersiteBumTrafficAllow", "intersiteBumTrafficAllow", 30545, PropCategory.REGULAR)
prop.label = "Allow BUM traffic between sites"
prop.isConfig = True
prop.isAdmin = True
prop.defaultValue = False
prop.defaultValueStr = "no"
prop._addConstant("no", None, False)
prop._addConstant("yes", None, True)
meta.props.add("intersiteBumTrafficAllow", prop)
prop = PropMeta("str", "intersiteL2Stretch", "intersiteL2Stretch", 33178, PropCategory.REGULAR)
prop.label = "Allow l2Stretch between sites"
prop.isConfig = True
prop.isAdmin = True
prop.defaultValue = False
prop.defaultValueStr = "no"
prop._addConstant("no", None, False)
prop._addConstant("yes", None, True)
meta.props.add("intersiteL2Stretch", prop)
prop = PropMeta("str", "ipLearning", "ipLearning", 21254, PropCategory.REGULAR)
prop.label = "Ip Learning"
prop.isConfig = True
prop.isAdmin = True
prop.defaultValue = True
prop.defaultValueStr = "yes"
prop._addConstant("no", None, False)
prop._addConstant("yes", None, True)
meta.props.add("ipLearning", prop)
prop = PropMeta("str", "ipv6McastAllow", "ipv6McastAllow", 47406, PropCategory.REGULAR)
prop.label = "ipv6 Multicast Allow"
prop.isConfig = True
prop.isAdmin = True
prop.defaultValue = False
prop.defaultValueStr = "no"
prop._addConstant("no", None, False)
prop._addConstant("yes", None, True)
meta.props.add("ipv6McastAllow", prop)
prop = PropMeta("str", "limitIpLearnToSubnets", "limitIpLearnToSubnets", 20851, PropCategory.REGULAR)
prop.label = "Limit IP learning to BD subnets only"
prop.isConfig = True
prop.isAdmin = True
prop.defaultValue = True
prop.defaultValueStr = "yes"
prop._addConstant("no", None, False)
prop._addConstant("yes", None, True)
meta.props.add("limitIpLearnToSubnets", prop)
prop = PropMeta("str", "llAddr", "llAddr", 16876, PropCategory.REGULAR)
prop.label = "IPv6 Link Local Address"
prop.isConfig = True
prop.isAdmin = True
meta.props.add("llAddr", prop)
prop = PropMeta("str", "mac", "mac", 1698, PropCategory.REGULAR)
prop.label = "MAC Address"
prop.isConfig = True
prop.isAdmin = True
prop.defaultValue = 280487012409856
prop.defaultValueStr = "00:22:BD:F8:19:FF"
meta.props.add("mac", prop)
prop = PropMeta("str", "mcastAllow", "mcastAllow", 24916, PropCategory.REGULAR)
prop.label = "Multicast Allow"
prop.isConfig = True
prop.isAdmin = True
prop.defaultValue = False
prop.defaultValueStr = "no"
prop._addConstant("no", None, False)
prop._addConstant("yes", None, True)
meta.props.add("mcastAllow", prop)
prop = PropMeta("str", "mtu", "mtu", 1697, PropCategory.REGULAR)
prop.label = "MTU Size"
prop.isImplicit = True
prop.isAdmin = True
prop.range = [(576, 9216)]
prop.defaultValue = 1
prop.defaultValueStr = "inherit"
prop._addConstant("inherit", "inherit", 1)
meta.props.add("mtu", prop)
prop = PropMeta("str", "multiDstPktAct", "multiDstPktAct", 18026, PropCategory.REGULAR)
prop.label = "Multi Destination Packet Action"
prop.isConfig = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "bd-flood"
prop._addConstant("bd-flood", "flood-in-bd", 0)
prop._addConstant("drop", "drop", 2)
prop._addConstant("encap-flood", "flood-in-encapsulation", 1)
meta.props.add("multiDstPktAct", prop)
prop = PropMeta("str", "name", "name", 4991, PropCategory.REGULAR)
prop.label = "Name"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 64)]
prop.regex = ['[a-zA-Z0-9_.:-]+']
meta.props.add("name", prop)
prop = PropMeta("str", "nameAlias", "nameAlias", 28417, PropCategory.REGULAR)
prop.label = "Name alias"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 63)]
prop.regex = ['[a-zA-Z0-9_.-]+']
meta.props.add("nameAlias", prop)
prop = PropMeta("str", "ownerKey", "ownerKey", 15230, PropCategory.REGULAR)
prop.label = "None"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 128)]
prop.regex = ['[a-zA-Z0-9\\!#$%()*,-./:;@ _{|}~?&+]+']
meta.props.add("ownerKey", prop)
prop = PropMeta("str", "ownerTag", "ownerTag", 15231, PropCategory.REGULAR)
prop.label = "None"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 64)]
prop.regex = ['[a-zA-Z0-9\\!#$%()*,-./:;@ _{|}~?&+]+']
meta.props.add("ownerTag", prop)
prop = PropMeta("str", "pcTag", "pcTag", 1695, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("any", "any", 0)
meta.props.add("pcTag", prop)
prop = PropMeta("str", "rn", "rn", 2, PropCategory.RN)
prop.label = "None"
prop.isRn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("rn", prop)
prop = PropMeta("str", "scope", "scope", 1694, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.range = [(0, 16777215)]
prop.defaultValue = 0
prop.defaultValueStr = "0"
meta.props.add("scope", prop)
prop = PropMeta("str", "seg", "seg", 1766, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("seg", prop)
prop = PropMeta("str", "status", "status", 3, PropCategory.STATUS)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("created", "created", 2)
prop._addConstant("deleted", "deleted", 8)
prop._addConstant("modified", "modified", 4)
meta.props.add("status", prop)
prop = PropMeta("str", "unicastRoute", "unicastRoute", 1692, PropCategory.REGULAR)
prop.label = "Unicast Routing"
prop.isConfig = True
prop.isAdmin = True
prop.defaultValue = True
prop.defaultValueStr = "yes"
prop._addConstant("no", None, False)
prop._addConstant("yes", None, True)
meta.props.add("unicastRoute", prop)
prop = PropMeta("str", "unkMacUcastAct", "unkMacUcastAct", 1696, PropCategory.REGULAR)
prop.label = "Unknown Mac Unicast Action"
prop.isConfig = True
prop.isAdmin = True
prop.defaultValue = 1
prop.defaultValueStr = "proxy"
prop._addConstant("flood", "flood", 0)
prop._addConstant("proxy", "hardware-proxy", 1)
meta.props.add("unkMacUcastAct", prop)
prop = PropMeta("str", "unkMcastAct", "unkMcastAct", 16164, PropCategory.REGULAR)
prop.label = "Unknown Multicast Destination Action"
prop.isConfig = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "flood"
prop._addConstant("flood", "flood", 0)
prop._addConstant("opt-flood", "optimized-flood", 1)
meta.props.add("unkMcastAct", prop)
prop = PropMeta("str", "v6unkMcastAct", "v6unkMcastAct", 47702, PropCategory.REGULAR)
prop.label = "Unknown V6 Multicast Destination Action"
prop.isConfig = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "flood"
prop._addConstant("flood", "flood", 0)
prop._addConstant("opt-flood", "optimized-flood", 1)
meta.props.add("v6unkMcastAct", prop)
prop = PropMeta("str", "vmac", "vmac", 21204, PropCategory.REGULAR)
prop.label = "Virtual MAC Address"
prop.isConfig = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "not-applicable"
prop._addConstant("not-applicable", "not-configured", 0)
meta.props.add("vmac", prop)
def __init__(self, parentMoOrDn, markDirty=True, **creationProps):
namingVals = []
Mo.__init__(self, parentMoOrDn, markDirty, *namingVals, **creationProps)
# End of package file
# ##################################################
| [
"bkhoward@live.com"
] | bkhoward@live.com |
2558eb429a70a1c07d1dc3089478c3b3ab870249 | 656eb6d63cc936f2a80261becd1e3f8b60e249bc | /03_picnic/version_info.py | f32a3e03321e0f0345b7fe91f85beb3d547085be | [
"MIT"
] | permissive | ddijk/tiny_python_projects | 6baa848cd696dfd3243650f97a7f44f4831dca9c | a9416039559b34bf8d36f1c043d6670f86a4583f | refs/heads/master | 2023-01-22T15:55:49.429677 | 2020-11-26T15:19:04 | 2020-11-26T15:19:38 | 279,935,410 | 0 | 0 | MIT | 2020-07-15T17:35:25 | 2020-07-15T17:35:25 | null | UTF-8 | Python | false | false | 202 | py | import sys
print('versie: ' + str(len(sys.version_info)))
print('-----')
for i in range(len(sys.version_info)):
print('versie: ' + str(sys.version_info[i]))
print('-----')
print(*sys.version_info)
| [
"dick@dijkrosoft.nl"
] | dick@dijkrosoft.nl |
aa78c02ad67f591157675a47bfb284c8d0878d46 | 9d7dad57f552f4687352c32fcced4aa49b13d6c0 | /tdl/tests/test_newtaskform.py | 224de889cc8d7298ecd90eadee78e6d2620fe105 | [
"MIT"
] | permissive | chrstsrs/todolist | 3e97f3eaf124c910bfda0f68aa854074a09e6658 | 8e13c5a8f6cabad0cf3f715ba9134dd82a46cc3e | refs/heads/master | 2022-12-14T05:22:48.451494 | 2019-01-23T21:24:15 | 2019-01-23T21:24:15 | 163,313,306 | 1 | 0 | MIT | 2022-12-08T01:20:31 | 2018-12-27T16:05:18 | HTML | UTF-8 | Python | false | false | 2,088 | py | from django.test import TestCase
from ..forms import NewTaskForm
class NewTaskFormTests(TestCase):
def test_edit_task_field_name(self):
form = NewTaskForm()
self.assertTrue(form.fields['name'].label == None or form.fields['name'].label == 'name')
def test_edit_task_field_description(self):
form = NewTaskForm()
self.assertTrue(form.fields['description'].label ==
None or form.fields['description'].label == 'description')
def test_edit_task_field_name_help_text(self):
form = NewTaskForm()
self.assertEqual(form.fields['name'].help_text, 'The Maximium Length is 40')
def test_edit_task_field_description_help_text(self):
form = NewTaskForm()
self.assertEqual(form.fields['description'].help_text, 'The Maximium Length is 4000')
def test_edit_task_field_name_max_length(self):
form = NewTaskForm()
self.assertEqual(form.fields['name'].max_length, 40)
def test_edit_task_field_description_max_length(self):
form = NewTaskForm()
self.assertEqual(form.fields['description'].max_length, 4000)
def test_edit_task_field_name_placeholder(self):
form = NewTaskForm()
self.assertEqual(form.fields['name'].widget.attrs['placeholder'],
'Write here a short task name')
def test_edit_task_field_description_placeholder(self):
form = NewTaskForm()
self.assertEqual(form.fields['description'].widget.attrs['placeholder'],
'Write here the new task.')
def test_edit_task_field_name_size(self):
form = NewTaskForm()
self.assertEqual(form.fields['name'].widget.attrs['size'], 40)
def test_edit_task_field_description_size(self):
form = NewTaskForm()
self.assertEqual(form.fields['description'].widget.attrs['size'], 4000)
def test_edit_task_field_name_rows(self):
form = NewTaskForm()
self.assertEqual(form.fields['name'].widget.attrs['rows'], 1)
| [
"noreply@github.com"
] | noreply@github.com |
a90e6404551b5912048b4829a5294fbb441ab70e | 93a95c5b9411960b394cfb63e400910d7d1abf50 | /estoque/migrations/0002_auto_20210510_1515.py | ea13d302de4b62141beacc1712e501fe83515d36 | [
"MIT"
] | permissive | jonathan-mothe/estoque | 9377e4ac826fabe9d1a4f66f817204334a59a311 | de8d0ea87e67e93ad4922a2d81b1ba7d68a29845 | refs/heads/master | 2023-04-21T19:08:07.579886 | 2021-05-11T17:46:31 | 2021-05-11T17:46:31 | 364,916,799 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,283 | py | # Generated by Django 3.2.2 on 2021-05-10 18:15
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('estoque', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='EstoqueEntrada',
fields=[
],
options={
'verbose_name': 'estoque estrada',
'verbose_name_plural': 'estoque entrada',
'proxy': True,
'indexes': [],
'constraints': [],
},
bases=('estoque.estoque',),
),
migrations.CreateModel(
name='EstoqueSaida',
fields=[
],
options={
'verbose_name': 'estoque saída',
'verbose_name_plural': 'estoque saída',
'proxy': True,
'indexes': [],
'constraints': [],
},
bases=('estoque.estoque',),
),
migrations.AlterField(
model_name='estoqueitens',
name='estoque',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='estoques', to='estoque.estoque'),
),
]
| [
"jonathan.mothe@gmail.com"
] | jonathan.mothe@gmail.com |
83fc41d8dbacc110b36cbeb2ded34e1d9ef46d8b | 1ad65a239d7aa2302bf192f8bff22947ccc9ef70 | /ws_test.py | 15701dbfd9c053cd8ea134ee0898500fa5be35be | [] | no_license | pHearning/WebsocketFighting | 24f3142b9fcdd5aaa2e4fa48a37f686390b595fa | 2f9729dbee11ceeba676b7c7fdffc7f3c8795378 | refs/heads/master | 2021-01-11T16:40:51.686120 | 2017-01-27T10:48:56 | 2017-01-27T10:48:56 | 80,138,125 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,106 | py | import websocket
import thread
import time
import json
class turn_holder():
turn = ''
red_hp = 0
blue_hp = 0
move = ''
def on_message(ws, message):
print message
new_message = json.loads(message)
turn_holder.turn = new_message["turn"]
turn_holder.move = new_message["move"]
turn_holder.red_hp = new_message["red_hp"]
turn_holder.blue_hp = new_message["blue_hp"]
def on_error(ws, error):
print error
def on_close(ws):
print "### closed ###"
def on_open(ws):
def run(*args):
while True:
time.sleep(3)
ws.send(json.dumps({"player":turn_holder.turn,"move":"punch"}))
time.sleep(1)
ws.close()
print "thread terminating..."
thread.start_new_thread(run, ())
if __name__ == "__main__":
websocket.enableTrace(True)
ws = websocket.WebSocketApp("ws://127.0.0.1:8888/ws",
on_message = on_message,
on_error = on_error,
on_close = on_close)
ws.on_open = on_open
ws.run_forever() | [
"tedjohanssondeveloper@gmail.com"
] | tedjohanssondeveloper@gmail.com |
542816beffb8b703f3ac06dfc3663090ffee2d00 | b129c9b11e9d2c06114f45ce03a94f4f2a177119 | /hugin/haproxy/filters/userstate.py | c25adced17d38446916ca97be7ca2a70eced1dc0 | [] | no_license | pyfidelity/hugin.haproxy | a9e48e345b03ed9d361c0d6c8617135378f5c311 | 444e30350936883e7749c2371f394fa82c1644fe | refs/heads/master | 2016-09-01T17:29:48.210244 | 2014-11-24T12:34:51 | 2014-11-24T12:34:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,529 | py | # Userstate is about detecting when users switch backend node when session affinity is used.
import re
from hugin.haproxy import registerFilter
from collections import deque
COOKIEMATCH = re.compile('.*="?(?P<cookie>\S+)')
class UserState(object):
def __init__(self):
self.duplicates = 0 # Redundant reloads where user press stop or reload
self.redispatch = 0 # Session affinity redispatch
self.affinity = 0 # Session affinity where previous 4 request were to the same instance
self.status = {} # Keep track of last 4 requests for each uid
def process(self, data):
#match = COOKIEMATCH.match(data['reqcookie'])
#if match:
# uid = match.group('cookie')
reqcookie = data.get('reqcookie', None)
if reqcookie is not None and len(reqcookie) > 1:
uid = reqcookie[6:] # __ac="cookieval...
hist = self.status.get(uid, deque(maxlen=4)) # We keep track of the 4 last requests
previous = hist and hist[0]
instance = data['instance']
if previous:
# Detect redundant reloads - C is client abort
if previous['terminationevent'] == 'C' and previous['url'] == data['url']:
self.duplicates += 1
# Check for session affinity
if previous['instance'] == instance:
for item in hist:
if item['instance'] != instance:
break # Different instance, no affinity
self.affinity += 1
# We only check for redispatch or affinity if we have a full history
elif len(hist) == 4:
# Check for redispatch
instances = set([h['instance'] for h in hist])
if len(instances) == 1:
self.redispatch += 1
hist.appendleft(dict(url=data['url'],
terminationevent=data['terminationevent'],
instance=instance,))
self.status[uid] = hist
return data
def stats(self, reset=True, count=20):
duplicates, redispatch, affinity = self.duplicates, self.redispatch, self.affinity
if reset:
self.duplicates = self.redispatch = self.affinity = 0
return dict(duplicates=duplicates,
redispatch=redispatch,
affinity=affinity)
registerFilter('userstate', UserState())
| [
"florian.schulze@gmx.net"
] | florian.schulze@gmx.net |
01f2ca3744cd9fbadbed8d343ed1e17851ff73e1 | 403f3702e6e47c0a5439284801bfdc9fae707711 | /Ordenes_ST/migrations/0016_auto_20200401_0139.py | ef776349de9f55271c910a386fdc65a7f4d7dcba | [] | no_license | harold-pinedar/CenterPro | db85d617c83459afa23b027266ac6b774c72e45a | c37b081a49516a9ac14a794ce9f67727b89c3661 | refs/heads/master | 2022-04-15T01:16:51.060297 | 2020-04-01T02:03:34 | 2020-04-01T02:03:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,384 | py | # Generated by Django 3.0.4 on 2020-04-01 01:39
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('Colaborador', '0018_auto_20200401_0139'),
('Ordenes_ST', '0015_auto_20200401_0119'),
]
operations = [
migrations.AlterField(
model_name='ordenesst',
name='estado',
field=models.CharField(choices=[('P', 'Pendiente Reparación'), ('D', 'Devolución'), ('E', 'Espera Repuesto'), ('R', 'Reparado')], max_length=4, verbose_name='Estado. '),
),
migrations.CreateModel(
name='OrdenEntregadaSt',
fields=[
('id_orden_entregada_st', models.BigAutoField(primary_key=True, serialize=False)),
('fecha', models.DateField(auto_now_add=True)),
('quien_recibe', models.TextField(verbose_name='Nombre de quien recibe.')),
('observacion', models.TextField(verbose_name='Observaciones.')),
('id_colaborador', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='Colaborador.Colaboradores', verbose_name='Nombre de quien entrega.')),
('id_orden_st', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='Ordenes_ST.OrdenesSt')),
],
),
]
| [
"fredyortegon30@gmail.com"
] | fredyortegon30@gmail.com |
902f82ec1d6408a0d05074ad354a7e9e1de2a428 | 75dbf493c64532514efa1ed5a0a54e6bb59a891b | /annhub_python/ml_lib/__init__.py | 6dc5d97af2f1568464ffc8f3e2fb929b94c72613 | [] | no_license | ans-ari/annhub-python | e73c6664cf3fabcd8bede9764e653d6160f9653d | 83aef254ab8d10059554f2054630a601c2c39863 | refs/heads/master | 2023-08-23T02:30:14.864537 | 2021-10-23T14:35:07 | 2021-10-23T14:35:07 | 410,797,783 | 0 | 1 | null | 2021-10-05T08:01:52 | 2021-09-27T08:12:34 | Python | UTF-8 | Python | false | false | 478 | py | import requests
def download():
URL = "https://github.com/ans-ari/annhub-python/raw/update-library/annhub_python/ml_lib/annhub.pyd"
response = requests.get(url=URL,allow_redirects=True)
file_name = "annhub.pyd"
with open(file_name,'wb') as f:
f.write(response.content)
# As the .pyd can not be uploaded into PyPi, we need to download it first before it can be
# injected to our project
download()
from . import annhub as annhub
| [
"noreply@github.com"
] | noreply@github.com |
48171eaf40c64fcf8748da54f2d65fe4eea701b6 | 5a6cd9da73ad197e6ca29cce0436640797991096 | /bench/app/benchmark/use_cases/listeners/sms_alert_listener.py | cdcd12c842ac62ca9c09a1f3212464711d6df263 | [] | no_license | in-void/flask-ddd-ca | 1c944f040f7001318ac2e73a3bfb8b36271424e6 | 201246cdd003c08b89d8bee08790db2afd9f0b72 | refs/heads/master | 2023-05-27T19:39:06.290935 | 2019-08-27T15:51:08 | 2019-08-27T15:51:08 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,248 | py | # -*- coding: utf-8 -*-
from bench.app.benchmark.domain.config import NotificationsConfig
from bench.app.benchmark.domain.events import ComparativeBenchmarkFinished
from bench.app.benchmark.infrastructure.repositories import ComparativeBenchmarkRepository
from bench.app.core.domain.events_dispatcher import DomainEventsListener
from bench.app.core.domain.specification import Specification
from bench.app.notifications.use_cases.commands import SendSmsCommand
class ComparativeBenchmarkFinishedSmsAlertListener(DomainEventsListener):
def __init__(self,
config: NotificationsConfig,
specification: Specification,
benchmark_repository: ComparativeBenchmarkRepository) -> None:
super().__init__()
self.config = config
self.specification = specification
self.benchmark_repository = benchmark_repository
def execute(self, event: ComparativeBenchmarkFinished):
benchmark = self.benchmark_repository.get_by_id(event.benchmark_id)
if self.specification.is_satisfied_by(benchmark):
return self.send_sms()
def send_sms(self):
return SendSmsCommand(self.config.notification_sms_phone_number, 'Your site is very slow')
| [
"barnard.kano@gmail.com"
] | barnard.kano@gmail.com |
7fc8092fd2f9fce44d8b397616ccbd500a075fa8 | 7ed07e6adefeb9fe088121bfd3b2509c5943ec20 | /extra_apps/xadmin/plugins/export.py | 743b3b7f8479686468db64d270fdb63c91c6e602 | [] | no_license | jon110101/untitled1 | 3639a381f4877723e865693167758e85f89138e1 | 0ee271f28580403c7efa86d182366113449b114e | refs/heads/master | 2021-05-09T11:46:09.814064 | 2018-01-26T02:54:27 | 2018-01-26T02:54:27 | 118,997,511 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,560 | py | import io
import datetime
import sys
# from future.utils import iteritems
from django.http import HttpResponse
from django.template import loader
from django.utils import six
from django.utils.encoding import force_text, smart_text
from django.utils.html import escape
from django.utils.translation import ugettext as _
from django.utils.xmlutils import SimplerXMLGenerator
from django.db.models import BooleanField, NullBooleanField
from xadmin.plugins.utils import get_context_dict
from xadmin.sites import site
from xadmin.views import BaseAdminPlugin, ListAdminView
from xadmin.util import json
from xadmin.views.list import ALL_VAR
try:
import xlwt
has_xlwt = True
except:
has_xlwt = False
try:
import xlsxwriter
has_xlsxwriter = True
except:
has_xlsxwriter = False
class ExportMenuPlugin(BaseAdminPlugin):
list_export = ('xlsx', 'xls', 'csv', 'xml', 'json')
export_names = {'xlsx': 'Excel 2007', 'xls': 'Excel', 'csv': 'CSV',
'xml': 'XML', 'json': 'JSON'}
def init_request(self, *args, **kwargs):
self.list_export = [
f for f in self.list_export
if (f != 'xlsx' or has_xlsxwriter) and (f != 'xls' or has_xlwt)]
def block_top_toolbar(self, context, nodes):
if self.list_export:
context.update({
'show_export_all': self.admin_view.paginator.count > self.admin_view.list_per_page and not ALL_VAR in self.admin_view.request.GET,
'form_params': self.admin_view.get_form_params({'_do_': 'export'}, ('export_type',)),
'export_types': [{'type': et, 'name': self.export_names[et]} for et in self.list_export],
})
nodes.append(loader.render_to_string('xadmin/blocks/model_list.top_toolbar.exports.html',
context=get_context_dict(context)))
class ExportPlugin(BaseAdminPlugin):
export_mimes = {'xlsx': 'application/vnd.ms-excel',
'xls': 'application/vnd.ms-excel', 'csv': 'text/csv',
'xml': 'application/xhtml+xml', 'json': 'application/json'}
def init_request(self, *args, **kwargs):
return self.request.GET.get('_do_') == 'export'
def _format_value(self, o):
if (o.field is None and getattr(o.attr, 'boolean', False)) or \
(o.field and isinstance(o.field, (BooleanField, NullBooleanField))):
value = o.value
elif str(o.text).startswith("<span class='text-muted'>"):
value = escape(str(o.text)[25:-7])
else:
value = escape(str(o.text))
return value
def _get_objects(self, context):
headers = [c for c in context['result_headers'].cells if c.export]
rows = context['results']
return [dict([
(force_text(headers[i].text), self._format_value(o)) for i, o in
enumerate(filter(lambda c:getattr(c, 'export', False), r.cells))]) for r in rows]
def _get_datas(self, context):
rows = context['results']
new_rows = [[self._format_value(o) for o in
filter(lambda c:getattr(c, 'export', False), r.cells)] for r in rows]
new_rows.insert(0, [force_text(c.text) for c in context['result_headers'].cells if c.export])
return new_rows
def get_xlsx_export(self, context):
datas = self._get_datas(context)
output = io.BytesIO()
export_header = (
self.request.GET.get('export_xlsx_header', 'off') == 'on')
model_name = self.opts.verbose_name
book = xlsxwriter.Workbook(output)
sheet = book.add_worksheet(
u"%s %s" % (_(u'Sheet'), force_text(model_name)))
styles = {'datetime': book.add_format({'num_format': 'yyyy-mm-dd hh:mm:ss'}),
'date': book.add_format({'num_format': 'yyyy-mm-dd'}),
'time': book.add_format({'num_format': 'hh:mm:ss'}),
'header': book.add_format({'font': 'name Times New Roman', 'color': 'red', 'bold': 'on', 'num_format': '#,##0.00'}),
'default': book.add_format()}
if not export_header:
datas = datas[1:]
for rowx, row in enumerate(datas):
for colx, value in enumerate(row):
if export_header and rowx == 0:
cell_style = styles['header']
else:
if isinstance(value, datetime.datetime):
cell_style = styles['datetime']
elif isinstance(value, datetime.date):
cell_style = styles['date']
elif isinstance(value, datetime.time):
cell_style = styles['time']
else:
cell_style = styles['default']
sheet.write(rowx, colx, value, cell_style)
book.close()
output.seek(0)
return output.getvalue()
def get_xls_export(self, context):
datas = self._get_datas(context)
output = io.BytesIO()
export_header = (
self.request.GET.get('export_xls_header', 'off') == 'on')
model_name = self.opts.verbose_name
book = xlwt.Workbook(encoding='utf8')
sheet = book.add_sheet(
u"%s %s" % (_(u'Sheet'), force_text(model_name)))
styles = {'datetime': xlwt.easyxf(num_format_str='yyyy-mm-dd hh:mm:ss'),
'date': xlwt.easyxf(num_format_str='yyyy-mm-dd'),
'time': xlwt.easyxf(num_format_str='hh:mm:ss'),
'header': xlwt.easyxf('font: name Times New Roman, color-index red, bold on', num_format_str='#,##0.00'),
'default': xlwt.Style.default_style}
if not export_header:
datas = datas[1:]
for rowx, row in enumerate(datas):
for colx, value in enumerate(row):
if export_header and rowx == 0:
cell_style = styles['header']
else:
if isinstance(value, datetime.datetime):
cell_style = styles['datetime']
elif isinstance(value, datetime.date):
cell_style = styles['date']
elif isinstance(value, datetime.time):
cell_style = styles['time']
else:
cell_style = styles['default']
sheet.write(rowx, colx, value, style=cell_style)
book.save(output)
output.seek(0)
return output.getvalue()
def _format_csv_text(self, t):
if isinstance(t, bool):
return _('Yes') if t else _('No')
t = t.replace('"', '""').replace(',', '\,')
cls_str = str if six.PY3 else basestring
if isinstance(t, cls_str):
t = '"%s"' % t
return t
def get_csv_export(self, context):
datas = self._get_datas(context)
stream = []
if self.request.GET.get('export_csv_header', 'off') != 'on':
datas = datas[1:]
for row in datas:
stream.append(','.join(map(self._format_csv_text, row)))
return '\r\n'.join(stream)
def _to_xml(self, xml, data):
if isinstance(data, (list, tuple)):
for item in data:
xml.startElement("row", {})
self._to_xml(xml, item)
xml.endElement("row")
elif isinstance(data, dict):
for key, value in (data).iteritems():
key = key.replace(' ', '_')
xml.startElement(key, {})
self._to_xml(xml, value)
xml.endElement(key)
else:
xml.characters(smart_text(data))
def get_xml_export(self, context):
results = self._get_objects(context)
stream = io.StringIO()
xml = SimplerXMLGenerator(stream, "utf-8")
xml.startDocument()
xml.startElement("objects", {})
self._to_xml(xml, results)
xml.endElement("objects")
xml.endDocument()
return stream.getvalue().split('\n')[1]
def get_json_export(self, context):
results = self._get_objects(context)
return json.dumps({'objects': results}, ensure_ascii=False,
indent=(self.request.GET.get('export_json_format', 'off') == 'on') and 4 or None)
def get_response(self, response, context, *args, **kwargs):
file_type = self.request.GET.get('export_type', 'csv')
response = HttpResponse(
content_type="%s; charset=UTF-8" % self.export_mimes[file_type])
file_name = self.opts.verbose_name.replace(' ', '_')
response['Content-Disposition'] = ('attachment; filename=%s.%s' % (
file_name, file_type)).encode('utf-8')
response.write(getattr(self, 'get_%s_export' % file_type)(context))
return response
# View Methods
def get_result_list(self, __):
if self.request.GET.get('all', 'off') == 'on':
self.admin_view.list_per_page = sys.maxsize
return __()
def result_header(self, item, field_name, row):
item.export = not item.attr or field_name == '__str__' or getattr(item.attr, 'allow_export', True)
return item
def result_item(self, item, obj, field_name, row):
item.export = item.field or field_name == '__str__' or getattr(item.attr, 'allow_export', True)
return item
site.register_plugin(ExportMenuPlugin, ListAdminView)
site.register_plugin(ExportPlugin, ListAdminView)
| [
"hyyy1101@163.com"
] | hyyy1101@163.com |
4f27f3c576f15a3cc19c726ce07b1b97751878de | d839e0558ce7c6701302937dda53759a4357d9e1 | /quiz/urls.py | 72982e32db1ed9862f27a5e9ace381669cbc1d44 | [] | no_license | rachit7399/Online-exam-Django | ce28e81c7626d78129d77a0db95376ba2c10a7bd | 575fb077d9fc443035767b1928d2d6029e47ccc8 | refs/heads/master | 2023-05-04T05:20:51.070098 | 2021-05-04T12:43:36 | 2021-05-04T12:43:36 | 364,252,368 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 900 | py | """user URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.urls import include, path
from rest_framework.routers import DefaultRouter
from . import views
router = DefaultRouter()
router.register(r'', views.QuizViewSet, basename='quiz')
urlpatterns = [
path('', include(router.urls)),
]
| [
"rachit.rachit.bundela@gmail.com"
] | rachit.rachit.bundela@gmail.com |
182707792e16940dfa836c5a377cfaeab1a95585 | c5ef48fce207b9ecf60144eb3dbbc9f938269d80 | /config/settings/production.py | 2cb6207b3f3c08ee610a917c7fdaf40855ce56c0 | [
"BSD-3-Clause"
] | permissive | wservices/django-cp-email-manager | c8d076a54e2c269402a71424b7e19bc5c9dea631 | f5bc08231d556d97e64facea2bbbb335600f6e3c | refs/heads/master | 2023-01-11T23:13:50.022565 | 2022-01-13T21:04:21 | 2022-01-13T21:04:21 | 140,289,852 | 0 | 0 | BSD-3-Clause | 2022-12-27T15:34:28 | 2018-07-09T13:31:31 | JavaScript | UTF-8 | Python | false | false | 2,697 | py | # -*- coding: utf-8 -*-
"""
Production Configurations
- Use Redis for cache
"""
from __future__ import absolute_import, unicode_literals
from .common import * # noqa
# SECURITY CONFIGURATION
# ------------------------------------------------------------------------------
# See https://docs.djangoproject.com/en/1.9/ref/middleware/#module-django.middleware.security
# and https://docs.djangoproject.com/ja/1.9/howto/deployment/checklist/#run-manage-py-check-deploy
# set this to 60 seconds and then to 518400 when you can prove it works
SECURE_HSTS_SECONDS = 60
SECURE_HSTS_INCLUDE_SUBDOMAINS = env.bool(
'DJANGO_SECURE_HSTS_INCLUDE_SUBDOMAINS', default=True)
SECURE_CONTENT_TYPE_NOSNIFF = env.bool(
'DJANGO_SECURE_CONTENT_TYPE_NOSNIFF', default=True)
SECURE_BROWSER_XSS_FILTER = True
#SESSION_COOKIE_SECURE = True
SESSION_COOKIE_HTTPONLY = True
SECURE_SSL_REDIRECT = env.bool('DJANGO_SECURE_SSL_REDIRECT', default=False)
#CSRF_COOKIE_SECURE = True
CSRF_COOKIE_HTTPONLY = True
#X_FRAME_OPTIONS = 'DENY'
# Disable DEBUG mode
DEBUG = False
if 'TEMPLATES' in locals():
for num,t in enumerate(TEMPLATES):
if type(t.get('OPTIONS')) is dict:
TEMPLATES[num]['OPTIONS']['debug'] = DEBUG
# SITE CONFIGURATION
# ------------------------------------------------------------------------------
# Hosts/domain names that are valid for this site
# See https://docs.djangoproject.com/en/1.11/ref/settings/#allowed-hosts
ALLOWED_HOSTS = env.list('DJANGO_ALLOWED_HOSTS', default=['*'])
# END SITE CONFIGURATION
INSTALLED_APPS += ('gunicorn', )
# TEMPLATE CONFIGURATION
# ------------------------------------------------------------------------------
# See:
# https://docs.djangoproject.com/en/dev/ref/templates/api/#django.template.loaders.cached.Loader
TEMPLATES[0]['OPTIONS']['loaders'] = [
('django.template.loaders.cached.Loader', [
'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader', ]),
]
# CACHING
# ------------------------------------------------------------------------------
"""
REDIS_LOCATION = '{0}/{1}'.format(env('REDIS_URL'), 0)
CACHES = {
'default': {
'BACKEND': 'django_redis.cache.RedisCache',
'LOCATION': REDIS_LOCATION,
'OPTIONS': {
'CLIENT_CLASS': 'django_redis.client.DefaultClient',
'IGNORE_EXCEPTIONS': True, # mimics memcache behavior.
# http://niwinz.github.io/django-redis/latest/#_memcached_exceptions_behavior
}
}
}
"""
# Your production stuff: Below this line define 3rd party library settings
# ------------------------------------------------------------------------------
| [
"info@wservices.ch"
] | info@wservices.ch |
91fe7b157dda79175abcc8c63072cce47e5a4bfd | 2c8d3e341e813c1b1b88ae824edeaadb366aec0a | /Parser/PY Files/rrt-greedypassive/smo2-14.py | ebaf369a8a3b92a25da1be8c2f941fa5ed65534c | [] | no_license | kiriphorito/MoveAndTag-Manticore | 2e24a958f4941556b2d2714563718069cc5b208f | d07a3d8c0bacf34cf5f433384a6fd45170896b7a | refs/heads/master | 2021-01-20T11:40:49.232449 | 2017-02-26T14:08:48 | 2017-02-26T14:08:48 | 82,548,310 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 87,736 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
u"""
@brief: Path Planning Sample Code with Randamized Rapidly-Exploring Random Trees (RRT)
@author: AtsushiSakai
@license: MIT
"""
import shapely
from shapely.geometry import Polygon, LineString, Point, MultiPoint, GeometryCollection
import matplotlib.pyplot as plt
from ast import literal_eval
import datetime
import random
import math
import copy
def drawRobots(robots):
for (x,y) in robots:
plt.plot(x,y,"o")
def drawPolygonNoFill(points,color):
polygon = plt.Polygon(points,color=color,fill=False)
plt.gca().add_patch(polygon)
def drawPolygon(points):
polygon = plt.Polygon(points)
plt.gca().add_patch(polygon)
def drawPolygons(polygons):
try:
for xs in polygons:
drawPolygon(xs)
except ValueError:
print ("no polygons specified")
def drawPolygonNoFill(points,color):
polygon = plt.Polygon(points,color=color,fill=False)
plt.gca().add_patch(polygon)
def drawPolygonsNoFill(polygons):
try:
for xs in polygons:
drawPolygonNoFill(xs,'red')
except ValueError:
print ("no polygons specified")
class RRT():
u"""
Class for RRT Planning
"""
def __init__(self, start, goal, obstacleList,randArea,expandDis=1.0,goalSampleRate=5,maxIter=500):
u"""
Setting Parameter
start:Start Position [x,y]
goal:Goal Position [x,y]
obstacleList:obstacle Positions [[x,y,size],...]
randArea:Ramdom Samping Area [min,max]
"""
self.start=Node(start[0],start[1])
self.end=Node(goal[0],goal[1])
self.minrand = randArea[0]
self.maxrand = randArea[1]
self.expandDis = expandDis
self.goalSampleRate = goalSampleRate
self.maxIter = maxIter
def Planning(self,animation=True):
u"""
Pathplanning
animation: flag for animation on or off
"""
self.nodeList = [self.start]
while True:
# Random Sampling
if random.randint(0, 100) > self.goalSampleRate:
rnd = [random.uniform(self.minrand, self.maxrand), random.uniform(self.minrand, self.maxrand)]
else:
rnd = [self.end.x, self.end.y]
# Find nearest node
nind = self.GetNearestListIndex(self.nodeList, rnd)
# print(nind)
# expand tree
nearestNode =self.nodeList[nind]
theta = math.atan2(rnd[1] - nearestNode.y, rnd[0] - nearestNode.x)
newNode = copy.deepcopy(nearestNode)
newNode.x += self.expandDis * math.cos(theta)
newNode.y += self.expandDis * math.sin(theta)
newNode.parent = nind
if not self.__CollisionCheck(newNode, obstacleList,nearestNode):
continue
self.nodeList.append(newNode)
# check goal
dx = newNode.x - self.end.x
dy = newNode.y - self.end.y
d = math.sqrt(dx * dx + dy * dy)
if d <= self.expandDis:
if not self.__CollisionCheck(newNode, obstacleList,self.end):
continue
else:
#print("Goal!!")
break
if animation:
self.DrawGraph(rnd)
path=[[self.end.x,self.end.y]]
lastIndex = len(self.nodeList) - 1
while self.nodeList[lastIndex].parent is not None:
node = self.nodeList[lastIndex]
path.append([node.x,node.y])
lastIndex = node.parent
path.append([self.start.x, self.start.y])
return path
def DrawGraph(self,rnd=None):
u"""
Draw Graph
"""
import matplotlib.pyplot as plt
plt.clf()
if rnd is not None:
plt.plot(rnd[0], rnd[1], "^k")
for node in self.nodeList:
if node.parent is not None:
plt.plot([node.x, self.nodeList[node.parent].x], [node.y, self.nodeList[node.parent].y], "-g")
# plt.plot([ox for (ox,oy,size) in obstacleList],[oy for (ox,oy,size) in obstacleList], "ok", ms=size * 20)
drawPolygons(obstacleList)
plt.plot(self.start.x, self.start.y, "xr")
plt.plot(self.end.x, self.end.y, "xr")
plt.axis()
plt.grid(True)
plt.pause(0.01)
def GetNearestListIndex(self, nodeList, rnd):
dlist = [(node.x - rnd[0]) ** 2 + (node.y - rnd[1]) ** 2 for node in nodeList]
minind = dlist.index(min(dlist))
return minind
def __CollisionCheck(self, node,obstacleList,nearestNode):
x1 = nearestNode.x
y1 = nearestNode.y
x2 = node.x
y2 = node.y
first = [x1,y1]
second = [x2,y2]
return LineCollisionCheck(first,second,obstacleList)
def LineCollisionCheck(first,second, obstacleList):
from shapely import geometry,wkt
EPS = 1.2e-16 #======= may need to change this value depending on precision
x1 = first[0]
y1 = first[1]
x2 = second[0]
y2 = second[1]
line = geometry.LineString([(x1,y1),(x2,y2)])
#============ changed here =======
# for p1 in obstacleList:
#
# poly = geometry.Polygon(p1)
# ips = line.intersection(poly.boundary)
## print ips
# if type(ips) is Point:
## print "hello"
# if ips.distance(poly) < EPS:
## print "INTERSECT"
# return False
# elif type(ips) is MultiPoint:
# for i in ips:
# if (i.distance(poly) <EPS):
## print "INTERSECT2"
# return False
# elif type(ips) is GeometryCollection:
# continue
# else:
# print (ips,type(ips))
# return False
# return True
#============ changed here =======
for poly in obstacleList:
p1 = Polygon(poly)
if p1.buffer(EPS).intersects(line):
# print "collision"
return False
# print "safe"
return True
#============ changed here =======
def supersmoothie(smoothie,obstacleList):
path = smoothie
state = True
counter1 = 0
counter2 = len(path)-1
while state:
counter2 = len(path)-1
if counter1 == counter2:
state = False
break
coord1 = path[counter1]
for counter in range(counter2,0,-1):
coord2 = path[counter]
if LineCollisionCheck(coord1,coord2,obstacleList): #if no obstacle
del path[(counter1+1):(counter)]
break
counter1 += 1
return path
class Node():
u"""
RRT Node
"""
def __init__(self, x, y):
self.x = x
self.y = y
self.parent = None
def rrtpath(obstacles,startcoord,goalcoord,randAreas):
rrt = RRT(start=startcoord, goal=goalcoord,randArea = randAreas, obstacleList=obstacles)
path= rrt.Planning(animation=False)
# rrt.DrawGaph()
# plt.plot([x for (x,y) in path], [y for (x,y) in path],'-r')
# print path
smoothiePath = supersmoothie(path,obstacles)
plt.plot([x for (x,y) in smoothiePath], [y for (x,y) in smoothiePath],'-r')
smoothiePath.reverse()
#print smoothiePath
return smoothiePath
obstacleList = [[(-0.0976483872639774,-0.10564467932051001),(1.1844203300023048,-1.6406690453183885),(2.719444696000184,-0.3586003280521064)],[(0.09344181768081013,-0.0037622982511678633),(1.8909609879261418,0.8731212929971426),(1.4525191923019865,1.7718808781198088),(0.5537596071793208,1.3334390824956532),(-0.3231239840689898,3.130958252740985),(-1.221883569191656,2.6925164571168296),(-0.7834417735675007,1.793756871994164),(-1.6822013586901665,1.3553150763700084),(-2.5590849499384776,3.1528342466153396),(-3.457844535061143,2.7143924509911845),(-3.019402739436988,1.815632865868518),(-3.918162324559653,1.3771910702443635),(-2.5809609438128325,0.9168732807458531),(-2.142519148188677,0.018113695623187376),(-0.3449999779433453,0.8949972868714979)],[(-2.6362836621064423,-2.171447968159453),(-1.9730452317281557,-0.28462132416977814),(-2.9164585537229932,0.04699789101936824),(-3.2480777689121356,-0.896415430975471),(-5.134904412901811,-0.23317700059718405),(-5.4665236280909575,-1.1765903225920222),(-4.523110306096116,-1.5082095377811646),(-4.854729521285263,-2.4516228597760032),(-3.5796969841012793,-1.839828752970309)],[(-0.09326431107488953,-0.7177047580761363),(-2.0642318461590263,-1.0572441160957373),(-1.8944621671492246,-2.042727883637807),(-0.9089783996071571,-1.8729582046280058),(-0.5694390415875523,-3.843925739712143),(0.4160447259545158,-3.674156060702342),(0.2462750469447137,-2.688672293160271),(1.2317588144867808,-2.518902614150472),(0.07650536793491211,-1.703188525618204)],[(1.6912367367553314,3.6472839237915786),(0.3491998675076484,5.130165254707803),(-1.1336814634085783,3.7881283854601153)],[(3.0364639773182462,-1.0866567512476395),(5.036115428750128,-1.123994035379791),(5.054784070816206,-0.12416830966384929),(4.054958345100264,-0.1054996675977744),(4.092295629232416,1.8941517838341084),(3.092469903516481,1.9128204259001838),(3.0738012614504027,0.9129947001842416),(2.0739755357344603,0.9316633422503215),(3.0551326193843242,-0.08683102553169897)],[(2.1999257025889793,-3.6134905987410137),(0.6692173603048053,-4.900709294558328),(1.3128267082134557,-5.666063465700415),(2.0781808793555427,-5.022454117791759),(3.365399575172855,-6.553162460075942),(4.130753746314942,-5.9095531121672895),(3.4871443984062918,-5.144198941025191),(4.252498569548378,-4.500589593116543),(2.8435350504976347,-4.378844769883103)],[(6.779700591705986,1.1062710772658166),(6.592106549807127,-0.8849116053236128),(7.587697891101842,-0.9787086262730467),(7.6814949120512726,0.016882715021669492),(9.672677594640707,-0.17071132687718227),(9.766474615590134,0.8248800144175337),(8.770883274295418,0.9186770353669614),(8.864680295244845,1.9142683766616826),(7.775291933000701,1.0124740563163892)],[(-3.9038285831817365,-7.261728006470083),(-5.869900578351039,-6.894902557774133),(-6.05331330269902,-7.8779385553587815),(-5.070277305114366,-8.061351279706761),(-5.437102753810313,-10.027423274876067),(-4.45406675622566,-10.210835999224042),(-4.270654031877689,-9.227800001639388),(-3.287618034293038,-9.411212725987365),(-4.0872413075297125,-8.244764004054735)],[(-2.358902014859919,2.972883137052348),(-0.9905650378277391,4.431531112845832),(-1.7198890257244739,5.115699601361923),(-2.404057514240567,4.386375613465183),(-3.8627054900340494,5.754712590497366),(-4.546873978550138,5.02538860260063),(-3.817549990653397,4.3412201140845355),(-4.501718479169493,3.611896126187795),(-3.088226002756658,3.6570516255684407)],[(5.777844918863362,6.48736803615264),(6.294680028738635,4.555301404485862),(7.260713344572029,4.813718959423501),(7.002295789634389,5.779752275256888),(8.934362421301168,6.296587385132165),(8.67594486636353,7.26262070096556),(7.709911550530138,7.004203146027915),(7.451493995592503,7.970236461861306),(6.743878234696751,6.745785591090278)],[(-7.568594026243649,-3.4212150346540535),(-5.7286486667517575,-4.2051800045884535),(-5.336666181784555,-3.285207324842508),(-6.256638861530501,-2.8932248398753084),(-5.472673891596104,-1.0532794803834102),(-6.392646571342048,-0.6612969954162127),(-6.784629056309253,-1.5812696751621604),(-7.704601736055196,-1.1892871901949604),(-7.1766115412764515,-2.5012423549081078)],[(-6.557909876240082,6.4142762260776),(-4.558815664700304,6.3540904022637585),(-4.528722752793382,7.353637508033651),(-5.52826985856327,7.383730419940568),(-5.468084034749426,9.382824631480347),(-6.467631140519312,9.412917543387268),(-6.497724052426236,8.413370437617377),(-7.497271158196127,8.4434633495243),(-7.4370853343822905,10.442557561064078),(-8.436632440152165,10.472650472971004),(-8.466725352059095,9.473103367201112),(-9.466272457828987,9.50319627910803),(-8.496818263966023,8.473556261431218),(-8.52691117587294,7.474009155661334),(-6.527816964333157,7.413823331847489)],[(-1.4526620703961235,-6.332559581949827),(0.21168906537294796,-5.2235351021189285),(-0.34282317454249966,-4.391359534234392),(-1.1749987424270363,-4.9458717741498415),(-2.284023222257932,-3.2815206383807656),(-3.116198790142473,-3.8360328782962165),(-2.5616865502270216,-4.668208446180755),(-3.39386211811156,-5.222720686096201),(-2.007174310311572,-5.50038401406529)],[(1.8801620971319342,1.9306725504216893),(3.782652313636195,2.5475444788683976),(3.165780385189491,4.450034695372661)],[(8.602172345760605,-5.267843493927633),(8.631976640609798,-3.268065580256085),(7.632087683774018,-3.2531634328314842),(7.617185536349424,-4.253052389667263),(5.617407622677872,-4.223248094818072),(5.602505475253283,-5.223137051653843),(6.602394432089057,-5.238039199078441),(6.587492284664452,-6.237928155914207),(4.587714370992905,-6.208123861065015),(4.572812223568311,-7.208012817900797),(5.572701180404087,-7.222914965325391),(5.557799032979484,-8.222803922161164),(3.558021119307936,-8.19299962731197),(3.5431189718833433,-9.192888584147749),(4.543007928719108,-9.207790731572343),(4.528105781294512,-10.207679688408119),(5.542896885554871,-9.22269287899693),(6.542785842390649,-9.237595026421534),(6.572590137239857,-7.237817112749988),(7.572479094075634,-7.252719260174587),(7.602283388924832,-5.252941346503036)],[(-0.022442822746903582,5.561475624726316),(1.448949502802951,6.916099050004714),(0.7716377901637573,7.651795212779643),(0.03594162738882873,6.974483500140445),(-1.3186817978895635,8.445875825690308),(-2.0543779606644965,7.768564113051108),(-1.377066248025298,7.032867950276176),(-2.1127624108002308,6.355556237636975),(-0.6997545353861018,6.297171787501244)],[(6.246352061466462,2.5280250527871324),(7.706441543995224,3.8948237588916137),(7.023042190942982,4.624868500155998),(6.292997449678604,3.9414691471037546),(4.926198743574115,5.401558629632515),(4.196154002309736,4.718159276580275),(4.879553355361981,3.9881145353158938),(4.149508614097597,3.304715182263655),(5.5629527084142225,3.258069794051512)]]
rand = (-12,12)
content = ""
starttime = datetime.datetime.now()
print "Path 1 of 195"
path = []
start = (9.26880565930645,-2.1349141454065315)
goal = (9.105866833901965,-1.5759461120051803)
print " Node 1 and 2 of 3"
path += rrtpath(obstacleList,start,goal,rand)
start = (9.105866833901965,-1.5759461120051803)
goal = (9.673971470787487,-3.6342182568304926)
print " Node 2 and 3 of 3"
path += rrtpath(obstacleList,start,goal,rand)[1:]
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 2 of 195"
path = []
start = (9.105866833901965,-1.5759461120051803)
goal = (8.568031900497433,-1.4975136184192852)
print " Node 1 and 2 of 3"
path += rrtpath(obstacleList,start,goal,rand)
start = (8.568031900497433,-1.4975136184192852)
goal = (9.704903929823564,-1.422017205901584)
print " Node 2 and 3 of 3"
path += rrtpath(obstacleList,start,goal,rand)[1:]
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 3 of 195"
path = []
start = (8.568031900497433,-1.4975136184192852)
goal = (8.43613291364756,-1.5870007352129445)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 4 of 195"
path = []
start = (8.43613291364756,-1.5870007352129445)
goal = (7.690826386260094,-0.6909607418889596)
print " Node 1 and 2 of 3"
path += rrtpath(obstacleList,start,goal,rand)
start = (7.690826386260094,-0.6909607418889596)
goal = (7.416143131313904,-2.898620572610456)
print " Node 2 and 3 of 3"
path += rrtpath(obstacleList,start,goal,rand)[1:]
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 5 of 195"
path = []
start = (7.690826386260094,-0.6909607418889596)
goal = (6.66599961963127,-0.9732479865779773)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 6 of 195"
path = []
start = (6.66599961963127,-0.9732479865779773)
goal = (6.016380776930431,0.14544820224626953)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 7 of 195"
path = []
start = (6.016380776930431,0.14544820224626953)
goal = (6.072333330061122,0.35119809158608106)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 8 of 195"
path = []
start = (6.072333330061122,0.35119809158608106)
goal = (5.657777817142408,1.180453066255831)
print " Node 1 and 2 of 3"
path += rrtpath(obstacleList,start,goal,rand)
start = (5.657777817142408,1.180453066255831)
goal = (7.006470706464347,1.2269215794850954)
print " Node 2 and 3 of 3"
path += rrtpath(obstacleList,start,goal,rand)[1:]
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 9 of 195"
path = []
start = (5.657777817142408,1.180453066255831)
goal = (4.647857674603511,1.0627056690079062)
print " Node 1 and 2 of 3"
path += rrtpath(obstacleList,start,goal,rand)
start = (4.647857674603511,1.0627056690079062)
goal = (5.778392154791742,2.252135553707296)
print " Node 2 and 3 of 3"
path += rrtpath(obstacleList,start,goal,rand)[1:]
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 10 of 195"
path = []
start = (4.647857674603511,1.0627056690079062)
goal = (4.191034183770256,2.0198548288461886)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 11 of 195"
path = []
start = (4.191034183770256,2.0198548288461886)
goal = (3.8777928819999836,1.9587832249480215)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 12 of 195"
path = []
start = (3.8777928819999836,1.9587832249480215)
goal = (2.6736893594560165,0.98842574214137)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 13 of 195"
path = []
start = (5.778392154791742,2.252135553707296)
goal = (5.378374116367132,2.3888487417424997)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 14 of 195"
path = []
start = (5.378374116367132,2.3888487417424997)
goal = (5.251441850334018,3.0149716133418103)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 15 of 195"
path = []
start = (5.251441850334018,3.0149716133418103)
goal = (4.804349612537733,3.225546704313734)
print " Node 1 and 2 of 3"
path += rrtpath(obstacleList,start,goal,rand)
start = (4.804349612537733,3.225546704313734)
goal = (5.795792947922797,2.9405237937506232)
print " Node 2 and 3 of 3"
path += rrtpath(obstacleList,start,goal,rand)[1:]
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 16 of 195"
path = []
start = (4.804349612537733,3.225546704313734)
goal = (4.335095955303256,3.6089442344155866)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 17 of 195"
path = []
start = (4.335095955303256,3.6089442344155866)
goal = (3.447799045276305,4.298376655190307)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 18 of 195"
path = []
start = (3.447799045276305,4.298376655190307)
goal = (3.6602843555807834,4.942235532051567)
print " Node 1 and 2 of 3"
path += rrtpath(obstacleList,start,goal,rand)
start = (3.6602843555807834,4.942235532051567)
goal = (2.3172548956503363,3.3826192347960617)
print " Node 2 and 3 of 3"
path += rrtpath(obstacleList,start,goal,rand)[1:]
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 19 of 195"
path = []
start = (3.6602843555807834,4.942235532051567)
goal = (4.117964920496025,4.7803149147148805)
print " Node 1 and 2 of 3"
path += rrtpath(obstacleList,start,goal,rand)
start = (4.117964920496025,4.7803149147148805)
goal = (3.084544443920045,5.831669107696808)
print " Node 2 and 3 of 3"
path += rrtpath(obstacleList,start,goal,rand)[1:]
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 20 of 195"
path = []
start = (3.084544443920045,5.831669107696808)
goal = (2.978125212356181,6.010927126516542)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 21 of 195"
path = []
start = (2.978125212356181,6.010927126516542)
goal = (2.412777274608775,5.788603928961395)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 22 of 195"
path = []
start = (2.412777274608775,5.788603928961395)
goal = (1.5873689666215434,6.194701706082032)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 23 of 195"
path = []
start = (1.5873689666215434,6.194701706082032)
goal = (1.4563331615344168,6.543864998484281)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 24 of 195"
path = []
start = (1.4563331615344168,6.543864998484281)
goal = (1.8900906546556282,7.997684739706184)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 25 of 195"
path = []
start = (7.006470706464347,1.2269215794850954)
goal = (7.250951652339081,1.1259328337981636)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 26 of 195"
path = []
start = (7.250951652339081,1.1259328337981636)
goal = (7.449427089671383,1.9202939120610711)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 27 of 195"
path = []
start = (7.449427089671383,1.9202939120610711)
goal = (7.730769132587399,2.114268182442787)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 28 of 195"
path = []
start = (7.730769132587399,2.114268182442787)
goal = (7.399106396187706,2.4889263633795906)
print " Node 1 and 2 of 3"
path += rrtpath(obstacleList,start,goal,rand)
start = (7.399106396187706,2.4889263633795906)
goal = (9.113826194491194,1.8778238796507516)
print " Node 2 and 3 of 3"
path += rrtpath(obstacleList,start,goal,rand)[1:]
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 29 of 195"
path = []
start = (7.399106396187706,2.4889263633795906)
goal = (7.863332588319929,4.787743967666675)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 30 of 195"
path = []
start = (9.113826194491194,1.8778238796507516)
goal = (9.36660277250168,2.1304576218310096)
print " Node 1 and 2 of 3"
path += rrtpath(obstacleList,start,goal,rand)
start = (9.36660277250168,2.1304576218310096)
goal = (9.086238543106388,1.4583590451066843)
print " Node 2 and 3 of 3"
path += rrtpath(obstacleList,start,goal,rand)[1:]
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 31 of 195"
path = []
start = (2.3172548956503363,3.3826192347960617)
goal = (1.6722820587472427,2.56988166576264)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 32 of 195"
path = []
start = (1.6722820587472427,2.56988166576264)
goal = (1.2802884366507303,3.5322224226434784)
print " Node 1 and 2 of 3"
path += rrtpath(obstacleList,start,goal,rand)
start = (1.2802884366507303,3.5322224226434784)
goal = (0.4889492670643083,2.1106291671655306)
print " Node 2 and 3 of 3"
path += rrtpath(obstacleList,start,goal,rand)[1:]
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 33 of 195"
path = []
start = (1.2802884366507303,3.5322224226434784)
goal = (1.0543616206063096,4.374728835373109)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 34 of 195"
path = []
start = (1.0543616206063096,4.374728835373109)
goal = (-0.15696845582343677,5.040712282265883)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 35 of 195"
path = []
start = (0.4889492670643083,2.1106291671655306)
goal = (0.3629562758484415,2.5089718182266143)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 36 of 195"
path = []
start = (0.3629562758484415,2.5089718182266143)
goal = (-0.3254849357884009,3.1684891784931573)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 37 of 195"
path = []
start = (-0.3254849357884009,3.1684891784931573)
goal = (-1.4887443795680158,2.4417699293062434)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 38 of 195"
path = []
start = (-1.4887443795680158,2.4417699293062434)
goal = (-1.9229650582698596,2.1765142463680096)
print " Node 1 and 2 of 3"
path += rrtpath(obstacleList,start,goal,rand)
start = (-1.9229650582698596,2.1765142463680096)
goal = (-1.6826722522784685,3.6885346280433424)
print " Node 2 and 3 of 3"
path += rrtpath(obstacleList,start,goal,rand)[1:]
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 39 of 195"
path = []
start = (-1.9229650582698596,2.1765142463680096)
goal = (-3.3180973525734867,2.156065277993383)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 40 of 195"
path = []
start = (-3.3180973525734867,2.156065277993383)
goal = (-3.6148523433938955,2.4834838350322546)
print " Node 1 and 2 of 3"
path += rrtpath(obstacleList,start,goal,rand)
start = (-3.6148523433938955,2.4834838350322546)
goal = (-2.8343607830817596,0.9818721054290851)
print " Node 2 and 3 of 3"
path += rrtpath(obstacleList,start,goal,rand)[1:]
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 41 of 195"
path = []
start = (-3.6148523433938955,2.4834838350322546)
goal = (-4.144488725678545,2.148382143094313)
print " Node 1 and 2 of 3"
path += rrtpath(obstacleList,start,goal,rand)
start = (-4.144488725678545,2.148382143094313)
goal = (-3.2042220108241786,3.408321234013913)
print " Node 2 and 3 of 3"
path += rrtpath(obstacleList,start,goal,rand)[1:]
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 42 of 195"
path = []
start = (-4.144488725678545,2.148382143094313)
goal = (-4.394951214857922,2.6584100269492517)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 43 of 195"
path = []
start = (-4.394951214857922,2.6584100269492517)
goal = (-5.230060516874864,2.029046921350533)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 44 of 195"
path = []
start = (-5.230060516874864,2.029046921350533)
goal = (-6.156928105491783,3.734140638062998)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 45 of 195"
path = []
start = (-2.8343607830817596,0.9818721054290851)
goal = (-3.864551635916601,0.7485114524043155)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 46 of 195"
path = []
start = (-3.864551635916601,0.7485114524043155)
goal = (-4.690033191742115,0.513963283287822)
print " Node 1 and 2 of 3"
path += rrtpath(obstacleList,start,goal,rand)
start = (-4.690033191742115,0.513963283287822)
goal = (-3.9113391890089186,-0.43028491450499473)
print " Node 2 and 3 of 3"
path += rrtpath(obstacleList,start,goal,rand)[1:]
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 47 of 195"
path = []
start = (-4.690033191742115,0.513963283287822)
goal = (-5.314971197706688,-0.3304537811655148)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 48 of 195"
path = []
start = (-5.314971197706688,-0.3304537811655148)
goal = (-5.694102504842835,-0.4385781069096897)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 49 of 195"
path = []
start = (-5.694102504842835,-0.4385781069096897)
goal = (-6.559020208084309,-0.449744835204978)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 50 of 195"
path = []
start = (-6.559020208084309,-0.449744835204978)
goal = (-7.166717513973668,-1.1806400614865957)
print " Node 1 and 2 of 3"
path += rrtpath(obstacleList,start,goal,rand)
start = (-7.166717513973668,-1.1806400614865957)
goal = (-6.842959558637849,0.4757393760601438)
print " Node 2 and 3 of 3"
path += rrtpath(obstacleList,start,goal,rand)[1:]
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 51 of 195"
path = []
start = (-7.166717513973668,-1.1806400614865957)
goal = (-7.643360573964589,-0.7903277862773344)
print " Node 1 and 2 of 3"
path += rrtpath(obstacleList,start,goal,rand)
start = (-7.643360573964589,-0.7903277862773344)
goal = (-7.854430883546397,-2.242074896092036)
print " Node 2 and 3 of 3"
path += rrtpath(obstacleList,start,goal,rand)[1:]
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 52 of 195"
path = []
start = (-7.643360573964589,-0.7903277862773344)
goal = (-8.484999350021642,-0.8205544730906489)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 53 of 195"
path = []
start = (-8.484999350021642,-0.8205544730906489)
goal = (-9.12991548333534,-0.6266836341121227)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 54 of 195"
path = []
start = (-9.12991548333534,-0.6266836341121227)
goal = (-8.80398122118867,-0.12726243945995286)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 55 of 195"
path = []
start = (-8.80398122118867,-0.12726243945995286)
goal = (-8.60600724523206,-0.012585179106649136)
print " Node 1 and 2 of 3"
path += rrtpath(obstacleList,start,goal,rand)
start = (-8.60600724523206,-0.012585179106649136)
goal = (-9.229179443618536,0.008196549650168805)
print " Node 2 and 3 of 3"
path += rrtpath(obstacleList,start,goal,rand)[1:]
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 56 of 195"
path = []
start = (-6.842959558637849,0.4757393760601438)
goal = (-6.957076700445645,0.6407031152217932)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 57 of 195"
path = []
start = (-3.9113391890089186,-0.43028491450499473)
goal = (-3.31250935895741,-0.6502363447640143)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 58 of 195"
path = []
start = (-3.31250935895741,-0.6502363447640143)
goal = (-2.906585841816592,-2.277617102560206)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 59 of 195"
path = []
start = (-7.854430883546397,-2.242074896092036)
goal = (-9.334530708768636,-2.2488811330030147)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 60 of 195"
path = []
start = (1.8900906546556282,7.997684739706184)
goal = (1.1393605829491733,8.088002756821389)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 61 of 195"
path = []
start = (1.1393605829491733,8.088002756821389)
goal = (1.5362087475130526,8.687318769430508)
print " Node 1 and 2 of 3"
path += rrtpath(obstacleList,start,goal,rand)
start = (1.5362087475130526,8.687318769430508)
goal = (0.3663237524963332,8.46215339784765)
print " Node 2 and 3 of 3"
path += rrtpath(obstacleList,start,goal,rand)[1:]
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 62 of 195"
path = []
start = (1.5362087475130526,8.687318769430508)
goal = (0.8613648219119199,9.344321435806986)
print " Node 1 and 2 of 3"
path += rrtpath(obstacleList,start,goal,rand)
start = (0.8613648219119199,9.344321435806986)
goal = (2.2621151581350123,9.539490756504435)
print " Node 2 and 3 of 3"
path += rrtpath(obstacleList,start,goal,rand)[1:]
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 63 of 195"
path = []
start = (0.3663237524963332,8.46215339784765)
goal = (-0.022779772729471404,8.064063956495012)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 64 of 195"
path = []
start = (-0.022779772729471404,8.064063956495012)
goal = (-0.6696528837622022,7.8133138118793966)
print " Node 1 and 2 of 3"
path += rrtpath(obstacleList,start,goal,rand)
start = (-0.6696528837622022,7.8133138118793966)
goal = (0.049976115187920556,7.1183450614739066)
print " Node 2 and 3 of 3"
path += rrtpath(obstacleList,start,goal,rand)[1:]
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 65 of 195"
path = []
start = (-0.6696528837622022,7.8133138118793966)
goal = (-1.1269680189987206,8.345556918296971)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 66 of 195"
path = []
start = (-1.1269680189987206,8.345556918296971)
goal = (-1.5191788537345268,9.612436572184603)
print " Node 1 and 2 of 3"
path += rrtpath(obstacleList,start,goal,rand)
start = (-1.5191788537345268,9.612436572184603)
goal = (-2.3268096679849295,7.3419683317068625)
print " Node 2 and 3 of 3"
path += rrtpath(obstacleList,start,goal,rand)[1:]
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 67 of 195"
path = []
start = (0.8613648219119199,9.344321435806986)
goal = (0.6161330503763178,9.731405922324758)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 68 of 195"
path = []
start = (2.2621151581350123,9.539490756504435)
goal = (2.9575373418512303,9.533540620840885)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 69 of 195"
path = []
start = (2.9575373418512303,9.533540620840885)
goal = (3.314857382453356,8.97202817590097)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 70 of 195"
path = []
start = (3.314857382453356,8.97202817590097)
goal = (3.2305523161972154,8.403703294556205)
print " Node 1 and 2 of 3"
path += rrtpath(obstacleList,start,goal,rand)
start = (3.2305523161972154,8.403703294556205)
goal = (3.954373619845816,8.832549591887364)
print " Node 2 and 3 of 3"
path += rrtpath(obstacleList,start,goal,rand)[1:]
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 71 of 195"
path = []
start = (3.2305523161972154,8.403703294556205)
goal = (3.365393230418226,7.785079106812827)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 72 of 195"
path = []
start = (3.954373619845816,8.832549591887364)
goal = (4.176877205574813,10.049296541168218)
print " Node 1 and 2 of 3"
path += rrtpath(obstacleList,start,goal,rand)
start = (4.176877205574813,10.049296541168218)
goal = (4.71828296021145,7.780419430225468)
print " Node 2 and 3 of 3"
path += rrtpath(obstacleList,start,goal,rand)[1:]
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 73 of 195"
path = []
start = (4.71828296021145,7.780419430225468)
goal = (4.967778489470103,7.6371345852129835)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 74 of 195"
path = []
start = (4.967778489470103,7.6371345852129835)
goal = (4.690502274352541,7.2893996217542)
print " Node 1 and 2 of 3"
path += rrtpath(obstacleList,start,goal,rand)
start = (4.690502274352541,7.2893996217542)
goal = (5.388711257017622,8.129563411018975)
print " Node 2 and 3 of 3"
path += rrtpath(obstacleList,start,goal,rand)[1:]
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 75 of 195"
path = []
start = (5.388711257017622,8.129563411018975)
goal = (6.246665817483819,7.918038386866499)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 76 of 195"
path = []
start = (6.246665817483819,7.918038386866499)
goal = (8.19291379321208,9.58713230308128)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 77 of 195"
path = []
start = (-1.5191788537345268,9.612436572184603)
goal = (-1.7652516990264289,9.822726217848757)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 78 of 195"
path = []
start = (-1.7652516990264289,9.822726217848757)
goal = (-2.1205634072191186,9.372388625646646)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 79 of 195"
path = []
start = (-2.1205634072191186,9.372388625646646)
goal = (-2.106507297132149,9.32642704810819)
print " Node 1 and 2 of 3"
path += rrtpath(obstacleList,start,goal,rand)
start = (-2.106507297132149,9.32642704810819)
goal = (-3.641092072350715,9.50154728107934)
print " Node 2 and 3 of 3"
path += rrtpath(obstacleList,start,goal,rand)[1:]
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 80 of 195"
path = []
start = (-3.641092072350715,9.50154728107934)
goal = (-4.148760416570201,9.23145677699608)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 81 of 195"
path = []
start = (-4.148760416570201,9.23145677699608)
goal = (-3.4937380808980985,8.403034647254938)
print " Node 1 and 2 of 3"
path += rrtpath(obstacleList,start,goal,rand)
start = (-3.4937380808980985,8.403034647254938)
goal = (-5.568162075171804,9.509628412343694)
print " Node 2 and 3 of 3"
path += rrtpath(obstacleList,start,goal,rand)[1:]
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 82 of 195"
path = []
start = (-3.4937380808980985,8.403034647254938)
goal = (-3.9630100698593402,7.479653189936464)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 83 of 195"
path = []
start = (-3.9630100698593402,7.479653189936464)
goal = (-4.293490473092685,7.296968343772651)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 84 of 195"
path = []
start = (-4.293490473092685,7.296968343772651)
goal = (-4.113031650306021,6.775052682499307)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 85 of 195"
path = []
start = (-4.113031650306021,6.775052682499307)
goal = (-4.143850773279473,6.417659526706654)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 86 of 195"
path = []
start = (-4.143850773279473,6.417659526706654)
goal = (-3.761627314121272,6.380397592310693)
print " Node 1 and 2 of 3"
path += rrtpath(obstacleList,start,goal,rand)
start = (-3.761627314121272,6.380397592310693)
goal = (-4.218946119744106,5.912706230180941)
print " Node 2 and 3 of 3"
path += rrtpath(obstacleList,start,goal,rand)[1:]
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 87 of 195"
path = []
start = (-5.568162075171804,9.509628412343694)
goal = (-5.6653586541489105,9.85596356044454)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 88 of 195"
path = []
start = (-5.6653586541489105,9.85596356044454)
goal = (-5.5430315425896,9.934104537511029)
print " Node 1 and 2 of 3"
path += rrtpath(obstacleList,start,goal,rand)
start = (-5.5430315425896,9.934104537511029)
goal = (-5.805451954860403,9.95189978891888)
print " Node 2 and 3 of 3"
path += rrtpath(obstacleList,start,goal,rand)[1:]
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 89 of 195"
path = []
start = (-5.5430315425896,9.934104537511029)
goal = (-5.603065158318151,10.094277201283074)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 90 of 195"
path = []
start = (-5.805451954860403,9.95189978891888)
goal = (-5.992701345109014,9.568647458452254)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 91 of 195"
path = []
start = (-5.992701345109014,9.568647458452254)
goal = (-6.419218094678854,9.607657093543995)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 92 of 195"
path = []
start = (-6.419218094678854,9.607657093543995)
goal = (-6.483787336917773,10.026230746635786)
print " Node 1 and 2 of 3"
path += rrtpath(obstacleList,start,goal,rand)
start = (-6.483787336917773,10.026230746635786)
goal = (-7.157912286511888,9.434060367111194)
print " Node 2 and 3 of 3"
path += rrtpath(obstacleList,start,goal,rand)[1:]
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 93 of 195"
path = []
start = (-7.157912286511888,9.434060367111194)
goal = (-7.29766477466146,10.021039806967163)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 94 of 195"
path = []
start = (-7.29766477466146,10.021039806967163)
goal = (-7.349073199358832,10.427485584110332)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 95 of 195"
path = []
start = (-7.349073199358832,10.427485584110332)
goal = (-9.11870364164235,10.081911813670395)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 96 of 195"
path = []
start = (2.6736893594560165,0.98842574214137)
goal = (1.7506069005087301,0.49261557753542107)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 97 of 195"
path = []
start = (1.7506069005087301,0.49261557753542107)
goal = (1.571622772176747,0.4254542833534174)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 98 of 195"
path = []
start = (1.571622772176747,0.4254542833534174)
goal = (0.43729340836385866,-0.8580992478699123)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 99 of 195"
path = []
start = (9.673971470787487,-3.6342182568304926)
goal = (9.519365045048636,-3.8984750049273016)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 100 of 195"
path = []
start = (9.519365045048636,-3.8984750049273016)
goal = (9.439078567128574,-4.036697075188319)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 101 of 195"
path = []
start = (9.439078567128574,-4.036697075188319)
goal = (9.263162903129642,-5.578167579809958)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 102 of 195"
path = []
start = (9.263162903129642,-5.578167579809958)
goal = (8.633437888955283,-5.654933993600254)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 103 of 195"
path = []
start = (8.633437888955283,-5.654933993600254)
goal = (8.89314912083085,-6.565957540535947)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 104 of 195"
path = []
start = (8.89314912083085,-6.565957540535947)
goal = (8.062032635736196,-6.585618972897694)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 105 of 195"
path = []
start = (8.062032635736196,-6.585618972897694)
goal = (7.7790772975787785,-7.224458732048997)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 106 of 195"
path = []
start = (7.7790772975787785,-7.224458732048997)
goal = (7.697557946422078,-7.674120146537135)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 107 of 195"
path = []
start = (7.697557946422078,-7.674120146537135)
goal = (8.420865280915367,-8.300640069911628)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 108 of 195"
path = []
start = (8.420865280915367,-8.300640069911628)
goal = (8.04041984933277,-8.679014838766365)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 109 of 195"
path = []
start = (8.04041984933277,-8.679014838766365)
goal = (7.632196175225465,-9.235192324972834)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 110 of 195"
path = []
start = (7.632196175225465,-9.235192324972834)
goal = (7.930299671096114,-9.569633918114347)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 111 of 195"
path = []
start = (7.416143131313904,-2.898620572610456)
goal = (7.555234600194757,-4.2410144174376985)
print " Node 1 and 2 of 3"
path += rrtpath(obstacleList,start,goal,rand)
start = (7.555234600194757,-4.2410144174376985)
goal = (6.025498407764836,-2.6227197627891004)
print " Node 2 and 3 of 3"
path += rrtpath(obstacleList,start,goal,rand)[1:]
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 112 of 195"
path = []
start = (6.025498407764836,-2.6227197627891004)
goal = (5.835020157077738,-2.9457341887721755)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 113 of 195"
path = []
start = (5.835020157077738,-2.9457341887721755)
goal = (6.006384467072163,-3.1397600620254753)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 114 of 195"
path = []
start = (6.006384467072163,-3.1397600620254753)
goal = (5.846667736937718,-3.5408686890599963)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 115 of 195"
path = []
start = (5.846667736937718,-3.5408686890599963)
goal = (5.011079740289752,-3.429092126354078)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 116 of 195"
path = []
start = (5.011079740289752,-3.429092126354078)
goal = (4.870780379376928,-2.69551430815559)
print " Node 1 and 2 of 4"
path += rrtpath(obstacleList,start,goal,rand)
start = (4.870780379376928,-2.69551430815559)
goal = (4.224895127678684,-3.298590496358454)
print " Node 2 and 3 of 4"
path += rrtpath(obstacleList,start,goal,rand)[1:]
start = (4.224895127678684,-3.298590496358454)
goal = (4.762703674073473,-4.629930982756727)
print " Node 3 and 4 of 4"
path += rrtpath(obstacleList,start,goal,rand)[1:]
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 117 of 195"
path = []
start = (4.870780379376928,-2.69551430815559)
goal = (4.503897197616272,-2.1125346552208377)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 118 of 195"
path = []
start = (4.503897197616272,-2.1125346552208377)
goal = (4.791996181308599,-1.856127003880811)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 119 of 195"
path = []
start = (4.791996181308599,-1.856127003880811)
goal = (5.078385220140683,-1.9082831529665985)
print " Node 1 and 2 of 3"
path += rrtpath(obstacleList,start,goal,rand)
start = (5.078385220140683,-1.9082831529665985)
goal = (3.9703080167935685,-1.2063903313006001)
print " Node 2 and 3 of 3"
path += rrtpath(obstacleList,start,goal,rand)[1:]
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 120 of 195"
path = []
start = (4.224895127678684,-3.298590496358454)
goal = (4.0716535636871924,-3.0065788501007678)
print " Node 1 and 2 of 3"
path += rrtpath(obstacleList,start,goal,rand)
start = (4.0716535636871924,-3.0065788501007678)
goal = (3.5846615146550125,-3.5710249028476735)
print " Node 2 and 3 of 3"
path += rrtpath(obstacleList,start,goal,rand)[1:]
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 121 of 195"
path = []
start = (3.5846615146550125,-3.5710249028476735)
goal = (2.994098162781709,-3.1361293900164355)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 122 of 195"
path = []
start = (2.994098162781709,-3.1361293900164355)
goal = (2.618286683711691,-3.0951331173838756)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 123 of 195"
path = []
start = (2.618286683711691,-3.0951331173838756)
goal = (2.1262698400554108,-2.922854568461668)
print " Node 1 and 2 of 3"
path += rrtpath(obstacleList,start,goal,rand)
start = (2.1262698400554108,-2.922854568461668)
goal = (2.697858685409278,-2.1812892646660487)
print " Node 2 and 3 of 3"
path += rrtpath(obstacleList,start,goal,rand)[1:]
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 124 of 195"
path = []
start = (4.762703674073473,-4.629930982756727)
goal = (4.828121574721809,-4.755307269374144)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 125 of 195"
path = []
start = (4.828121574721809,-4.755307269374144)
goal = (4.115667684142432,-5.13366286988697)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 126 of 195"
path = []
start = (4.115667684142432,-5.13366286988697)
goal = (4.277526792271242,-6.07960509219607)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 127 of 195"
path = []
start = (4.277526792271242,-6.07960509219607)
goal = (4.3410294243704985,-6.1917066488007775)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 128 of 195"
path = []
start = (4.3410294243704985,-6.1917066488007775)
goal = (6.201056947211924,-6.1831161874019145)
print " Node 1 and 2 of 3"
path += rrtpath(obstacleList,start,goal,rand)
start = (6.201056947211924,-6.1831161874019145)
goal = (3.4482804299506835,-7.893393839369679)
print " Node 2 and 3 of 3"
path += rrtpath(obstacleList,start,goal,rand)[1:]
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 129 of 195"
path = []
start = (-2.906585841816592,-2.277617102560206)
goal = (-2.2905499592809013,-2.3071894244238793)
print " Node 1 and 2 of 3"
path += rrtpath(obstacleList,start,goal,rand)
start = (-2.2905499592809013,-2.3071894244238793)
goal = (-4.135852074936734,-2.8263634088116367)
print " Node 2 and 3 of 3"
path += rrtpath(obstacleList,start,goal,rand)[1:]
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 130 of 195"
path = []
start = (-4.135852074936734,-2.8263634088116367)
goal = (-5.124239938413424,-2.72208777856802)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 131 of 195"
path = []
start = (-5.124239938413424,-2.72208777856802)
goal = (-5.168518111469236,-3.6403585272649783)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 132 of 195"
path = []
start = (-5.168518111469236,-3.6403585272649783)
goal = (-4.67619021796943,-4.077563052502802)
print " Node 1 and 2 of 3"
path += rrtpath(obstacleList,start,goal,rand)
start = (-4.67619021796943,-4.077563052502802)
goal = (-6.367786946871513,-4.31139680040583)
print " Node 2 and 3 of 3"
path += rrtpath(obstacleList,start,goal,rand)[1:]
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 133 of 195"
path = []
start = (-6.367786946871513,-4.31139680040583)
goal = (-6.242645901453949,-4.637683431105549)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 134 of 195"
path = []
start = (-6.242645901453949,-4.637683431105549)
goal = (-5.638407126593564,-5.104520730898647)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 135 of 195"
path = []
start = (-5.638407126593564,-5.104520730898647)
goal = (-5.783901280797914,-6.289462367879554)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 136 of 195"
path = []
start = (-5.783901280797914,-6.289462367879554)
goal = (-6.755558120579791,-5.962448490824169)
print " Node 1 and 2 of 3"
path += rrtpath(obstacleList,start,goal,rand)
start = (-6.755558120579791,-5.962448490824169)
goal = (-6.214992076202863,-7.748097813259757)
print " Node 2 and 3 of 3"
path += rrtpath(obstacleList,start,goal,rand)[1:]
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 137 of 195"
path = []
start = (-6.755558120579791,-5.962448490824169)
goal = (-7.169863977555668,-6.023125732121636)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 138 of 195"
path = []
start = (-7.169863977555668,-6.023125732121636)
goal = (-7.154125811622611,-5.76104539675132)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 139 of 195"
path = []
start = (-7.154125811622611,-5.76104539675132)
goal = (-7.474728500969665,-4.770567060472976)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 140 of 195"
path = []
start = (-7.474728500969665,-4.770567060472976)
goal = (-8.999967623574893,-5.018614435670993)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 141 of 195"
path = []
start = (-6.214992076202863,-7.748097813259757)
goal = (-6.366879732632414,-8.512132613022992)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 142 of 195"
path = []
start = (-6.366879732632414,-8.512132613022992)
goal = (-5.875209493030557,-10.059740157612156)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 143 of 195"
path = []
start = (-8.999967623574893,-5.018614435670993)
goal = (-9.45604081196029,-4.900886520925096)
print " Node 1 and 2 of 4"
path += rrtpath(obstacleList,start,goal,rand)
start = (-9.45604081196029,-4.900886520925096)
goal = (-9.01370970705642,-4.176522839517413)
print " Node 2 and 3 of 4"
path += rrtpath(obstacleList,start,goal,rand)[1:]
start = (-9.01370970705642,-4.176522839517413)
goal = (-8.99387185265867,-6.572675926888636)
print " Node 3 and 4 of 4"
path += rrtpath(obstacleList,start,goal,rand)[1:]
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 144 of 195"
path = []
start = (-8.99387185265867,-6.572675926888636)
goal = (-8.96620291762029,-7.463057956322052)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 145 of 195"
path = []
start = (-8.96620291762029,-7.463057956322052)
goal = (-9.280931943388815,-8.523336425367294)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 146 of 195"
path = []
start = (-9.280931943388815,-8.523336425367294)
goal = (-9.1204392020113,-8.95604194841109)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 147 of 195"
path = []
start = (-9.1204392020113,-8.95604194841109)
goal = (-8.286069577689865,-8.516946985433119)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 148 of 195"
path = []
start = (-8.286069577689865,-8.516946985433119)
goal = (-7.7853607003807035,-9.837059412089193)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 149 of 195"
path = []
start = (-7.7853607003807035,-9.837059412089193)
goal = (-7.553104900129899,-9.74930905677127)
print " Node 1 and 2 of 3"
path += rrtpath(obstacleList,start,goal,rand)
start = (-7.553104900129899,-9.74930905677127)
goal = (-7.524986036986687,-10.207889842728509)
print " Node 2 and 3 of 3"
path += rrtpath(obstacleList,start,goal,rand)[1:]
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 150 of 195"
path = []
start = (0.43729340836385866,-0.8580992478699123)
goal = (-0.8194793167139096,0.029346360123080117)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 151 of 195"
path = []
start = (3.4482804299506835,-7.893393839369679)
goal = (3.152308229395098,-8.014992912278453)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 152 of 195"
path = []
start = (3.152308229395098,-8.014992912278453)
goal = (3.0953748882332306,-8.711534195741658)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 153 of 195"
path = []
start = (3.0953748882332306,-8.711534195741658)
goal = (3.2421428335092326,-9.017768503115628)
print " Node 1 and 2 of 3"
path += rrtpath(obstacleList,start,goal,rand)
start = (3.2421428335092326,-9.017768503115628)
goal = (2.6139867474173997,-8.831025454346078)
print " Node 2 and 3 of 3"
path += rrtpath(obstacleList,start,goal,rand)[1:]
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 154 of 195"
path = []
start = (3.2421428335092326,-9.017768503115628)
goal = (3.5463419938744494,-9.227692793550515)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 155 of 195"
path = []
start = (3.5463419938744494,-9.227692793550515)
goal = (5.477228766407922,-9.921532298717883)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 156 of 195"
path = []
start = (2.6139867474173997,-8.831025454346078)
goal = (2.417266157133513,-8.78118473874098)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 157 of 195"
path = []
start = (2.417266157133513,-8.78118473874098)
goal = (2.150734022491358,-8.140733226335755)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 158 of 195"
path = []
start = (2.150734022491358,-8.140733226335755)
goal = (1.8882467510287615,-7.819820248216771)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 159 of 195"
path = []
start = (1.8882467510287615,-7.819820248216771)
goal = (1.4606656517639767,-6.858763490914976)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 160 of 195"
path = []
start = (1.4606656517639767,-6.858763490914976)
goal = (1.7185494988577936,-6.679764274568765)
print " Node 1 and 2 of 3"
path += rrtpath(obstacleList,start,goal,rand)
start = (1.7185494988577936,-6.679764274568765)
goal = (1.0199391269827505,-6.983375142860706)
print " Node 2 and 3 of 3"
path += rrtpath(obstacleList,start,goal,rand)[1:]
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 161 of 195"
path = []
start = (1.7185494988577936,-6.679764274568765)
goal = (1.8451329773974496,-6.121115573880867)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 162 of 195"
path = []
start = (1.0199391269827505,-6.983375142860706)
goal = (0.5938519659056283,-6.920019698069153)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 163 of 195"
path = []
start = (0.5938519659056283,-6.920019698069153)
goal = (0.3963159607528155,-6.511042794782924)
print " Node 1 and 2 of 3"
path += rrtpath(obstacleList,start,goal,rand)
start = (0.3963159607528155,-6.511042794782924)
goal = (-0.5858209285582952,-7.9946272347617775)
print " Node 2 and 3 of 3"
path += rrtpath(obstacleList,start,goal,rand)[1:]
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 164 of 195"
path = []
start = (-0.5858209285582952,-7.9946272347617775)
goal = (-1.0997884215068225,-7.9838711753419584)
print " Node 1 and 2 of 3"
path += rrtpath(obstacleList,start,goal,rand)
start = (-1.0997884215068225,-7.9838711753419584)
goal = (-0.5127077598548695,-9.18434879825937)
print " Node 2 and 3 of 3"
path += rrtpath(obstacleList,start,goal,rand)[1:]
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 165 of 195"
path = []
start = (-1.0997884215068225,-7.9838711753419584)
goal = (-1.0076892651539051,-7.37025854913373)
print " Node 1 and 2 of 3"
path += rrtpath(obstacleList,start,goal,rand)
start = (-1.0076892651539051,-7.37025854913373)
goal = (-2.1815038354549587,-8.733443384618694)
print " Node 2 and 3 of 3"
path += rrtpath(obstacleList,start,goal,rand)[1:]
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 166 of 195"
path = []
start = (-1.0076892651539051,-7.37025854913373)
goal = (-1.9165036563256397,-6.698311660935721)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 167 of 195"
path = []
start = (-1.9165036563256397,-6.698311660935721)
goal = (-2.0775874807561623,-5.930703140698889)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 168 of 195"
path = []
start = (-2.0775874807561623,-5.930703140698889)
goal = (-2.365353421721095,-5.733896425915096)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 169 of 195"
path = []
start = (-2.365353421721095,-5.733896425915096)
goal = (-3.067603465826119,-5.47066491392329)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 170 of 195"
path = []
start = (-3.067603465826119,-5.47066491392329)
goal = (-3.728428579344647,-6.040348765783154)
print " Node 1 and 2 of 3"
path += rrtpath(obstacleList,start,goal,rand)
start = (-3.728428579344647,-6.040348765783154)
goal = (-2.7290139800084336,-4.525066079806915)
print " Node 2 and 3 of 3"
path += rrtpath(obstacleList,start,goal,rand)[1:]
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 171 of 195"
path = []
start = (-3.728428579344647,-6.040348765783154)
goal = (-3.5054638443059822,-6.956646251299784)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 172 of 195"
path = []
start = (-3.5054638443059822,-6.956646251299784)
goal = (-2.7877262068549227,-7.346712929225562)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 173 of 195"
path = []
start = (-2.7877262068549227,-7.346712929225562)
goal = (-3.7419344266448498,-8.369615463292861)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 174 of 195"
path = []
start = (-2.7290139800084336,-4.525066079806915)
goal = (-1.2951268903831625,-4.0126914115704135)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 175 of 195"
path = []
start = (-0.5127077598548695,-9.18434879825937)
goal = (-0.8695446887033071,-9.45946903136333)
print " Node 1 and 2 of 3"
path += rrtpath(obstacleList,start,goal,rand)
start = (-0.8695446887033071,-9.45946903136333)
goal = (-0.3541807800094112,-9.744273368227468)
print " Node 2 and 3 of 3"
path += rrtpath(obstacleList,start,goal,rand)[1:]
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 176 of 195"
path = []
start = (-2.1815038354549587,-8.733443384618694)
goal = (-2.4080518785817047,-8.862931658870382)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 177 of 195"
path = []
start = (-2.4080518785817047,-8.862931658870382)
goal = (-2.6096073252927816,-9.376229526044405)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 178 of 195"
path = []
start = (-2.6096073252927816,-9.376229526044405)
goal = (-3.3139882581217863,-9.725216932039197)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 179 of 195"
path = []
start = (-1.2951268903831625,-4.0126914115704135)
goal = (-0.2255810506887137,-3.855492003088247)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 180 of 195"
path = []
start = (-0.2255810506887137,-3.855492003088247)
goal = (-0.0739021192642646,-4.437171506872378)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 181 of 195"
path = []
start = (-6.156928105491783,3.734140638062998)
goal = (-6.4049443270243565,3.6677149380760223)
print " Node 1 and 2 of 3"
path += rrtpath(obstacleList,start,goal,rand)
start = (-6.4049443270243565,3.6677149380760223)
goal = (-5.865000957124653,3.9374127155632817)
print " Node 2 and 3 of 3"
path += rrtpath(obstacleList,start,goal,rand)[1:]
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 182 of 195"
path = []
start = (-5.865000957124653,3.9374127155632817)
goal = (-6.2644544780202445,4.489071180532179)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 183 of 195"
path = []
start = (-6.2644544780202445,4.489071180532179)
goal = (-6.737408807554153,6.588071959629822)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 184 of 195"
path = []
start = (-6.737408807554153,6.588071959629822)
goal = (-6.673879426100205,7.1468551319439335)
print " Node 1 and 2 of 3"
path += rrtpath(obstacleList,start,goal,rand)
start = (-6.673879426100205,7.1468551319439335)
goal = (-8.499805428244695,5.950006521269653)
print " Node 2 and 3 of 3"
path += rrtpath(obstacleList,start,goal,rand)[1:]
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 185 of 195"
path = []
start = (-8.499805428244695,5.950006521269653)
goal = (-9.064443156320877,5.5046392668698445)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 186 of 195"
path = []
start = (-9.064443156320877,5.5046392668698445)
goal = (-9.29541265811847,5.2045812815942085)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 187 of 195"
path = []
start = (-9.29541265811847,5.2045812815942085)
goal = (-9.116101871676769,4.024882354866646)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 188 of 195"
path = []
start = (-9.116101871676769,4.024882354866646)
goal = (-8.622683698385151,3.939841784708454)
print " Node 1 and 2 of 3"
path += rrtpath(obstacleList,start,goal,rand)
start = (-8.622683698385151,3.939841784708454)
goal = (-9.099620300835987,3.5079419032963504)
print " Node 2 and 3 of 3"
path += rrtpath(obstacleList,start,goal,rand)[1:]
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 189 of 195"
path = []
start = (-9.099620300835987,3.5079419032963504)
goal = (-8.553587810125624,2.4009560231067297)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 190 of 195"
path = []
start = (7.863332588319929,4.787743967666675)
goal = (9.549608685221854,4.655524550924012)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 191 of 195"
path = []
start = (8.19291379321208,9.58713230308128)
goal = (8.950935755770873,8.67838274729046)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 192 of 195"
path = []
start = (8.950935755770873,8.67838274729046)
goal = (9.310539889834326,8.512850197588026)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 193 of 195"
path = []
start = (9.310539889834326,8.512850197588026)
goal = (9.47230690001575,8.695944536772469)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 194 of 195"
path = []
start = (9.47230690001575,8.695944536772469)
goal = (9.691035403834414,8.495966687641669)
print " Node 1 and 2 of 3"
path += rrtpath(obstacleList,start,goal,rand)
start = (9.691035403834414,8.495966687641669)
goal = (9.462228402978846,9.833818877270854)
print " Node 2 and 3 of 3"
path += rrtpath(obstacleList,start,goal,rand)[1:]
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
print "Path 195 of 195"
path = []
start = (9.691035403834414,8.495966687641669)
goal = (9.616519833776108,7.457770510416772)
print " Node 1 and 2 of 2"
path += rrtpath(obstacleList,start,goal,rand)
pathStr = str(path)[1:-1] + ";"
pathStr = pathStr.replace("[", "(")
pathStr = pathStr.replace("]", ")")
content += pathStr
endtime = datetime.datetime.now()
timeTaken = endtime - starttime
tts = str(timeTaken)
content = "Time Taken: " + tts + "\n" + content
content = content[:-1]
f = open('smo2sol-14.txt', 'w')
f.write(content)
f.close
#plt.axis('scaled')
#plt.grid(True)
#plt.pause(0.01) # Need for Mac
#plt.show()
| [
"sam.h.pham@gmail.com"
] | sam.h.pham@gmail.com |
301441e999e632c03147751b84aff6d3fd63c0c1 | f0f5ef31ad7057845408a2168b30d7769632ac74 | /stumanageBackend/core/migrations/0001_initial.py | e867fae8a9feadd783a05101f645e8da552c492e | [] | no_license | sharif181/django-react-simple-app | 0429fc3492ada2d1f8dfa6ce14f0765ec10eb89c | c69fc8de15c7af650f59f907628b7033527f160b | refs/heads/main | 2023-03-15T23:24:22.592068 | 2021-03-03T17:07:58 | 2021-03-03T17:07:58 | 344,198,717 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 732 | py | # Generated by Django 3.1.5 on 2021-01-08 15:39
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='studentInfo',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('first_name', models.CharField(max_length=100)),
('last_name', models.CharField(max_length=100)),
('phone_no', models.CharField(max_length=15)),
('age', models.IntegerField(default=18)),
('address', models.TextField()),
],
),
]
| [
"sharifkhan74647464@gmail.com"
] | sharifkhan74647464@gmail.com |
5f7ab5326e3927d2f99cf006d0e55d5bbd6b7b69 | 203f053a2caa0bea75cf6905a3f0eb4a967c4976 | /django_gplus/django_gplus/urls.py | a2a71b7b914ef433b350b5b1d958c67fd29492f7 | [] | no_license | Alsum/gsocial | 4c663400d68eeb93a2425e612f0b0d3611718550 | ebc3cfdc1844ae16ae9d4af911978ee65310c456 | refs/heads/master | 2021-01-23T02:39:53.540813 | 2017-03-24T01:22:08 | 2017-03-24T01:22:08 | 86,013,573 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 912 | py | """django_gplus URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url,include
from django.contrib import admin
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^oauth/', include('social_django.urls', namespace='social')),
url(r'^',include('mysocial.urls', namespace='mysocial')),
]
| [
"engsum@gmail.com"
] | engsum@gmail.com |
8a21fb04dd9a94e97bf3626e2d80e17dcf8ceb91 | 40e0ce23528ca0613d5494917362308b54de4ea6 | /lpthw/ex19_sd3.py | e1163339603e519c1b53db523a1c61c12ce7744c | [] | no_license | selimski/Study | bdeb929eeff216156fae1d07bd9a39c969c26284 | 92d2c273b7fbca483a5a4cbbef72cde87aab36bd | refs/heads/master | 2021-01-11T05:37:36.038780 | 2016-11-08T22:08:58 | 2016-11-08T22:08:58 | 71,508,900 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 433 | py | def func1(*args):
arg1, arg2 = args
print "Today you choose: %r as your first argument." % arg1
print "You didn't choose: %r. You did not... \n" % arg2
func1("Yes","Yaaaauzaaaa")
func1(10 + 2, 9 + 4)
a1 = 1000
a2 = 0.1
func1(a1, a2)
func1(a1 + 1, a2 + 12345)
b1 = "hey"
b2 = "bye"
func1(b1 + "bae", b2 + "BIIITCH")
func1(a1 * 100 + a2, a2)
c1 = raw_input("variable1: ")
c2 = raw_input("variable2: ")
func1(c1, c2)
| [
"selimnowicki@Dr-Evil-MacBook-Pro.local"
] | selimnowicki@Dr-Evil-MacBook-Pro.local |
244972f15e90d08c549f5a61dbfdaa978f98430d | 241434deb3804a2a5253fcb2631b8aa09a906efe | /Prediction/LSTM_model.py | 60e3a6c7318ee9eec20f0b8cbf4be8819e32e87c | [] | no_license | JohnP-1/fpl-analystics-prediction | c3ed2255a203d76a01fafe6c79f8663003b973fe | d71c0743cde8e9bd7c534bc38612e9cdd5ea9b0b | refs/heads/main | 2023-03-17T12:56:22.706445 | 2021-03-04T07:14:54 | 2021-03-04T07:14:54 | 326,921,807 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,876 | py | import pandas as pd
import numpy as np
from preprocess import preprocess
import os.path as path
from models import LSTM
import matplotlib.pyplot as plt
import torch
import torch.nn as nn
import random
path_data = path.join(path.dirname(path.dirname(path.abspath(__file__))), 'Processed', 'model_database.csv')
data = pd.read_csv(path_data)
seed = 1000
seed_test = 100
data_preprocesser = preprocess()
data_2020 = data[data['season']==2020]
data = data[data['season']!=2020]
data = data[data['minutes']!=0]
data = data[data['element_type_3']==1]
data_2020 = data_2020[data_2020['element_type_3']==1]
unique_ids = data['unique_id'].unique()
random.shuffle(unique_ids)
unique_ids = unique_ids[:int(len(unique_ids)*0.8)]
unique_ids_test = unique_ids[int(len(unique_ids)*0.8):]
unique_ids_train = unique_ids[:int(len(unique_ids)*0.7)]
unique_ids_val = unique_ids[int(len(unique_ids)*0.7):]
data_train = data[data['unique_id'].isin(unique_ids_train)]
data_val = data[data['unique_id'].isin(unique_ids_val)]
data_test = data[data['unique_id'].isin(unique_ids_test)]
unique_id_column = 'unique_id'
feature_columns = ['total_points']
response_columns = ['points_next']
X_train = data_train[feature_columns].values
yy_train = data_train[response_columns].values
unique_id_train = data_train[unique_id_column]
X_val = data_val[feature_columns].values
yy_val = data_val[response_columns].values
unique_id_val = data_val[unique_id_column]
X_test = data_test[feature_columns].values
yy_test = data_test[response_columns].values
unique_id_test = data_test[unique_id_column]
X_train, scaler = data_preprocesser.scale_features(X_train)
X_val = scaler.transform(X_val)
X_test = scaler.transform(X_test)
X_train = np.hstack((data_train['unique_id'].values.reshape((-1, 1)), X_train.reshape((-1, 1)), yy_train.reshape((-1, 1))))
columns_names = ['unique_id'] + feature_columns + response_columns
X_train = pd.DataFrame(X_train, columns=columns_names)
D_in = 1
H = 50
D_out = 1
n_layers = 3
lr = 2.5e-5
batch_size = 500
model = LSTM(D_in, output_size=1, hidden_dim=50, n_layers=1, lr=0.001).cuda()
loss_function = nn.MSELoss()
optimizer = torch.optim.Adam(model.parameters(), lr=0.001)
epochs = 150
unique_ids = data_train['unique_id'].unique()
for i in range(epochs):
for unique_id in unique_ids:
seq = X_train[X_train['unique_id']==unique_id][feature_columns].values
labels = X_train[X_train['unique_id']==unique_id][response_columns].values
seq = torch.tensor(seq).float()
seq = seq.to(model.device)
labels = torch.tensor(labels).float()
labels = labels.to(model.device)
optimizer.zero_grad()
model.hidden_cell = (torch.zeros(1, 1, model.hidden_dim).to(model.device),
torch.zeros(1, 1, model.hidden_dim).to(model.device))
# model.hidden_cell = model.hidden_cell.to(model.device)
y_pred = model(seq)
single_loss = loss_function(y_pred, labels)
single_loss.backward()
optimizer.step()
if i%25 == 1:
print(f'epoch: {i:3} loss: {single_loss.item():10.8f}')
print(f'epoch: {i:3} loss: {single_loss.item():10.10f}')
# model = FullyConectedNeuralNetwork(D_in ,H, D_out, n_layers, lr, batch_size, seed)
# model.fit(pc_scores[['PC1', 'PC2', 'PC3', 'PC4', 'PC5', 'PC6']].values, yy_train, pc_scores_val[['PC1', 'PC2', 'PC3', 'PC4', 'PC5', 'PC6']].values, yy_val, n_epochs=50000)
#
# X_2020 = data_2020[feature_columns].values
# X_2020 = scaler.transform(X_2020)
# yy_2020 = data_2020[response_columns].values
# unique_id_2020 = data_2020[unique_id_column].values
# round_2020 = data_2020['round'].values
#
# pc_scores_2020 = pca.transform(X_2020)
# pc_scores_2020 = pd.DataFrame(pc_scores_2020, columns=score_columns)
#
# yy_train_hat = (model.predict(pc_scores[['PC1', 'PC2', 'PC3', 'PC4', 'PC5', 'PC6']].values))
# yy_2020_hat = model.predict(pc_scores_2020[['PC1', 'PC2', 'PC3', 'PC4', 'PC5', 'PC6']].values)
#
# print(unique_id_2020.shape, data_2020['round'].values.reshape((-1, 1)).shape, X_2020.shape, yy_2020.shape, yy_2020_hat.shape)
#
# data_predictions = np.hstack((unique_id_2020.reshape((-1, 1)), round_2020.reshape((-1, 1)), X_2020, yy_2020, yy_2020_hat))
# data_predictions_columns = ['unique_id'] + ['round'] + feature_columns + response_columns + ['points_next_pred']
#
# data_predictions = pd.DataFrame(data_predictions, columns=data_predictions_columns)
#
# path_predictions = path.join(path.dirname(path.dirname(path.abspath(__file__))), 'Processed', 'predictions.csv')
# data_predictions.to_csv(path_predictions, index=False)
#
# RMSE = model.RMSE(yy_2020, yy_2020_hat)
# print(RMSE)
#
# plt.plot(yy_train, yy_train_hat, 'ob')
# plt.plot(yy_2020, yy_2020_hat, 'xr')
# plt.plot([0,30], [0,30], '--k')
# plt.grid()
# plt.xlabel('Actual Points (Next GW)')
# plt.ylabel('Predicted Points (Next GW)')
# plt.show()
| [
"j.e.palmer-1@sms.ed.ac.uk"
] | j.e.palmer-1@sms.ed.ac.uk |
1fd9d339fb8682ef8a6f21a25cc9fe2d23ae8ca3 | 09bcd2a342fc79a4a7c30e24a76788d90df2176d | /galleria/artists/migrations/0001_initial.py | f3a25b6873d47bb0d9d6a0437de97740b0176461 | [
"Apache-2.0"
] | permissive | kamalhg/galleria | 48b2ed5ef1931ee12b7247caf7e50caa167c88ff | 18ee38e99869812e61244d62652514d1c46bf3f3 | refs/heads/master | 2020-12-27T12:15:22.233386 | 2014-06-18T15:53:54 | 2014-06-18T15:53:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,628 | py | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Artist'
db.create_table('artists_artist', (
('created', self.gf('model_utils.fields.AutoCreatedField')(default=datetime.datetime.now)),
('modified', self.gf('model_utils.fields.AutoLastModifiedField')(default=datetime.datetime.now)),
('contact', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['contacts.Contact'], primary_key=True, unique=True)),
('gallery_id', self.gf('django.db.models.fields.IntegerField')()),
('biography', self.gf('django.db.models.fields.TextField')()),
('price', self.gf('django.db.models.fields.TextField')()),
('info', self.gf('django.db.models.fields.TextField')()),
('commission', self.gf('django.db.models.fields.DecimalField')(decimal_places=3, max_digits=4)),
))
db.send_create_signal('artists', ['Artist'])
def backwards(self, orm):
# Deleting model 'Artist'
db.delete_table('artists_artist')
models = {
'artists.artist': {
'Meta': {'object_name': 'Artist'},
'biography': ('django.db.models.fields.TextField', [], {}),
'commission': ('django.db.models.fields.DecimalField', [], {'decimal_places': '3', 'max_digits': '4'}),
'contact': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['contacts.Contact']", 'primary_key': 'True', 'unique': 'True'}),
'created': ('model_utils.fields.AutoCreatedField', [], {'default': 'datetime.datetime.now'}),
'gallery_id': ('django.db.models.fields.IntegerField', [], {}),
'info': ('django.db.models.fields.TextField', [], {}),
'modified': ('model_utils.fields.AutoLastModifiedField', [], {'default': 'datetime.datetime.now'}),
'price': ('django.db.models.fields.TextField', [], {})
},
'categories.category': {
'Meta': {'object_name': 'Category'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '40'})
},
'contacts.address': {
'Meta': {'object_name': 'Address'},
'city': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'contact': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contacts.Contact']"}),
'country': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'county': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'postcode': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'street': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'contacts.contact': {
'Meta': {'object_name': 'Contact'},
'addressed_as': ('django.db.models.fields.CharField', [], {'default': "'calculated'", 'max_length': '100'}),
'addressed_as_custom': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}),
'categories': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'null': 'True', 'blank': 'True', 'to': "orm['categories.Category']"}),
'company': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'blank': 'True'}),
'company_or_individual': ('django.db.models.fields.CharField', [], {'default': "'individual'", 'max_length': '10'}),
'created': ('model_utils.fields.AutoCreatedField', [], {'default': 'datetime.datetime.now'}),
'department': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'job_title': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'blank': 'True'}),
'main_address': ('django.db.models.fields.related.ForeignKey', [], {'null': 'True', 'to': "orm['contacts.Address']", 'blank': 'True', 'related_name': "'main_address'"}),
'main_phonenumber': ('django.db.models.fields.related.ForeignKey', [], {'null': 'True', 'to': "orm['contacts.PhoneNumber']", 'blank': 'True', 'related_name': "'main_phonenumber'"}),
'migration_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'modified': ('model_utils.fields.AutoLastModifiedField', [], {'default': 'datetime.datetime.now'}),
'name_first': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'blank': 'True'}),
'name_last': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'blank': 'True'}),
'name_middle': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'blank': 'True'}),
'reference': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}),
'suffix': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'blank': 'True'}),
'type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contacts.ContactType']"})
},
'contacts.contacttype': {
'Meta': {'object_name': 'ContactType'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '30'})
},
'contacts.phonenumber': {
'Meta': {'object_name': 'PhoneNumber'},
'contact': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contacts.Contact']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'number': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['artists']
| [
"hum3@drummond.info"
] | hum3@drummond.info |
a0d0b8457024d2982d5052c463bd38f342cf93d4 | e18f0a32703fbe841d27c8a0e55eca9b9ab39cce | /run.py | c3e6569870cc01a9823fac62025ce182b58ea349 | [
"Apache-2.0"
] | permissive | qybing/tf-pose-estimation | 302550e74d457edea178b8e36a9cd58c1cbe89e8 | 9adc3d4bf1c87fba4df977b83cee8e656882fe15 | refs/heads/master | 2023-04-10T08:59:08.778691 | 2019-06-19T06:11:49 | 2019-06-19T06:11:49 | 189,166,320 | 0 | 0 | Apache-2.0 | 2023-03-25T00:06:18 | 2019-05-29T06:44:17 | PureBasic | UTF-8 | Python | false | false | 3,518 | py | import argparse
import logging
import sys
import time
from tf_pose import common
import cv2
import numpy as np
from tf_pose.estimator import TfPoseEstimator
from tf_pose.networks import get_graph_path, model_wh
logger = logging.getLogger('TfPoseEstimatorRun')
logger.handlers.clear()
logger.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
formatter = logging.Formatter('[%(asctime)s] [%(name)s] [%(levelname)s] %(message)s')
ch.setFormatter(formatter)
logger.addHandler(ch)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='tf-pose-estimation run')
parser.add_argument('--image', type=str, default='./images/p1.jpg')
parser.add_argument('--model', type=str, default='mobilenet_thin',
help='cmu / mobilenet_thin / mobilenet_v2_large / mobilenet_v2_small')
parser.add_argument('--resize', type=str, default='0x0',
help='if provided, resize images before they are processed. '
'default=0x0, Recommends : 432x368 or 656x368 or 1312x736 ')
parser.add_argument('--resize-out-ratio', type=float, default=4.0,
help='if provided, resize heatmaps before they are post-processed. default=1.0')
args = parser.parse_args()
w, h = model_wh(args.resize)
if w == 0 or h == 0:
e = TfPoseEstimator(get_graph_path(args.model), target_size=(432, 368))
else:
e = TfPoseEstimator(get_graph_path(args.model), target_size=(w, h))
# estimate human poses from a single image !
image = common.read_imgfile(args.image, None, None)
if image is None:
logger.error('Image can not be read, path=%s' % args.image)
sys.exit(-1)
t = time.time()
humans = e.inference(image, resize_to_default=(w > 0 and h > 0), upsample_size=args.resize_out_ratio)
elapsed = time.time() - t
logger.info('inference image: %s in %.4f seconds.' % (args.image, elapsed))
image = TfPoseEstimator.draw_humans(image, humans, imgcopy=False)
try:
import matplotlib.pyplot as plt
fig = plt.figure()
a = fig.add_subplot(2, 2, 1)
a.set_title('Result')
plt.imshow(cv2.cvtColor(image, cv2.COLOR_BGR2RGB))
bgimg = cv2.cvtColor(image.astype(np.uint8), cv2.COLOR_BGR2RGB)
bgimg = cv2.resize(bgimg, (e.heatMat.shape[1], e.heatMat.shape[0]), interpolation=cv2.INTER_AREA)
# show network output
a = fig.add_subplot(2, 2, 2)
plt.imshow(bgimg, alpha=0.5)
tmp = np.amax(e.heatMat[:, :, :-1], axis=2)
plt.imshow(tmp, cmap=plt.cm.gray, alpha=0.5)
plt.colorbar()
tmp2 = e.pafMat.transpose((2, 0, 1))
tmp2_odd = np.amax(np.absolute(tmp2[::2, :, :]), axis=0)
tmp2_even = np.amax(np.absolute(tmp2[1::2, :, :]), axis=0)
a = fig.add_subplot(2, 2, 3)
a.set_title('Vectormap-x')
# plt.imshow(CocoPose.get_bgimg(inp, target_size=(vectmap.shape[1], vectmap.shape[0])), alpha=0.5)
plt.imshow(tmp2_odd, cmap=plt.cm.gray, alpha=0.5)
plt.colorbar()
a = fig.add_subplot(2, 2, 4)
a.set_title('Vectormap-y')
# plt.imshow(CocoPose.get_bgimg(inp, target_size=(vectmap.shape[1], vectmap.shape[0])), alpha=0.5)
plt.imshow(tmp2_even, cmap=plt.cm.gray, alpha=0.5)
plt.colorbar()
plt.show()
except Exception as e:
logger.warning('matplitlib error, %s' % e)
cv2.imshow('result', image)
cv2.waitKey()
| [
"qiaoyanbing1@163.com"
] | qiaoyanbing1@163.com |
9f5ac57a3446426f8ff756599e9780e245ee3da3 | dd80d6c017cab703fd0cfcab4a8ff745095b42de | /329-longest-increasing-path-in-a-matrix.py | ba3c37c48fab049566ae457ff2ff97351b40ae3e | [] | no_license | dingyaguang117/LeetCode | d2b42e87c35250f0e6bed3af49ec9e077196fff4 | b5684499584a6f5006af907b1225455ea58c1104 | refs/heads/master | 2021-09-12T15:02:56.194575 | 2021-08-20T02:15:35 | 2021-08-20T02:15:35 | 21,963,997 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,086 | py | '''
递归+记忆化, DP
'''
class Solution(object):
def calc(self, i, j):
if self.dp[i][j]:
return self.dp[i][j]
value = self.matrix[i][j]
ans = 0
if i > 0 and self.matrix[i-1][j] < value:
ans = max(ans, self.calc(i-1, j))
if j > 0 and self.matrix[i][j-1] < value:
ans = max(ans, self.calc(i, j-1))
if i < self.m-1 and self.matrix[i+1][j] < value:
ans = max(ans, self.calc(i+1, j))
if j < self.n-1 and self.matrix[i][j+1] < value:
ans = max(ans, self.calc(i, j+1))
self.dp[i][j] = ans + 1
return self.dp[i][j]
def longestIncreasingPath(self, matrix):
"""
:type matrix: List[List[int]]
:rtype: int
"""
if len(matrix) == 0 or len(matrix[0]) == 0: return 0
self.matrix = matrix
self.m, self.n = len(matrix), len(matrix[0])
self.dp = [[0]*self.n for i in xrange(self.m)]
ans = 0
for i in xrange(self.m):
for j in xrange(self.n):
_ans = self.calc(i, j)
ans = max(ans, _ans)
return ans
if __name__ == '__main__':
s = Solution()
nums = [
[3,4,5],
[3,2,6],
[2,2,1]
]
print s.longestIncreasingPath(nums) | [
"dingyaguang117@gmail.com"
] | dingyaguang117@gmail.com |
8384c25f0bd13dd9db7d550b2dcd676408c0460e | d5cc5b93483f3e13b13cb0580a1994583c70ebb8 | /tmuxp/testsuite/test_config_teamocil.py | 184d205016b1244ea37d8ce4677b4cf7cdbec981 | [
"BSD-3-Clause"
] | permissive | GoodDingo/tmuxp | b07293e3090760283f7e733fd538410f36f8bea7 | 23594fdae5473aaa31c33dae64ace59001847f9e | refs/heads/master | 2020-12-11T07:38:36.346464 | 2013-12-25T03:29:06 | 2013-12-25T03:29:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 20,985 | py | # -*- coding: utf-8 -*-
"""Test for tmuxp teamocil configuration..
tmuxp.tests.test_config_teamocil
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: Copyright 2013 Tony Narlock.
:license: BSD, see LICENSE for details
"""
from __future__ import absolute_import, division, print_function, \
with_statement, unicode_literals
import os
import logging
import kaptan
from .. import config, exc
from ..util import tmux
from .helpers import TestCase
logger = logging.getLogger(__name__)
TMUXP_DIR = os.path.join(os.path.dirname(__file__), '.tmuxp')
class TeamocilTest(TestCase):
teamocil_yaml = """\
windows:
- name: "sample-two-panes"
root: "~/Code/sample/www"
layout: even-horizontal
panes:
- cmd: ["pwd", "ls -la"]
- cmd: "rails server --port 3000"
"""
teamocil_dict = {
'windows': [{
'name': 'sample-two-panes',
'root': '~/Code/sample/www',
'layout': 'even-horizontal',
'panes': [
{
'cmd': [
'pwd',
'ls -la'
]
},
{
'cmd': 'rails server --port 3000'
}
]
}]
}
tmuxp_dict = {
'session_name': None,
'windows': [
{
'window_name': 'sample-two-panes',
'layout': 'even-horizontal',
'start_directory': '~/Code/sample/www',
'panes': [
{
'shell_command': [
'pwd',
'ls -la'
]
},
{
'shell_command': 'rails server --port 3000'
}
]
}
]
}
def test_config_to_dict(self):
configparser = kaptan.Kaptan(handler='yaml')
test_config = configparser.import_config(self.teamocil_yaml)
yaml_to_dict = test_config.get()
self.assertDictEqual(yaml_to_dict, self.teamocil_dict)
self.assertDictEqual(
config.import_teamocil(self.teamocil_dict),
self.tmuxp_dict
)
config.validate_schema(
config.import_teamocil(
self.teamocil_dict
)
)
class Teamocil2Test(TestCase):
teamocil_yaml = """\
windows:
- name: "sample-four-panes"
root: "~/Code/sample/www"
layout: tiled
panes:
- cmd: "pwd"
- cmd: "pwd"
- cmd: "pwd"
- cmd: "pwd"
"""
teamocil_dict = {
'windows': [{
'name': 'sample-four-panes',
'root': '~/Code/sample/www',
'layout': 'tiled',
'panes': [
{'cmd': 'pwd'},
{'cmd': 'pwd'},
{'cmd': 'pwd'},
{'cmd': 'pwd'},
]
}]
}
tmuxp_dict = {
'session_name': None,
'windows': [
{
'window_name': 'sample-four-panes',
'layout': 'tiled',
'start_directory': '~/Code/sample/www',
'panes': [
{
'shell_command': 'pwd'
},
{
'shell_command': 'pwd'
},
{
'shell_command': 'pwd'
},
{
'shell_command': 'pwd'
},
]
}
]
}
def test_config_to_dict(self):
configparser = kaptan.Kaptan(handler='yaml')
test_config = configparser.import_config(self.teamocil_yaml)
yaml_to_dict = test_config.get()
self.assertDictEqual(yaml_to_dict, self.teamocil_dict)
self.assertDictEqual(
config.import_teamocil(self.teamocil_dict),
self.tmuxp_dict
)
config.validate_schema(
config.import_teamocil(
self.teamocil_dict
)
)
class Teamocil3Test(TestCase):
teamocil_yaml = """\
windows:
- name: "my-first-window"
root: "~/Projects/foo-www"
layout: even-vertical
filters:
before: "rbenv local 2.0.0-p0"
after: "echo 'I am done initializing this pane.'"
panes:
- cmd: "git status"
- cmd: "bundle exec rails server --port 4000"
focus: true
- cmd:
- "sudo service memcached start"
- "sudo service mongodb start"
"""
teamocil_dict = {
'windows': [{
'name': 'my-first-window',
'root': '~/Projects/foo-www',
'layout': 'even-vertical',
'filters': {
'before': 'rbenv local 2.0.0-p0',
'after': 'echo \'I am done initializing this pane.\''
},
'panes': [
{'cmd': 'git status'},
{'cmd': 'bundle exec rails server --port 4000',
'focus': True},
{'cmd': [
'sudo service memcached start',
'sudo service mongodb start',
]}
]
}]
}
tmuxp_dict = {
'session_name': None,
'windows': [
{
'window_name': 'my-first-window',
'layout': 'even-vertical',
'start_directory': "~/Projects/foo-www",
'shell_command_before': 'rbenv local 2.0.0-p0',
'shell_command_after': (
'echo '
'\'I am done initializing this pane.\''
),
'panes': [
{
'shell_command': 'git status'
},
{
'shell_command': 'bundle exec rails server --port 4000',
'focus': True
},
{
'shell_command': [
'sudo service memcached start',
'sudo service mongodb start'
]
}
]
}
]
}
def test_config_to_dict(self):
self.maxDiff = None
configparser = kaptan.Kaptan(handler='yaml')
test_config = configparser.import_config(self.teamocil_yaml)
yaml_to_dict = test_config.get()
self.assertDictEqual(yaml_to_dict, self.teamocil_dict)
self.assertDictEqual(
config.import_teamocil(self.teamocil_dict),
self.tmuxp_dict
)
config.validate_schema(
config.import_teamocil(
self.teamocil_dict
)
)
class Teamocil4Test(TestCase):
teamocil_yaml = """\
windows:
- name: "erb-example"
root: <%= ENV['MY_PROJECT_ROOT'] %>
panes:
- cmd: "pwd"
"""
teamocil_dict = {
'windows': [{
'name': 'erb-example',
'root': "<%= ENV['MY_PROJECT_ROOT'] %>",
'panes': [
{'cmd': 'pwd'}
]
}]
}
tmuxp_dict = {
'session_name': None,
'windows': [
{
'window_name': 'erb-example',
'start_directory': "<%= ENV['MY_PROJECT_ROOT'] %>",
'panes': [
{
'shell_command': 'pwd'
}
]
}
]
}
def test_config_to_dict(self):
self.maxDiff = None
configparser = kaptan.Kaptan(handler='yaml')
test_config = configparser.import_config(self.teamocil_yaml)
yaml_to_dict = test_config.get()
self.assertDictEqual(yaml_to_dict, self.teamocil_dict)
self.assertDictEqual(
config.import_teamocil(self.teamocil_dict),
self.tmuxp_dict
)
config.validate_schema(
config.import_teamocil(
self.teamocil_dict
)
)
class TeamocilLayoutsTest(TestCase):
"""Import configurations from teamocil's <fixtures/layout.yml>.
https://github.com/remiprev/teamocil/blob/master/spec/fixtures/layouts.yml
LICENSE: https://github.com/remiprev/teamocil/blob/master/LICENSE
"""
teamocil_yaml = """\
# Simple two windows layout
two-windows:
windows:
- name: "foo"
clear: true
root: "/foo"
layout: "tiled"
panes:
- cmd: "echo 'foo'"
- cmd: "echo 'foo again'"
- name: "bar"
root: "/bar"
splits:
- cmd:
- "echo 'bar'"
- "echo 'bar in an array'"
target: bottom-right
- cmd: "echo 'bar again'"
focus: true
width: 50
# Simple two windows layout with filters
two-windows-with-filters:
windows:
- name: "foo"
root: "/foo"
filters:
before:
- "echo first before filter"
- "echo second before filter"
after:
- "echo first after filter"
- "echo second after filter"
panes:
- cmd: "echo 'foo'"
- cmd: "echo 'foo again'"
width: 50
two-windows-with-custom-command-options:
windows:
- name: "foo"
cmd_separator: "\n"
with_env_var: false
clear: true
root: "/foo"
layout: "tiled"
panes:
- cmd: "echo 'foo'"
- cmd: "echo 'foo again'"
- name: "bar"
cmd_separator: " && "
with_env_var: true
root: "/bar"
splits:
- cmd:
- "echo 'bar'"
- "echo 'bar in an array'"
target: bottom-right
- cmd: "echo 'bar again'"
focus: true
width: 50
three-windows-within-a-session:
session:
name: "my awesome session"
windows:
- name: "first window"
panes:
- cmd: "echo 'foo'"
- name: "second window"
panes:
- cmd: "echo 'foo'"
- name: "third window"
panes:
- cmd: "echo 'foo'"
"""
teamocil_dict = {
'two-windows': {
'windows': [
{
'name': 'foo',
'clear': True,
'root': '/foo',
'layout': 'tiled',
'panes': [
{
'cmd': "echo 'foo'"
},
{
'cmd': "echo 'foo again'"
}
]
},
{
'name': 'bar',
'root': '/bar',
'splits': [
{
'cmd': [
"echo 'bar'",
"echo 'bar in an array'"
],
'target': 'bottom-right'
},
{
'cmd': "echo 'bar again'",
'focus': True,
'width': 50
}
]
}
]
},
'two-windows-with-filters': {
'windows': [
{
'name': 'foo',
'root': '/foo',
'filters':
{
'before': [
'echo first before filter',
'echo second before filter'
],
'after': [
'echo first after filter',
'echo second after filter',
]
},
'panes': [
{
'cmd': "echo 'foo'"
},
{
'cmd': "echo 'foo again'",
'width': 50
}
]
}
]
},
'two-windows-with-custom-command-options': {
'windows': [
{
'name': 'foo',
'cmd_separator': ' ',
'with_env_var': False,
'clear': True,
'root': '/foo',
'layout': 'tiled',
'panes': [
{
'cmd': "echo 'foo'"
},
{
'cmd': "echo 'foo again'"
}
]
}, {
'name': 'bar',
'cmd_separator': ' && ',
'with_env_var': True,
'root': '/bar',
'splits': [
{
'cmd': [
"echo 'bar'",
"echo 'bar in an array'"
],
'target': 'bottom-right'
},
{
'cmd': "echo 'bar again'",
'focus': True,
'width': 50
}
]
}]
},
'three-windows-within-a-session': {
'session': {
'name': 'my awesome session',
'windows': [
{
'name': 'first window',
'panes': [
{
'cmd': "echo 'foo'"
}
]
}, {
'name': 'second window',
'panes': [
{
'cmd': "echo 'foo'"}
]
}, {
'name': 'third window',
'panes': [
{
'cmd': "echo 'foo'"
}
]
}
]
}
}
}
two_windows = \
{
'session_name': None,
'windows': [
{
'window_name': 'foo',
'start_directory': '/foo',
'clear': True,
'layout': 'tiled',
'panes': [
{
'shell_command': "echo 'foo'"
},
{
'shell_command': "echo 'foo again'"
}
]
},
{
'window_name': 'bar',
'start_directory': '/bar',
'panes': [
{
'shell_command': [
"echo 'bar'",
"echo 'bar in an array'"
],
'target': 'bottom-right'
},
{
'shell_command': "echo 'bar again'",
'focus': True,
}
]
}
]
}
two_windows_with_filters = \
{
'session_name': None,
'windows': [
{
'window_name': 'foo',
'start_directory': '/foo',
'shell_command_before': [
'echo first before filter',
'echo second before filter',
],
'shell_command_after': [
'echo first after filter',
'echo second after filter',
],
'panes': [
{
'shell_command': "echo 'foo'"
},
{
'shell_command': "echo 'foo again'",
}
]
}
]
}
two_windows_with_custom_command_options = \
{
'session_name': None,
'windows': [
{
'window_name': 'foo',
'start_directory': '/foo',
'clear': True,
'layout': 'tiled',
'panes': [
{
'shell_command': "echo 'foo'",
},
{
'shell_command': "echo 'foo again'",
}
]
},
{
'window_name': 'bar',
'start_directory': '/bar',
'panes': [
{
'shell_command': [
"echo 'bar'",
"echo 'bar in an array'"
],
'target': 'bottom-right'
},
{
'shell_command': "echo 'bar again'",
'focus': True,
}
]
}
]
}
three_windows_within_a_session = {
'session_name': 'my awesome session',
'windows': [
{
'window_name': 'first window',
'panes': [
{
'shell_command': "echo 'foo'"
},
]
},
{
'window_name': 'second window',
'panes': [
{
'shell_command': "echo 'foo'"
},
]
},
{
'window_name': 'third window',
'panes': [
{
'shell_command': "echo 'foo'"
},
]
},
]
}
def test_config_to_dict(self):
self.maxDiff = None
configparser = kaptan.Kaptan(handler='yaml')
test_config = configparser.import_config(self.teamocil_yaml)
yaml_to_dict = test_config.get()
self.assertDictEqual(yaml_to_dict, self.teamocil_dict)
self.assertDictEqual(
config.import_teamocil(
self.teamocil_dict['two-windows'],
),
self.two_windows
)
config.validate_schema(
config.import_teamocil(
self.teamocil_dict['two-windows']
)
)
self.assertDictEqual(
config.import_teamocil(
self.teamocil_dict['two-windows-with-filters'],
),
self.two_windows_with_filters
)
config.validate_schema(
config.import_teamocil(
self.teamocil_dict['two-windows-with-filters']
)
)
self.assertDictEqual(
config.import_teamocil(
self.teamocil_dict['two-windows-with-custom-command-options'],
),
self.two_windows_with_custom_command_options
)
config.validate_schema(
config.import_teamocil(
self.teamocil_dict['two-windows-with-custom-command-options']
)
)
self.assertDictEqual(
config.import_teamocil(
self.teamocil_dict['three-windows-within-a-session'],
),
self.three_windows_within_a_session
)
config.validate_schema(
config.import_teamocil(
self.teamocil_dict['three-windows-within-a-session']
)
)
""" this configuration contains multiple sessions in a single file.
tmuxp can split them into files, proceed?
"""
| [
"tony@git-pull.com"
] | tony@git-pull.com |
fd94d32ab8a50d496a3803833a4fbfee21a3d4a8 | 2ba6ca934f24bb388bccdb7c6c8af0acb0e50a7a | /Gamle scripts/old tests/test10.py | f6757d4dc11c4a4b4522a7e5340648080a3db440 | [
"MIT"
] | permissive | MadsAW/machine-learning-on-materials | 6fdb71baee94a8cb951dc4e2632950218454c4bc | 6101c7e3d12be54b12391c78442294198a39cc9b | refs/heads/master | 2020-03-28T20:40:25.595479 | 2019-01-28T18:58:12 | 2019-01-28T18:58:12 | 149,092,384 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,770 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Sep 26 11:57:35 2018
@author: Simon
"""
import os
from createLargerFeatureMatrix import simpleLargeMatrix
import pickle
from keras.models import Sequential
from keras.layers import Dense, Dropout
from keras import regularizers
import numpy as np
path = "Saved matrices/11-10-2018 11.36/sorted_Cutoff25_noSingleElementKrystals/"
featureMatrixFile = "train_featureMatrix.npy"
atomicSymbolsListFile = "train_pickledAtomicSymbolsList.txt"
energiesFile = "train_pickledEnergies.txt"
largeFeatureMatrix, mappedAtomicNumber = simpleLargeMatrix(path,featureMatrixFile, atomicSymbolsListFile)
with open(path+energiesFile, "rb") as pickleFile:
energies = pickle.load(pickleFile)
largeFeatureMatrix.shape = (largeFeatureMatrix.shape[0], -1)
X = largeFeatureMatrix
Y = np.array(energies)
#Lambda regularization penalty
drop = 0.2
#Create model
model = Sequential()
inputShape = np.shape(X)[1:]
model.add(Dense(800, input_shape=inputShape, activation='relu'))
model.add(Dropout(drop))
model.add(Dense(400, activation='relu'))
model.add(Dropout(drop))
model.add(Dense(1))
#Compile model
model.compile(loss='mse', optimizer='adam', metrics=["mse"])
print(model.summary())
#Fit the model. This is where the hard computing happens.
#Number of epochs is number of iterations through dataset
#Batch size is number of iterations before weights are changed.
model.fit(X, Y, epochs=40, batch_size=50)
#Evaluate model efficiency
scores = model.evaluate(X, Y)
print("\n%s: %.2f eV" % (model.metrics_names[1], scores[1]))
#Make predictions
predictions = model.predict(X)
#Save weights
model.save_weights(os.path.basename(__file__)[:-3]+"weights")
a=0
for i in range(len(predictions)):
a+=(energies[i]-predictions[i])**2
rmse=np.sqrt(a/len(energies))
print("RMSE on training data "+str(rmse))
#Load validation set
featureMatrixFileValidate = "validate_featureMatrix.npy"
atomicSymbolsListFileValidate = "validate_pickledAtomicSymbolsList.txt"
energiesFileValidate = "validate_pickledEnergies.txt"
largeFeatureMatrixValidate, mappedAtomicNumberValidate = simpleLargeMatrix(path,featureMatrixFileValidate, atomicSymbolsListFileValidate)
with open(path+energiesFileValidate, "rb") as pickleFile:
energiesValidate = pickle.load(pickleFile)
largeFeatureMatrixValidate.shape = (largeFeatureMatrixValidate.shape[0], -1)
X_v = largeFeatureMatrixValidate
Y_v = np.array(energiesValidate)
#Make predictions on validation set
predictionsValidate = model.predict(X_v)
a=0
for i in range(len(predictionsValidate)):
a+=(energiesValidate[i]-predictionsValidate[i])**2
rmseValidate=np.sqrt(a/len(energiesValidate))
print("RMSE on validation data "+str(rmseValidate))
print("DONE") | [
"simon.kamuk@gmail.com"
] | simon.kamuk@gmail.com |
0d67648812aee24d60a3a8ce41b2b18c527f6a6d | b36d6c2066c7b34386d2575a435ee6ecb5cfbb9b | /live_bug.py | ba07a18512443ab04543e16a5d60ed2f3425c188 | [] | no_license | kingkonghon/bug | 3456418fead893c4c1e75a693cbd9649b98f55b5 | 0a2dd43580162544910861c09529e2cbe41fee6e | refs/heads/master | 2021-01-13T05:25:13.033973 | 2017-03-17T07:29:57 | 2017-03-17T07:29:57 | 81,426,027 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,951 | py | import datetime as dt
import urllib2
from bs4 import BeautifulSoup
import pandas as pd
import time
import csv
import MySQLdb
from sqlalchemy import create_engine
import logging
def parsePage(url):
while True:
try:
r = urllib2.Request(url)
page = urllib2.urlopen(r, data=None, timeout=10)
soup = BeautifulSoup(page, 'lxml')
break
except Exception, e:
logging.warning('exception(parse):{0}'.format(str(e)))
#page.close()
time.sleep(10)
page.close()
pre_id_str = ''
time_current = dt.datetime.now()
time_delta = dt.timedelta(days=time_current.weekday() + 1)
year = (time_current - time_delta).year
# be aware of the switch between summer and winter time
time_delta_for_timezone = dt.timedelta(hours=4)
temp_buffer = {}
event_num = 0
df = pd.DataFrame(columns=['webid','time','event','currency','impact','actual','forecast','previous'])
for event_id in soup.find_all('tr'):
id_str = event_id.get('data-eventid')
if id_str == pre_id_str:
continue
if id_str is not None:
pre_id_str = id_str
try:
temp_buffer['webid'] = int(id_str)
except ValueError:
#print event_id
continue
#for string in soup.stripped_strings:
# print string
for child in event_id.children:
#print child
try:
if child['class'][2] == 'detail':
pass
elif child['class'][2] == 'graph':
pass
elif child['class'][2] == 'date':
span = child.span
if span is not None:
temp_date_str = span.span.string
if temp_date_str is not None:
if temp_date_str == 'Jan 1':
year += 1
date_str = temp_date_str
#temp_buffer[child['class'][2]] = '%s %d'%(date_current,year)
elif child['class'][2] == 'time':
if child.span is None:
temp_time_str = child.string
else:
temp_time_str = child.span.string
if temp_time_str is not None:
if temp_time_str.find(':') == -1:
temp_dt = dt.datetime.strptime('%s %d'%(date_str,year),'%b %d %Y')
else:
#print temp_str
temp_dt = dt.datetime.strptime('%s %d %s'%(date_str,year,temp_time_str),'%b %d %Y %I:%M%p')
temp_buffer[child['class'][2]] = dt.datetime.strftime(temp_dt + time_delta_for_timezone, '%Y.%m.%d %H:%M')
elif child['class'][2] == 'impact':
span = child.span
if span == None:
temp_buffer[child['class'][2]] = None
else:
temp_buffer[child['class'][2]] = span['class'][0]
elif child['class'][2] == 'event':
div = child.div
if div == None:
temp_buffer[child['class'][2]] = None
else:
temp_buffer[child['class'][2]] = div.span.string
else:
temp_buffer[child['class'][2]] = child.string
except TypeError, KeyError:
continue
#print temp_buffer
df.loc[event_num] = temp_buffer
event_num += 1
#print df
return df
def inspectEvent (df):
time_delta_for_localtime = dt.timedelta(hours=8)
time_current = dt.datetime.now() - time_delta_for_localtime
time_current_str = dt.datetime.strftime(time_current, '%Y.%m.%d %H:%M')
#print time_current
criterion = df['forecast'].map(lambda x:x is not None)
upcoming_event = df[criterion]
upcoming_event = upcoming_event.loc[lambda df:df.time > time_current_str]
#print upcoming_event
if not upcoming_event.empty:
upcoming_time = upcoming_event.iloc[0]['time']
upcoming_event = upcoming_event.loc[lambda df:df.time == upcoming_time]
#secs_left = (dt.datetime.strptime(upcoming_time, '%Y.%m.%d %H:%M') - time_current).seconds
local_upcoming_time = dt.datetime.strptime(upcoming_time, '%Y.%m.%d %H:%M') + time_delta_for_localtime
return local_upcoming_time,upcoming_event
else:
# unkown network error, causing no future event
logging.warning('calander error: no future event')
return dt.datetime(1970,1,1), upcoming_event
'''
time_current_uplimit = time_current + dt.timedelta(minutes=10)
time_current_downlimit = time_current - dt.timedelta(minutes=10)
time_current_uplimit_str = dt.datetime.strftime(time_current_uplimit, '%Y.%m.%d %H:%M')
time_current_downlimit_str = dt.datetime.strftime(time_current_downlimit, '%Y.%m.%d %H:%M')
print time_current_downlimit_str
print time_current_uplimit_str
criterion = df['time'].map(lambda x:(x < time_current_uplimit_str) and (x > time_current_downlimit_str))
current_event = df[criterion]
print current_event.empty
'''
def findActual(df_events, df_result, url, datebase_ip, table_name): # web content change, return -1, cannot find actual return 0, find actual return 1
while True:
try:
r = urllib2.Request(url)
page = urllib2.urlopen(r, data=None, timeout=10)
soup = BeautifulSoup(page, 'lxml')
break
except Exception,e:
logging.warning('exception(find actual):{0}'.format(str(e)))
#page.close()
print url
time.sleep(10)
page.close()
time_delta_for_localtime = dt.timedelta(hours=8)
pre_id_str = ''
total_event_num = df_events.index.size
found_event_num = 0
is_found_event = False
for event_id in soup.find_all('tr'):
id_str = event_id.get('data-eventid')
if id_str == pre_id_str:
continue
if id_str is not None:
pre_id_str = id_str
try:
id_num = int(id_str)
except ValueError:
continue
#search by webid
search_df = df_events.loc[lambda df: df.webid == id_num]
if not search_df.empty:
#if search_df.iloc[0]['actual'] is not None:
# continue
#found start
is_found_event = True
temp_buffer = search_df.iloc[0].to_dict()
for child in event_id.children:
try:
# check whether the content of the website have changed
if child['class'][2] == 'event':
div = child.div
if div is not None:
if search_df.iloc[0]['event'] != div.span.string:
print 'web content changed'
return -1 # web content change
elif child['class'][2] == 'actual':
value = child.string
if value is None:
return 0 # cannot find actual
else:
temp_buffer['actual'] = value
updateAcutalValue(datebase_ip,table_name,id_num,value) # update database
df_events.loc[search_df.index[0],'actual'] = value # update buffer
print df_events
temp_buffer['record time'] = dt.datetime.strftime(
dt.datetime.now() - time_delta_for_localtime, '%Y.%m.%d %H:%M')
df_result.loc[df_result['webid'].size] = temp_buffer
print temp_buffer
#write file
try:
with open('E:\\forex_factory.csv', 'ab') as csvfile:
writer = csv.DictWriter(csvfile, fieldnames=['webid', 'time', 'record time', 'event', 'currency', 'impact', 'actual', 'forecast', 'previous'])
writer.writerow(temp_buffer)
except IOError:
print 'write dict error'
found_event_num += 1
if found_event_num == total_event_num:
return 1 # find all events
except TypeError, KeyError:
continue
else:
if is_found_event:
return 0 # already pass the events we want to find actual for, meaning not found actual value for them
def updateCurrentNewsTable(ip, table_name, df):
engine = create_engine('mysql+mysqldb://root:@%s:3306/forex_news' % ip, pool_recycle = 300)
df.to_sql(name=table_name, con=engine, if_exists='replace', index=True, index_label='id')
def updateAcutalValue(ip,table_name, webid, actual_value):
try:
conn = MySQLdb.connect(host = ip, port=3306,user='root',passwd='',db='forex_news')
cur = conn.cursor()
sql_line = 'update %s set actual = \'%s\' where webid = %d;' % (table_name, actual_value,webid)
#sql_line = 'select * from %s where webid = %d' %(table_name,webid)
logging.debug(sql_line)
num = cur.execute(sql_line)
#'CREATE TABLE `current_news`(`id` int(11) default null,`webid` int(11) default null,`time` varchar(50) not null default \'\',`event` varchar(100) not null default \'\', `currency` varchar(10) not null default \'\', `impact` varchar(20) not null default \'\', `actual` varchar(20) not null default \'\', `forecast` varchar(20) not null default \'\', `previous` varchar(20) not null default \'\', PRIMARY KEY (`id`)) ENGINE=InnoDB DEFAULT CHARSET=utf8;')
logging.debug('sql return:%d'%num)
#print cur.fetchone()
conn.commit()
cur.close()
conn.close()
except MySQLdb.Warning, w:
logging.warning('warning:{0}'.format(str(w)) )
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG,
format='%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s',
datefmt='%a, %d %b %Y %H:%M:%S',
filename='live_bug.log',
filemode='w')
console = logging.StreamHandler()
console.setLevel(logging.INFO)
formatter = logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s')
console.setFormatter(formatter)
logging.getLogger('').addHandler(console)
url = 'http://www.forexfactory.com/calendar.php'
database_ip = '127.0.0.1'
current_news_table_name = 'current_news'
calender = parsePage(url)
updateCurrentNewsTable(database_ip,current_news_table_name,calender)
#updateAcutalValue(datebase_ip,current_news_table_name,70268,'0.5%')
max_index = calender.index[-1]
current_index = calender.index[0]
df_result = pd.DataFrame(columns=['webid', 'time', 'record time', 'event', 'currency', 'impact', 'actual', 'forecast', 'previous'])
while current_index <= max_index:
#secs_left, events = inspectEvent(calender)
next_event_time, events = inspectEvent(calender)
print events
#next_event_time = dt.datetime.now() + dt.timedelta(seconds=secs_left)
#print secs_left, next_event_time
logging.info('next event time:{0}'.format(dt.datetime.strftime(next_event_time,'%Y.%m.%d %H:%M')) )
if events.empty:
break
current_index = events.index[-1] + 1
logging.debug('next index:%d'%current_index)
#print current_index, max_index
logging.info('sleeping...')
# time.sleep(secs_left)
#sleep, check whether web content has changed every 5 mins
while dt.datetime.now() < next_event_time:
new_calander = parsePage(url)
while new_calander.empty:
time.sleep(60)
new_calander = parsePage(url)
# events num are different, content changed
if calender.index.size != new_calander.index.size:
logging.info('detect web content changed')
next_event_time, events = inspectEvent(new_calander)
#if web error occurs, may be no future events
if events.empty:
#new_calender = parsePage(url)
#next_event_time, events = inspectEvent(new_calender)
#time.sleep(5)
break
else:
print events
calender = new_calander
updateCurrentNewsTable(database_ip,current_news_table_name,new_calander)
current_index = events.index[-1] + 1
logging.debug('next index: %d'%current_index)
#check web page every 5 mins
time.sleep(300)
# awake
logging.info('fetching actual value...')
while True:
find_actual_signal = findActual(events, df_result,url,database_ip,current_news_table_name)
#found actual value
if find_actual_signal == 1:
break
elif find_actual_signal == 0:
#in case partially found, but not all found
criterion = events['actual'].map(lambda x: x is None)
events = events[criterion]
time.sleep(5)
else:
# web content changed, update database and buffer(dataframe)
calender = parsePage(url)
updateCurrentNewsTable(database_ip, current_news_table_name, calender)
max_index = calender.index[-1]
stop_id = events.index[0]
logging.info('changed index %d: %s'% (stop_id, events.iloc[0]['event']))
criterion = calender['forecast'].map(lambda x:x is not None)
events = calender[criterion]
events = events['time'].loc[lambda df:df.time == events['time'].loc[stop_id]]
logging.info('new events...')
print events
#print secs_left
#print events
#print events.loc[lambda df:df.id == int('65743')].empty
| [
"kingkonghon@hotmail.com"
] | kingkonghon@hotmail.com |
5cb291f086a1a6e113110c2af91826068dd57189 | 255e19ddc1bcde0d3d4fe70e01cec9bb724979c9 | /all-gists/4287546/snippet.py | 9124f440cdeccffaef8a4bd8d92dd5e133c999f6 | [
"MIT"
] | permissive | gistable/gistable | 26c1e909928ec463026811f69b61619b62f14721 | 665d39a2bd82543d5196555f0801ef8fd4a3ee48 | refs/heads/master | 2023-02-17T21:33:55.558398 | 2023-02-11T18:20:10 | 2023-02-11T18:20:10 | 119,861,038 | 76 | 19 | null | 2020-07-26T03:14:55 | 2018-02-01T16:19:24 | Python | UTF-8 | Python | false | false | 680 | py | # FROM: http://en.wikipedia.org/wiki/Base_36#Python_implementation
def base36encode(number):
"""Converts an integer into a base36 string."""
ALPHABET = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ"
if not isinstance(number, (int, long)):
raise TypeError('This function must be called on an integer.')
base36 = ''
sign = ''
if number < 0:
sign = '-'
number = -number
if 0 <= number < len(ALPHABET):
return sign + ALPHABET[number]
while number != 0:
number, i = divmod(number, len(ALPHABET))
base36 = ALPHABET[i] + base36
return sign + base36
def base36decode(number):
return int(number, 36) | [
"gistshub@gmail.com"
] | gistshub@gmail.com |
ebfd3c5ec16c2733d41e8c7b34b1f6cd09317644 | 7100f3358c9b8dba4b0cf1890bb1334d6833eacf | /Transition_examples_NCL_to_PyNGL/read_data/TRANS_read_ASCII_lat_lon_value_way2.py | 2e40183d8bcd1ea60b23477025039424b1cf5456 | [
"MIT"
] | permissive | KMFleischer/PyEarthScience | c0278329a47660c5dcb5cf5f7e6b9617684b4da0 | 9624ad3d6f6bae89a7ed71763144b56b8ba8b885 | refs/heads/master | 2022-08-12T17:01:18.854579 | 2022-07-28T14:42:11 | 2022-07-28T14:42:11 | 44,372,741 | 60 | 31 | MIT | 2022-07-27T13:19:56 | 2015-10-16T08:35:20 | Jupyter Notebook | UTF-8 | Python | false | false | 2,610 | py | #
# File:
# TRANS_read_ASCII_lat_lon_value_way2.py
#
# Synopsis:
# Illustrates how to read an ASCII file and create a
# contour fill plot on a map
#
# Categories:
# I/O
# contour plot
# map plot
#
# Author:
# Karin Meier-Fleischer, based on NCL example
#
# Date of initial publication:
# September 2018
#
# Description:
# This example shows how to read an ASCII file and
# create a contour fill plot on a map.
#
# Effects illustrated:
# o Read ASCII data
# o Drawing contours
# o Drawing a map
#
# Output:
# -
#
# Notes: The data for this example can be downloaded from
# http://www.ncl.ucar.edu/Document/Manuals/NCL_User_Guide/Data/
#
"""
Transition Guide Python Example: TRANS_read_ASCII_lat_lon_value_way2.py
based on read_asc6.ncl: http://ncl.ucar.edu/Applications/Scripts/read_asc6.ncl
- read ASCII file asc6.txt
- retrieve variable informations
- draw contours on a map
asc6.txt
Lat Lon Temp (C)
33.3 76.5 20.3
33.3 76.6 20.3
33.3 76.7 21.5
33.3 76.8 20.0
.....
2018-08-27 kmf
"""
from __future__ import print_function
import numpy as np
print("")
#-- read the data
f = open("asc6.txt",'r')
data = f.readlines() #-- data: type list
nrows = len(data)
#-- assign lists to append elements
lat0 = []
lon0 = []
vals = []
for i in data[1::]:
line = i.strip()
print(line)
cols = line.split()
lat0.append(cols[0])
lon0.append(cols[1])
vals.append(cols[2])
#-- convert string to float
print(len(lat0))
print(len(lon0))
print(len(vals))
lat0 = np.array(lat0).astype(float)
lon0 = np.array(lon0).astype(float)
temp1d = np.array(vals).astype(float)
indeqlat = np.array(np.where(lat0 == lat0[0]))
print(type(indeqlat))
nlons = indeqlat.shape #-- number of longitudes
nlons = nlons[1] #-- number of longitudes
nlats = nrows / nlons #-- number of latitude
lat = lat0[::nlons]
lon = lon0[0:nlons]
#setattr(lat, 'units', 'degrees_north')
#setattr(lon, 'units', 'degrees_east')
#-- rows by column
print("--> nlats: " + str(len(lat)))
print("--> nlons: " + str(len(lon)))
print("--> rank of vals: " + str(len(temp1d.shape)))
print("--> shape temp1d: " + str(temp1d.shape))
temp2d = np.reshape(temp1d,(nlats,nlons))
#setattr(temp2d, 'units', 'degC')
#setattr(telp2d, 'long_name', 'temperature')
print("--> shape temp2d: " + str(temp2d))
print("--> shape temp2d: " + str(temp2d.shape))
exit()
| [
"meier-fleischer@dkrz.de"
] | meier-fleischer@dkrz.de |
666459957700b84b14bec98f21fbd9d2c6441c2b | 7236d1d4873faa9735fd5e2d4598b211a370f731 | /project/n/projects/projects/ecommapp/ecommerce/migrations/0020_auto_20200928_1630.py | 7ceab26f28c88cea212b50ab80571813c500c591 | [] | no_license | Dreambigxz/my_first_django_app | 05f5a5d330d72084489f9306fca9ca232af13999 | 9e21ebcbe63c7394280558d2977ef8a796960e0d | refs/heads/main | 2023-01-03T18:45:20.712074 | 2020-10-23T09:05:47 | 2020-10-23T09:05:47 | 306,180,592 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 496 | py | # Generated by Django 3.0.8 on 2020-09-28 15:30
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('ecommerce', '0019_products_color'),
]
operations = [
migrations.AlterField(
model_name='products',
name='color',
field=models.CharField(choices=[('YL', 'Yellow'), ('RD', 'Red'), ('OR', 'Orange'), ('GR', 'Green')], default='RD', max_length=200),
),
]
| [
"onyemordidaniel@gmail.com"
] | onyemordidaniel@gmail.com |
bd94282c6683da22e869c64a0dd76f5ba27d7158 | 35c1dcb0c8a713725a8d9d3062df26b096a4c150 | /setup.py | 295d74e177fe1a2cc941145ea359d2970b74527a | [
"MIT"
] | permissive | ijgarcesc/pybiblio | 84fc78c09866a65bd13945ab36c8841587d74f09 | 02428eba8c4fcef3f9311ca9ba7be6bab661ca9e | refs/heads/master | 2021-07-15T07:40:08.319875 | 2017-10-18T13:57:08 | 2017-10-18T13:57:08 | 105,827,028 | 0 | 0 | null | 2017-10-04T22:48:12 | 2017-10-04T22:48:12 | null | UTF-8 | Python | false | false | 899 | py | from setuptools import setup
def readme():
with open('README.md') as f:
return f.read()
setup(name='pybiblio',
version='0.0.0',
description='Analysis of bibliographic information using python',
long_description='A tool for creating and gradding assignments in the Jupyter Notebook using the Virtual Programming Lab plugging and Moodle',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5',
'Intended Audience :: Education',
'License :: Free For Educational Use',
],
keywords='Scopus',
url='http://github.com/jdvelasq/pybiblio',
author='Juan D. Velasquez, Johana Garces',
author_email='jdvelasq@unal.edu.co',
license='MIT',
packages=['pybibio'],
include_package_data=True,
zip_safe=False)
| [
"jdvelasq@unal.edu.co"
] | jdvelasq@unal.edu.co |
16b801019c6fae214098e1bee6e09311a7b48845 | 80e1313c8936f7cda0a4686c2d07d516e974158f | /tests/test_heatmap_smoketest.py | fb786f336f19e649df8ccde5c7d87366b3c9a800 | [
"MIT"
] | permissive | wahtak/develocorder | 1bb00bd9d88cd41a79a37e406732777e9cf55e81 | e8fb5d4e93355c6d28a08f5a21d3e948459da392 | refs/heads/master | 2020-12-20T00:37:42.162675 | 2020-02-10T23:05:41 | 2020-02-10T23:05:41 | 235,900,917 | 10 | 0 | null | null | null | null | UTF-8 | Python | false | false | 835 | py | from unittest.mock import patch
import pytest
import matplotlib
from develocorder import Heatmap, set_recorder, record
@pytest.mark.filterwarnings(
"ignore:Matplotlib is currently using agg, which is a non-GUI backend, so cannot show the figure"
)
@patch("develocorder.graph._global_container_instance", None)
@patch("develocorder.interface._recorders", {})
def test_heatmap_smoketest():
# use backend which does not require a display for CI
matplotlib.use("Agg")
# all arguments
set_recorder(value1=Heatmap(xlabel="Sample", ylabel="Values 1", max_length=3))
# no arguments
set_recorder(value2=Heatmap())
record(value1=[1, 9])
record(value2=[42, 43, 44])
record(value1=[3, 6])
record(value2=[43, 44, 45])
record(value1=[6, 3])
record(value1=[9, 1])
record(value1=[-1, 0])
| [
"alexander.wt.metz@gmail.com"
] | alexander.wt.metz@gmail.com |
a0e6dacc42f43f83393b7db514c433a1171143a3 | 2a3188301e622f1f6bce3acc4974bf641daf3578 | /driving-behaviour/src/data_utils.py | 7616a149bcc60a00b530218c0b2580b59d6ffef8 | [
"MIT"
] | permissive | cy94/ghack | 884d2116853656eb3101d4bdd158386c9d6944e7 | 75a55cbb3c89517ed60b7885abfb6ba5b53bfcef | refs/heads/master | 2020-03-28T14:38:33.646118 | 2018-09-12T13:25:38 | 2018-09-12T13:25:38 | 148,506,910 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 242 | py | # -*- coding: utf-8 -*-
# @Author: chandan
# @Date: 2017-07-08 00:32:17
# @Last Modified by: chandan
# @Last Modified time: 2018-08-28 17:11:51
import pandas as pd
def read_file(fname):
df = pd.read_csv(fname, header=None)
return df
| [
"chandan.yeshwanth@siemens.com"
] | chandan.yeshwanth@siemens.com |
63806dd54f4edeaf3c314607b3876a7b5e12b74c | 81a1507bfb534da45a0aca8f95ee859bdd37e7b5 | /fc/ast.py | f4b14b638a077c07a85edba6d290871864977da7 | [] | no_license | arpit-saxena/flipbook | ec81de9e77ac1ed8161bbdc3f9ccd17b83c5a1be | 1ffbf28c6dcf534c53c55732533dfa13d1b47fac | refs/heads/main | 2023-08-24T15:55:05.008131 | 2021-11-06T12:25:13 | 2021-11-06T12:25:13 | 424,356,868 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,384 | py | from __future__ import annotations
from collections import namedtuple
from typing import List
from .config import GRID_SIZE
from sortedcontainers import SortedDict
class Object:
def __init__(self, name: str) -> None:
self.name = name
class SceneElement:
def __init__(self, object_name: str, pos_x: float, pos_y: float, frame_begin: int, frame_end: int) -> None:
self.object_name = object_name
self.pos_x = pos_x
self.pos_y = pos_y
self.frame_begin = frame_begin
self.frame_end = frame_end
class Scene(Object):
def __init__(self, name: str, scene_elements: List[SceneElement]) -> None:
super().__init__(name)
self.scene_elements = scene_elements
def get_max_frame_num(self): return 0
class Image(Object):
def __init__(self, name: str, size_x: float, size_y: float, path: str) -> None:
super().__init__(name)
self.size_x = size_x
self.size_y = size_y
self.path = path
def get_max_frame_num(self): return 0
class TweenFrameDesc:
def __init__(self, frame_num: int, pos_x: float, pos_y: float) -> None:
self.frame_num = frame_num
self.pos_x = pos_x
self.pos_y = pos_y
class Tween(Object):
def __init__(self, name: str, object_name: str,
frame_desc_list: List[TweenFrameDesc]) -> None:
super().__init__(name)
self.object_name = object_name
self.frame_desc_list = SortedDict()
for frame_desc in frame_desc_list:
self.frame_desc_list[frame_desc.frame_num] = frame_desc.pos_x, frame_desc.pos_y
def get_max_frame_num(self):
return self.frame_desc_list.keys()[-1]
class Program:
Grid = namedtuple('Grid', ['size_x', 'size_y'])
def __init__(self) -> None:
self.objects = []
self.grid_size = GRID_SIZE
self.grid_explicit = False
def add_grid(self, size_x: int, size_y: int) -> Program:
if self.grid_explicit:
raise RuntimeError("Can't have more than one grid directives!")
self.grid_explicit = True
self.grid_size = tuple((size_x, size_y))
return self
def add_object(self, object: Object) -> Program:
self.objects.append(object)
return self
def get_max_frame_num(self) -> int:
return max(obj.get_max_frame_num() for obj in self.objects)
| [
"arpit.saxena2000@yahoo.in"
] | arpit.saxena2000@yahoo.in |
92ec561813fa2612c71339d61d9e4a46e94ae5c3 | 0fd0495f194bc22f0322d02ebabe8add9bf5814c | /python_concept_references/strings.py | 219c87dc70935c66bdb73b5f543c1efcaa79c660 | [] | no_license | WorldPierce/Automate_the_Boring_Stuff_With_Python | bd6982bddefa712598dc5e6eb4cf2a2aa87b2c1f | a0bc6ba49d92b25f4fbe8d4fdd9385f294567e4c | refs/heads/master | 2020-12-30T09:58:38.341981 | 2017-08-05T19:51:10 | 2017-08-05T19:51:10 | 99,248,638 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 750 | py | spam = 'hello world!'
print(spam.upper())
print(spam.lower())
print(spam.islower())
print(spam.isupper())
# isalpha - letters only?
# isalnum - alphpa numeric?
# isspace - single space?
# istitle - every first word capital after space?
print(spam.title())
# startswith('str') | endswith('str')
print('\n\n'.join(['cats','rats','bats'])) # joins list split by /n/n
print(spam.split(' '))
print(spam.rjust(15)) # right justifys letters and makes string len 15
# ljust(num)
print(spam.ljust(15, '*')) #adds * to right of str
print(spam.center(20, '+')) # centers text
#spam.strip() removes white space from string
#ltrip | rstrip justtifys which side to remove white space
spam.replace('e', 'XSA')
print(spam)
print(spam.replace('e', 'XSA'))
| [
"bildo316@gmail.com"
] | bildo316@gmail.com |
07b9f1b6db7bb45b9d2272190d29db9566448bcf | 4e566a1944fdfcf9de007a6c404136c2529830c1 | /imagedataset.py | bee04f3237ce389335c1805488d9e254691ba786 | [] | no_license | Sreenivas1993/Yelp-photo-Classification | 22314788378b33060df73707445d4db0e292f945 | e86dded2f3ea4ad53e7e83a9346390ed2b8a443d | refs/heads/master | 2021-08-23T03:43:43.395428 | 2017-12-03T01:12:51 | 2017-12-03T01:12:51 | 110,392,817 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,444 | py | # -*- coding: utf-8 -*-
"""
Created on Sat Nov 18 21:34:09 2017
@author: Sreenivas
"""
from PIL import Image
import os
import os.path
import pandas as pd
import torch.utils.data as data
def pil_loader(path):
# open path as file to avoid ResourceWarning (https://github.com/python-pillow/Pillow/issues/835)
with open(path, 'rb') as f:
with Image.open(f) as img:
return img.convert('RGB')
def accimage_loader(path):
import accimage
try:
return accimage.Image(path)
except IOError:
return pil_loader(path)
def default_loader(path):
from torchvision import get_image_backend
if get_image_backend() == 'accimage':
return accimage_loader(path)
else:
return pil_loader(path)
class Imagedataset(data.Dataset):
#Init function taking csv files for labels and root directory for images
def __init__(self,root_dir,csv_file,transform=None,loader=default_loader):
self.labelfile=pd.read_csv(csv_file)
self.root_dir=root_dir
self.loader=loader
self.transform=transform
#Length of dataset
def __len__(self):
return len(self.labelfile)
#forming tuple of image and its label
def __getitem__(self,idx):
img_name=os.path.join(self.root_dir,self.labelfile.ix[idx,0])
image=self.loader(img_name)
if self.transform:
image=self.transform(image)
return image
| [
"sreenivenki@gmail.com"
] | sreenivenki@gmail.com |
3245b49aa803d9fd7eaad75b78856c0dd540cbf0 | 1d4adeca605818749247235dba11a90fbc154748 | /deprecated/services/deployment/deployment_controller.py | 6ccff0b553fb38655e4e8f6737b142ce9a9a71f7 | [
"Apache-2.0"
] | permissive | joshish-iu/cloudmesh-nist | 4598cd884fb9faaef5ea5cc5fa3e3920dbc6ebff | c26952859c2230231420058f6c488c9f6cc73218 | refs/heads/master | 2020-05-26T13:11:51.730994 | 2019-05-21T10:42:28 | 2019-05-21T10:42:28 | 188,243,290 | 0 | 0 | NOASSERTION | 2019-05-23T13:48:49 | 2019-05-23T13:48:48 | null | UTF-8 | Python | false | false | 905 | py | import connexion
import six
#from deployment_controller import *
from swagger_server.models.deployment import Deployment # noqa: E501
from swagger_server import util
from pymongo import MongoClient
client = MongoClient()
db = client['cm']
deployments = db['deployment']
def get_deployment():
"""
:return: list all the deployments as a list
"""
# ok
return list(deployments.find({}, {'_id': False}))
def add_deployment(deployment=None):
# ok
if connexion.request.is_json:
deployment = Deployment.from_dict(deployment)
deployments.insert(deployment.to_dict())
return deployment
def get_deployment_by_name(name):
# BUG: not yet gaurantiied there is only one name
for element in deployments.find({'name':name}):
return (element['name'],
element['description'],
element['value'],
element['kind'])
| [
"laszewski@gmail.com"
] | laszewski@gmail.com |
63049adb954204a6d260ac373203abbc430dd063 | 67568ac53039fd633f9017bd368a13258191e1b2 | /thrift_example/file_transport/file/FileService.py | 429826b7eddda8fd50d339c35c87f6a40c1cd0ea | [] | no_license | starryrbs/rpc_shared | 22012bbb5e0916a178e088f1be58acedd589c13d | 6e4feb2929337ccb885ff629c13a0a6d3bc457d2 | refs/heads/master | 2022-12-11T04:27:24.176393 | 2020-09-02T16:01:37 | 2020-09-02T16:01:37 | 291,492,629 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | true | 7,528 | py | #
# Autogenerated by Thrift Compiler (0.12.0)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
# options string: py
#
from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException
from thrift.protocol.TProtocol import TProtocolException
from thrift.TRecursive import fix_spec
import sys
import logging
from .ttypes import *
from thrift.Thrift import TProcessor
from thrift.transport import TTransport
all_structs = []
class Iface(object):
def uploadFile(self, filedata):
"""
Parameters:
- filedata
"""
pass
class Client(Iface):
def __init__(self, iprot, oprot=None):
self._iprot = self._oprot = iprot
if oprot is not None:
self._oprot = oprot
self._seqid = 0
def uploadFile(self, filedata):
"""
Parameters:
- filedata
"""
self.send_uploadFile(filedata)
return self.recv_uploadFile()
def send_uploadFile(self, filedata):
self._oprot.writeMessageBegin('uploadFile', TMessageType.CALL, self._seqid)
args = uploadFile_args()
args.filedata = filedata
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_uploadFile(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = uploadFile_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "uploadFile failed: unknown result")
class Processor(Iface, TProcessor):
def __init__(self, handler):
self._handler = handler
self._processMap = {}
self._processMap["uploadFile"] = Processor.process_uploadFile
def process(self, iprot, oprot):
(name, type, seqid) = iprot.readMessageBegin()
if name not in self._processMap:
iprot.skip(TType.STRUCT)
iprot.readMessageEnd()
x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name))
oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid)
x.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
return
else:
self._processMap[name](self, seqid, iprot, oprot)
return True
def process_uploadFile(self, seqid, iprot, oprot):
args = uploadFile_args()
args.read(iprot)
iprot.readMessageEnd()
result = uploadFile_result()
try:
result.success = self._handler.uploadFile(args.filedata)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("uploadFile", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
# HELPER FUNCTIONS AND STRUCTURES
class uploadFile_args(object):
"""
Attributes:
- filedata
"""
def __init__(self, filedata=None,):
self.filedata = filedata
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.filedata = File()
self.filedata.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('uploadFile_args')
if self.filedata is not None:
oprot.writeFieldBegin('filedata', TType.STRUCT, 1)
self.filedata.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(uploadFile_args)
uploadFile_args.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'filedata', [File, None], None, ), # 1
)
class uploadFile_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.BOOL:
self.success = iprot.readBool()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('uploadFile_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.BOOL, 0)
oprot.writeBool(self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(uploadFile_result)
uploadFile_result.thrift_spec = (
(0, TType.BOOL, 'success', None, None, ), # 0
)
fix_spec(all_structs)
del all_structs
| [
"1322096624@qq.com"
] | 1322096624@qq.com |
fbf8ad242fdce014c3e68374b9493dc4412d6cae | d48219cbb727da7c283260e95afc61c43c5e9006 | /tests/test_main.py | 62832d8fcac385cb6e45e9a494a972a565595bc4 | [
"MIT"
] | permissive | NathanVaughn/actions-cloudflare-purge | a83a2c6cea330dd6c88c11e382176920dc386745 | aefc898270f34a9bbf2078094f8728bcf4043047 | refs/heads/master | 2023-06-27T15:12:12.359346 | 2023-06-08T22:58:14 | 2023-06-08T22:58:14 | 210,984,075 | 23 | 6 | MIT | 2023-08-28T21:04:06 | 2019-09-26T02:40:26 | Python | UTF-8 | Python | false | false | 12,323 | py | import json
import os
import subprocess
import sys
from typing import List, Tuple
import pytest
# weird environment variable to enable testing mode that no one else should ever set
os.environ["NATHANVAUGHN_TESTING"] = "True"
def run_command(options: List[str]) -> Tuple[str, dict, dict]:
# run the command just as the Action does, and return the results
main = os.path.join(os.path.dirname(__file__), "..", "main.py")
output = subprocess.check_output([sys.executable, main] + options, text=True)
output_split = output.split("\n")
url = output_split[0]
headers: dict = json.loads(output_split[1])
data: dict = json.loads(output_split[2])
return url, headers, data
def test_legacy():
# test the legacy method of authentication
os.environ["CLOUDFLARE_ZONE"] = "zone123"
os.environ["CLOUDFLARE_AUTH_KEY"] = "key123"
url, headers, data = run_command(
[
"--cf-zone",
"",
"--cf-auth",
"",
"--urls",
"",
"--files",
"",
"--tags",
"",
"--hosts",
"",
"--prefixes",
"",
]
)
assert url == "https://api.cloudflare.com/client/v4/zones/zone123/purge_cache"
assert headers["Authorization"] == "Bearer key123"
assert data == {"purge_everything": True}
del os.environ["CLOUDFLARE_ZONE"]
del os.environ["CLOUDFLARE_AUTH_KEY"]
def test_new():
# test the new method of authentication
url, headers, data = run_command(
[
"--cf-zone",
"zone123",
"--cf-auth",
"key123",
"--urls",
"",
"--files",
"",
"--tags",
"",
"--hosts",
"",
"--prefixes",
"",
]
)
assert url == "https://api.cloudflare.com/client/v4/zones/zone123/purge_cache"
assert headers["Authorization"] == "Bearer key123"
assert data == {"purge_everything": True}
def test_new_missing():
# test missing authentication
with pytest.raises(Exception):
run_command(
[
"--cf-zone",
"",
"--cf-auth",
"",
"--urls",
"",
"--files",
"",
"--tags",
"",
"--hosts",
"",
"--prefixes",
"",
]
)
with pytest.raises(Exception):
run_command(
[
"--cf-zone",
"zone123",
"--cf-auth",
"",
"--urls",
"",
"--files",
"",
"--tags",
"",
"--hosts",
"",
"--prefixes",
"",
]
)
with pytest.raises(Exception):
run_command(
[
"--cf-zone",
"",
"--cf-auth",
"key123",
"--urls",
"",
"--files",
"",
"--tags",
"",
"--hosts",
"",
"--prefixes",
"",
]
)
def test_legacy_missing():
# test missing authentication
os.environ["CLOUDFLARE_AUTH_KEY"] = "key123"
with pytest.raises(Exception):
run_command(
[
"--cf-zone",
"",
"--cf-auth",
"",
"--urls",
"",
"--files",
"",
"--tags",
"",
"--hosts",
"",
"--prefixes",
"",
]
)
del os.environ["CLOUDFLARE_AUTH_KEY"]
os.environ["CLOUDFLARE_ZONE"] = "zone123"
with pytest.raises(Exception):
run_command(
[
"--cf-zone",
"",
"--cf-auth",
"",
"--urls",
"",
"--files",
"",
"--tags",
"",
"--hosts",
"",
"--prefixes",
"",
]
)
del os.environ["CLOUDFLARE_ZONE"]
with pytest.raises(Exception):
run_command(
[
"--cf-zone",
"",
"--cf-auth",
"",
"--urls",
"",
"--files",
"",
"--tags",
"",
"--hosts",
"",
"--prefixes",
"",
]
)
def test_mix():
# test mixing authentication between new and old
os.environ["CLOUDFLARE_AUTH_KEY"] = "key123"
url, headers, data = run_command(
[
"--cf-zone",
"zone123",
"--cf-auth",
"",
"--urls",
"",
"--files",
"",
"--tags",
"",
"--hosts",
"",
"--prefixes",
"",
]
)
assert url == "https://api.cloudflare.com/client/v4/zones/zone123/purge_cache"
assert headers["Authorization"] == "Bearer key123"
assert data == {"purge_everything": True}
del os.environ["CLOUDFLARE_AUTH_KEY"]
os.environ["CLOUDFLARE_ZONE"] = "zone123"
url, headers, data = run_command(
[
"--cf-zone",
"",
"--cf-auth",
"key123",
"--urls",
"",
"--files",
"",
"--tags",
"",
"--hosts",
"",
"--prefixes",
"",
]
)
assert url == "https://api.cloudflare.com/client/v4/zones/zone123/purge_cache"
assert headers["Authorization"] == "Bearer key123"
assert data == {"purge_everything": True}
del os.environ["CLOUDFLARE_ZONE"]
def test_urls():
url, headers, data = run_command(
[
"--cf-zone",
"zone123",
"--cf-auth",
"key123",
"--urls",
"nathanv.me google.com",
"--files",
"",
"--tags",
"",
"--hosts",
"",
"--prefixes",
"",
]
)
assert url == "https://api.cloudflare.com/client/v4/zones/zone123/purge_cache"
assert headers["Authorization"] == "Bearer key123"
assert data == {"files": ["nathanv.me", "google.com"]}
def test_files():
url, headers, data = run_command(
[
"--cf-zone",
"zone123",
"--cf-auth",
"key123",
"--urls",
"",
"--files",
"nathanv.me google.com",
"--tags",
"",
"--hosts",
"",
"--prefixes",
"",
]
)
assert url == "https://api.cloudflare.com/client/v4/zones/zone123/purge_cache"
assert headers["Authorization"] == "Bearer key123"
assert data == {"files": ["nathanv.me", "google.com"]}
def test_tags():
url, headers, data = run_command(
[
"--cf-zone",
"zone123",
"--cf-auth",
"key123",
"--urls",
"",
"--files",
"",
"--tags",
"tag1 tag-2",
"--hosts",
"",
"--prefixes",
"",
]
)
assert url == "https://api.cloudflare.com/client/v4/zones/zone123/purge_cache"
assert headers["Authorization"] == "Bearer key123"
assert data == {"tags": ["tag1", "tag-2"]}
def test_hosts():
url, headers, data = run_command(
[
"--cf-zone",
"zone123",
"--cf-auth",
"key123",
"--urls",
"",
"--files",
"",
"--tags",
"",
"--hosts",
"nathanv.me google.com",
"--prefixes",
"",
]
)
assert url == "https://api.cloudflare.com/client/v4/zones/zone123/purge_cache"
assert headers["Authorization"] == "Bearer key123"
assert data == {"hosts": ["nathanv.me", "google.com"]}
def test_prefixes():
url, headers, data = run_command(
[
"--cf-zone",
"zone123",
"--cf-auth",
"key123",
"--urls",
"",
"--files",
"",
"--tags",
"",
"--hosts",
"",
"--prefixes",
"nathanv.me/assets https://google.com/images",
]
)
assert url == "https://api.cloudflare.com/client/v4/zones/zone123/purge_cache"
assert headers["Authorization"] == "Bearer key123"
assert data == {"prefixes": ["nathanv.me/assets", "https://google.com/images"]}
def test_purge_everything():
url, headers, data = run_command(
[
"--cf-zone",
"zone123",
"--cf-auth",
"key123",
"--urls",
"",
"--files",
"",
"--tags",
"",
"--hosts",
"",
"--prefixes",
"",
]
)
assert url == "https://api.cloudflare.com/client/v4/zones/zone123/purge_cache"
assert headers["Authorization"] == "Bearer key123"
assert data == {"purge_everything": True}
def test_full():
# test full options suite
url, headers, data = run_command(
[
"--cf-zone",
"zone123",
"--cf-auth",
"key123",
"--urls",
"nathanv.me google.com",
"--files",
"blog.nathanv.me",
"--tags",
"tag1 tag-2",
"--hosts",
"nathanv.me google.com",
"--prefixes",
"nathanv.me/assets https://google.com/images",
]
)
assert url == "https://api.cloudflare.com/client/v4/zones/zone123/purge_cache"
assert headers["Authorization"] == "Bearer key123"
assert data == {
"files": ["blog.nathanv.me", "nathanv.me", "google.com"],
"tags": ["tag1", "tag-2"],
"hosts": ["nathanv.me", "google.com"],
"prefixes": ["nathanv.me/assets", "https://google.com/images"],
}
def test_cli():
# test cli with arguments missing
url, headers, data = run_command(["--cf-zone", "zone123", "--cf-auth", "key123"])
assert url == "https://api.cloudflare.com/client/v4/zones/zone123/purge_cache"
assert headers["Authorization"] == "Bearer key123"
assert data == {"purge_everything": True}
url, headers, data = run_command(
["--cf-zone", "zone123", "--cf-auth", "key123", "--tags", "tag1", "tag-2"]
)
assert url == "https://api.cloudflare.com/client/v4/zones/zone123/purge_cache"
assert headers["Authorization"] == "Bearer key123"
assert data == {"tags": ["tag1", "tag-2"]}
def test_whitespace():
url, headers, data = run_command(
[
"--cf-zone",
"zone123",
"--cf-auth",
"key123",
"--urls",
"nathanv.me\ngoogle.com",
"--files",
"pay.nathanv.me\tblog.nathanv.me",
"--tags",
"tag1 tag-2\t tag3 \n\ntag4",
"--hosts",
"nathanv.me google.com",
"--prefixes",
"nathanv.me/assets https://google.com/images",
]
)
assert url == "https://api.cloudflare.com/client/v4/zones/zone123/purge_cache"
assert headers["Authorization"] == "Bearer key123"
assert data == {
"files": ["pay.nathanv.me", "blog.nathanv.me", "nathanv.me", "google.com"],
"tags": ["tag1", "tag-2", "tag3", "tag4"],
"hosts": ["nathanv.me", "google.com"],
"prefixes": ["nathanv.me/assets", "https://google.com/images"],
}
| [
"nvaughn51@gmail.com"
] | nvaughn51@gmail.com |
a105bce6c8161218e02ea347b9d1dae4caa90058 | 12406580078bd7db2902887582186ce544619d57 | /runwise_multiplicity/asp_extraction_method.py | 9dc19dde1b35613bf5b32e4a0f7e387afb162772 | [
"MIT"
] | permissive | fact-project/runwise_multiplicity | 92adfcf38ef88077ef81e9653be7113c652310ea | 57526b6dbade135ee26d0ba04b2ba6f77879628b | refs/heads/master | 2018-07-03T03:28:47.091187 | 2018-05-31T17:42:30 | 2018-05-31T17:42:30 | 126,005,509 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,817 | py | import matplotlib.pyplot as plt
import runwise_multiplicity as rwm
import numpy as np
import pandas as pd
run_info = pd.read_msgpack(
'fact_run_info_table_20180319.msg')
read_file = rwm.read_gzipped_jsonl(
'run_database_of_photon_multiplicity_20180207.jsonl.gz')
r = rwm.multiplicity_intenity_to_rates(read_file)
for run_index in range(len(r)):
only_asp_rates = (
r[run_index]['MultiplicityPhysicsTriggerRate'] -
r[run_index]['MultiplicityPedestalTriggerRate'])
fNight = r[run_index]['fNight']
fRunID = r[run_index]['fRunID']
run_mask = (
(run_info.fNight == fNight) &
(run_info.fRunID == fRunID))
fCurrentsMedMean = run_info.fCurrentsMedMean[run_mask].as_matrix()[0]
if fCurrentsMedMean < 10:
break
print('Night:', fNight, 'Run:', fRunID, 'Current:', fCurrentsMedMean)
#MultiplicityPhysicsTriggerRate ~ NSB + ASP --> r[0]['MultiplicityPhysicsTriggerRate']
#MultiplicityPedestalTriggerRate ~ NSB --> r[0]['MultiplicityPedestalTriggerRate']
#only_asp_rates ~ subtracting NSB from Total rates, we get ASP rates
fig, ax = plt.subplots()
bining = np.linspace(1, 101, 100)
ax.loglog( bining,
r[run_index]['MultiplicityPedestalTriggerRate']*np.linspace(1, 101, 100)**2.7,
color = 'r')
ax.loglog( bining,
only_asp_rates*np.linspace(1, 101, 100)**2.7,
color = 'b')
ax.loglog( bining,
r[run_index]['MultiplicityPhysicsTriggerRate']*np.linspace(1, 101, 100)**2.7,
color = 'xkcd:lime',
linestyle = '--',
linewidth = 2 )
plt.xlabel('Multiplicity/1')
plt.ylabel('Multiplicity$^{2.7}$ Rates(Multiplicity)/s$^{-1}$')
plt.savefig(
'asp_extraction_method.png',
dpi= 'figure',
bbox_inches= 'tight'
)
plt.clf()
| [
"amandeepsingh@ads.ethz.ch"
] | amandeepsingh@ads.ethz.ch |
212760228292c0bf7012fb43f8ccebebea45a312 | 66b92a180fb10e2f4dea337022c4ce85cb4f486d | /pis_ofap/main/serializers.py | f05b555c9964907514d8832855a413ef3a01fc6e | [] | no_license | encrypted-fox/django-pis-ofap | f5c7315138a500283432e5d1c6b4c3b943a30075 | b2d9a5c38b7649e9269b33b2de388a87d4d0f922 | refs/heads/master | 2022-11-08T19:13:17.345990 | 2020-06-18T16:05:19 | 2020-06-18T16:05:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 574 | py | from rest_framework import serializers
from .models import *
class EmployeesSerializer(serializers.ModelSerializer):
class Meta:
model = Employees
fields = '__all__'
class RequestsSerializer(serializers.ModelSerializer):
class Meta:
model = Requests
fields = '__all__'
class RepositoriesSerializer(serializers.ModelSerializer):
class Meta:
model = Repositories
fields = '__all__'
class AgreementsSerializer(serializers.ModelSerializer):
class Meta:
model = Agreements
fields = '__all__' | [
"yaroslav.souchkov@gmail.com"
] | yaroslav.souchkov@gmail.com |
da783cbc09a1b7c3e9060a93cd82e33e1de0a527 | 43d462e2e9cafabea41768b51231784c381be3ec | /details_from_Wiki/Crawl_car/Crawl_car/spiders/car.py | 993815a003e00cb65bdf90ea08cb5cc08d9359d3 | [] | no_license | DPgg999/- | 899d6b95b3b3ccaac9c540bbe31cc6d6a3b0c0eb | a8966077bb2754ec8d6d2d9a1b32393ac28fc8c9 | refs/heads/master | 2020-05-29T19:53:13.092058 | 2019-06-20T12:40:08 | 2019-06-20T12:40:08 | 189,341,781 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,617 | py | # -*- coding: utf-8 -*-
import scrapy
from Crawl_car.items import CrawlCarItem
class CarSpider(scrapy.Spider):
name = 'car'
allowed_domains = ["http://www.baike.com"]
file = open('Z:\项目\Car_\Car_Knowledge_Graph_A\Crawl_car\Crawl_car\spiders\car_name.txt', 'r').read()
wordList = file.split()
count = 0
start_urls = []
for i in wordList:
url = "http://www.baike.com/wiki/"
url = url + str(i)
start_urls.append(url)
count += 1
# print(start_urls)
# print(count) #3747
def parse(self, response):
deltail_count = 0
item = CrawlCarItem()
all_xpath_name = response.xpath(r'//div[@class="content-h1"]')
for single_xpath in all_xpath_name:
name = single_xpath.xpath(r'.//h1/text()').extract_first()
item['name'] = name
all_xpath_key_value = response.xpath(r'//*[@id="datamodule"]/div[1]/table')
for single_xpath in all_xpath_key_value:
baseInfoKeyList = single_xpath.xpath(r'.//tr/td/strong/text()').extract()
baseInfoValueList = single_xpath.xpath(r'.//tr/td/span/text()').extract()
item['baseInfoKeyList'] = baseInfoKeyList
item['baseInfoValueList'] = baseInfoValueList
all_xpath_detail = response.xpath(r'//*[@id="content"]')
for single_xpath in all_xpath_detail:
detail = single_xpath.xpath(r'.//p/text()').extract()
image_url = single_xpath.xpath(r'.//div[2]/a/@href').extract()
item['detail'] = detail
item['image_url'] = image_url
yield item
| [
"331378255@qq.com"
] | 331378255@qq.com |
497ff7a37a21b8d7b26e76ad4a2070a35baf71cc | bb87afa0fd2f5466f282ba93779293449ae72e9f | /apps/article/tasks.py | 0377213159cd91d4dfd25488e3ce850c0d8f288e | [
"MIT"
] | permissive | xuechuance/website | 14d50e6c66f4315f5829f5a2707fc7bdf3925266 | 91a017ea26806136a89f12d8620a4d99676a7497 | refs/heads/master | 2020-05-27T06:18:36.403271 | 2019-05-21T04:28:02 | 2019-05-21T04:28:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,889 | py | from __future__ import absolute_import
import datetime
from configparser import ConfigParser
from time import sleep
import requests
from celery import shared_task
from django.core.mail import send_mail
from random import Random
import random
from apps.article.models import Headlines
from apps.user.models import VerifyCode
from website import settings
from website.celery import app
def random_str(randomlength=8):
str=""
chars="0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
lenght = len(chars)-1
for i in range(randomlength):
str+=chars[random.randint(0,lenght)]
print(str)
return str
@app.task()
def send_register_email(email,username=None,token=None,send_type='register'):
"""
登录注册等邮件发送
:param email:
:param username:
:param token:
:param send_type:
:return:
"""
code = random_str(4)
email_title = ''
email_body = ''
if send_type =='register':
email_title = '注册用户验证信息'
email_body = "\n".join([u'{0},欢迎加入我的博客'.format(username), u'请访问该链接,完成用户验证,该链接1个小时内有效',
'/'.join([settings.DOMAIN, 'activate', token])])
print('========发送邮件中')
send_stutas = send_mail(email_title,email_body,settings.EMAIL_HOST_USER,[email])
if send_stutas:
print('========发送成功')
pass
elif send_type == 'forget':
VerifyCode.objects.create(code=code, email=email, send_type=send_type)
email_title = '密码重置链接'
email_body = "你的密码重置验证码为:{0}。如非本人操作请忽略,此验证码30分钟后失效。".format(code)
print('========发送邮件中')
send_stutas = send_mail(email_title, email_body, settings.EMAIL_HOST_USER, [email])
if send_stutas:
print('========发送成功')
pass
elif send_type =='update_email':
VerifyCode.objects.create(code=code, email=email, send_type=send_type)
email_title = '修改邮箱链接'
email_body = "你的修改邮箱验证码为:{0}。如非本人操作请忽略,此验证码30分钟后失效。".format(code)
print('========发送邮件中')
send_stutas = send_mail(email_title, email_body, settings.EMAIL_HOST_USER, [email])
if send_stutas:
print('========发送成功')
pass
@app.task()
def error_email(email,title=None,body=None):
email_title = title
email_body = body
send_mail(email_title, email_body, settings.EMAIL_HOST_USER, [email])
@app.task()
def add():
print('发送邮件到**************************************************************' )
sleep(5) # 休息5秒
print('success')
return True
conf = ConfigParser()
conf.read('config.ini')
@app.task()
def getApi():
print('正在获取数据...')
#url = 'http://api01.idataapi.cn:8000/article/idataapi?KwPosition=3&catLabel1=科技&apikey={0}'.format(conf.get('iDataApi','key'))
url = 'http://v.juhe.cn/toutiao/index?type=keji&key={0}'.format(conf.get('AppKey','key'))
headers = {
"Accept-Encoding": "gzip",
"Connection": "close"
}
try:
r = requests.get(url, headers=headers)
if r.status_code == requests.codes.ok:
dict_json = r.json()
list_dict = []
for item in dict_json['result']['data']:
# obj = Headlines(
# url=item['url'],
# title=item['title'],
# category=item['catLabel1'],
# conent=item['content'],
# author_name=item['sourceType'],
# )
obj = Headlines(
url=item['url'],
title=item['title'],
category=item['category'],
conent=item['title'],
author_name=item['author_name'],
)
list_dict.append(obj)
Headlines.objects.bulk_create(list_dict)
print('数据添加成功')
except Exception as e:
print('数据添加失败===正在发生邮件通知管理员',e)
error_email.delay('tarena_feng@126.com','抓取数据错误','{0}'.format(e))
print('邮件发送成功')
@app.task()
def removeApi():
# 当前日期格式
cur_date = datetime.datetime.now().date()
# 前一天日期
yester_day = cur_date - datetime.timedelta(days=1)
# 前一周日期
day = cur_date - datetime.timedelta(days=7)
print("=======正在删除7天前数据======")
# 查询前一周数据,也可以用range,我用的是glt,lte大于等于
Headlines.objects.filter(add_time__lte=day).delete()
print('======已删除=========')
| [
"tarena_feng@126.com"
] | tarena_feng@126.com |
742ffb8178826cbe1484ba6d6459345859b86065 | 820e321b20f3a6f1063fb28d83bcec55841c1eea | /python_sample/Chapter5/chapter5_5.py | 71b38e4d9965bc0906ecd13b05a6d1b3eed0c1ad | [] | no_license | WadaTakafumi/Sample | cb6a323ea193b2c2fee87b04d0cae20fef1e621f | b012d6330a73d9230e733c5eb06607d27aa9aef7 | refs/heads/master | 2023-01-27T16:01:28.161048 | 2019-08-13T06:10:53 | 2019-08-13T06:10:53 | 193,650,560 | 0 | 0 | null | 2023-01-19T13:41:02 | 2019-06-25T06:45:04 | Ruby | UTF-8 | Python | false | false | 127 | py | #辞書型
tel = {'jack': 4098, 'sape': 4139}
tel['guido'] = 4127
print(tel)
li=[('a',1),('b',2),('c',3)]
s=dict(li)
print(s) | [
"wdtk2525@gmail.com"
] | wdtk2525@gmail.com |
0209945db389ffcf041bf8356b57309837cca01c | bc233c24523f05708dd1e091dca817f9095e6bb5 | /bitmovin_api_sdk/models/profile_h262.py | b8e281c4665dfd372553426648bec8a362d66ae8 | [
"MIT"
] | permissive | bitmovin/bitmovin-api-sdk-python | e3d6cf8eb8bdad62cb83ec77c0fc4950b06b9cdd | b0860c0b1be7747cf22ad060985504da625255eb | refs/heads/main | 2023-09-01T15:41:03.628720 | 2023-08-30T10:52:13 | 2023-08-30T10:52:13 | 175,209,828 | 13 | 14 | MIT | 2021-04-29T12:30:31 | 2019-03-12T12:47:18 | Python | UTF-8 | Python | false | false | 194 | py | # coding: utf-8
from enum import Enum
from six import string_types, iteritems
from bitmovin_api_sdk.common.poscheck import poscheck_model
class ProfileH262(Enum):
MPEG2_422 = "MPEG2_422"
| [
"openapi@bitmovin.com"
] | openapi@bitmovin.com |
27e87424929b5c8237e98b92155346589f22cff5 | f00ad57c98e554470a72511dda7a7bfd160aca19 | /others/test_compress_str.py | 21fe737a9d075590a8f39e8909d0acdd69b93853 | [] | no_license | fanzhangg/algorithm-problems | d60115210aaaffcd094b34b9db5b46dadf93fe9e | 43b111ad625f197ba0905abceab9ee4484284e08 | refs/heads/master | 2021-07-12T20:24:46.265700 | 2020-07-06T17:58:31 | 2020-07-06T17:58:31 | 171,220,135 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 211 | py | from unittest import TestCase
from compress_string import compress_str
class TestCompressStr(TestCase):
def test_compress_str(self):
self.assertEqual(compress_str("AAAAaaBCCCDDe"), "A4a2B1C3D2e1")
| [
"vanadiumzhang@gmail.com"
] | vanadiumzhang@gmail.com |
98f5dd3c6f7bfbe86c72dc575eab1857e19bb214 | d2b99e95f5080bf61bad0c6a4c1f33a9c87cb93c | /music_bg/logging.py | ff19a27d88fec868b3ac419e7c0e891e06e240ed | [
"MIT"
] | permissive | music-bg/music_bg | d5c9f562ed44e743b6ee4ca2e887fbe89d87f0b2 | 5b79eacb32506b6eda5861df4b5f71b611c5dfa3 | refs/heads/master | 2023-08-14T20:23:33.061661 | 2021-09-18T18:51:49 | 2021-09-18T18:51:49 | 405,702,931 | 3 | 0 | MIT | 2021-09-18T10:29:52 | 2021-09-12T17:05:23 | Python | UTF-8 | Python | false | false | 346 | py | from sys import stdout
from loguru import logger
from music_bg.config import LogLevel
def init_logger(level: LogLevel) -> None:
"""Configure music_bg logging.
:param level: New log level.
"""
logger.remove()
logger.add(
sink=stdout,
level=level.value,
diagnose=True,
backtrace=True,
)
| [
"win10@list.ru"
] | win10@list.ru |
1a2c1fd4567315112d871d2a553139e502020022 | a08f51b7e8c648391ebb1e66e74927444895f847 | /Practicas/Practica_19.py | 5d08b7005329db9402ed03d584705da506eea060 | [] | no_license | MarcoVelazquez/Tareas-Estructura-De-Datos | 6598becd1d72b06daace76a48189e6ba5000fa63 | 1b8e5d5404a3afb932e1ecb5571d52b564819ac6 | refs/heads/master | 2021-07-05T08:19:28.821894 | 2019-05-12T22:03:08 | 2019-05-12T22:03:08 | 146,939,530 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,527 | py |
#Listas enlazadas
i = 0
class Nodo(object):
def __init__(self,data):
self.data = data
self.next = None
# def get_data(self):
# return self.data
def push(q,i,p):
if i < 5:
if q.next != None:
print("Ingresa el dato")
data = input()
p = Nodo(data)
q.next = p
q = p
i += 1
return q,p,i
else:
print("Ingresa el dato")
data = input()
p = Nodo(data)
q.next = p
q = p
i += 1
return q,p,i
else:
print("Lista llena")
return menu(r,q,i,p)
def peek_all(r,q,i):
cont_local = 0
q = r
while cont_local < i+1:
print(q.data)
if q.next != None:
q = q.next
else:
return 0
def peek_last(r,q,p):
print(p.data)
return 0
def peek_first(r,q):
print(r.next.data)
return 0
def peek_if(r,q,i):
cont_local = 0
q = r
comp = input("Ingresa el dato a comprobar")
while cont_local < i+1:
if q.next != None:
if comp == q.data:
print("El dato esta en la lista")
return 0
else:
print("-"*25)
q = q.next
else:
return 0
def pop(r,q,i,p):
cont_local = 0
q = r.next
helpy_last = r
helpy_front = q.next
comp = input("Ingresa el dato a eliminar --->")
while cont_local < i+1:
if q.next != None:
if comp == r.data:
print("No se puede eliminar la Raiz")
return 0
else:
if comp == q.data and comp != r.next.data:
helpy_last.next = helpy_front
q.next = None
i -=1
return i
elif comp == r.next.data:
r.next = helpy_front
i -=1
print("Dato eliminado")
return i
else:
print("-"*25)
helpy_last = helpy_last.next
q = helpy_last.next
helpy_front = q.next
else:
if q.next == None and comp == q.data:
helpy_last.next = None
q = r
i -=1
print("Dato eliminado")
return i
else:
print("El dato no esta en la lista")
return 0
#inicio
r = Nodo("Raiz")
q = r
p = r
def menu(r,q,i,p):
while True:
print("1.-Agregar un dato")
print("2.-Imprimir la lista")
print("3.-Imprimir el ultimo dato agregado")
print("4.-Imprimir el primer dato agregado")
print("5.-Comprobar si hay un dato en la lista")
print("6.-Eliminar un dato")
print("0.-Salir del programa")
opcion = int(input())
if opcion == 1:
q,p,i = push(q,i,p)
if opcion == 2:
peek_all(r,q,i)
if opcion == 3:
peek_last(r,q,p)
if opcion == 4:
peek_first(r,q)
if opcion == 5:
peek_if(r,q,i)
if opcion == 6:
i = pop(r,q,i,p)
if opcion == 0:
print("Adios")
return 0
menu(r,q,i,p)
| [
"noreply@github.com"
] | noreply@github.com |
c85205ef73c8d7167609ae35ca227238328d5b73 | 17dcbf3e791c372294590a54aa9305c4b753b877 | /explosionPOS/cms/migrations/0010_auto_20160622_1522.py | 72a73e8656eef0bc4eaee7599f1e148542aae2ab | [] | no_license | ShogoKitajima/explosionPOS | 69aa6cbca9451241acadd86eaceb60856db557af | d70a162e7d90fa39050431974e861d203f81cd58 | refs/heads/master | 2021-01-21T14:44:03.056760 | 2016-09-20T04:31:41 | 2016-09-20T04:31:41 | 56,673,000 | 0 | 1 | null | 2016-04-28T07:24:50 | 2016-04-20T09:15:17 | null | UTF-8 | Python | false | false | 487 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2016-06-22 06:22
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('cms', '0009_auto_20160511_1445'),
]
operations = [
migrations.AlterField(
model_name='user',
name='student_id',
field=models.IntegerField(default=0, unique=True, verbose_name='Student ID'),
),
]
| [
"a3ynxo@gmail.com"
] | a3ynxo@gmail.com |
8aa6b75ec6073c1f06b3f84c8af964bb7fb2e76a | 299372c4931029db3e2a00c97b9730e292e5288e | /test/test_part2.py | 8c445c73d8621b31208f8d80b9b4032f6a610faa | [] | no_license | Yuyu-Ren/ECSE429 | c97c096fa923117326603d49829cb1513b94524a | d7a5c69ef35e5e9602ae31bd30f55299dad9d545 | refs/heads/main | 2023-01-15T07:13:02.304361 | 2020-10-27T01:32:06 | 2020-10-27T01:32:06 | 306,927,287 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,158 | py | import requests
import json
from support.assertions import assert_valid_json_schema, assert_valid_dictionary_schema, \
get_valid_dictionary_schema, get_valid_json_schema
def test_get_projects_return_code():
r = requests.get(url="http://localhost:4567/projects")
assert r.status_code == 200
def test_get_projects_return_payload():
r = requests.get(url="http://localhost:4567/projects")
assert assert_valid_json_schema(r.json(), 'projects.get.json')
def test_head_projects_return_code():
r = requests.get(url="http://localhost:4567/projects")
assert r.status_code == 200
def test_head_projects_return_payload():
r = requests.head(url="http://localhost:4567/projects")
try:
r.json()
except json.decoder.JSONDecodeError:
assert True
return
assert False
def test_head_projects_headers():
r = requests.head(url="http://localhost:4567/projects").headers
assert r['Server'] == get_valid_dictionary_schema('projects.head.json')['Server'] and \
r['Content-Type'] == get_valid_dictionary_schema('projects.head.json')['Content-Type'] and \
r['Transfer-Encoding'] == get_valid_dictionary_schema('projects.head.json')['Transfer-Encoding']
def test_post_projects_return_code_without_id():
body = get_valid_json_schema('projects.post.withoutid.json')
r = requests.post(url="http://localhost:4567/projects", json=body)
assert r.status_code == 201
def test_post_projects_return_code_with_id():
body = get_valid_json_schema('projects.post.withid.json')
r = requests.post(url="http://localhost:4567/projects", json=body)
assert r.status_code == 400
def test_post_projects_return_payload_without_id():
body = get_valid_json_schema('projects.post.withoutid.json')
r = requests.post(url="http://localhost:4567/projects", json=body)
assert r.json() == get_valid_json_schema('projects.post.withoutid.return_payload.json')
def test_post_projects_return_payload_with_id():
body = get_valid_json_schema('projects.post.withid.json')
r = requests.post(url="http://localhost:4567/projects", json=body)
assert r.json() == get_valid_json_schema('projects.post.withid.return_payload.json')
def test_get_projects_id_existing_return_code():
r = requests.get(url="http://localhost:4567/projects/1")
assert r.status_code == 200
def test_get_projects_id_existing_return_payload():
r = requests.get(url="http://localhost:4567/projects/1")
assert r.json() == get_valid_json_schema('projects.id.get.existing.return_payload.json')
def test_get_projects_id_not_found_return_code():
r = requests.get(url="http://localhost:4567/projects/55")
assert r.status_code == 404
def test_get_projects_id_not_found_return_payload():
r = requests.get(url="http://localhost:4567/projects/55")
assert r.json() == get_valid_json_schema('projects.id.get.notfound.return_payload.json')
def test_head_projects_id_return_code():
r = requests.head(url="http://localhost:4567/projects/1")
assert r.status_code == 200
def test_head_projects_id_return_headers():
r = requests.head(url="http://localhost:4567/projects/1").headers
assert r['Server'] == get_valid_dictionary_schema('projects.id.head.json')['Server'] and \
r['Content-Type'] == get_valid_dictionary_schema('projects.id.head.json')['Content-Type'] and \
r['Transfer-Encoding'] == get_valid_dictionary_schema('projects.id.head.json')['Transfer-Encoding']
def test_post_projects_id_existing_return_code():
body = get_valid_json_schema('projects.id.post.existing.json')
r = requests.post(url="http://localhost:4567/projects/1", json=body)
assert r.status_code == 200
def test_post_projects_id_existing_return_payload():
body = get_valid_json_schema('projects.id.post.existing.json')
r = requests.post(url="http://localhost:4567/projects/1", json=body)
assert r.json() == get_valid_json_schema('projects.id.post.existing.return_payload.json')
def test_post_projects_id_notfound_return_code():
body = get_valid_json_schema('projects.id.post.existing.json')
r = requests.post(url="http://localhost:4567/projects/55", json=body)
assert r.status_code == 404
def test_post_projects_id_notfound_return_payload():
body = get_valid_json_schema('projects.id.post.notfound.json')
r = requests.post(url="http://localhost:4567/projects/55", json=body)
assert r.json() == get_valid_json_schema('projects.id.post.notfound.return_payload.json')
def test_put_projects_id_existing_return_code():
body = get_valid_json_schema('projects.id.put.existing.json')
r = requests.put(url="http://localhost:4567/projects/1", json=body)
assert r.status_code == 200
def test_put_projects_id_existing_return_payload():
body = get_valid_json_schema('projects.id.put.existing.json')
r = requests.put(url="http://localhost:4567/projects/1", json=body)
assert r.json() == get_valid_json_schema('projects.id.put.existing.return_payload.json')
def test_put_projects_id_notfound_return_code():
body = get_valid_json_schema('projects.id.put.existing.json')
r = requests.put(url="http://localhost:4567/projects/55", json=body)
assert r.status_code == 404
def test_put_projects_id_notfound_return_payload():
body = get_valid_json_schema('projects.id.put.notfound.json')
r = requests.put(url="http://localhost:4567/projects/55", json=body)
assert r.json() == get_valid_json_schema('projects.id.put.notfound.return_payload.json')
def test_delete_projects_id_existing_return_code():
r = requests.delete(url="http://localhost:4567/projects/1")
assert r.status_code == 200
def test_delete_projects_id_existing_return_payload():
r = requests.delete(url="http://localhost:4567/projects/1")
try:
r.json()
except ValueError:
assert True
return
assert False
def test_delete_projects_id_notfound_return_code():
r = requests.delete(url="http://localhost:4567/projects/55")
assert r.status_code == 404
def test_delete_projects_id_notfound_return_payload():
r = requests.delete(url="http://localhost:4567/projects/55")
assert r.json() == get_valid_json_schema('projects.id.delete.notfound.return_payload.json')
def test_get_projects_id_tasks_return_code():
r = requests.get(url="http://localhost:4567/projects/1/tasks")
assert r.status_code == 200
def test_get_projects_id_tasks_return_payload():
r = requests.get(url="http://localhost:4567/projects/1/tasks")
assert r.json() == get_valid_json_schema('projects.id.tasks.get.return_payload.json')
def test_head_projects_id_tasks_return_code():
r = requests.head(url="http://localhost:4567/projects/1/tasks")
assert r.status_code == 200
def test_head_projects_id_tasks_headers():
r = requests.head(url="http://localhost:4567/projects/1/tasks").headers
assert r['Server'] == get_valid_dictionary_schema('projects.id.tasks.head.json')['Server'] and \
r['Content-Type'] == get_valid_dictionary_schema('projects.id.tasks.head.json')['Content-Type'] and \
r['Transfer-Encoding'] == get_valid_dictionary_schema('projects.id.tasks.head.json')['Transfer-Encoding']
def test_post_projects_id_tasks_existing_return_code():
body = get_valid_json_schema('projects.id.tasks.post.existing.json')
r = requests.post(url="http://localhost:4567/projects/1/tasks", json=body)
assert r.status_code == 201
def test_post_projects_id_tasks_existing_return_payload():
body = get_valid_json_schema('projects.id.tasks.post.existing.json')
r = requests.post(url="http://localhost:4567/projects/1/tasks", json=body)
try:
r.json()
except ValueError:
assert True
return
assert False
def test_post_projects_id_tasks_bodyid_notfound_return_code():
body = get_valid_json_schema('projects.id.tasks.post.bodyid.notfound.json')
r = requests.post(url="http://localhost:4567/projects/1/tasks", json=body)
assert r.status_code == 404
def test_post_projects_id_tasks_bodyid_notfound_return_payload():
body = get_valid_json_schema('projects.id.tasks.post.bodyid.notfound.json')
r = requests.post(url="http://localhost:4567/projects/1/tasks", json=body)
assert r.json() == get_valid_json_schema('projects.id.tasks.post.bodyid.notfound.return_payload.json')
def test_post_projects_id_tasks_id_notfound_return_code():
body = get_valid_json_schema('projects.id.tasks.post.id.notfound.json')
r = requests.post(url="http://localhost:4567/projects/55/tasks", json=body)
assert r.status_code == 404
def test_post_projects_id_tasks_id_notfound_return_payload():
body = get_valid_json_schema('projects.id.tasks.post.id.notfound.json')
r = requests.post(url="http://localhost:4567/projects/55/tasks", json=body)
assert r.json() == get_valid_json_schema('projects.id.tasks.post.id.notfound.return_payload.json')
def test_delete_projects_id_tasks_id_existing_return_code():
r = requests.delete(url="http://localhost:4567/projects/1/tasks/2")
assert r.status_code == 200
def test_delete_projects_id_tasks_id_existing_return_payload():
r = requests.delete(url="http://localhost:4567/projects/1/tasks/2")
try:
r.json()
except ValueError:
assert True
return
assert False
def test_delete_projects_id_tasks_id_notfound_return_code():
r = requests.delete(url="http://localhost:4567/projects/1/tasks/5")
assert r.status_code == 404
def test_delete_projects_id_tasks_id_notfound_return_payload():
r = requests.delete(url="http://localhost:4567/projects/1/tasks/5")
assert r.json() == get_valid_json_schema('projects.id.tasks.id.notfound.json')
def test_delete_projects_id_tasks_id_wrong_projectid_return_code():
r = requests.delete(url="http://localhost:4567/projects/6/tasks/2")
assert r.status_code == 404
def test_delete_projects_id_tasks_id_wrong_projectid_return_payload():
r = requests.delete(url="http://localhost:4567/projects/6/tasks/2")
assert r.json() == get_valid_json_schema('projects.id.tasks.id.wrongprojectid.json')
def test_get_shutdown_return_code():
r = requests.get(url="http://localhost:4567/shutdown")
assert r.status_code == 200
def test_get_shutdown_notfound_return_code():
r = requests.get(url="http://localhost:4567/shutdown/dfgiufdshgusfdh")
assert r.status_code == 404
def test_get_projects_return_code_with_query_match():
r = requests.get(url="http://localhost:4567/projects?active=false")
assert r.status_code == 200
def test_get_projects_return_code_with_noquery_match():
r = requests.get(url="http://localhost:4567/projects?active=true")
assert r.status_code == 200
def test_get_projects_return_payload_with_query_match():
r = requests.get(url="http://localhost:4567/projects?active=false")
print(r.json())
print(get_valid_json_schema('projects.get.matchingquery.json'))
assert assert_valid_json_schema(r.json(), 'projects.get.matchingquery.json')
def test_get_projects_id_existing_return_code_with_query_match():
r = requests.get(url="http://localhost:4567/projects/1?active=false")
assert r.status_code == 200
def test_get_projects_id_existing_return_code_with_noquery_match():
r = requests.get(url="http://localhost:4567/projects/1?active=true")
assert r.status_code == 200
def test_get_projects_id_existing_return_payload_with_query_match():
r = requests.get(url="http://localhost:4567/projects/1?active=false")
assert r.json() == get_valid_json_schema('projects.id.get.existing.return_payload_querymatch.json')
def test_get_projects_id_existing_return_payload_with_noquery_match():
r = requests.get(url="http://localhost:4567/projects/1?active=true")
assert r.json() == get_valid_json_schema('projects.id.get.existing.return_payload_noquery.json')
def test_get_projects_id_not_found_return_code_with_query_match():
r = requests.get(url="http://localhost:4567/projects/55?active=false")
assert r.status_code == 404
def test_get_projects_id_not_found_return_payload_with_noquery_match():
r = requests.get(url="http://localhost:4567/projects/55?active=true")
assert r.json() == get_valid_json_schema('projects.id.get.notfound.return_payload_noquery.json')
def test_get_projects_id_tasks_return_code_with_query_match():
r = requests.get(url="http://localhost:4567/projects/1/tasks?doneStatus=false")
assert r.status_code == 200
def test_get_projects_id_tasks_query_match_return_payload_query_match():
r = requests.get(url="http://localhost:4567/projects/1/tasks?doneStatus=false")
assert r.json() == get_valid_json_schema('projects.id.tasks.get.return_payload_querymatch.json')
def test_get_projects_id_tasks_return_code_with_noquery_match():
r = requests.get(url="http://localhost:4567/projects/1/tasks?doneStatus=true")
assert r.status_code == 200
def test_get_projects_id_tasks_query_match_return_payload_noquery_match():
r = requests.get(url="http://localhost:4567/projects/1/tasks?doneStatus=true")
assert r.json() == get_valid_json_schema('projects.id.tasks.get.return_payload_noquery.json') | [
"8889046+Yuyu-Ren@users.noreply.github.com"
] | 8889046+Yuyu-Ren@users.noreply.github.com |
98621e5bca9df5c5b444a34970c6bbb20aa9d82d | 93261d9aa17d09d6eb4f5b25d8c8f49a915cd453 | /test/test.py | 49c26d482ceaeba4243d7c1a3a067e945c62ba4f | [] | no_license | strpc/upload_download_service | 7e6b80473c816a4e94ed1babdce612bdce5e16cf | 25d3737c3faa7d07e1f9418af0bd610ce9f42923 | refs/heads/master | 2022-07-03T01:46:28.027949 | 2020-05-11T08:57:46 | 2020-05-11T08:57:46 | 261,977,883 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,694 | py | import requests
import os
import unittest
url = f"http://localhost:8000/"
file = 'test.pdf'
class TestServer(unittest.TestCase):
"""Тестирование функционала сервера"""
headers = {'Content-type': 'pdf', 'Filename': file}
def test_post_upload(self):
"""Тест на загрузку файла в сервис по адресу {url}/upload"""
with open(os.path.join(os.getcwd(), file), 'rb') as f:
data = f.read()
response = requests.post(url + 'upload', data=data, headers=self.headers)
response.text
result = "{'status': 201, 'file is uploaded': 'test.pdf', \
'link for check': '0.0.0.0:8000/check/test.pdf', \
'link for download': '0.0.0.0:8000/download/test.pdf'}"
self.assertEqual(201, response.status_code)
self.assertEqual(result, response.content.decode('utf-8'))
self.assertEqual(response.headers['Content-Type'], 'application/json')
def test_post_without_headers(self):
"""Тест на POST запрос без заголовков о загружаемом файле"""
response = requests.post(url + 'upload')
result = "{'status': 403, 'message': 'headers not found'}"
self.assertEqual(403, response.status_code)
self.assertEqual(result, response.content.decode('utf-8'))
self.assertEqual(response.headers['Content-Type'], 'application/json')
def test_post_without_attach(self):
"""Тест на POST запрос без прикрепленного файла"""
response = requests.post(url + 'upload', headers=self.headers)
result = "{'status': 409, 'error': 'file not attached'}"
self.assertEqual(409, response.status_code)
self.assertEqual(response.headers['Content-Type'], 'application/json')
self.assertEqual(result, response.content.decode('utf-8'))
def test_post_bad_url(self):
"""Тест на POST запрос по неправильному адресу"""
response = requests.post(url + 'uplodad', headers=self.headers)
result = "{'status': 404, 'message': 'page not found'}"
self.assertEqual(result, response.content.decode('utf-8'))
self.assertEqual(404, response.status_code)
self.assertEqual(response.headers['Content-Type'], 'application/json')
def test_post_root_url(self):
"""Тест на POST запрос по корневому адресу"""
response = requests.post(url, headers=self.headers)
result = "{'status': 404, 'message': 'page not found'}"
self.assertEqual(result, response.content.decode('utf-8'))
self.assertEqual(404, response.status_code)
self.assertEqual(response.headers['Content-Type'], 'application/json')
def test_get_check_file(self):
"""Тест на GET запрос по адресу {url}/check для проверки наличия файла"""
response = requests.get(url + 'check/' + file, headers=self.headers)
result = "{'status': 200, 'file': 'test.pdf', 'message': 'is ready to download'}"
self.assertEqual(200, response.status_code)
self.assertEqual(result, response.content.decode('utf-8'))
self.assertEqual(response.headers['Content-Type'], 'application/json')
def test_get_check_file_not_found(self):
"""Тест на GET запрос по адресу {url}/check/несуществующий файл"""
response = requests.get(url + 'check/' + 'not_file', headers=self.headers)
result = "{'status': 400, 'file': 'not_file', 'message': 'is not found'}"
self.assertEqual(400, response.status_code)
self.assertEqual(result, response.content.decode('utf-8'))
self.assertEqual(response.headers['Content-Type'], 'application/json')
def test_get_check_url(self):
"""Тест на GET запрос по адресу {url}/check (без имени файла)"""
response = requests.get(url + 'check/', headers=self.headers)
result = "{'status': 404, 'message': 'page not found'}"
self.assertEqual(404, response.status_code)
self.assertEqual(result, response.content.decode('utf-8'))
self.assertEqual(response.headers['Content-Type'], 'application/json')
def test_get_root_url(self):
"""Тест на GET запрос по корневому адресу"""
response = requests.post(url, headers=self.headers)
result = "{'status': 404, 'message': 'page not found'}"
self.assertEqual(result, response.content.decode('utf-8'))
self.assertEqual(404, response.status_code)
self.assertEqual(response.headers['Content-Type'], 'application/json')
def test_get_file_not_found_download(self):
"""Тест на GET запрос по адресу {url}/download/несуществующий файл"""
response = requests.get(url + 'download/' + 'not_file', headers=self.headers)
result = "{'status': 400, 'file': 'not_file', 'message': 'is not found'}"
self.assertEqual(400, response.status_code)
self.assertEqual(result, response.content.decode('utf-8'))
self.assertEqual(response.headers['Content-Type'], 'application/json')
def test_get_file(self):
"""Тест на загрузку файла"""
response = requests.get(url + 'download/' + file, headers=self.headers)
self.assertEqual(200, response.status_code)
self.assertEqual(response.headers['Content-Type'], 'application/octet-stream')
self.assertNotEqual('', response.content)
if __name__ == "__main__":
unittest.main() | [
"renat.kruchinskij@gmail.com"
] | renat.kruchinskij@gmail.com |
61720acbc0b351390bba5bd82dedafd6d721e1af | 0eff560cadef67ad747fdc4e7c3d91a163df4571 | /pyarduino.py | 98e859eb0ae4cb9de2cb03b0c2b1890289abd46c | [] | no_license | AlexanderMelian/ProjectPAM | d87dfff06e7bcc945735677142bbcddfc28995a9 | efd5211fc824398958db54dcc87acec4bf4f543f | refs/heads/master | 2022-12-04T08:40:14.957234 | 2020-08-25T16:39:26 | 2020-08-25T16:39:26 | 281,609,288 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 201 | py | from pyfirmata import Arduino, util
import user_password
board = Arduino(user_password.port())#change the port
def ledOn():
board.digital[13].write(1)
def ledOff():
board.digital[13].write(0) | [
"alexandermelian@outlook.com"
] | alexandermelian@outlook.com |
9673a3a41a73f9b41461528237414504f9ca82fa | f0971b5d82ecb9d801957a823845cf6ce0321fe9 | /wpm_api/monitor.py | f4fdb6e14d3caf2ac0b8fc4c17c49d1396c4d3af | [
"Apache-2.0"
] | permissive | nikhilkulkarnisr/wpm_api_client | 200295bec40d308b2b0d1dac1b99952d01cdacb3 | 83809e9d101b5d6f6f3313329d1a9fc9a770f816 | refs/heads/master | 2020-12-13T22:35:30.700336 | 2015-05-20T18:29:28 | 2015-05-20T18:29:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,984 | py | # Copyright 2000 - 2015 NeuStar, Inc.All rights reserved.
# NeuStar, the Neustar logo and related names and logos are registered
# trademarks, service marks or tradenames of NeuStar, Inc. All other
# product names, company names, marks, logos and symbols may be trademarks
# of their respective owners.
import json
class Monitor:
def __init__(self, connection, id):
self.connection = connection
self.id = id
self.service = "/monitor/1.0"
def create(self, name, interval, locations, **kwargs):
"""Creates a new monitor and returns the monitor id of the newly created monitor. Name,
interval, testScript and locations are required. Use the Get Monitoring Locations api to
retrieve a list of monitoring locations.
Arguments:
name -- The name of the monitor.
interval -- How often the monitoring script will run for each of the locations.
locations -- A CSV list of locations that this monitor should run from.
Keyword Arguments:
test_script -- The id of the test script that this monitor should run.
description -- A description of what this monitor is for.
alert_policy -- The id of the alert policy that this monitor should run.
browser -- Specifies the browser type that this monitor should use (IE, CHROME or FF).
Note: IE is available for Enterprise customers only.
active -- True or False. Enables or disables this monitor from taking samples.
type -- Set to network monitor type such as 'dns'. See related settings below. Leave this
blank for script-based monitors. Note: this interface will not allow you to test network
monitor creation. Please use your API client.
DNS
PING
SMTP
POP
PORT
dns_settings -- A JSON object containing all DNS-related settings:
{
"timeout": int,
"lookups": array
}
The "lookups" array contains JSON objects with this format:
{
"lookupType": string ("A" or "AAAA"),
"authoritative": boolean,
"hostname": string,
"dnsServer": string,
"expectedIps": string of comma-separated IP addresses
}
ping_settings -- A JSON object containing all PING-related settings:
{
"timeout": int,
"host": string
}
pop_settings -- A JSON object containing all POP-related settings:
{
"timeout": int,
"server": string,
"username": string,
"password": string
}
port_settings -- A JSON object containing all PORT-related settings:
{
"timeout": int,
"server": string,
"port": int,
"protocol": string ("tcp" or "udp"),
"command": string,
"expected_response": string,
"data_format": string ("ascii" or "raw")
}
smtp_settings -- A JSON object containing all SMTP-related settings:
{
"timeout": int,
"server": string,
"email": string
}
"""
new_monitor = {"name": name, "interval": interval, "locations": locations}
if "test_script" in kwargs:
new_monitor.update({"testScript": kwargs['test_script']})
if "description" in kwargs:
new_monitor.update({"description": kwargs['description']})
if "alert_policy" in kwargs:
new_monitor.update({"alertPolicy": kwargs['alert_policy']})
if "browser" in kwargs:
new_monitor.update({"browser": kwargs['browser']})
if "active" in kwargs:
new_monitor.update({"active": kwargs['active']})
if "type" in kwargs:
new_monitor.update({"type": kwargs['type']})
if "dns_settings" in kwargs:
new_monitor.update({"dnsSettings": kwargs['dns_settings']})
if "ping_settings" in kwargs:
new_monitor.update({"pingSettings": kwargs['ping_settings']})
if "pop_settings" in kwargs:
new_monitor.update({"popSettings": kwargs['pop_settings']})
if "port_settings" in kwargs:
new_monitor.update({"portSettings": kwargs['port_settings']})
if "smtp_settings" in kwargs:
new_monitor.update({"smtpSettings": kwargs['smtp_settings']})
return self.connection.post(self.service, json.dumps(new_monitor))
def list(self):
"""Retrieves a list of all monitors associated with your account, along with information about
each. The monitor id that is returned is used to make other api calls."""
return self.connection.get(self.service)
def get(self):
"""Retrieves information for a specific monitor associated with your account. The monitor id
that is returned is used to make other api calls."""
if self.id is None:
raise Exception("Missing id: This API requires a monitor ID be supplied.")
return self.connection.get(self.service + "/" + self.id)
def update(self, **kwargs):
"""Change some or all of the parameters of an existing monitor. Requires the monitor ID
retrieved from the List Monitors api.
Keyword Arguments:
name -- The name of the monitor.
description -- A description of what this monitor is for
interval -- How often the monitoring script will run for each of the locations.
test_script -- The id of the test script that this monitor should run.
locations -- A CSV list of locations that this monitor should run from.
alert_policy -- The id of the alert policy that this monitor should run.
browser -- Specifies the browser type that this monitor should use. Note: IE is available for
Enterprise customers only.
active -- Enables or disables this monitor from taking samples.
"""
if self.id is None:
raise Exception("Missing id: This API requires a monitor ID be supplied.")
update_monitor = {}
if "name" in kwargs:
update_monitor.update({"name": kwargs['name']})
if "description" in kwargs:
update_monitor.update({"description": kwargs['description']})
if "interval" in kwargs:
update_monitor.update({"interval": kwargs['interval']})
if "test_script" in kwargs:
update_monitor.update({"testScript": kwargs['test_script']})
if "locations" in kwargs:
update_monitor.update({"locations": kwargs['locations']})
if "alert_policy" in kwargs:
update_monitor.update({"alertPolicy": kwargs['alert_policy']})
if "browser" in kwargs:
update_monitor.update({"browser": kwargs['browser']})
if "active" in kwargs:
update_monitor.update({"active": kwargs['active']})
return self.connection.put(self.service + "/" + self.id, json.dumps(update_monitor))
def delete(self):
"""Deletes the given monitor, stopping it from monitoring and removing all its monitoring
data."""
if self.id is None:
raise Exception("Missing id: This API requires a monitor ID be supplied.")
return self.connection.delete(self.service + "/" + self.id)
def samples(self):
"""Returns all samples associated to this monitor for a given time period. This data is
returned at a high level, which timing for the overall sample. To get the details for the
specific sample, call the get raw sample data api. At a maximum, this api will return 2000
samples. If there are more than 2000 results returned, the 'more' field will be set to true
and you can make another api call specifying an offset which would be equal to the
number of results returned in the first api call plus the offset of that call."""
if self.id is None:
raise Exception("Missing id: This API requires a monitor ID be supplied.")
return self.connection.get(self.service + "/" + self.id + "/sample")
def raw_sample_data(self, sample_id):
"""Retrieve the raw, HTTP Archive (HAR) data for a particular sample"""
if self.id is None:
raise Exception("Missing id: This API requires a monitor ID be supplied.")
return self.connection.get(self.service + "/" + self.id + "/sample/" + sample_id)
def aggregate_sample_data(self):
"""Retrieves the aggregated sample information for a given period of time. You can
choose to aggregate the data for each hour or each day. This is more effecient than
getting all the individual samples for a period of time and performing the aggregation
yourself."""
if self.id is None:
raise Exception("Missing id: This API requires a monitor ID be supplied.")
return self.connection.get(self.service + "/" + self.id + "/aggregate")
def summary(self):
"""The monitor summary api returns all of the data that is found when looking at your list of
monitors in the web portal. This includes things such as the average load time, sample
count and uptime for the day, week, month or year, the last time an error occurred, and
the last error message."""
if self.id is None:
raise Exception("Missing id: This API requires a monitor ID be supplied.")
return self.connection.get(self.service + "/" + self.id + "/summary")
def locations(self):
"""Get a list of all monitoring locations available."""
return self.connection.get(self.service + "/locations") | [
"shane.barbetta@neustar.biz"
] | shane.barbetta@neustar.biz |
a77d8dd80a8d0d6ac64a82fb0f81156f195acefd | 86e23cea4bee8580a804d4436f291f695753b5ec | /custom_chars.py | 9dbeb5c9cf65a413619d2a6b23a42ce632d99f5d | [] | no_license | kac-kon/rpi-lib | 844f503ecd8039f784e0316d9f5429f4b8f106f6 | daae5032320e3b08882df81f94b66514ed582234 | refs/heads/master | 2023-07-03T22:10:50.863450 | 2021-05-14T18:27:07 | 2021-05-14T18:27:07 | 352,186,523 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,085 | py | arrows = [
# arrow_up
[0x00,
0x04,
0x0E,
0x15,
0x04,
0x04,
0x00,
0x00],
# arrow_up_right
[0x00,
0x0F,
0x03,
0x05,
0x09,
0x10,
0x00,
0x00],
# arrow_right
[0x00,
0x04,
0x02,
0x1F,
0x02,
0x04,
0x00,
0x00],
# arrow_down_right
[0x00,
0x10,
0x09,
0x05,
0x03,
0x0F,
0x00,
0x00],
# arrow_down
[0x00,
0x04,
0x04,
0x15,
0x0E,
0x04,
0x00,
0x00],
# arrow_down_left
[0x00,
0x01,
0x12,
0x14,
0x18,
0x1E,
0x00,
0x00],
# arrow_left
[0x00,
0x04,
0x08,
0x1F,
0x08,
0x04,
0x00,
0x00],
# arrow_up_left
[0x00,
0x1E,
0x18,
0x14,
0x12,
0x01,
0x00,
0x00]]
celc = [0x18,
0x1B,
0x04,
0x04,
0x04,
0x04,
0x03,
0x00]
| [
"kacper.kontny@gmail.com"
] | kacper.kontny@gmail.com |
681f7da9bfce9e9c71273cf5a965b0c1703c1575 | ad9286e8f45aceffa8f8a52c2a671a744444b43b | /lib.py | fd814d2409280963a58d6e3595b5ac873871407d | [] | no_license | jocalte/wumpus | 36e9f4fa2a51990cb7e024c67a6acecf60a38c5f | ebeb9394efedcb6e8c4f879c8740a884e6046b43 | refs/heads/master | 2021-01-05T07:53:30.956330 | 2020-02-16T18:33:38 | 2020-02-16T18:33:38 | 240,941,952 | 0 | 0 | null | 2020-02-16T18:20:31 | 2020-02-16T18:20:30 | null | UTF-8 | Python | false | false | 7,062 | py | """
José Miguel Calderón Terol
15/02/2020
version 1.0
"""
import numpy as np
import copy
class Mapa(object):
"""
Clase que crea el mapa, recoje los movimientos de avance del jugador y comprueba el disparo.
Los objetos se almacenan en listas que contienen sus coordenadas.
"""
def __init__(self, size, n_pool):
self.posicion_salida = []
self.posicion_pozo = []
self.posicion_brisa = []
self.posicion_wumpus = []
self.posicion_hedor = []
self.posicion_jugador = []
self.posicion_tesoro = []
self.size = size
self.n_pool = n_pool
# casillas libres, según se crea un objeto se saca su casilla para no crear dos objetos en el mismo lugar
self.casillas = [i for i in np.arange(size**2)]
if self.size < 3:
raise Exception("valor mínimo 3 de lado")
# calculo índice
def indice(elemento):
if elemento < self.size:
y, x = 0, elemento
else:
y = int(elemento // self.size)
x = elemento % self.size
return [x, y]
# posición salida
self.posicion_salida = [0, 0]
self.casillas.remove(0)
self.casillas.remove(1)
self.casillas.remove(self.size)
# posición inicial
self.posicion_jugador = [0, 0]
# posición pozos
for _ in range(self.n_pool):
if len(self.casillas) < 3:
raise Exception("demasiados pozos")
_posicion = self.casillas[np.random.randint(len(self.casillas))]
self.posicion_pozo.append(indice(_posicion))
self.casillas.remove(_posicion)
# posición brisa
for ind in self.posicion_pozo:
self.posicion_brisa = self.posicion_brisa + FuncionesAuxiliares.generador_posiciones_alrededor(ind,
self.size)
# posición wumpus
posicion = self.casillas[np.random.randint(len(self.casillas))]
self.posicion_wumpus = indice(posicion)
self.casillas.remove(posicion)
# posición hedor
self.posicion_hedor = FuncionesAuxiliares.generador_posiciones_alrededor(self.posicion_wumpus, self.size)
# posición tesoro
posicion = self.casillas[np.random.randint(len(self.casillas))]
self.posicion_tesoro = indice(posicion)
self.casillas.remove(posicion)
print()
def movimiento_pj(self, ori):
"""
avanza una casilla en la orientación indicado por ori, siempre que no nos salgamos del tablero
:param ori: orientacion
:return:
"""
if ori == 0 and self.posicion_jugador[1] < self.size - 1:
self.posicion_jugador[1] = self.posicion_jugador[1] + 1
elif ori == 1 and self.posicion_jugador[0] < self.size - 1:
self.posicion_jugador[0] = self.posicion_jugador[0] + 1
elif ori == 2 and self.posicion_jugador[1] > 0:
self.posicion_jugador[1] = self.posicion_jugador[1] - 1
elif ori == 3 and self.posicion_jugador[0] > 0:
self.posicion_jugador[0] = self.posicion_jugador[0] - 1
else:
print("pared alcanzada, orden no realizada")
def check_disparo(self, ori, wumpus):
acierto = False
if wumpus.vivo:
if ori == 0: # dispara hacia arriba
if self.posicion_wumpus[0] == self.posicion_jugador[0] and \
self.posicion_wumpus[1] > self.posicion_jugador[1]:
acierto = True
if ori == 1: # dispara hacia la derecha
if self.posicion_wumpus[1] == self.posicion_jugador[1] and \
self.posicion_wumpus[0] > self.posicion_jugador[0]:
acierto = True
if ori == 2: # dispara hacia abajo
if self.posicion_wumpus[0] == self.posicion_jugador[0] and \
self.posicion_wumpus[1] < self.posicion_jugador[1]:
acierto = True
if ori == 3: # dispara hacia la izquierda
if self.posicion_wumpus[1] == self.posicion_jugador[1] and \
self.posicion_wumpus[0] < self.posicion_jugador[0]:
acierto = True
if acierto:
wumpus.mata_wumpus()
return True
else:
return False
class Wumpus(object):
def __init__(self):
self.vivo = True
self.orientacion = 0 # 0 arriba, 1 derecha, 2 abajo, 3 izquierda
def mata_wumpus(self):
self.vivo = False
def movimiento(self, mapa):
if self.vivo:
if np.random.randint(2) == 0: # aleatoriamente decide si girar o avanzar
self.girar()
else:
self.avanza(mapa)
def girar(self):
if np.random.randint(2) == 0: # aleatoriamente decide si girar a la izquierda o a la derecha
self.orientacion = (self.orientacion - 1) % 4
else:
self.orientacion = (self.orientacion + 1) % 4
def avanza(self, mapa):
posicion = copy.deepcopy(mapa.posicion_wumpus)
if self.orientacion == 0 and mapa.posicion_wumpus[1] < mapa.size - 1:
posicion[1] = posicion[1] + 1 # me guardo la nueva posicion actualizada
if self.orientacion == 1 and mapa.posicion_wumpus[0] < mapa.size - 1:
posicion[0] = posicion[0] + 1 # me guardo la nueva posicion actualizada
if self.orientacion == 2 and mapa.posicion_wumpus[1] > 0:
posicion[1] = posicion[1] - 1 # me guardo la nueva posicion actualizada
if self.orientacion == 3 and mapa.posicion_wumpus[0] > 0:
posicion[0] = posicion[0] - 1 # me guardo la nueva posicion actualizada
if posicion not in mapa.posicion_pozo: # checkeo la validez
mapa.posicion_wumpus = posicion # actualizo si no tropieza con nada
mapa.posicion_hedor = FuncionesAuxiliares.generador_posiciones_alrededor(posicion, mapa.size)
class Personaje(object):
def __init__(self, ammo):
self.ammo = ammo
self.orientacion = 0 # 0 arriba, 1 derecha, 2 abajo, 3 izquierda
self.tesoro = False # True cuando encuentre al tesoro
def disparo(self):
self.ammo -= 1
def giro_derecha(self):
self.orientacion = (self.orientacion + 1) % 4
def giro_izquierda(self):
self.orientacion = (self.orientacion - 1) % 4
def coger_tesoro(self):
self.tesoro = True
class FuncionesAuxiliares:
@staticmethod
def generador_posiciones_alrededor(ind, size):
salida = []
x, y = ind[0], ind[1]
if x > 0:
salida.append([x - 1, y])
if y > 0:
salida.append([x, y - 1])
if x < size - 1:
salida.append([x + 1, y])
if y < size - 1:
salida.append([x, y + 1])
return salida
| [
"jocalte85@gmail.com"
] | jocalte85@gmail.com |
62194023eeccb98aa40ee194047fb1ed24d608be | 2dc09b8eded668359714b0aa15ef60089c8886c4 | /gunclub/gunclub/wsgi.py | 68bb642eb37a7193bb87e01e75498a4af71f15fa | [
"Apache-2.0"
] | permissive | wiliamsouza/gunclub | 9bc3af70f9bda0f6abdeab76a67f88bf000ec357 | b5a64e380d08b1400eecf8eeab62db1704c1e94a | refs/heads/master | 2020-06-08T00:06:39.366403 | 2012-10-25T06:32:59 | 2012-10-25T06:32:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,136 | py | """
WSGI config for gunclub project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "gunclub.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
| [
"wiliamsouza83@gmail.com"
] | wiliamsouza83@gmail.com |
4c95a2d345027b537dac2498ebacc36cb7aaf94c | 60991c0f938dbcf761d5268de03b80a4e0648c30 | /config.py | 5ed6c57508c440f953c88a8c5894bf3bf3afed26 | [] | no_license | dannysim01/web_apps_bert_sentiment | e77a52b70074b8c53862275a458e469867aaa19c | ac403acfff159429d02cb790ea11fa1170e59807 | refs/heads/master | 2023-04-02T21:55:59.165530 | 2020-07-14T21:18:29 | 2020-07-14T21:18:29 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 331 | py | import transformers
MAX_LEN = 512
TRAIN_BATCH_SIZE = 8
VALID_BATCH_SIZE = 4
EPOCHS = 10
# ACCUMULATION = 2
BERT_PATH = "../input/bert_base_uncased/"
MODEL_PATH = "bert_sentiment_model.bin"
TRAINING_FILE = "../input/IMDB Dataset.csv"
TOKENIZER = transformers.BertTokenizer.from_pretrained(BERT_PATH, do_lower_case=True)
| [
"dannysimon11@gmail.com"
] | dannysimon11@gmail.com |
21ee44b6155df6c86db7afac320d841dd0a6eea7 | 1d928c3f90d4a0a9a3919a804597aa0a4aab19a3 | /python/zulip/2016/4/realm_filters.py | 3ebd74a423ccd5d544621ba6011b7915cf5d5e90 | [] | no_license | rosoareslv/SED99 | d8b2ff5811e7f0ffc59be066a5a0349a92cbb845 | a062c118f12b93172e31e8ca115ce3f871b64461 | refs/heads/main | 2023-02-22T21:59:02.703005 | 2021-01-28T19:40:51 | 2021-01-28T19:40:51 | 306,497,459 | 1 | 1 | null | 2020-11-24T20:56:18 | 2020-10-23T01:18:07 | null | UTF-8 | Python | false | false | 2,844 | py | from __future__ import absolute_import
from __future__ import print_function
from optparse import make_option
from django.core.management.base import BaseCommand
from zerver.models import RealmFilter, all_realm_filters, get_realm
from zerver.lib.actions import do_add_realm_filter, do_remove_realm_filter
import sys
class Command(BaseCommand):
help = """Create a link filter rule for the specified domain.
NOTE: Regexes must be simple enough that they can be easily translated to JavaScript
RegExp syntax. In addition to JS-compatible syntax, the following features are available:
* Named groups will be converted to numbered groups automatically
* Inline-regex flags will be stripped, and where possible translated to RegExp-wide flags
Example: python2.7 manage.py realm_filters --realm=zulip.com --op=add '#(?P<id>[0-9]{2,8})' 'https://trac.humbughq.com/ticket/%(id)s'
Example: python2.7 manage.py realm_filters --realm=zulip.com --op=remove '#(?P<id>[0-9]{2,8})'
Example: python2.7 manage.py realm_filters --realm=zulip.com --op=show
"""
def add_arguments(self, parser):
parser.add_argument('-r', '--realm',
dest='domain',
type=str,
required=True,
help='The name of the realm to adjust filters for.')
parser.add_argument('--op',
dest='op',
type=str,
default="show",
help='What operation to do (add, show, remove).')
parser.add_argument('pattern', metavar='<pattern>', type=str, nargs='?', default=None,
help="regular expression to match")
parser.add_argument('url_format_string', metavar='<url pattern>', type=str, nargs='?',
help="format string to substitute")
def handle(self, *args, **options):
realm = get_realm(options["domain"])
if options["op"] == "show":
print("%s: %s" % (realm.domain, all_realm_filters().get(realm.domain, [])))
sys.exit(0)
pattern = options['pattern']
if not pattern:
self.print_help("python2.7 manage.py", "realm_filters")
sys.exit(1)
if options["op"] == "add":
url_format_string = options['url_format_string']
if not url_format_string:
self.print_help("python2.7 manage.py", "realm_filters")
sys.exit(1)
do_add_realm_filter(realm, pattern, url_format_string)
sys.exit(0)
elif options["op"] == "remove":
do_remove_realm_filter(realm, pattern)
sys.exit(0)
else:
self.print_help("python2.7 manage.py", "realm_filters")
sys.exit(1)
| [
"rodrigosoaresilva@gmail.com"
] | rodrigosoaresilva@gmail.com |
80c24746c6073d583a09c6dabbafb81559a1f716 | 04f4b5c2e6b283d9ef7198425c3bf926623554d2 | /One_Chicken_Per_Person/one_chicken.py | 81ae99333e88a83795c3f19f3e9580f2d219131b | [] | no_license | gabisala/Kattis | a00e96aab4dbe8033e0e110f5224170b8ad473a3 | 686817fb90b39e0126b4c8b0280f8a1f10c294ee | refs/heads/master | 2021-07-07T11:53:30.931347 | 2017-10-03T15:40:07 | 2017-10-03T15:40:07 | 105,471,152 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 590 | py |
# -*- coding:utf-8 -*-
import sys
# Read data
data = map(int, sys.stdin.readline().split())
people, chicken = data
# Output message with the ratio(people, chicken)
if people > chicken:
if people - chicken == 1:
print 'Dr. Chaz needs 1 more piece of chicken!'
else:
print 'Dr. Chaz needs {} more pieces of chicken!'.format(people - chicken)
elif people < chicken:
if chicken - people == 1:
print 'Dr. Chaz will have 1 piece of chicken left over!'
else:
print 'Dr. Chaz will have {} pieces of chicken left over!'.format(chicken - people)
| [
"noreply@github.com"
] | noreply@github.com |
8809b90d42ff815f5a5ac1384caeecf159676a44 | 127e9753d54399770ebe9b8124770be305bf8128 | /azure-devops/azext_devops/test/team/test_security_group.py | 58cc04dca06d7e0be16f61b1a9d7a5aeedf02bd6 | [
"MIT"
] | permissive | sandeepchadda/azure-devops-cli-extension | 223111e7ca62414b89dcadbbf724d49b40f19476 | d87336b848e07d4ac7e25c454c35304e4abf2e10 | refs/heads/master | 2020-05-27T22:39:16.937583 | 2019-05-27T08:59:54 | 2019-05-27T08:59:54 | 188,807,782 | 0 | 0 | null | 2019-05-27T08:55:32 | 2019-05-27T08:55:32 | null | UTF-8 | Python | false | false | 10,083 | py | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
import unittest
try:
# Attempt to load mock (works on Python 3.3 and above)
from unittest.mock import patch
except ImportError:
# Attempt to load mock (works on Python version below 3.3)
from mock import patch
from azext_devops.devops_sdk.v5_0.graph.graph_client import (GraphClient)
from azext_devops.dev.team.security_group import (list_groups,
get_group,
create_group,
delete_group,
update_group,
add_membership,
remove_membership,
list_memberships)
from azext_devops.dev.common.services import clear_connection_cache
from azext_devops.test.utils.authentication import AuthenticatedTests
from azext_devops.test.utils.helper import get_client_mock_helper
class TestSecurityGroupMethods(AuthenticatedTests):
_TEST_DEVOPS_ORGANIZATION = 'https://someorganization.visualstudio.com'
_TEST_PROJECT_DESCRIPTOR = 'scp.someRandomDescriptorForProject'
_OFF = 'Off'
_GROUP_MGMT_CLIENT_LOCATION = 'azext_devops.devops_sdk.v5_0.graph.graph_client.GraphClient.'
_TEST_GROUP_DESCRIPTOR = 'vssgp.someRandomDescriptorForGroup'
_TEST_GROUP_NAME = 'New test security group'
_TEST_GROUP_DESCRIPTION = 'Some test description'
_TEST_USER_DESCRIPTOR = 'aad.someRandomDescriptorForUser'
def setUp(self):
self.authentication_setup()
self.authenticate()
self.get_patch_op_patcher = patch('azext_devops.dev.team.security_group._create_patch_operation')
self.list_groups_patcher = patch(self._GROUP_MGMT_CLIENT_LOCATION + 'list_groups')
self.get_group_patcher = patch(self._GROUP_MGMT_CLIENT_LOCATION + 'get_group')
self.create_group_patcher = patch(self._GROUP_MGMT_CLIENT_LOCATION + 'create_group')
self.get_descriptor_patcher = patch(self._GROUP_MGMT_CLIENT_LOCATION + 'get_descriptor')
self.delete_group_patcher = patch(self._GROUP_MGMT_CLIENT_LOCATION + 'delete_group')
self.update_group_patcher = patch(self._GROUP_MGMT_CLIENT_LOCATION + 'update_group')
self.list_memberships_patcher = patch(self._GROUP_MGMT_CLIENT_LOCATION + 'list_memberships')
self.lookup_subjects_patcher = patch(self._GROUP_MGMT_CLIENT_LOCATION + 'lookup_subjects')
self.get_project_patcher = patch('azext_devops.devops_sdk.v5_0.core.core_client.CoreClient.get_project')
self.add_membership_patcher = patch(self._GROUP_MGMT_CLIENT_LOCATION + 'add_membership')
self.remove_membership_patcher = patch(self._GROUP_MGMT_CLIENT_LOCATION + 'remove_membership')
self.check_membership_patcher = patch(self._GROUP_MGMT_CLIENT_LOCATION + 'check_membership_existence')
self.get_client = patch('azext_devops.devops_sdk.connection.Connection.get_client', new=get_client_mock_helper)
self.mock_get_client = self.get_client.start()
self.mock_list_groups = self.list_groups_patcher.start()
self.mock_get_group = self.get_group_patcher.start()
self.mock_get_descriptor= self.get_descriptor_patcher.start()
self.mock_create_group = self.create_group_patcher.start()
self.mock_delete_group = self.delete_group_patcher.start()
self.mock_update_group = self.update_group_patcher.start()
self.mock_list_memberships = self.list_memberships_patcher.start()
self.mock_add_membership = self.add_membership_patcher.start()
self.mock_remove_membership = self.remove_membership_patcher.start()
self.mock_lookup_subjects = self.lookup_subjects_patcher.start()
self.mock_get_project = self.get_project_patcher.start()
self.mock_check_membership = self.check_membership_patcher.start()
#clear connection cache before running each test
clear_connection_cache()
def tearDown(self):
patch.stopall()
def test_list_groups_with_org_filter(self):
response = list_groups(scope='organization', organization=self._TEST_DEVOPS_ORGANIZATION,detect=self._OFF)
#assert
self.mock_list_groups.assert_called_once()
def test_list_groups(self):
self.mock_get_descriptor.return_value = self._TEST_PROJECT_DESCRIPTOR
response = list_groups(project=self._TEST_PROJECT_DESCRIPTOR,organization=self._TEST_DEVOPS_ORGANIZATION,detect=self._OFF)
#assert
self.mock_list_groups.assert_called_once()
list_groups_param = self.mock_list_groups.call_args_list[0][1]
self.assertEqual(self._TEST_PROJECT_DESCRIPTOR, list_groups_param['scope_descriptor'], str(list_groups_param))
def test_show_group(self):
response = get_group(id=self._TEST_GROUP_DESCRIPTOR, organization=self._TEST_DEVOPS_ORGANIZATION,detect=self._OFF)
#assert
self.mock_get_group.assert_called_once()
get_group_param = self.mock_get_group.call_args_list[0][1]
self.assertEqual(self._TEST_GROUP_DESCRIPTOR, get_group_param['group_descriptor'], str(get_group_param))
def test_delete_group(self):
response = delete_group(id=self._TEST_GROUP_DESCRIPTOR, organization=self._TEST_DEVOPS_ORGANIZATION,detect=self._OFF)
#assert
self.mock_delete_group.assert_called_once()
def test_update_group(self):
response = update_group(id=self._TEST_GROUP_DESCRIPTOR, description='Updated description for the test group', organization=self._TEST_DEVOPS_ORGANIZATION,detect=self._OFF)
#assert
self.mock_update_group.assert_called_once()
def test_create_group_in_org(self):
response = create_group(scope='organization', name=self._TEST_GROUP_NAME, description= self._TEST_GROUP_DESCRIPTION, organization=self._TEST_DEVOPS_ORGANIZATION,detect=self._OFF)
#assert
self.mock_create_group.assert_called_once()
create_group_param = self.mock_create_group.call_args_list[0][1]
self.assertEqual(self._TEST_GROUP_NAME, create_group_param['creation_context'].display_name, str(create_group_param))
self.assertEqual(self._TEST_GROUP_DESCRIPTION, create_group_param['creation_context'].description, str(create_group_param))
def test_create_group(self):
self.mock_get_descriptor.return_value = self._TEST_PROJECT_DESCRIPTOR
response = create_group(name=self._TEST_GROUP_NAME, description= self._TEST_GROUP_DESCRIPTION, project=self._TEST_PROJECT_DESCRIPTOR, organization=self._TEST_DEVOPS_ORGANIZATION,detect=self._OFF)
#assert
self.mock_create_group.assert_called_once()
create_group_param = self.mock_create_group.call_args_list[0][1]
self.assertEqual(self._TEST_GROUP_NAME, create_group_param['creation_context'].display_name, str(create_group_param))
self.assertEqual(self._TEST_GROUP_DESCRIPTION, create_group_param['creation_context'].description, str(create_group_param))
self.assertEqual(self._TEST_PROJECT_DESCRIPTOR, create_group_param['scope_descriptor'], str(create_group_param))
def test_list_memberships(self):
response = list_memberships(id=self._TEST_GROUP_DESCRIPTOR, organization=self._TEST_DEVOPS_ORGANIZATION,detect=self._OFF)
#assert
self.mock_list_memberships.assert_called_once()
self.mock_lookup_subjects.assert_called_once()
list_memberships_param = self.mock_list_memberships.call_args_list[0][1]
self.assertEqual(self._TEST_GROUP_DESCRIPTOR, list_memberships_param['subject_descriptor'], str(list_memberships_param))
self.assertEqual('down', list_memberships_param['direction'], str(list_memberships_param))
def test_list_memberships_member_of(self):
response = list_memberships(id=self._TEST_GROUP_DESCRIPTOR, relationship='memberof', organization=self._TEST_DEVOPS_ORGANIZATION,detect=self._OFF)
#assert
self.mock_list_memberships.assert_called_once()
self.mock_lookup_subjects.assert_called_once()
list_memberships_param = self.mock_list_memberships.call_args_list[0][1]
self.assertEqual(self._TEST_GROUP_DESCRIPTOR, list_memberships_param['subject_descriptor'], str(list_memberships_param))
self.assertEqual('up', list_memberships_param['direction'], str(list_memberships_param))
def test_add_membership(self):
response = add_membership(group_id=self._TEST_GROUP_DESCRIPTOR, member_id=self._TEST_USER_DESCRIPTOR, organization=self._TEST_DEVOPS_ORGANIZATION,detect=self._OFF)
#assert
self.mock_add_membership.assert_called_once()
self.mock_lookup_subjects.assert_called_once()
add_membership_param = self.mock_add_membership.call_args_list[0][1]
self.assertEqual(self._TEST_GROUP_DESCRIPTOR, add_membership_param['container_descriptor'], str(add_membership_param))
self.assertEqual(self._TEST_USER_DESCRIPTOR, add_membership_param['subject_descriptor'], str(add_membership_param))
def test_remove_membership(self):
response = remove_membership(group_id=self._TEST_GROUP_DESCRIPTOR, member_id=self._TEST_USER_DESCRIPTOR, organization=self._TEST_DEVOPS_ORGANIZATION,detect=self._OFF)
#assert
self.mock_remove_membership.assert_called_once()
remove_membership_param = self.mock_remove_membership.call_args_list[0][1]
self.assertEqual(self._TEST_GROUP_DESCRIPTOR, remove_membership_param['container_descriptor'], str(remove_membership_param))
self.assertEqual(self._TEST_USER_DESCRIPTOR, remove_membership_param['subject_descriptor'], str(remove_membership_param))
| [
"noreply@github.com"
] | noreply@github.com |
1bcd0f6d900720243d017fbdeb811745cab1f329 | f3dedd86822718a0dc9da90da704cc6ed907bd22 | /Codes/ReadData/Read_hdf5_Data.py | ea400e1242e49437c0d800485b60e22775e6ef34 | [] | no_license | BlenderCN-Org/MeshOpt | 80d59e8631eb1eeff33e30b4130ababa02b1337b | 4878bb3bfb0e9c3d880e41754719c4257b8c09b8 | refs/heads/master | 2020-05-23T22:15:19.145510 | 2019-01-09T00:56:40 | 2019-01-09T00:56:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,528 | py | import h5py
import meshio
import matplotlib.pyplot as plt
import json
import t2listing
from mulgrids import *
from mpl_toolkits.mplot3d import Axes3D
import numpy
from collections import OrderedDict
from itertools import permutations
def plot_Tcell_vs_time(x_pos, y_pos, z_pos):
target = None
i = 0
while target is None:
if abs(points[i][0]-x_pos)<10000. and abs(points[i][1]-y_pos)<10000. and abs(points[i][2]-z_pos)<10000.:
target = i
i += 1
i = 0
cell_index = None
while cell_index is None:
if target in elements[i]:
cell_index = i
i += 1
T_cell = [T[time][cell_index] for time in xrange(len(times))]
plt.figure()
plt.plot(times, T_cell)
plt.show()
def build_isoT(T_target, time):
data_x = []
data_y = []
data_z = []
test =0
for index in xrange(len(elements)):
if abs(T[time][index] - T_target) < 0.0001:
test += 1
current_points = [points[vert] for vert in elements[index]]
data_x.append(sum(current_points[j][0] for j in xrange(4))/4)
data_y.append(sum(current_points[j][1] for j in xrange(4))/4)
data_z.append(sum(current_points[j][2] for j in xrange(4))/4)
return [data_x,data_y,data_z]
def compute_surfaces():
surfs = numpy.zeros(len(tets))
for con in el_neighbours:
if len(el_neighbours[con]) == 1:
face = [vert for vert in tets[el_neighbours[con][0]] if vert != neighbours[con][0]]
current_z = [list_points[face[0]][2],list_points[face[1]][2],list_points[face[2]][2]]
if all(abs(z+4000)<0.0001 for z in current_z):
centroid = [sum(list_points[face[i]][j] for i in xrange(3))/3 for j in xrange(3)]
if numpy.linalg.norm([centroid[i]-origin[i] for i in xrange(2)]) <= upflow_radius:
face_area = compute_surface_triangle(face)
surfs[el_neighbours[con][0]] = face_area
return surfs
def connection_mapping(tet_list):
"Dict that make correspond every faces to the two missing vertices of the elements sharing it"
dict_faces_opti = OrderedDict()
dict_faces = OrderedDict()
for i in range(len(tet_list)):
permutations_list = permutations(tet_list[i], 3)
faces_list = []
for perm in permutations_list:
face = frozenset(perm)
if face not in faces_list:
faces_list.append(face)
if face in dict_faces_opti:
dict_faces_opti[face] += [vertice for vertice in tet_list[i] if vertice not in face]
dict_faces[face] += [i]
else:
dict_faces_opti[face] = [vertice for vertice in tet_list[i] if vertice not in face]
dict_faces[face] = [i]
return dict_faces_opti, dict_faces
def compute_surface_triangle(triangle):
a = numpy.linalg.norm([list_points[triangle[0]][i] - list_points[triangle[1]][i] for i in xrange(3)])
b = numpy.linalg.norm([list_points[triangle[0]][i] - list_points[triangle[2]][i] for i in xrange(3)])
c = numpy.linalg.norm([list_points[triangle[1]][i] - list_points[triangle[2]][i] for i in xrange(3)])
s = (a + b + c) / 2
return (s*(s-a)*(s-b)*(s-c)) ** 0.5
PATH = '/home/lmar626/Documents/Meshes/Basicmodels/Model1/Tet/cyl/'
geofile = 'Basic_Model_Geo.dat'
filename = 'otet_m1.h5'
output = filename[:-3]
meshname = 'cyl_m1_opti.msh'
H5 = True
LISTING = False
if H5:
file = h5py.File(PATH + filename, 'r')
mesh = meshio.read(PATH + meshname)
cells = mesh.cells
points = mesh.points
if 'hexahedron' in mesh.cells:
elements = mesh.cells['hexahedron']
type = 'hex'
elif 'tetra' in mesh.cells:
elements = mesh.cells['tetra']
neighbours, el_neighbours = connection_mapping(elements)
type = 'tet'
cell_fields = file['cell_fields']
cell_indexes = file['cell_index']
cell_int_index = file['cell_interior_index']
fields = file['fields']
bulk_times = file['time']
times = []
for t in bulk_times:
times.append(t[0])
print len(times)
T = cell_fields['fluid_temperature']
Sg = cell_fields['fluid_vapour_saturation']
P = cell_fields['fluid_pressure']
if type == 'tet':
mesh.cells['tetra'] = elements
print len(elements), len(T[1])
mesh.cell_data['tetra'] = {}
mesh.cell_data['tetra']['T_0'] = T[0]
mesh.cell_data['tetra']['T_1'] = T[1]
mesh.cell_data['tetra']['P_0'] = P[0]
mesh.cell_data['tetra']['P_1'] = P[1]
mesh.cell_data['tetra']['Sg_0'] = Sg[0]
mesh.cell_data['tetra']['Sg_1'] = Sg[1]
if type == 'hex':
mesh.cell_data['hexahedron'] = {}
mesh.cell_data['hexahedron']['T_0'] = T[0]
mesh.cell_data['hexahedron']['T_1'] = T[1]
mesh.cell_data['hexahedron']['P_0'] = P[0]
mesh.cell_data['hexahedron']['P_1'] = P[1]
mesh.cell_data['hexahedron']['Sg_0'] = Sg[0]
mesh.cell_data['hexahedron']['Sg_1'] = Sg[1]
####
meshio.write(PATH + output + '.vtk', mesh)
elif LISTING:
# file = t2listing.t2listing(PATH+filename, skip_tables=['generation', 'connection'])
# geo = mulgrid(PATH + geofile)
# file.write_vtk(geo, PATH + 'model1_hex')
# mesh = meshio.read(PATH + meshname)
# cells = mesh.cells
# points = mesh.points
# elements = cells['hexahedron']
times = file.time
print times
# PATH = '/home/lmar626/Documents/Meshes/Basicmodels/Model1/Tet/'
# geofile = 'Basic_Model_Geo.dat'
# filename = 'm1_tet.json'
# output = 'heatflows.vtk'
# meshname = 'model1.msh'
# H5 = True
# LISTING = False
#
# origin = [5000.,5000.]
# upflow_radius = 1200.
#
#
# if H5:
# with open(PATH + filename) as f:
# data = json.load(f)
#
# sources = data['source']
# mesh = meshio.read(PATH + meshname)
# list_points = mesh.points
# tets = mesh.cells['tetra']
# neighbours, el_neighbours = connection_mapping(tets)
# surfaces = compute_surfaces()
# hf = numpy.zeros(len(mesh.cells['tetra']))
# for source in sources:
# hf[source['cell']] = source['rate']
# mesh.cell_data = {'tetra':{'heatflow': hf, 'surface': surfaces}}
# meshio.write(PATH + output, mesh)
| [
"noreply@github.com"
] | noreply@github.com |
d8482c372930f5b396b0f84fea8ef886b2b2b545 | 948205e4d3bbe2200ca41ffc4f450ee96948b50f | /picamera/mmalobj.py | ec167f2f6d6e24dfa2b1bba876addbc6c496c0e7 | [
"BSD-3-Clause"
] | permissive | TheSpaghettiDetective/picamera | 1a875dec7f616db059034317dee7b38060149253 | f7b9dcc66224d12ff5a22ece61d76cace2376749 | refs/heads/master | 2022-11-24T12:58:12.932558 | 2020-07-28T18:39:26 | 2020-07-28T18:39:26 | 283,290,128 | 2 | 1 | BSD-3-Clause | 2020-07-28T18:05:32 | 2020-07-28T18:05:32 | null | UTF-8 | Python | false | false | 150,486 | py | # vim: set et sw=4 sts=4 fileencoding=utf-8:
#
# Python header conversion
# Copyright (c) 2013-2017 Dave Jones <dave@waveform.org.uk>
#
# Original headers
# Copyright (c) 2012, Broadcom Europe Ltd
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from __future__ import (
unicode_literals,
print_function,
division,
absolute_import,
)
# Make Py2's str equivalent to Py3's
str = type('')
import io
import ctypes as ct
import warnings
import weakref
from threading import Thread, Event
from collections import namedtuple
from fractions import Fraction
from itertools import cycle
from functools import reduce
from operator import mul
from . import bcm_host, mmal
from .streams import BufferIO
from .exc import (
mmal_check,
PiCameraValueError,
PiCameraRuntimeError,
PiCameraMMALError,
PiCameraPortDisabled,
PiCameraDeprecated,
)
# Old firmwares confuse the RGB24 and BGR24 encodings. This flag tracks whether
# the order needs fixing (it is set during MMALCamera.__init__).
FIX_RGB_BGR_ORDER = None
# Mapping of parameters to the C-structure they expect / return. If a parameter
# does not appear in this mapping, it cannot be queried / set with the
# MMALControlPort.params attribute.
PARAM_TYPES = {
mmal.MMAL_PARAMETER_ALGORITHM_CONTROL: mmal.MMAL_PARAMETER_ALGORITHM_CONTROL_T,
mmal.MMAL_PARAMETER_ANNOTATE: None, # adjusted by MMALCamera.annotate_rev
mmal.MMAL_PARAMETER_ANTISHAKE: mmal.MMAL_PARAMETER_BOOLEAN_T,
mmal.MMAL_PARAMETER_AUDIO_LATENCY_TARGET: mmal.MMAL_PARAMETER_AUDIO_LATENCY_TARGET_T,
mmal.MMAL_PARAMETER_AWB_MODE: mmal.MMAL_PARAMETER_AWBMODE_T,
mmal.MMAL_PARAMETER_BRIGHTNESS: mmal.MMAL_PARAMETER_RATIONAL_T,
mmal.MMAL_PARAMETER_BUFFER_FLAG_FILTER: mmal.MMAL_PARAMETER_UINT32_T,
mmal.MMAL_PARAMETER_BUFFER_REQUIREMENTS: mmal.MMAL_PARAMETER_BUFFER_REQUIREMENTS_T,
mmal.MMAL_PARAMETER_CAMERA_BURST_CAPTURE: mmal.MMAL_PARAMETER_BOOLEAN_T,
mmal.MMAL_PARAMETER_CAMERA_CLOCKING_MODE: mmal.MMAL_PARAMETER_CAMERA_CLOCKING_MODE_T,
mmal.MMAL_PARAMETER_CAMERA_CONFIG: mmal.MMAL_PARAMETER_CAMERA_CONFIG_T,
mmal.MMAL_PARAMETER_CAMERA_CUSTOM_SENSOR_CONFIG: mmal.MMAL_PARAMETER_UINT32_T,
mmal.MMAL_PARAMETER_CAMERA_INFO: None, # adjusted by MMALCameraInfo.info_rev
mmal.MMAL_PARAMETER_CAMERA_INTERFACE: mmal.MMAL_PARAMETER_CAMERA_INTERFACE_T,
mmal.MMAL_PARAMETER_CAMERA_ISP_BLOCK_OVERRIDE: mmal.MMAL_PARAMETER_UINT32_T,
mmal.MMAL_PARAMETER_CAMERA_MIN_ISO: mmal.MMAL_PARAMETER_UINT32_T,
mmal.MMAL_PARAMETER_CAMERA_NUM: mmal.MMAL_PARAMETER_INT32_T,
mmal.MMAL_PARAMETER_CAMERA_RX_CONFIG: mmal.MMAL_PARAMETER_CAMERA_RX_CONFIG_T,
mmal.MMAL_PARAMETER_CAMERA_RX_TIMING: mmal.MMAL_PARAMETER_CAMERA_RX_TIMING_T,
mmal.MMAL_PARAMETER_CAMERA_SETTINGS: mmal.MMAL_PARAMETER_CAMERA_SETTINGS_T,
mmal.MMAL_PARAMETER_CAMERA_USE_CASE: mmal.MMAL_PARAMETER_CAMERA_USE_CASE_T,
mmal.MMAL_PARAMETER_CAPTURE_EXPOSURE_COMP: mmal.MMAL_PARAMETER_INT32_T,
mmal.MMAL_PARAMETER_CAPTURE: mmal.MMAL_PARAMETER_BOOLEAN_T,
mmal.MMAL_PARAMETER_CAPTURE_MODE: mmal.MMAL_PARAMETER_CAPTUREMODE_T,
mmal.MMAL_PARAMETER_CAPTURE_STATS_PASS: mmal.MMAL_PARAMETER_BOOLEAN_T,
mmal.MMAL_PARAMETER_CAPTURE_STATUS: mmal.MMAL_PARAMETER_CAPTURE_STATUS_T,
mmal.MMAL_PARAMETER_CHANGE_EVENT_REQUEST: mmal.MMAL_PARAMETER_CHANGE_EVENT_REQUEST_T,
mmal.MMAL_PARAMETER_CLOCK_ACTIVE: mmal.MMAL_PARAMETER_BOOLEAN_T,
mmal.MMAL_PARAMETER_CLOCK_DISCONT_THRESHOLD: mmal.MMAL_PARAMETER_CLOCK_DISCONT_THRESHOLD_T,
mmal.MMAL_PARAMETER_CLOCK_ENABLE_BUFFER_INFO: mmal.MMAL_PARAMETER_BOOLEAN_T,
mmal.MMAL_PARAMETER_CLOCK_FRAME_RATE: mmal.MMAL_PARAMETER_RATIONAL_T,
mmal.MMAL_PARAMETER_CLOCK_LATENCY: mmal.MMAL_PARAMETER_CLOCK_LATENCY_T,
mmal.MMAL_PARAMETER_CLOCK_REQUEST_THRESHOLD: mmal.MMAL_PARAMETER_CLOCK_REQUEST_THRESHOLD_T,
mmal.MMAL_PARAMETER_CLOCK_SCALE: mmal.MMAL_PARAMETER_RATIONAL_T,
mmal.MMAL_PARAMETER_CLOCK_TIME: mmal.MMAL_PARAMETER_INT64_T,
mmal.MMAL_PARAMETER_CLOCK_UPDATE_THRESHOLD: mmal.MMAL_PARAMETER_CLOCK_UPDATE_THRESHOLD_T,
mmal.MMAL_PARAMETER_COLOUR_EFFECT: mmal.MMAL_PARAMETER_COLOURFX_T,
mmal.MMAL_PARAMETER_CONTRAST: mmal.MMAL_PARAMETER_RATIONAL_T,
mmal.MMAL_PARAMETER_CORE_STATISTICS: mmal.MMAL_PARAMETER_CORE_STATISTICS_T,
mmal.MMAL_PARAMETER_CUSTOM_AWB_GAINS: mmal.MMAL_PARAMETER_AWB_GAINS_T,
mmal.MMAL_PARAMETER_DISPLAYREGION: mmal.MMAL_DISPLAYREGION_T,
mmal.MMAL_PARAMETER_DPF_CONFIG: mmal.MMAL_PARAMETER_UINT32_T,
mmal.MMAL_PARAMETER_DYNAMIC_RANGE_COMPRESSION: mmal.MMAL_PARAMETER_DRC_T,
mmal.MMAL_PARAMETER_ENABLE_RAW_CAPTURE: mmal.MMAL_PARAMETER_BOOLEAN_T,
mmal.MMAL_PARAMETER_EXIF_DISABLE: mmal.MMAL_PARAMETER_BOOLEAN_T,
mmal.MMAL_PARAMETER_EXIF: mmal.MMAL_PARAMETER_EXIF_T,
mmal.MMAL_PARAMETER_EXP_METERING_MODE: mmal.MMAL_PARAMETER_EXPOSUREMETERINGMODE_T,
mmal.MMAL_PARAMETER_EXPOSURE_COMP: mmal.MMAL_PARAMETER_INT32_T,
mmal.MMAL_PARAMETER_EXPOSURE_MODE: mmal.MMAL_PARAMETER_EXPOSUREMODE_T,
mmal.MMAL_PARAMETER_EXTRA_BUFFERS: mmal.MMAL_PARAMETER_UINT32_T,
mmal.MMAL_PARAMETER_FIELD_OF_VIEW: mmal.MMAL_PARAMETER_FIELD_OF_VIEW_T,
mmal.MMAL_PARAMETER_FLASH: mmal.MMAL_PARAMETER_FLASH_T,
mmal.MMAL_PARAMETER_FLASH_REQUIRED: mmal.MMAL_PARAMETER_BOOLEAN_T,
mmal.MMAL_PARAMETER_FLASH_SELECT: mmal.MMAL_PARAMETER_FLASH_SELECT_T,
mmal.MMAL_PARAMETER_FLICKER_AVOID: mmal.MMAL_PARAMETER_FLICKERAVOID_T,
mmal.MMAL_PARAMETER_FOCUS: mmal.MMAL_PARAMETER_FOCUS_T,
mmal.MMAL_PARAMETER_FOCUS_REGIONS: mmal.MMAL_PARAMETER_FOCUS_REGIONS_T,
mmal.MMAL_PARAMETER_FOCUS_STATUS: mmal.MMAL_PARAMETER_FOCUS_STATUS_T,
mmal.MMAL_PARAMETER_FPS_RANGE: mmal.MMAL_PARAMETER_FPS_RANGE_T,
mmal.MMAL_PARAMETER_FRAME_RATE: mmal.MMAL_PARAMETER_RATIONAL_T, # actually mmal.MMAL_PARAMETER_FRAME_RATE_T but this only contains a rational anyway...
mmal.MMAL_PARAMETER_IMAGE_EFFECT: mmal.MMAL_PARAMETER_IMAGEFX_T,
mmal.MMAL_PARAMETER_IMAGE_EFFECT_PARAMETERS: mmal.MMAL_PARAMETER_IMAGEFX_PARAMETERS_T,
mmal.MMAL_PARAMETER_INPUT_CROP: mmal.MMAL_PARAMETER_INPUT_CROP_T,
mmal.MMAL_PARAMETER_INTRAPERIOD: mmal.MMAL_PARAMETER_UINT32_T,
mmal.MMAL_PARAMETER_ISO: mmal.MMAL_PARAMETER_UINT32_T,
mmal.MMAL_PARAMETER_JPEG_ATTACH_LOG: mmal.MMAL_PARAMETER_BOOLEAN_T,
mmal.MMAL_PARAMETER_JPEG_Q_FACTOR: mmal.MMAL_PARAMETER_UINT32_T,
mmal.MMAL_PARAMETER_JPEG_RESTART_INTERVAL: mmal.MMAL_PARAMETER_UINT32_T,
mmal.MMAL_PARAMETER_LOCKSTEP_ENABLE: mmal.MMAL_PARAMETER_BOOLEAN_T,
mmal.MMAL_PARAMETER_LOGGING: mmal.MMAL_PARAMETER_LOGGING_T,
mmal.MMAL_PARAMETER_MB_ROWS_PER_SLICE: mmal.MMAL_PARAMETER_UINT32_T,
mmal.MMAL_PARAMETER_MEM_USAGE: mmal.MMAL_PARAMETER_MEM_USAGE_T,
mmal.MMAL_PARAMETER_MINIMISE_FRAGMENTATION: mmal.MMAL_PARAMETER_BOOLEAN_T,
mmal.MMAL_PARAMETER_MIRROR: mmal.MMAL_PARAMETER_UINT32_T, # actually mmal.MMAL_PARAMETER_MIRROR_T but this just contains a uint32
mmal.MMAL_PARAMETER_NALUNITFORMAT: mmal.MMAL_PARAMETER_VIDEO_NALUNITFORMAT_T,
mmal.MMAL_PARAMETER_NO_IMAGE_PADDING: mmal.MMAL_PARAMETER_BOOLEAN_T,
mmal.MMAL_PARAMETER_POWERMON_ENABLE: mmal.MMAL_PARAMETER_BOOLEAN_T,
mmal.MMAL_PARAMETER_PRIVACY_INDICATOR: mmal.MMAL_PARAMETER_PRIVACY_INDICATOR_T,
mmal.MMAL_PARAMETER_PROFILE: mmal.MMAL_PARAMETER_VIDEO_PROFILE_T,
mmal.MMAL_PARAMETER_RATECONTROL: mmal.MMAL_PARAMETER_VIDEO_RATECONTROL_T,
mmal.MMAL_PARAMETER_REDEYE: mmal.MMAL_PARAMETER_REDEYE_T,
mmal.MMAL_PARAMETER_ROTATION: mmal.MMAL_PARAMETER_INT32_T,
mmal.MMAL_PARAMETER_SATURATION: mmal.MMAL_PARAMETER_RATIONAL_T,
mmal.MMAL_PARAMETER_SEEK: mmal.MMAL_PARAMETER_SEEK_T,
mmal.MMAL_PARAMETER_SENSOR_INFORMATION: mmal.MMAL_PARAMETER_SENSOR_INFORMATION_T,
mmal.MMAL_PARAMETER_SHARPNESS: mmal.MMAL_PARAMETER_RATIONAL_T,
mmal.MMAL_PARAMETER_SHUTTER_SPEED: mmal.MMAL_PARAMETER_UINT32_T,
mmal.MMAL_PARAMETER_STATISTICS: mmal.MMAL_PARAMETER_STATISTICS_T,
mmal.MMAL_PARAMETER_STEREOSCOPIC_MODE: mmal.MMAL_PARAMETER_STEREOSCOPIC_MODE_T,
mmal.MMAL_PARAMETER_STILLS_DENOISE: mmal.MMAL_PARAMETER_BOOLEAN_T,
mmal.MMAL_PARAMETER_SUPPORTED_ENCODINGS: mmal.MMAL_PARAMETER_ENCODING_T,
mmal.MMAL_PARAMETER_SUPPORTED_PROFILES: mmal.MMAL_PARAMETER_VIDEO_PROFILE_T,
mmal.MMAL_PARAMETER_SW_SATURATION_DISABLE: mmal.MMAL_PARAMETER_BOOLEAN_T,
mmal.MMAL_PARAMETER_SW_SHARPEN_DISABLE: mmal.MMAL_PARAMETER_BOOLEAN_T,
mmal.MMAL_PARAMETER_SYSTEM_TIME: mmal.MMAL_PARAMETER_UINT64_T,
mmal.MMAL_PARAMETER_THUMBNAIL_CONFIGURATION: mmal.MMAL_PARAMETER_THUMBNAIL_CONFIG_T,
mmal.MMAL_PARAMETER_URI: mmal.MMAL_PARAMETER_URI_T,
mmal.MMAL_PARAMETER_USE_STC: mmal.MMAL_PARAMETER_CAMERA_STC_MODE_T,
mmal.MMAL_PARAMETER_VIDEO_ALIGN_HORIZ: mmal.MMAL_PARAMETER_UINT32_T,
mmal.MMAL_PARAMETER_VIDEO_ALIGN_VERT: mmal.MMAL_PARAMETER_UINT32_T,
mmal.MMAL_PARAMETER_VIDEO_BIT_RATE: mmal.MMAL_PARAMETER_UINT32_T,
mmal.MMAL_PARAMETER_VIDEO_DENOISE: mmal.MMAL_PARAMETER_BOOLEAN_T,
mmal.MMAL_PARAMETER_VIDEO_DROPPABLE_PFRAMES: mmal.MMAL_PARAMETER_BOOLEAN_T,
mmal.MMAL_PARAMETER_VIDEO_EEDE_ENABLE: mmal.MMAL_PARAMETER_VIDEO_EEDE_ENABLE_T,
mmal.MMAL_PARAMETER_VIDEO_EEDE_LOSSRATE: mmal.MMAL_PARAMETER_VIDEO_EEDE_LOSSRATE_T,
mmal.MMAL_PARAMETER_VIDEO_ENCODE_FRAME_LIMIT_BITS: mmal.MMAL_PARAMETER_UINT32_T,
mmal.MMAL_PARAMETER_VIDEO_ENCODE_INITIAL_QUANT: mmal.MMAL_PARAMETER_UINT32_T,
mmal.MMAL_PARAMETER_VIDEO_ENCODE_INLINE_HEADER: mmal.MMAL_PARAMETER_BOOLEAN_T,
mmal.MMAL_PARAMETER_VIDEO_ENCODE_INLINE_VECTORS: mmal.MMAL_PARAMETER_BOOLEAN_T,
mmal.MMAL_PARAMETER_VIDEO_ENCODE_MAX_QUANT: mmal.MMAL_PARAMETER_UINT32_T,
mmal.MMAL_PARAMETER_VIDEO_ENCODE_MIN_QUANT: mmal.MMAL_PARAMETER_UINT32_T,
mmal.MMAL_PARAMETER_VIDEO_ENCODE_PEAK_RATE: mmal.MMAL_PARAMETER_UINT32_T,
mmal.MMAL_PARAMETER_VIDEO_ENCODE_QP_P: mmal.MMAL_PARAMETER_UINT32_T,
mmal.MMAL_PARAMETER_VIDEO_ENCODE_RC_MODEL: mmal.MMAL_PARAMETER_VIDEO_ENCODE_RC_MODEL_T,
mmal.MMAL_PARAMETER_VIDEO_ENCODE_RC_SLICE_DQUANT: mmal.MMAL_PARAMETER_UINT32_T,
mmal.MMAL_PARAMETER_VIDEO_ENCODE_SEI_ENABLE: mmal.MMAL_PARAMETER_BOOLEAN_T,
mmal.MMAL_PARAMETER_VIDEO_ENCODE_SPS_TIMING: mmal.MMAL_PARAMETER_BOOLEAN_T,
mmal.MMAL_PARAMETER_VIDEO_FRAME_RATE: mmal.MMAL_PARAMETER_RATIONAL_T, # actually mmal.MMAL_PARAMETER_FRAME_RATE_T but this only contains a rational anyway...
mmal.MMAL_PARAMETER_VIDEO_IMMUTABLE_INPUT: mmal.MMAL_PARAMETER_BOOLEAN_T,
mmal.MMAL_PARAMETER_VIDEO_INTERLACE_TYPE: mmal.MMAL_PARAMETER_VIDEO_INTERLACE_TYPE_T,
mmal.MMAL_PARAMETER_VIDEO_INTERPOLATE_TIMESTAMPS: mmal.MMAL_PARAMETER_BOOLEAN_T,
mmal.MMAL_PARAMETER_VIDEO_INTRA_REFRESH: mmal.MMAL_PARAMETER_VIDEO_INTRA_REFRESH_T,
mmal.MMAL_PARAMETER_VIDEO_LEVEL_EXTENSION: mmal.MMAL_PARAMETER_VIDEO_LEVEL_EXTENSION_T,
mmal.MMAL_PARAMETER_VIDEO_MAX_NUM_CALLBACKS: mmal.MMAL_PARAMETER_UINT32_T,
mmal.MMAL_PARAMETER_VIDEO_RENDER_STATS: mmal.MMAL_PARAMETER_VIDEO_RENDER_STATS_T,
mmal.MMAL_PARAMETER_VIDEO_REQUEST_I_FRAME: mmal.MMAL_PARAMETER_BOOLEAN_T,
mmal.MMAL_PARAMETER_VIDEO_STABILISATION: mmal.MMAL_PARAMETER_BOOLEAN_T,
mmal.MMAL_PARAMETER_ZERO_COPY: mmal.MMAL_PARAMETER_BOOLEAN_T,
mmal.MMAL_PARAMETER_ZERO_SHUTTER_LAG: mmal.MMAL_PARAMETER_ZEROSHUTTERLAG_T,
mmal.MMAL_PARAMETER_ZOOM: mmal.MMAL_PARAMETER_SCALEFACTOR_T,
}
class PiCameraFraction(Fraction):
"""
Extends :class:`~fractions.Fraction` to act as a (numerator, denominator)
tuple when required.
"""
def __len__(self):
warnings.warn(
PiCameraDeprecated(
'Accessing framerate as a tuple is deprecated; this value is '
'now a Fraction, so you can query the numerator and '
'denominator properties directly, convert to an int or float, '
'or perform arithmetic operations and comparisons directly'))
return 2
def __getitem__(self, index):
warnings.warn(
PiCameraDeprecated(
'Accessing framerate as a tuple is deprecated; this value is '
'now a Fraction, so you can query the numerator and '
'denominator properties directly, convert to an int or float, '
'or perform arithmetic operations and comparisons directly'))
if index == 0:
return self.numerator
elif index == 1:
return self.denominator
else:
raise IndexError('invalid index %d' % index)
def __contains__(self, value):
return value in (self.numerator, self.denominator)
class PiResolution(namedtuple('PiResolution', ('width', 'height'))):
"""
A :func:`~collections.namedtuple` derivative which represents a resolution
with a :attr:`width` and :attr:`height`.
.. attribute:: width
The width of the resolution in pixels
.. attribute:: height
The height of the resolution in pixels
.. versionadded:: 1.11
"""
__slots__ = () # workaround python issue #24931
def pad(self, width=32, height=16):
"""
Returns the resolution padded up to the nearest multiple of *width*
and *height* which default to 32 and 16 respectively (the camera's
native block size for most operations). For example:
.. code-block:: pycon
>>> PiResolution(1920, 1080).pad()
PiResolution(width=1920, height=1088)
>>> PiResolution(100, 100).pad(16, 16)
PiResolution(width=128, height=112)
>>> PiResolution(100, 100).pad(16, 16)
PiResolution(width=112, height=112)
"""
return PiResolution(
width=((self.width + (width - 1)) // width) * width,
height=((self.height + (height - 1)) // height) * height,
)
def transpose(self):
"""
Returns the resolution with the width and height transposed. For
example:
.. code-block:: pycon
>>> PiResolution(1920, 1080).transpose()
PiResolution(width=1080, height=1920)
"""
return PiResolution(self.height, self.width)
def __str__(self):
return '%dx%d' % (self.width, self.height)
class PiFramerateRange(namedtuple('PiFramerateRange', ('low', 'high'))):
"""
This class is a :func:`~collections.namedtuple` derivative used to store
the low and high limits of a range of framerates. It is recommended that
you access the information stored by this class by attribute rather than
position (for example: ``camera.framerate_range.low`` rather than
``camera.framerate_range[0]``).
.. attribute:: low
The lowest framerate that the camera is permitted to use (inclusive).
When the :attr:`~picamera.PiCamera.framerate_range` attribute is
queried, this value will always be returned as a
:class:`~fractions.Fraction`.
.. attribute:: high
The highest framerate that the camera is permitted to use (inclusive).
When the :attr:`~picamera.PiCamera.framerate_range` attribute is
queried, this value will always be returned as a
:class:`~fractions.Fraction`.
.. versionadded:: 1.13
"""
__slots__ = () # workaround python issue #24931
def __new__(cls, low, high):
return super(PiFramerateRange, cls).__new__(cls, to_fraction(low),
to_fraction(high))
def __str__(self):
return '%s..%s' % (self.low, self.high)
class PiSensorMode(namedtuple('PiSensorMode', ('resolution', 'framerates',
'video', 'still', 'full_fov'))):
"""
This class is a :func:`~collections.namedtuple` derivative used to store
the attributes describing a camera sensor mode.
.. attribute:: resolution
A :class:`PiResolution` specifying the size of frames output by the
camera in this mode.
.. attribute:: framerates
A :class:`PiFramerateRange` specifying the minimum and maximum
framerates supported by this sensor mode. Typically the low value is
exclusive and high value inclusive.
.. attribute:: video
A :class:`bool` indicating whether or not the mode is capable of
recording video. Currently this is always ``True``.
.. attribute:: still
A :class:`bool` indicating whether the mode can be used for still
captures (cases where a capture method is called with
``use_video_port`` set to ``False``).
.. attribute:: full_fov
A :class:`bool` indicating whether the full width of the sensor
area is used to capture frames. This can be ``True`` even when the
resolution is less than the camera's maximum resolution due to binning
and skipping. See :ref:`camera_modes` for a diagram of the available
fields of view.
"""
__slots__ = () # workaround python issue #24931
def __new__(cls, resolution, framerates, video=True, still=False,
full_fov=True):
return super(PiSensorMode, cls).__new__(
cls,
resolution
if isinstance(resolution, PiResolution) else
to_resolution(resolution),
framerates
if isinstance(framerates, PiFramerateRange) else
PiFramerateRange(*framerates),
video, still, full_fov)
def open_stream(stream, output=True, buffering=65536):
"""
This is the core of picamera's IO-semantics. It returns a tuple of a
file-like object and a bool indicating whether the stream requires closing
once the caller is finished with it.
* If *stream* is a string, it is opened as a file object (with mode 'wb' if
*output* is ``True``, and the specified amount of *bufffering*). In this
case the function returns ``(stream, True)``.
* If *stream* is a stream with a ``write`` method, it is returned as
``(stream, False)``.
* Otherwise *stream* is assumed to be a writeable buffer and is wrapped
with :class:`BufferIO`. The function returns ``(stream, True)``.
"""
if isinstance(stream, bytes):
stream = stream.decode('ascii')
opened = isinstance(stream, str)
if opened:
stream = io.open(stream, 'wb' if output else 'rb', buffering)
else:
try:
if output:
stream.write
else:
stream.read
except AttributeError:
# Assume the stream is actually a buffer
opened = True
stream = BufferIO(stream)
if output and not stream.writable:
raise IOError('writeable buffer required for output')
return (stream, opened)
def close_stream(stream, opened):
"""
If *opened* is ``True``, then the ``close`` method of *stream* will be
called. Otherwise, the function will attempt to call the ``flush`` method
on *stream* (if one exists). This function essentially takes the output
of :func:`open_stream` and finalizes the result.
"""
if opened:
stream.close()
else:
try:
stream.flush()
except AttributeError:
pass
def to_resolution(value):
"""
Converts *value* which may be a (width, height) tuple or a string
containing a representation of a resolution (e.g. "1024x768" or "1080p") to
a (width, height) tuple.
"""
if isinstance(value, bytes):
value = value.decode('utf-8')
if isinstance(value, str):
try:
# A selection from https://en.wikipedia.org/wiki/Graphics_display_resolution
# Feel free to suggest additions
w, h = {
'VGA': (640, 480),
'SVGA': (800, 600),
'XGA': (1024, 768),
'SXGA': (1280, 1024),
'UXGA': (1600, 1200),
'HD': (1280, 720),
'FHD': (1920, 1080),
'1080P': (1920, 1080),
'720P': (1280, 720),
}[value.strip().upper()]
except KeyError:
w, h = (int(i.strip()) for i in value.upper().split('X', 1))
else:
try:
w, h = value
except (TypeError, ValueError):
raise PiCameraValueError("Invalid resolution tuple: %r" % value)
return PiResolution(w, h)
def to_fraction(value, den_limit=65536):
"""
Converts *value*, which can be any numeric type, an MMAL_RATIONAL_T, or a
(numerator, denominator) tuple to a :class:`~fractions.Fraction` limiting
the denominator to the range 0 < n <= *den_limit* (which defaults to
65536).
"""
try:
# int, long, or fraction
n, d = value.numerator, value.denominator
except AttributeError:
try:
# float
n, d = value.as_integer_ratio()
except AttributeError:
try:
n, d = value.num, value.den
except AttributeError:
try:
# tuple
n, d = value
warnings.warn(
PiCameraDeprecated(
"Setting framerate or gains as a tuple is "
"deprecated; please use one of Python's many "
"numeric classes like int, float, Decimal, or "
"Fraction instead"))
except (TypeError, ValueError):
# try and convert anything else to a Fraction directly
value = Fraction(value)
n, d = value.numerator, value.denominator
# Ensure denominator is reasonable
if d == 0:
raise PiCameraValueError("Denominator cannot be 0")
elif d > den_limit:
return Fraction(n, d).limit_denominator(den_limit)
else:
return Fraction(n, d)
def to_rational(value):
"""
Converts *value* (which can be anything accepted by :func:`to_fraction`) to
an MMAL_RATIONAL_T structure.
"""
value = to_fraction(value)
return mmal.MMAL_RATIONAL_T(value.numerator, value.denominator)
def buffer_bytes(buf):
"""
Given an object which implements the :ref:`buffer protocol
<bufferobjects>`, this function returns the size of the object in bytes.
The object can be multi-dimensional or include items larger than byte-size.
"""
if not isinstance(buf, memoryview):
buf = memoryview(buf)
return buf.itemsize * reduce(mul, buf.shape)
def debug_pipeline(port):
"""
Given an :class:`MMALVideoPort` *port*, this traces all objects in the
pipeline feeding it (including components and connections) and yields each
object in turn. Hence the generator typically yields something like:
* :class:`MMALVideoPort` (the specified output port)
* :class:`MMALEncoder` (the encoder which owns the output port)
* :class:`MMALVideoPort` (the encoder's input port)
* :class:`MMALConnection` (the connection between the splitter and encoder)
* :class:`MMALVideoPort` (the splitter's output port)
* :class:`MMALSplitter` (the splitter on the camera's video port)
* :class:`MMALVideoPort` (the splitter's input port)
* :class:`MMALConnection` (the connection between the splitter and camera)
* :class:`MMALVideoPort` (the camera's video port)
* :class:`MMALCamera` (the camera component)
"""
def find_port(addr):
for obj in MMALObject.REGISTRY:
if isinstance(obj, MMALControlPort):
if ct.addressof(obj._port[0]) == addr:
return obj
raise IndexError('unable to locate port with address %x' % addr)
def find_component(addr):
for obj in MMALObject.REGISTRY:
if isinstance(obj, MMALBaseComponent) and obj._component is not None:
if ct.addressof(obj._component[0]) == addr:
return obj
raise IndexError('unable to locate component with address %x' % addr)
assert isinstance(port, (MMALControlPort, MMALPythonPort))
while True:
if port.type == mmal.MMAL_PORT_TYPE_OUTPUT:
yield port
if isinstance(port, MMALPythonPort):
comp = port._owner()
else:
comp = find_component(ct.addressof(port._port[0].component[0]))
yield comp
if not isinstance(comp, (MMALComponent, MMALPythonComponent)):
break
if comp.connection is None:
break
if isinstance(comp.connection, MMALPythonConnection):
port = comp.connection._target
else:
port = find_port(ct.addressof(comp.connection._connection[0].in_[0]))
yield port
yield comp.connection
if isinstance(comp.connection, MMALPythonConnection):
port = comp.connection._source
else:
port = find_port(ct.addressof(comp.connection._connection[0].out[0]))
def print_pipeline(port):
"""
Prints a human readable representation of the pipeline feeding the
specified :class:`MMALVideoPort` *port*.
"""
rows = [[], [], [], [], [], []]
under_comp = False
for obj in reversed(list(debug_pipeline(port))):
if isinstance(obj, (MMALBaseComponent, MMALPythonBaseComponent)):
rows[0].append(obj.name)
under_comp = True
elif isinstance(obj, MMALVideoPort):
rows[0].append('[%d]' % obj._port[0].index)
if under_comp:
rows[1].append('encoding')
if obj.format == mmal.MMAL_ENCODING_OPAQUE:
rows[1].append(obj.opaque_subformat)
else:
rows[1].append(mmal.FOURCC_str(obj._port[0].format[0].encoding))
if under_comp:
rows[2].append('buf')
rows[2].append('%dx%d' % (obj._port[0].buffer_num, obj._port[0].buffer_size))
if under_comp:
rows[3].append('bitrate')
rows[3].append('%dbps' % (obj._port[0].format[0].bitrate,))
if under_comp:
rows[4].append('frame')
rows[4].append('%dx%d@%sfps' % (
obj._port[0].format[0].es[0].video.width,
obj._port[0].format[0].es[0].video.height,
obj.framerate))
if under_comp:
rows[5].append('colorspc')
under_comp = False
rows[5].append(mmal.FOURCC_str(obj._port[0].format[0].es[0].video.color_space))
elif isinstance(obj, MMALPythonPort):
rows[0].append('[%d]' % obj._index)
if under_comp:
rows[1].append('encoding')
if obj.format == mmal.MMAL_ENCODING_OPAQUE:
rows[1].append(obj.opaque_subformat)
else:
rows[1].append(mmal.FOURCC_str(obj._format[0].encoding))
if under_comp:
rows[2].append('buf')
rows[2].append('%dx%d' % (obj.buffer_count, obj.buffer_size))
if under_comp:
rows[3].append('bitrate')
rows[3].append('%dbps' % (obj._format[0].bitrate,))
if under_comp:
rows[4].append('frame')
under_comp = False
rows[4].append('%dx%d@%sfps' % (
obj._format[0].es[0].video.width,
obj._format[0].es[0].video.height,
obj.framerate))
if under_comp:
rows[5].append('colorspc')
rows[5].append('???')
elif isinstance(obj, (MMALConnection, MMALPythonConnection)):
rows[0].append('')
rows[1].append('')
rows[2].append('-->')
rows[3].append('')
rows[4].append('')
rows[5].append('')
if under_comp:
rows[1].append('encoding')
rows[2].append('buf')
rows[3].append('bitrate')
rows[4].append('frame')
rows[5].append('colorspc')
cols = list(zip(*rows))
max_lens = [max(len(s) for s in col) + 2 for col in cols]
rows = [
''.join('{0:{align}{width}s}'.format(s, align=align, width=max_len)
for s, max_len, align in zip(row, max_lens, cycle('^<^>')))
for row in rows
]
for row in rows:
print(row)
class MMALObject(object):
"""
Represents an object wrapper around an MMAL object (component, port,
connection, etc). This base class maintains a registry of all MMAL objects
currently alive (via weakrefs) which permits object lookup by name and
listing all used MMAL objects.
"""
__slots__ = ('__weakref__',)
REGISTRY = weakref.WeakSet()
def __init__(self):
super(MMALObject, self).__init__()
MMALObject.REGISTRY.add(self)
class MMALBaseComponent(MMALObject):
"""
Represents a generic MMAL component. Class attributes are read to determine
the component type, and the OPAQUE sub-formats of each connectable port.
"""
__slots__ = ('_component', '_control', '_inputs', '_outputs')
component_type = b'none'
opaque_input_subformats = ()
opaque_output_subformats = ()
def __init__(self):
super(MMALBaseComponent, self).__init__()
self._component = ct.POINTER(mmal.MMAL_COMPONENT_T)()
mmal_check(
mmal.mmal_component_create(self.component_type, self._component),
prefix="Failed to create MMAL component %s" % self.component_type)
if self._component[0].input_num != len(self.opaque_input_subformats):
raise PiCameraRuntimeError(
'Expected %d inputs but found %d on component %s' % (
len(self.opaque_input_subformats),
self._component[0].input_num,
self.component_type))
if self._component[0].output_num != len(self.opaque_output_subformats):
raise PiCameraRuntimeError(
'Expected %d outputs but found %d on component %s' % (
len(self.opaque_output_subformats),
self._component[0].output_num,
self.component_type))
self._control = MMALControlPort(self._component[0].control)
port_class = {
mmal.MMAL_ES_TYPE_UNKNOWN: MMALPort,
mmal.MMAL_ES_TYPE_CONTROL: MMALControlPort,
mmal.MMAL_ES_TYPE_VIDEO: MMALVideoPort,
mmal.MMAL_ES_TYPE_AUDIO: MMALAudioPort,
mmal.MMAL_ES_TYPE_SUBPICTURE: MMALSubPicturePort,
}
self._inputs = tuple(
port_class[self._component[0].input[n][0].format[0].type](
self._component[0].input[n], opaque_subformat)
for n, opaque_subformat in enumerate(self.opaque_input_subformats))
self._outputs = tuple(
port_class[self._component[0].output[n][0].format[0].type](
self._component[0].output[n], opaque_subformat)
for n, opaque_subformat in enumerate(self.opaque_output_subformats))
def close(self):
"""
Close the component and release all its resources. After this is
called, most methods will raise exceptions if called.
"""
if self._component is not None:
# ensure we free any pools associated with input/output ports
for output in self.outputs:
output.disable()
for input in self.inputs:
input.disable()
mmal.mmal_component_destroy(self._component)
self._component = None
self._inputs = ()
self._outputs = ()
self._control = None
@property
def name(self):
return self._component[0].name.decode('ascii')
@property
def control(self):
"""
The :class:`MMALControlPort` control port of the component which can be
used to configure most aspects of the component's behaviour.
"""
return self._control
@property
def inputs(self):
"""
A sequence of :class:`MMALPort` objects representing the inputs
of the component.
"""
return self._inputs
@property
def outputs(self):
"""
A sequence of :class:`MMALPort` objects representing the outputs
of the component.
"""
return self._outputs
@property
def enabled(self):
"""
Returns ``True`` if the component is currently enabled. Use
:meth:`enable` and :meth:`disable` to control the component's state.
"""
return bool(self._component[0].is_enabled)
def enable(self):
"""
Enable the component. When a component is enabled it will process data
sent to its input port(s), sending the results to buffers on its output
port(s). Components may be implicitly enabled by connections.
"""
mmal_check(
mmal.mmal_component_enable(self._component),
prefix="Failed to enable component")
def disable(self):
"""
Disables the component.
"""
mmal_check(
mmal.mmal_component_disable(self._component),
prefix="Failed to disable component")
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, exc_tb):
self.close()
def __repr__(self):
if self._component is not None:
return '<%s "%s": %d inputs %d outputs>' % (
self.__class__.__name__, self.name,
len(self.inputs), len(self.outputs))
else:
return '<%s closed>' % self.__class__.__name__
class MMALControlPort(MMALObject):
"""
Represents an MMAL port with properties to configure the port's parameters.
"""
__slots__ = ('_port', '_params', '_wrapper')
def __init__(self, port):
super(MMALControlPort, self).__init__()
self._port = port
self._params = MMALPortParams(port)
self._wrapper = None
@property
def index(self):
"""
Returns an integer indicating the port's position within its owning
list (inputs, outputs, etc.)
"""
return self._port[0].index
@property
def enabled(self):
"""
Returns a :class:`bool` indicating whether the port is currently
enabled. Unlike other classes, this is a read-only property. Use
:meth:`enable` and :meth:`disable` to modify the value.
"""
return bool(self._port[0].is_enabled)
def enable(self, callback=None):
"""
Enable the port with the specified callback function (this must be
``None`` for connected ports, and a callable for disconnected ports).
The callback function must accept two parameters which will be this
:class:`MMALControlPort` (or descendent) and an :class:`MMALBuffer`
instance. Any return value will be ignored.
"""
def wrapper(port, buf):
buf = MMALBuffer(buf)
try:
callback(self, buf)
finally:
buf.release()
if callback:
self._wrapper = mmal.MMAL_PORT_BH_CB_T(wrapper)
else:
self._wrapper = ct.cast(None, mmal.MMAL_PORT_BH_CB_T)
mmal_check(
mmal.mmal_port_enable(self._port, self._wrapper),
prefix="Unable to enable port %s" % self.name)
def disable(self):
"""
Disable the port.
"""
# NOTE: The test here only exists to avoid spamming the console; when
# disabling an already disabled port MMAL dumps errors to stderr. If
# this test isn't here closing a camera results in half a dozen lines
# of ignored errors
if self.enabled:
try:
mmal_check(
mmal.mmal_port_disable(self._port),
prefix="Unable to disable port %s" % self.name)
except PiCameraMMALError as e:
# Ignore the error if we're disabling an already disabled port
if not (e.status == mmal.MMAL_EINVAL and not self.enabled):
raise e
self._wrapper = None
@property
def name(self):
result = self._port[0].name.decode('ascii')
if result.endswith(')'):
try:
# strip (format) from port names as it doesn't really belong
# there (it doesn't identify the port in any way) and makes
# matching some of the correctional cases a pain
return result[:result.rindex('(')]
except ValueError:
return result
else:
return result
@property
def type(self):
"""
The type of the port. One of:
* MMAL_PORT_TYPE_OUTPUT
* MMAL_PORT_TYPE_INPUT
* MMAL_PORT_TYPE_CONTROL
* MMAL_PORT_TYPE_CLOCK
"""
return self._port[0].type
@property
def capabilities(self):
"""
The capabilities of the port. A bitfield of the following:
* MMAL_PORT_CAPABILITY_PASSTHROUGH
* MMAL_PORT_CAPABILITY_ALLOCATION
* MMAL_PORT_CAPABILITY_SUPPORTS_EVENT_FORMAT_CHANGE
"""
return self._port[0].capabilities
@property
def params(self):
"""
The configurable parameters for the port. This is presented as a
mutable mapping of parameter numbers to values, implemented by the
:class:`MMALPortParams` class.
"""
return self._params
def __repr__(self):
if self._port is not None:
return '<MMALControlPort "%s">' % self.name
else:
return '<MMALControlPort closed>'
class MMALPort(MMALControlPort):
"""
Represents an MMAL port with properties to configure and update the port's
format. This is the base class of :class:`MMALVideoPort`,
:class:`MMALAudioPort`, and :class:`MMALSubPicturePort`.
"""
__slots__ = ('_opaque_subformat', '_pool', '_stopped', '_connection')
# A mapping of corrected definitions of supported_formats for ports with
# particular names. Older firmwares either raised EINVAL, ENOSYS, or just
# reported the wrong things for various ports; these lists are derived from
# querying newer firmwares or in some cases guessing sensible defaults
# (for ports where even the newer firmwares get stuff wrong).
_supported_formats_patch = {
'vc.ril.camera:out:2': [
mmal.MMAL_ENCODING_I420,
mmal.MMAL_ENCODING_NV12,
mmal.MMAL_ENCODING_I422,
mmal.MMAL_ENCODING_YUYV,
mmal.MMAL_ENCODING_YVYU,
mmal.MMAL_ENCODING_VYUY,
mmal.MMAL_ENCODING_UYVY,
mmal.MMAL_ENCODING_BGR24,
mmal.MMAL_ENCODING_BGRA,
mmal.MMAL_ENCODING_RGB16,
mmal.MMAL_ENCODING_YV12,
mmal.MMAL_ENCODING_NV21,
mmal.MMAL_ENCODING_RGB24,
mmal.MMAL_ENCODING_RGBA,
],
'vc.ril.image_encode:in:0': [
mmal.MMAL_ENCODING_RGB16,
mmal.MMAL_ENCODING_RGB24,
mmal.MMAL_ENCODING_RGBA,
mmal.MMAL_ENCODING_BGRA,
mmal.MMAL_ENCODING_I420,
mmal.MMAL_ENCODING_I422,
mmal.MMAL_ENCODING_NV12,
mmal.MMAL_ENCODING_YUYV,
mmal.MMAL_ENCODING_YVYU,
mmal.MMAL_ENCODING_VYUY,
],
'vc.ril.image_encode:out:0': [
mmal.MMAL_ENCODING_JPEG,
mmal.MMAL_ENCODING_GIF,
mmal.MMAL_ENCODING_PNG,
mmal.MMAL_ENCODING_BMP,
mmal.MMAL_ENCODING_PPM,
mmal.MMAL_ENCODING_TGA,
],
'vc.ril.resize:in:0': [
mmal.MMAL_ENCODING_RGBA,
mmal.MMAL_ENCODING_BGRA,
mmal.MMAL_ENCODING_RGB16,
mmal.MMAL_ENCODING_I420,
# several invalid encodings (lowercase versions of the priors)
# appear here in modern firmwares but since they don't map to any
# constants they're excluded
mmal.MMAL_ENCODING_I420_SLICE,
],
'vc.ril.resize:out:0': [
mmal.MMAL_ENCODING_RGBA,
mmal.MMAL_ENCODING_BGRA,
mmal.MMAL_ENCODING_RGB16,
mmal.MMAL_ENCODING_I420,
# same invalid encodings as above here
mmal.MMAL_ENCODING_I420_SLICE,
],
'vc.ril.isp:in:0': [
mmal.MMAL_ENCODING_BAYER_SBGGR8,
mmal.MMAL_ENCODING_BAYER_SBGGR10DPCM8,
mmal.MMAL_ENCODING_BAYER_SBGGR10P,
mmal.MMAL_ENCODING_BAYER_SBGGR12P,
mmal.MMAL_ENCODING_YUYV,
mmal.MMAL_ENCODING_YVYU,
mmal.MMAL_ENCODING_VYUY,
mmal.MMAL_ENCODING_UYVY,
mmal.MMAL_ENCODING_I420,
mmal.MMAL_ENCODING_YV12,
mmal.MMAL_ENCODING_I422,
mmal.MMAL_ENCODING_RGB24,
mmal.MMAL_ENCODING_BGR24,
mmal.MMAL_ENCODING_RGBA,
mmal.MMAL_ENCODING_BGRA,
mmal.MMAL_ENCODING_RGB16,
mmal.MMAL_ENCODING_YUVUV128,
mmal.MMAL_ENCODING_NV12,
mmal.MMAL_ENCODING_NV21,
],
'vc.ril.isp:out:0': [
mmal.MMAL_ENCODING_YUYV,
mmal.MMAL_ENCODING_YVYU,
mmal.MMAL_ENCODING_VYUY,
mmal.MMAL_ENCODING_UYVY,
mmal.MMAL_ENCODING_I420,
mmal.MMAL_ENCODING_YV12,
mmal.MMAL_ENCODING_I422,
mmal.MMAL_ENCODING_RGB24,
mmal.MMAL_ENCODING_BGR24,
mmal.MMAL_ENCODING_RGBA,
mmal.MMAL_ENCODING_BGRA,
mmal.MMAL_ENCODING_RGB16,
mmal.MMAL_ENCODING_YUVUV128,
mmal.MMAL_ENCODING_NV12,
mmal.MMAL_ENCODING_NV21,
],
'vc.null_sink:in:0': [
mmal.MMAL_ENCODING_I420,
mmal.MMAL_ENCODING_RGB24,
mmal.MMAL_ENCODING_BGR24,
mmal.MMAL_ENCODING_RGBA,
mmal.MMAL_ENCODING_BGRA,
],
}
def __init__(self, port, opaque_subformat='OPQV'):
super(MMALPort, self).__init__(port)
self.opaque_subformat = opaque_subformat
self._pool = None
self._stopped = True
self._connection = None
def __repr__(self):
if self._port is not None:
return '<MMALPort "%s": format=MMAL_FOURCC(%r) buffers=%dx%d>' % (
self.name, mmal.FOURCC_str(self.format),
self.buffer_count, self.buffer_size)
else:
return '<MMALPort closed>'
def _get_opaque_subformat(self):
return self._opaque_subformat
def _set_opaque_subformat(self, value):
self._opaque_subformat = value
opaque_subformat = property(
_get_opaque_subformat, _set_opaque_subformat, doc="""\
Retrieves or sets the opaque sub-format that the port speaks. While
most formats (I420, RGBA, etc.) mean one thing, the opaque format is
special; different ports produce different sorts of data when
configured for OPQV format. This property stores a string which
uniquely identifies what the associated port means for OPQV format.
If the port does not support opaque format at all, set this property to
``None``.
:class:`MMALConnection` uses this information when negotiating formats
for a connection between two ports.
""")
def _get_format(self):
result = self._port[0].format[0].encoding
if FIX_RGB_BGR_ORDER:
return {
mmal.MMAL_ENCODING_RGB24: mmal.MMAL_ENCODING_BGR24,
mmal.MMAL_ENCODING_BGR24: mmal.MMAL_ENCODING_RGB24,
}.get(result, result)
else:
return result
def _set_format(self, value):
if FIX_RGB_BGR_ORDER:
value = {
mmal.MMAL_ENCODING_RGB24: mmal.MMAL_ENCODING_BGR24,
mmal.MMAL_ENCODING_BGR24: mmal.MMAL_ENCODING_RGB24,
}.get(value, value)
self._port[0].format[0].encoding = value
if value == mmal.MMAL_ENCODING_OPAQUE:
self._port[0].format[0].encoding_variant = mmal.MMAL_ENCODING_I420
format = property(_get_format, _set_format, doc="""\
Retrieves or sets the encoding format of the port. Setting this
attribute implicitly sets the encoding variant to a sensible value
(I420 in the case of OPAQUE).
After setting this attribute, call :meth:`commit` to make the changes
effective.
""")
@property
def supported_formats(self):
"""
Retrieves a sequence of supported encodings on this port.
"""
try:
mp = self.params[mmal.MMAL_PARAMETER_SUPPORTED_ENCODINGS]
except PiCameraMMALError as e:
if e.status in (mmal.MMAL_EINVAL, mmal.MMAL_ENOSYS):
# Workaround: old firmwares raise EINVAL or ENOSYS when various
# ports are queried for supported formats. The following is the
# correct sequence for old firmwares (note: swapped RGB24 and
# BGR24 order in still port) ... probably (vc.ril.camera:out:2
# is definitely right, the rest are largely guessed based on
# queries of later firmwares)
try:
return MMALPort._supported_formats_patch[self.name]
except KeyError:
raise e
else:
raise
else:
result = [
v for v in mp.encoding if v != 0
][:mp.hdr.size // ct.sizeof(ct.c_uint32)]
# Workaround: Fix incorrect result on MMALImageEncoder.outputs[0]
# from modern firmwares
if self.name == 'vc.ril.image_encode:out:0' and result == [
mmal.MMAL_ENCODING_MP2V, mmal.MMAL_ENCODING_MP2V,
mmal.MMAL_ENCODING_H264, mmal.MMAL_ENCODING_H264,
mmal.MMAL_ENCODING_VP7, mmal.MMAL_ENCODING_VP7,
mmal.MMAL_ENCODING_VP6, mmal.MMAL_ENCODING_VP6]:
return MMALPort._supported_formats_patch[self.name]
else:
return result
def _get_bitrate(self):
return self._port[0].format[0].bitrate
def _set_bitrate(self, value):
self._port[0].format[0].bitrate = value
bitrate = property(_get_bitrate, _set_bitrate, doc="""\
Retrieves or sets the bitrate limit for the port's format.
""")
def copy_from(self, source):
"""
Copies the port's :attr:`format` from the *source*
:class:`MMALControlPort`.
"""
if isinstance(source, MMALPythonPort):
mmal.mmal_format_copy(self._port[0].format, source._format)
else:
mmal.mmal_format_copy(self._port[0].format, source._port[0].format)
def commit(self):
"""
Commits the port's configuration and automatically updates the number
and size of associated buffers according to the recommendations of the
MMAL library. This is typically called after adjusting the port's
format and/or associated settings (like width and height for video
ports).
"""
mmal_check(
mmal.mmal_port_format_commit(self._port),
prefix="Format couldn't be set on port %s" % self.name)
# Workaround: Unfortunately, there is an upstream issue with the
# buffer_num_recommended which means it can't currently be used (see
# discussion in raspberrypi/userland#167). There's another upstream
# issue with buffer_num_min which means we need to guard against 0
# values...
self._port[0].buffer_num = max(1, self._port[0].buffer_num_min)
self._port[0].buffer_size = (
self._port[0].buffer_size_recommended
if self._port[0].buffer_size_recommended > 0 else
self._port[0].buffer_size_min)
@property
def pool(self):
"""
Returns the :class:`MMALPool` associated with the buffer, if any.
"""
return self._pool
def get_buffer(self, block=True, timeout=None):
"""
Returns a :class:`MMALBuffer` from the associated :attr:`pool`. *block*
and *timeout* act as they do in the corresponding
:meth:`MMALPool.get_buffer`.
"""
if not self.enabled:
raise PiCameraPortDisabled(
'cannot get buffer from disabled port %s' % self.name)
return self.pool.get_buffer(block, timeout)
def send_buffer(self, buf):
"""
Send :class:`MMALBuffer` *buf* to the port.
"""
if (
self.type == mmal.MMAL_PORT_TYPE_INPUT and
isinstance(self._connection, MMALPythonConnection) and
self._connection._callback is not None):
try:
modified_buf = self._connection._callback(self._connection, buf)
except:
buf.release()
raise
else:
if modified_buf is None:
buf.release()
return
else:
buf = modified_buf
try:
mmal_check(
mmal.mmal_port_send_buffer(self._port, buf._buf),
prefix="cannot send buffer to port %s" % self.name)
except PiCameraMMALError as e:
# If port is disabled, convert exception for convenience
if e.status == mmal.MMAL_EINVAL and not self.enabled:
raise PiCameraPortDisabled(
'cannot send buffer to disabled port %s' % self.name)
else:
raise
def flush(self):
"""
Flush the port.
"""
mmal_check(
mmal.mmal_port_flush(self._port),
prefix="Unable to flush port %s" % self.name)
def _get_buffer_count(self):
return self._port[0].buffer_num
def _set_buffer_count(self, value):
if value < 1:
raise PiCameraMMALError(mmal.MMAL_EINVAL, 'buffer count <1')
self._port[0].buffer_num = value
buffer_count = property(_get_buffer_count, _set_buffer_count, doc="""\
The number of buffers allocated (or to be allocated) to the port.
The ``mmalobj`` layer automatically configures this based on
recommendations from the MMAL library.
""")
def _get_buffer_size(self):
return self._port[0].buffer_size
def _set_buffer_size(self, value):
if value < 0:
raise PiCameraMMALError(mmal.MMAL_EINVAL, 'buffer size <0')
self._port[0].buffer_size = value
buffer_size = property(_get_buffer_size, _set_buffer_size, doc="""\
The size of buffers allocated (or to be allocated) to the port. The
size of buffers is typically dictated by the port's format. The
``mmalobj`` layer automatically configures this based on
recommendations from the MMAL library.
""")
def enable(self, callback=None):
"""
Enable the port with the specified callback function (this must be
``None`` for connected ports, and a callable for disconnected ports).
The callback function must accept two parameters which will be this
:class:`MMALControlPort` (or descendent) and an :class:`MMALBuffer`
instance. The callback should return ``True`` when processing is
complete and no further calls are expected (e.g. at frame-end for an
image encoder), and ``False`` otherwise.
"""
def wrapper(port, buf):
buf = MMALBuffer(buf)
try:
if not self._stopped and callback(self, buf):
self._stopped = True
finally:
buf.release()
try:
self._pool.send_buffer(block=False)
except PiCameraPortDisabled:
# The port was disabled, no point trying again
pass
# Workaround: There is a bug in the MJPEG encoder that causes a
# deadlock if the FIFO is full on shutdown. Increasing the encoder
# buffer size makes this less likely to happen. See
# raspberrypi/userland#208. Connecting the encoder component resets the
# output port's buffer size, hence why we correct this here, just
# before enabling the port.
if self._port[0].format[0].encoding == mmal.MMAL_ENCODING_MJPEG:
self._port[0].buffer_size = max(512 * 1024, self._port[0].buffer_size_recommended)
if callback:
assert self._stopped
assert self._pool is None
self._stopped = False
self._pool = MMALPortPool(self)
try:
self._wrapper = mmal.MMAL_PORT_BH_CB_T(wrapper)
mmal_check(
mmal.mmal_port_enable(self._port, self._wrapper),
prefix="Unable to enable port %s" % self.name)
# If this port is an output port, send it all the buffers
# in the pool. If it's an input port, don't bother: the user
# will presumably want to feed buffers to it manually
if self._port[0].type == mmal.MMAL_PORT_TYPE_OUTPUT:
self._pool.send_all_buffers(block=False)
except:
self._pool.close()
self._pool = None
self._stopped = True
raise
else:
super(MMALPort, self).enable()
def disable(self):
"""
Disable the port.
"""
self._stopped = True
super(MMALPort, self).disable()
if self._pool is not None:
self._pool.close()
self._pool = None
@property
def connection(self):
"""
If this port is connected to another, this property holds the
:class:`MMALConnection` or :class:`MMALPythonConnection` object which
represents that connection. If this port is not connected, this
property is ``None``.
"""
return self._connection
def connect(self, other, **options):
"""
Connect this port to the *other* :class:`MMALPort` (or
:class:`MMALPythonPort`). The type and configuration of the connection
will be automatically selected.
Various connection *options* can be specified as keyword arguments.
These will be passed onto the :class:`MMALConnection` or
:class:`MMALPythonConnection` constructor that is called (see those
classes for an explanation of the available options).
"""
# Always construct connections from the output end
if self.type != mmal.MMAL_PORT_TYPE_OUTPUT:
return other.connect(self, **options)
if other.type != mmal.MMAL_PORT_TYPE_INPUT:
raise PiCameraValueError(
'A connection can only be established between an output and '
'an input port')
if isinstance(other, MMALPythonPort):
return MMALPythonConnection(self, other, **options)
else:
return MMALConnection(self, other, **options)
def disconnect(self):
"""
Destroy the connection between this port and another port.
"""
if self.connection is not None:
self.connection.close()
class MMALVideoPort(MMALPort):
"""
Represents an MMAL port used to pass video data.
"""
__slots__ = ()
def __repr__(self):
if self._port is not None:
return (
'<MMALVideoPort "%s": format=MMAL_FOURCC("%s") buffers=%dx%d '
'frames=%s@%sfps colorspace=MMAL_FOURCC("%s")>' % (
self.name, mmal.FOURCC_str(self.format),
self._port[0].buffer_num, self._port[0].buffer_size,
self.framesize, self.framerate,
mmal.FOURCC_str(self.colorspace)))
else:
return '<MMALVideoPort closed>'
def _get_framesize(self):
return PiResolution(
self._port[0].format[0].es[0].video.crop.width,
self._port[0].format[0].es[0].video.crop.height,
)
def _set_framesize(self, value):
value = to_resolution(value)
video = self._port[0].format[0].es[0].video
video.width = bcm_host.VCOS_ALIGN_UP(value.width, 32)
video.height = bcm_host.VCOS_ALIGN_UP(value.height, 16)
video.crop.width = value.width
video.crop.height = value.height
framesize = property(_get_framesize, _set_framesize, doc="""\
Retrieves or sets the size of the port's video frames as a (width,
height) tuple. This attribute implicitly handles scaling the given
size up to the block size of the camera (32x16).
After setting this attribute, call :meth:`~MMALPort.commit` to make the
changes effective.
""")
def _get_framerate(self):
video = self._port[0].format[0].es[0].video
try:
return Fraction(
video.frame_rate.num,
video.frame_rate.den)
except ZeroDivisionError:
assert video.frame_rate.num == 0
return Fraction(0, 1)
def _set_framerate(self, value):
value = to_fraction(value)
video = self._port[0].format[0].es[0].video
video.frame_rate.num = value.numerator
video.frame_rate.den = value.denominator
framerate = property(_get_framerate, _set_framerate, doc="""\
Retrieves or sets the framerate of the port's video frames in fps.
After setting this attribute, call :meth:`~MMALPort.commit` to make the
changes effective.
""")
def _get_colorspace(self):
return self._port[0].format[0].es[0].video.color_space
def _set_colorspace(self, value):
self._port[0].format[0].es[0].video.color_space = value
colorspace = property(_get_colorspace, _set_colorspace, doc="""\
Retrieves or sets the color-space of the port's frames.
After setting this attribute, call :meth:`~MMALPort.commit` to make the
changes effective.
""")
class MMALAudioPort(MMALPort):
"""
Represents an MMAL port used to pass audio data.
"""
__slots__ = ()
def __repr__(self):
if self._port is not None:
return '<MMALAudioPort "%s": format=MMAL_FOURCC(%r) buffers=%dx%d>' % (
self.name, mmal.FOURCC_str(self.format),
self._port[0].buffer_num, self._port[0].buffer_size)
else:
return '<MMALAudioPort closed>'
class MMALSubPicturePort(MMALPort):
"""
Represents an MMAL port used to pass sub-picture (caption) data.
"""
__slots__ = ()
def __repr__(self):
if self._port is not None:
return '<MMALSubPicturePort "%s": format=MMAL_FOURCC(%r) buffers=%dx%d>' % (
self.name, mmal.FOURCC_str(self.format),
self._port[0].buffer_num, self._port[0].buffer_size)
else:
return '<MMALSubPicturePort closed>'
class MMALPortParams(object):
"""
Represents the parameters of an MMAL port. This class implements the
:attr:`MMALControlPort.params` attribute.
Internally, the class understands how to convert certain structures to more
common Python data-types. For example, parameters that expect an
MMAL_RATIONAL_T type will return and accept Python's
:class:`~fractions.Fraction` class (or any other numeric types), while
parameters that expect an MMAL_BOOL_T type will treat anything as a truthy
value. Parameters that expect the MMAL_PARAMETER_STRING_T structure will be
treated as plain strings, and likewise MMAL_PARAMETER_INT32_T and similar
structures will be treated as plain ints.
Parameters that expect more complex structures will return and expect
those structures verbatim.
"""
__slots__ = ('_port',)
def __init__(self, port):
super(MMALPortParams, self).__init__()
self._port = port
def __getitem__(self, key):
dtype = PARAM_TYPES[key]
# Use the short-cut functions where possible (teeny bit faster if we
# get some C to do the structure wrapping for us)
func = {
mmal.MMAL_PARAMETER_RATIONAL_T: mmal.mmal_port_parameter_get_rational,
mmal.MMAL_PARAMETER_BOOLEAN_T: mmal.mmal_port_parameter_get_boolean,
mmal.MMAL_PARAMETER_INT32_T: mmal.mmal_port_parameter_get_int32,
mmal.MMAL_PARAMETER_INT64_T: mmal.mmal_port_parameter_get_int64,
mmal.MMAL_PARAMETER_UINT32_T: mmal.mmal_port_parameter_get_uint32,
mmal.MMAL_PARAMETER_UINT64_T: mmal.mmal_port_parameter_get_uint64,
}.get(dtype, mmal.mmal_port_parameter_get)
conv = {
mmal.MMAL_PARAMETER_RATIONAL_T: lambda v: Fraction(v.num, v.den),
mmal.MMAL_PARAMETER_BOOLEAN_T: lambda v: v.value != mmal.MMAL_FALSE,
mmal.MMAL_PARAMETER_INT32_T: lambda v: v.value,
mmal.MMAL_PARAMETER_INT64_T: lambda v: v.value,
mmal.MMAL_PARAMETER_UINT32_T: lambda v: v.value,
mmal.MMAL_PARAMETER_UINT64_T: lambda v: v.value,
mmal.MMAL_PARAMETER_STRING_T: lambda v: v.str.decode('ascii'),
}.get(dtype, lambda v: v)
if func == mmal.mmal_port_parameter_get:
result = dtype(
mmal.MMAL_PARAMETER_HEADER_T(key, ct.sizeof(dtype))
)
mmal_check(
func(self._port, result.hdr),
prefix="Failed to get parameter %d" % key)
else:
dtype = {
mmal.MMAL_PARAMETER_RATIONAL_T: mmal.MMAL_RATIONAL_T,
mmal.MMAL_PARAMETER_BOOLEAN_T: mmal.MMAL_BOOL_T,
mmal.MMAL_PARAMETER_INT32_T: ct.c_int32,
mmal.MMAL_PARAMETER_INT64_T: ct.c_int64,
mmal.MMAL_PARAMETER_UINT32_T: ct.c_uint32,
mmal.MMAL_PARAMETER_UINT64_T: ct.c_uint64,
}[dtype]
result = dtype()
mmal_check(
func(self._port, key, result),
prefix="Failed to get parameter %d" % key)
return conv(result)
def __setitem__(self, key, value):
dtype = PARAM_TYPES[key]
func = {
mmal.MMAL_PARAMETER_RATIONAL_T: mmal.mmal_port_parameter_set_rational,
mmal.MMAL_PARAMETER_BOOLEAN_T: mmal.mmal_port_parameter_set_boolean,
mmal.MMAL_PARAMETER_INT32_T: mmal.mmal_port_parameter_set_int32,
mmal.MMAL_PARAMETER_INT64_T: mmal.mmal_port_parameter_set_int64,
mmal.MMAL_PARAMETER_UINT32_T: mmal.mmal_port_parameter_set_uint32,
mmal.MMAL_PARAMETER_UINT64_T: mmal.mmal_port_parameter_set_uint64,
mmal.MMAL_PARAMETER_STRING_T: mmal.mmal_port_parameter_set_string,
}.get(dtype, mmal.mmal_port_parameter_set)
conv = {
mmal.MMAL_PARAMETER_RATIONAL_T: lambda v: to_rational(v),
mmal.MMAL_PARAMETER_BOOLEAN_T: lambda v: mmal.MMAL_TRUE if v else mmal.MMAL_FALSE,
mmal.MMAL_PARAMETER_STRING_T: lambda v: v.encode('ascii'),
}.get(dtype, lambda v: v)
if func == mmal.mmal_port_parameter_set:
mp = conv(value)
assert mp.hdr.id == key
assert mp.hdr.size >= ct.sizeof(dtype)
mmal_check(
func(self._port, mp.hdr),
prefix="Failed to set parameter %d to %r" % (key, value))
else:
mmal_check(
func(self._port, key, conv(value)),
prefix="Failed to set parameter %d to %r" % (key, value))
class MMALBuffer(object):
"""
Represents an MMAL buffer header. This is usually constructed from the
buffer header pointer and is largely supplied to make working with
the buffer's data a bit simpler. Using the buffer as a context manager
implicitly locks the buffer's memory and returns the :mod:`ctypes`
buffer object itself::
def callback(port, buf):
with buf as data:
# data is a ctypes uint8 array with size entries
print(len(data))
Alternatively you can use the :attr:`data` property directly, which returns
and modifies the buffer's data as a :class:`bytes` object (note this is
generally slower than using the buffer object unless you are simply
replacing the entire buffer)::
def callback(port, buf):
# the buffer contents as a byte-string
print(buf.data)
"""
__slots__ = ('_buf',)
def __init__(self, buf):
super(MMALBuffer, self).__init__()
self._buf = buf
def _get_command(self):
return self._buf[0].cmd
def _set_command(self, value):
self._buf[0].cmd = value
command = property(_get_command, _set_command, doc="""\
The command set in the buffer's meta-data. This is usually 0 for
buffers returned by an encoder; typically this is only used by buffers
sent to the callback of a control port.
""")
def _get_flags(self):
return self._buf[0].flags
def _set_flags(self, value):
self._buf[0].flags = value
flags = property(_get_flags, _set_flags, doc="""\
The flags set in the buffer's meta-data, returned as a bitmapped
integer. Typical flags include:
* ``MMAL_BUFFER_HEADER_FLAG_EOS`` -- end of stream
* ``MMAL_BUFFER_HEADER_FLAG_FRAME_START`` -- start of frame data
* ``MMAL_BUFFER_HEADER_FLAG_FRAME_END`` -- end of frame data
* ``MMAL_BUFFER_HEADER_FLAG_KEYFRAME`` -- frame is a key-frame
* ``MMAL_BUFFER_HEADER_FLAG_FRAME`` -- frame data
* ``MMAL_BUFFER_HEADER_FLAG_CODECSIDEINFO`` -- motion estimatation data
""")
def _get_pts(self):
return self._buf[0].pts
def _set_pts(self, value):
self._buf[0].pts = value
pts = property(_get_pts, _set_pts, doc="""\
The presentation timestamp (PTS) of the buffer, as an integer number
of microseconds or ``MMAL_TIME_UNKNOWN``.
""")
def _get_dts(self):
return self._buf[0].dts
def _set_dts(self, value):
self._buf[0].dts = value
dts = property(_get_dts, _set_dts, doc="""\
The decoding timestamp (DTS) of the buffer, as an integer number of
microseconds or ``MMAL_TIME_UNKNOWN``.
""")
@property
def size(self):
"""
Returns the length of the buffer's data area in bytes. This will be
greater than or equal to :attr:`length` and is fixed in value.
"""
return self._buf[0].alloc_size
def _get_offset(self):
return self._buf[0].offset
def _set_offset(self, value):
assert 0 <= value <= self.size
self._buf[0].offset = value
self.length = min(self.size - self.offset, self.length)
offset = property(_get_offset, _set_offset, doc="""\
The offset from the start of the buffer at which the data actually
begins. Defaults to 0. If this is set to a value which would force the
current :attr:`length` off the end of the buffer's :attr:`size`, then
:attr:`length` will be decreased automatically.
""")
def _get_length(self):
return self._buf[0].length
def _set_length(self, value):
assert 0 <= value <= self.size - self.offset
self._buf[0].length = value
length = property(_get_length, _set_length, doc="""\
The length of data held in the buffer. Must be less than or equal to
the allocated size of data held in :attr:`size` minus the data
:attr:`offset`. This attribute can be used to effectively blank the
buffer by setting it to zero.
""")
def _get_data(self):
with self as buf:
return ct.string_at(
ct.byref(buf, self._buf[0].offset),
self._buf[0].length)
def _set_data(self, value):
value_len = buffer_bytes(value)
if value_len:
if value_len > self.size:
raise PiCameraValueError(
'data is too large for buffer (%d > %d)' % (
value_len, self.size))
bp = ct.c_uint8 * value_len
try:
sp = bp.from_buffer(value)
except TypeError:
sp = bp.from_buffer_copy(value)
with self as buf:
ct.memmove(buf, sp, value_len)
self._buf[0].offset = 0
self._buf[0].length = value_len
data = property(_get_data, _set_data, doc="""\
The data held in the buffer as a :class:`bytes` string. You can set
this attribute to modify the data in the buffer. Acceptable values
are anything that supports the buffer protocol, and which contains
:attr:`size` bytes or less. Setting this attribute implicitly modifies
the :attr:`length` attribute to the length of the specified value and
sets :attr:`offset` to zero.
.. note::
Accessing a buffer's data via this attribute is relatively slow
(as it copies the buffer's data to/from Python objects). See the
:class:`MMALBuffer` documentation for details of a faster (but
more complex) method.
""")
def replicate(self, source):
"""
Replicates the *source* :class:`MMALBuffer`. This copies all fields
from the *source* buffer, including the internal :attr:`data` pointer.
In other words, after replication this buffer and the *source* buffer
will share the same block of memory for *data*.
The *source* buffer will also be referenced internally by this buffer
and will only be recycled once this buffer is released.
.. note::
This is fundamentally different to the operation of the
:meth:`copy_from` method. It is much faster, but imposes the burden
that two buffers now share data (the *source* cannot be released
until the replicant has been released).
"""
mmal_check(
mmal.mmal_buffer_header_replicate(self._buf, source._buf),
prefix='unable to replicate buffer')
def copy_from(self, source):
"""
Copies all fields (including data) from the *source*
:class:`MMALBuffer`. This buffer must have sufficient :attr:`size` to
store :attr:`length` bytes from the *source* buffer. This method
implicitly sets :attr:`offset` to zero, and :attr:`length` to the
number of bytes copied.
.. note::
This is fundamentally different to the operation of the
:meth:`replicate` method. It is much slower, but afterward the
copied buffer is entirely independent of the *source*.
"""
assert self.size >= source.length
source_len = source._buf[0].length
if source_len:
with self as target_buf, source as source_buf:
ct.memmove(target_buf, ct.byref(source_buf, source.offset), source_len)
self._buf[0].offset = 0
self._buf[0].length = source_len
self.copy_meta(source)
def copy_meta(self, source):
"""
Copy meta-data from the *source* :class:`MMALBuffer`; specifically this
copies all buffer fields with the exception of :attr:`data`,
:attr:`length` and :attr:`offset`.
"""
self._buf[0].cmd = source._buf[0].cmd
self._buf[0].flags = source._buf[0].flags
self._buf[0].dts = source._buf[0].dts
self._buf[0].pts = source._buf[0].pts
self._buf[0].type[0] = source._buf[0].type[0]
def acquire(self):
"""
Acquire a reference to the buffer. This will prevent the buffer from
being recycled until :meth:`release` is called. This method can be
called multiple times in which case an equivalent number of calls
to :meth:`release` must be made before the buffer will actually be
released.
"""
mmal.mmal_buffer_header_acquire(self._buf)
def release(self):
"""
Release a reference to the buffer. This is the opposing call to
:meth:`acquire`. Once all references have been released, the buffer
will be recycled.
"""
mmal.mmal_buffer_header_release(self._buf)
def reset(self):
"""
Resets all buffer header fields to default values.
"""
mmal.mmal_buffer_header_reset(self._buf)
def __enter__(self):
mmal_check(
mmal.mmal_buffer_header_mem_lock(self._buf),
prefix='unable to lock buffer header memory')
return ct.cast(
self._buf[0].data,
ct.POINTER(ct.c_uint8 * self._buf[0].alloc_size)).contents
def __exit__(self, *exc):
mmal.mmal_buffer_header_mem_unlock(self._buf)
return False
def __repr__(self):
if self._buf is not None:
return '<MMALBuffer object: flags=%s command=%s length=%d>' % (
''.join((
'S' if self.flags & mmal.MMAL_BUFFER_HEADER_FLAG_FRAME_START else '_',
'E' if self.flags & mmal.MMAL_BUFFER_HEADER_FLAG_FRAME_END else '_',
'K' if self.flags & mmal.MMAL_BUFFER_HEADER_FLAG_KEYFRAME else '_',
'C' if self.flags & mmal.MMAL_BUFFER_HEADER_FLAG_CONFIG else '_',
'M' if self.flags & mmal.MMAL_BUFFER_HEADER_FLAG_CODECSIDEINFO else '_',
'X' if self.flags & mmal.MMAL_BUFFER_HEADER_FLAG_EOS else '_',
)), {
0: 'none',
mmal.MMAL_EVENT_ERROR: 'error',
mmal.MMAL_EVENT_FORMAT_CHANGED: 'format-change',
mmal.MMAL_EVENT_PARAMETER_CHANGED: 'param-change',
mmal.MMAL_EVENT_EOS: 'end-of-stream',
}[self.command], self.length)
else:
return '<MMALBuffer object: ???>'
class MMALQueue(object):
"""
Represents an MMAL buffer queue. Buffers can be added to the queue with the
:meth:`put` method, and retrieved from the queue (with optional wait
timeout) with the :meth:`get` method.
"""
__slots__ = ('_queue', '_created')
def __init__(self, queue):
self._created = False
self._queue = queue
@classmethod
def create(cls):
self = cls(mmal.mmal_queue_create())
self._created = True
return self
def close(self):
if self._created:
mmal_queue_destroy(self._queue)
self._queue = None
def __len__(self):
return mmal.mmal_queue_length(self._queue)
def get(self, block=True, timeout=None):
"""
Get the next buffer from the queue. If *block* is ``True`` (the default)
and *timeout* is ``None`` (the default) then the method will block
until a buffer is available. Otherwise *timeout* is the maximum time to
wait (in seconds) for a buffer to become available. If a buffer is not
available before the timeout expires, the method returns ``None``.
Likewise, if *block* is ``False`` and no buffer is immediately
available then ``None`` is returned.
"""
if block and timeout is None:
buf = mmal.mmal_queue_wait(self._queue)
elif block and timeout is not None:
buf = mmal.mmal_queue_timedwait(self._queue, int(timeout * 1000))
else:
buf = mmal.mmal_queue_get(self._queue)
if buf:
return MMALBuffer(buf)
def put(self, buf):
"""
Place :class:`MMALBuffer` *buf* at the back of the queue.
"""
mmal.mmal_queue_put(self._queue, buf._buf)
def put_back(self, buf):
"""
Place :class:`MMALBuffer` *buf* at the front of the queue. This is
used when a buffer was removed from the queue but needs to be put
back at the front where it was originally taken from.
"""
mmal.mmal_queue_put_back(self._queue, buf._buf)
class MMALPool(object):
"""
Represents an MMAL pool containing :class:`MMALBuffer` objects. All active
ports are associated with a pool of buffers, and a queue. Instances can be
treated as a sequence of :class:`MMALBuffer` objects but this is only
recommended for debugging purposes; otherwise, use the :meth:`get_buffer`,
:meth:`send_buffer`, and :meth:`send_all_buffers` methods which work with
the encapsulated :class:`MMALQueue`.
"""
__slots__ = ('_pool', '_queue')
def __init__(self, pool):
self._pool = pool
super(MMALPool, self).__init__()
self._queue = MMALQueue(pool[0].queue)
def __len__(self):
return self._pool[0].headers_num
def __getitem__(self, index):
return MMALBuffer(self._pool[0].header[index])
@property
def queue(self):
"""
The :class:`MMALQueue` associated with the pool.
"""
return self._queue
def close(self):
if self._pool is not None:
mmal.mmal_pool_destroy(self._pool)
self._pool = None
def resize(self, new_count, new_size):
"""
Resizes the pool to contain *new_count* buffers with *new_size* bytes
allocated to each buffer.
*new_count* must be 1 or more (you cannot resize a pool to contain
no headers). However, *new_size* can be 0 which causes all payload
buffers to be released.
.. warning::
If the pool is associated with a port, the port must be disabled
when resizing the pool.
"""
mmal_check(
mmal.mmal_pool_resize(self._pool, new_count, new_size),
prefix='unable to resize pool')
def get_buffer(self, block=True, timeout=None):
"""
Get the next buffer from the pool's queue. See :meth:`MMALQueue.get`
for the meaning of the parameters.
"""
return self._queue.get(block, timeout)
def send_buffer(self, port, block=True, timeout=None):
"""
Get a buffer from the pool's queue and send it to *port*. *block* and
*timeout* act as they do in :meth:`get_buffer`. If no buffer is
available (for the values of *block* and *timeout*,
:exc:`~picamera.PiCameraMMALError` is raised).
"""
buf = self.get_buffer(block, timeout)
if buf is None:
raise PiCameraMMALError(mmal.MMAL_EAGAIN, 'no buffers available')
port.send_buffer(buf)
def send_all_buffers(self, port, block=True, timeout=None):
"""
Send all buffers from the queue to *port*. *block* and *timeout* act as
they do in :meth:`get_buffer`. If no buffer is available (for the
values of *block* and *timeout*, :exc:`~picamera.PiCameraMMALError` is
raised).
"""
for i in range(len(self._queue)):
self.send_buffer(port, block, timeout)
class MMALPortPool(MMALPool):
"""
Construct an MMAL pool for the number and size of buffers required by
the :class:`MMALPort` *port*.
"""
__slots__ = ('_port',)
def __init__(self, port):
pool = mmal.mmal_port_pool_create(
port._port, port._port[0].buffer_num, port._port[0].buffer_size)
if not pool:
raise PiCameraMMALError(
mmal.MMAL_ENOSPC,
'failed to create buffer header pool for port %s' % port.name)
super(MMALPortPool, self).__init__(pool)
self._port = port
def close(self):
if self._pool is not None:
mmal.mmal_port_pool_destroy(self._port._port, self._pool)
self._port = None
self._pool = None
super(MMALPortPool, self).close()
@property
def port(self):
return self._port
def send_buffer(self, port=None, block=True, timeout=None):
"""
Get a buffer from the pool and send it to *port* (or the port the pool
is associated with by default). *block* and *timeout* act as they do in
:meth:`MMALPool.get_buffer`.
"""
if port is None:
port = self._port
super(MMALPortPool, self).send_buffer(port, block, timeout)
def send_all_buffers(self, port=None, block=True, timeout=None):
"""
Send all buffers from the pool to *port* (or the port the pool is
associated with by default). *block* and *timeout* act as they do in
:meth:`MMALPool.get_buffer`.
"""
if port is None:
port = self._port
super(MMALPortPool, self).send_all_buffers(port, block, timeout)
class MMALBaseConnection(MMALObject):
"""
Abstract base class for :class:`MMALConnection` and
:class:`MMALPythonConnection`. Handles weakrefs to the source and
target ports, and format negotiation. All other connection details are
handled by the descendent classes.
"""
__slots__ = ('_source', '_target')
default_formats = ()
compatible_opaque_formats = {
('OPQV-single', 'OPQV-single'),
('OPQV-dual', 'OPQV-dual'),
('OPQV-strips', 'OPQV-strips'),
('OPQV-dual', 'OPQV-single'),
('OPQV-single', 'OPQV-dual'), # recent firmwares permit this
}
def __init__(
self, source, target, formats=default_formats):
super(MMALBaseConnection, self).__init__()
if not isinstance(source, (MMALPort, MMALPythonPort)):
raise PiCameraValueError('source is not a port')
if not isinstance(target, (MMALPort, MMALPythonPort)):
raise PiCameraValueError('target is not a port')
if source.type != mmal.MMAL_PORT_TYPE_OUTPUT:
raise PiCameraValueError('source is not an output port')
if target.type != mmal.MMAL_PORT_TYPE_INPUT:
raise PiCameraValueError('target is not an input port')
if source.connection is not None:
raise PiCameraValueError('source port is already connected')
if target.connection is not None:
raise PiCameraValueError('target port is already connected')
if formats is None:
formats = ()
self._source = source
self._target = target
try:
iter(formats)
except TypeError:
formats = (formats,)
self._negotiate_format(formats)
source._connection = self
target._connection = self
# Descendents continue with connection implementation...
def close(self):
if self._source is not None:
self._source._connection = None
self._source = None
if self._target is not None:
self._target._connection = None
self._target = None
def _negotiate_format(self, formats):
def copy_format():
self._source.commit()
self._target.copy_from(self._source)
self._target.commit()
def max_buffers():
self._source.buffer_count = self._target.buffer_count = max(
self._source.buffer_count, self._target.buffer_count)
self._source.buffer_size = self._target.buffer_size = max(
self._source.buffer_size, self._target.buffer_size)
# Filter out formats that aren't supported on both source and target
# ports. This is a little tricky as ports that support OPAQUE never
# claim they do (so we have to assume it's mutually supported)
mutually_supported = (
set(self._source.supported_formats) &
set(self._target.supported_formats)
) | {mmal.MMAL_ENCODING_OPAQUE}
formats = [f for f in formats if f in mutually_supported]
if formats:
# If there are any formats left to try, perform the negotiation
# with the filtered list. Again, there's some special casing to
# deal with the incompatible OPAQUE sub-formats
for f in formats:
if f == mmal.MMAL_ENCODING_OPAQUE:
if (self._source.opaque_subformat,
self._target.opaque_subformat) in self.compatible_opaque_formats:
self._source.format = mmal.MMAL_ENCODING_OPAQUE
else:
continue
else:
self._source.format = f
try:
copy_format()
except PiCameraMMALError as e:
if e.status != mmal.MMAL_EINVAL:
raise
continue
else:
max_buffers()
return
raise PiCameraMMALError(
mmal.MMAL_EINVAL, 'failed to negotiate port format')
else:
# If no formats are available to try (either from filtering or
# because none were given), assume the source port is set up
# properly. Just copy the format to the target and hope the caller
# knows what they're doing
try:
copy_format()
except PiCameraMMALError as e:
if e.status != mmal.MMAL_EINVAL:
raise
raise PiCameraMMALError(
mmal.MMAL_EINVAL, 'failed to copy source format to target port')
else:
max_buffers()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, exc_tb):
self.close()
@property
def source(self):
"""
The source :class:`MMALPort` or :class:`MMALPythonPort` of the
connection.
"""
return self._source
@property
def target(self):
"""
The target :class:`MMALPort` or :class:`MMALPythonPort` of the
connection.
"""
return self._target
class MMALConnection(MMALBaseConnection):
"""
Represents an MMAL internal connection between two components. The
constructor accepts arguments providing the *source* :class:`MMALPort` and
*target* :class:`MMALPort`.
The *formats* parameter specifies an iterable of formats (in preference
order) that the connection may attempt when negotiating formats between
the two ports. If this is ``None``, or an empty iterable, no negotiation
will take place and the source port's format will simply be copied to the
target port. Otherwise, the iterable will be worked through in order until
a format acceptable to both ports is discovered.
.. note::
The default *formats* list starts with OPAQUE; the class understands
the different OPAQUE sub-formats (see :ref:`mmal` for more information)
and will only select OPAQUE if compatible sub-formats can be used on
both ports.
The *callback* parameter can optionally specify a callable which will be
executed for each buffer that traverses the connection (providing an
opportunity to manipulate or drop that buffer). If specified, it must be a
callable which accepts two parameters: the :class:`MMALConnection` object
sending the data, and the :class:`MMALBuffer` object containing data. The
callable may optionally manipulate the :class:`MMALBuffer` and return it
to permit it to continue traversing the connection, or return ``None``
in which case the buffer will be released.
.. note::
There is a significant performance penalty for specifying a
callback between MMAL components as it requires buffers to be
copied from the GPU's memory to the CPU's memory and back again.
.. data:: default_formats
:annotation: = (MMAL_ENCODING_OPAQUE, MMAL_ENCODING_I420, MMAL_ENCODING_RGB24, MMAL_ENCODING_BGR24, MMAL_ENCODING_RGBA, MMAL_ENCODING_BGRA)
Class attribute defining the default formats used to negotiate
connections between MMAL components.
"""
__slots__ = ('_connection', '_callback', '_wrapper')
default_formats = (
mmal.MMAL_ENCODING_OPAQUE,
mmal.MMAL_ENCODING_I420,
mmal.MMAL_ENCODING_RGB24,
mmal.MMAL_ENCODING_BGR24,
mmal.MMAL_ENCODING_RGBA,
mmal.MMAL_ENCODING_BGRA,
)
def __init__(
self, source, target, formats=default_formats, callback=None):
if not isinstance(source, MMALPort):
raise PiCameraValueError('source is not an MMAL port')
if not isinstance(target, MMALPort):
raise PiCameraValueError('target is not an MMAL port')
super(MMALConnection, self).__init__(source, target, formats)
self._connection = ct.POINTER(mmal.MMAL_CONNECTION_T)()
self._callback = callback
flags = mmal.MMAL_CONNECTION_FLAG_ALLOCATION_ON_INPUT
if callback is None:
flags |= mmal.MMAL_CONNECTION_FLAG_TUNNELLING
try:
mmal_check(
mmal.mmal_connection_create(
self._connection, source._port, target._port, flags),
prefix="Failed to create connection")
except:
self._connection = None
raise
def close(self):
if self._connection is not None:
mmal.mmal_connection_destroy(self._connection)
self._connection = None
self._wrapper = None
super(MMALConnection, self).close()
@property
def enabled(self):
"""
Returns ``True`` if the connection is enabled. Use :meth:`enable`
and :meth:`disable` to control the state of the connection.
"""
return bool(self._connection[0].is_enabled)
def enable(self):
"""
Enable the connection. When a connection is enabled, data is
continually transferred from the output port of the source to the input
port of the target component.
"""
def wrapper(connection):
buf = mmal.mmal_queue_get(connection[0].queue)
if buf:
buf = MMALBuffer(buf)
try:
modified_buf = self._callback(self, buf)
except:
buf.release()
raise
else:
if modified_buf is not None:
try:
self._target.send_buffer(modified_buf)
except PiCameraPortDisabled:
# Target port disabled; ignore the error
pass
else:
buf.release()
return
buf = mmal.mmal_queue_get(connection[0].pool[0].queue)
if buf:
buf = MMALBuffer(buf)
try:
self._source.send_buffer(buf)
except PiCameraPortDisabled:
# Source port has been disabled; ignore the error
pass
if self._callback is not None:
self._wrapper = mmal.MMAL_CONNECTION_CALLBACK_T(wrapper)
self._connection[0].callback = self._wrapper
self._source.params[mmal.MMAL_PARAMETER_ZERO_COPY] = True
self._target.params[mmal.MMAL_PARAMETER_ZERO_COPY] = True
mmal_check(
mmal.mmal_connection_enable(self._connection),
prefix="Failed to enable connection")
if self._callback is not None:
MMALPool(self._connection[0].pool).send_all_buffers(self._source)
def disable(self):
"""
Disables the connection.
"""
mmal_check(
mmal.mmal_connection_disable(self._connection),
prefix="Failed to disable connection")
self._wrapper = None
@property
def name(self):
return self._connection[0].name.decode('ascii')
def __repr__(self):
if self._connection is not None:
return '<MMALConnection "%s">' % self.name
else:
return '<MMALConnection closed>'
class MMALRawCamera(MMALBaseComponent):
"""
The MMAL "raw camera" component.
Don't use this! If you insist on using this anyway, read the forum post
about `raw sensor access`_ first.
.. raw sensor access: https://www.raspberrypi.org/forums/viewtopic.php?f=43&t=109137
"""
__slots__ = ()
component_type = mmal.MMAL_COMPONENT_RAW_CAMERA
opaque_input_subformats = ()
opaque_output_subformats = ('OPQV-single',)
class MMALCamera(MMALBaseComponent):
"""
Represents the MMAL camera component. This component has 0 input ports and
3 output ports. The intended use of the output ports (which in turn
determines the behaviour of those ports) is as follows:
* Port 0 is intended for preview renderers
* Port 1 is intended for video recording
* Port 2 is intended for still image capture
Use the ``MMAL_PARAMETER_CAMERA_CONFIG`` parameter on the control port to
obtain and manipulate the camera's configuration.
"""
__slots__ = ()
component_type = mmal.MMAL_COMPONENT_DEFAULT_CAMERA
opaque_output_subformats = ('OPQV-single', 'OPQV-dual', 'OPQV-strips')
annotate_structs = (
mmal.MMAL_PARAMETER_CAMERA_ANNOTATE_T,
mmal.MMAL_PARAMETER_CAMERA_ANNOTATE_V2_T,
mmal.MMAL_PARAMETER_CAMERA_ANNOTATE_V3_T,
)
def __init__(self):
global FIX_RGB_BGR_ORDER
super(MMALCamera, self).__init__()
if PARAM_TYPES[mmal.MMAL_PARAMETER_ANNOTATE] is None:
found = False
# try largest struct to smallest as later firmwares still happily
# accept earlier revision structures
# XXX do old firmwares reject too-large structs?
for struct in reversed(MMALCamera.annotate_structs):
try:
PARAM_TYPES[mmal.MMAL_PARAMETER_ANNOTATE] = struct
self.control.params[mmal.MMAL_PARAMETER_ANNOTATE]
except PiCameraMMALError:
pass
else:
found = True
break
if not found:
PARAM_TYPES[mmal.MMAL_PARAMETER_ANNOTATE] = None
raise PiCameraMMALError(
mmal.MMAL_EINVAL, "unknown camera annotation structure revision")
if FIX_RGB_BGR_ORDER is None:
# old firmware lists BGR24 before RGB24 in supported_formats
for f in self.outputs[1].supported_formats:
if f == mmal.MMAL_ENCODING_BGR24:
FIX_RGB_BGR_ORDER = True
break
elif f == mmal.MMAL_ENCODING_RGB24:
FIX_RGB_BGR_ORDER = False
break
def _get_annotate_rev(self):
try:
return MMALCamera.annotate_structs.index(PARAM_TYPES[mmal.MMAL_PARAMETER_ANNOTATE]) + 1
except IndexError:
raise PiCameraMMALError(
mmal.MMAL_EINVAL, "unknown camera annotation structure revision")
def _set_annotate_rev(self, value):
try:
PARAM_TYPES[mmal.MMAL_PARAMETER_ANNOTATE] = MMALCamera.annotate_structs[value - 1]
except IndexError:
raise PiCameraMMALError(
mmal.MMAL_EINVAL, "invalid camera annotation structure revision")
annotate_rev = property(_get_annotate_rev, _set_annotate_rev, doc="""\
The annotation capabilities of the firmware have evolved over time and
several structures are available for querying and setting video
annotations. By default the :class:`MMALCamera` class will pick the
latest annotation structure supported by the current firmware but you
can select older revisions with :attr:`annotate_rev` for other purposes
(e.g. testing).
""")
class MMALCameraInfo(MMALBaseComponent):
"""
Represents the MMAL camera-info component. Query the
``MMAL_PARAMETER_CAMERA_INFO`` parameter on the control port to obtain
information about the connected camera module.
"""
__slots__ = ()
component_type = mmal.MMAL_COMPONENT_DEFAULT_CAMERA_INFO
info_structs = (
mmal.MMAL_PARAMETER_CAMERA_INFO_T,
mmal.MMAL_PARAMETER_CAMERA_INFO_V2_T,
)
def __init__(self):
super(MMALCameraInfo, self).__init__()
if PARAM_TYPES[mmal.MMAL_PARAMETER_CAMERA_INFO] is None:
found = False
# try smallest structure to largest as later firmwares reject
# older structures
for struct in MMALCameraInfo.info_structs:
try:
PARAM_TYPES[mmal.MMAL_PARAMETER_CAMERA_INFO] = struct
self.control.params[mmal.MMAL_PARAMETER_CAMERA_INFO]
except PiCameraMMALError:
pass
else:
found = True
break
if not found:
PARAM_TYPES[mmal.MMAL_PARAMETER_CAMERA_INFO] = None
raise PiCameraMMALError(
mmal.MMAL_EINVAL, "unknown camera info structure revision")
def _get_info_rev(self):
try:
return MMALCameraInfo.info_structs.index(PARAM_TYPES[mmal.MMAL_PARAMETER_CAMERA_INFO]) + 1
except IndexError:
raise PiCameraMMALError(
mmal.MMAL_EINVAL, "unknown camera info structure revision")
def _set_info_rev(self, value):
try:
PARAM_TYPES[mmal.MMAL_PARAMETER_CAMERA_INFO] = MMALCameraInfo.info_structs[value - 1]
except IndexError:
raise PiCameraMMALError(
mmal.MMAL_EINVAL, "invalid camera info structure revision")
info_rev = property(_get_info_rev, _set_info_rev, doc="""\
The camera information capabilities of the firmware have evolved over
time and several structures are available for querying camera
information. When initialized, :class:`MMALCameraInfo` will attempt
to discover which structure is in use by the extant firmware. This
property can be used to discover the structure version and to modify
the version in use for other purposes (e.g. testing).
""")
class MMALComponent(MMALBaseComponent):
"""
Represents an MMAL component that acts as a filter of some sort, with a
single input that connects to an upstream source port. This is an asbtract
base class.
"""
__slots__ = ()
def __init__(self):
super(MMALComponent, self).__init__()
assert len(self.opaque_input_subformats) == 1
def close(self):
self.disconnect()
super(MMALComponent, self).close()
def enable(self):
super(MMALComponent, self).enable()
if self.connection is not None:
self.connection.enable()
def disable(self):
if self.connection is not None:
self.connection.disable()
super(MMALComponent, self).disable()
def connect(self, source, **options):
"""
Connects the input port of this component to the specified *source*
:class:`MMALPort` or :class:`MMALPythonPort`. Alternatively, as a
convenience (primarily intended for command line experimentation; don't
use this in scripts), *source* can be another component in which case
the first unconnected output port will be selected as *source*.
Keyword arguments will be passed along to the connection constructor.
See :class:`MMALConnection` and :class:`MMALPythonConnection` for
further information.
"""
if isinstance(source, (MMALPort, MMALPythonPort)):
return self.inputs[0].connect(source)
else:
for port in source.outputs:
if not port.connection:
return self.inputs[0].connect(port, **options)
raise PiCameraMMALError(
mmal.MMAL_EINVAL, 'no free output ports on %r' % source)
def disconnect(self):
"""
Destroy the connection between this component's input port and the
upstream component.
"""
self.inputs[0].disconnect()
@property
def connection(self):
"""
The :class:`MMALConnection` or :class:`MMALPythonConnection` object
linking this component to the upstream component.
"""
return self.inputs[0].connection
class MMALSplitter(MMALComponent):
"""
Represents the MMAL splitter component. This component has 1 input port
and 4 output ports which all generate duplicates of buffers passed to the
input port.
"""
__slots__ = ()
component_type = mmal.MMAL_COMPONENT_DEFAULT_VIDEO_SPLITTER
opaque_input_subformats = ('OPQV-single',)
opaque_output_subformats = ('OPQV-single',) * 4
class MMALISPResizer(MMALComponent):
"""
Represents the MMAL ISP resizer component. This component has 1 input port
and 1 output port, and supports resizing via the VideoCore ISP, along with
conversion of numerous formats into numerous other formats (e.g. OPAQUE to
RGB, etc). This is more efficient than :class:`MMALResizer` but is only
available on later firmware versions.
"""
__slots__ = ()
component_type = mmal.MMAL_COMPONENT_DEFAULT_ISP
opaque_input_subformats = ('OPQV-single',)
opaque_output_subformats = (None,)
class MMALResizer(MMALComponent):
"""
Represents the MMAL VPU resizer component. This component has 1 input port
and 1 output port. This supports resizing via the VPU. This is not as
efficient as :class:`MMALISPResizer` but is available on all firmware
verions. The output port can (and usually should) have a different frame
size to the input port.
"""
__slots__ = ()
component_type = mmal.MMAL_COMPONENT_DEFAULT_RESIZER
opaque_input_subformats = (None,)
opaque_output_subformats = (None,)
class MMALEncoder(MMALComponent):
"""
Represents a generic MMAL encoder. This is an abstract base class.
"""
__slots__ = ()
class MMALVideoEncoder(MMALEncoder):
"""
Represents the MMAL video encoder component. This component has 1 input
port and 1 output port. The output port is usually configured with
``MMAL_ENCODING_H264`` or ``MMAL_ENCODING_MJPEG``.
"""
__slots__ = ()
component_type = mmal.MMAL_COMPONENT_DEFAULT_VIDEO_ENCODER
opaque_input_subformats = ('OPQV-dual',)
opaque_output_subformats = (None,)
class MMALImageEncoder(MMALEncoder):
"""
Represents the MMAL image encoder component. This component has 1 input
port and 1 output port. The output port is typically configured with
``MMAL_ENCODING_JPEG`` but can also use ``MMAL_ENCODING_PNG``,
``MMAL_ENCODING_GIF``, etc.
"""
__slots__ = ()
component_type = mmal.MMAL_COMPONENT_DEFAULT_IMAGE_ENCODER
opaque_input_subformats = ('OPQV-strips',)
opaque_output_subformats = (None,)
class MMALDecoder(MMALComponent):
"""
Represents a generic MMAL decoder. This is an abstract base class.
"""
__slots__ = ()
class MMALVideoDecoder(MMALDecoder):
"""
Represents the MMAL video decoder component. This component has 1 input
port and 1 output port. The input port is usually configured with
``MMAL_ENCODING_H264`` or ``MMAL_ENCODING_MJPEG``.
"""
__slots__ = ()
component_type = mmal.MMAL_COMPONENT_DEFAULT_VIDEO_DECODER
opaque_input_subformats = (None,)
opaque_output_subformats = ('OPQV-single',)
class MMALImageDecoder(MMALDecoder):
"""
Represents the MMAL iamge decoder component. This component has 1 input
port and 1 output port. The input port is usually configured with
``MMAL_ENCODING_JPEG``.
"""
__slots__ = ()
component_type = mmal.MMAL_COMPONENT_DEFAULT_IMAGE_DECODER
opaque_input_subformats = (None,)
opaque_output_subformats = ('OPQV-single',)
class MMALRenderer(MMALComponent):
"""
Represents the MMAL renderer component. This component has 1 input port and
0 output ports. It is used to implement the camera preview and overlays.
"""
__slots__ = ()
component_type = mmal.MMAL_COMPONENT_DEFAULT_VIDEO_RENDERER
opaque_input_subformats = ('OPQV-single',)
class MMALNullSink(MMALComponent):
"""
Represents the MMAL null-sink component. This component has 1 input port
and 0 output ports. It is used to keep the preview port "alive" (and thus
calculating white-balance and exposure) when the camera preview is not
required.
"""
__slots__ = ()
component_type = mmal.MMAL_COMPONENT_DEFAULT_NULL_SINK
opaque_input_subformats = ('OPQV-single',)
class MMALPythonPort(MMALObject):
"""
Implements ports for Python-based MMAL components.
"""
__slots__ = (
'_buffer_count',
'_buffer_size',
'_connection',
'_enabled',
'_owner',
'_pool',
'_type',
'_index',
'_supported_formats',
'_format',
'_callback',
)
_FORMAT_BPP = {
'I420': 1.5,
'RGB3': 3,
'RGBA': 4,
'BGR3': 3,
'BGRA': 4,
}
def __init__(self, owner, port_type, index):
self._buffer_count = 2
self._buffer_size = 0
self._connection = None
self._enabled = False
self._owner = weakref.ref(owner)
self._pool = None
self._callback = None
self._type = port_type
self._index = index
self._supported_formats = {
mmal.MMAL_ENCODING_I420,
mmal.MMAL_ENCODING_RGB24,
mmal.MMAL_ENCODING_BGR24,
mmal.MMAL_ENCODING_RGBA,
mmal.MMAL_ENCODING_BGRA,
}
self._format = ct.pointer(mmal.MMAL_ES_FORMAT_T(
type=mmal.MMAL_ES_TYPE_VIDEO,
encoding=mmal.MMAL_ENCODING_I420,
es=ct.pointer(mmal.MMAL_ES_SPECIFIC_FORMAT_T())))
def close(self):
self.disconnect()
self.disable()
self._format = None
def __repr__(self):
return '<MMALPythonPort "%s": format=MMAL_FOURCC(%r) buffers=%dx%d frames=%s@%sfps>' % (
self.name, mmal.FOURCC_str(self.format), self.buffer_count,
self.buffer_size, self.framesize, self.framerate)
def _get_bitrate(self):
return self._format[0].bitrate
def _set_bitrate(self, value):
self._format[0].bitrate = value
bitrate = property(_get_bitrate, _set_bitrate, doc="""\
Retrieves or sets the bitrate limit for the port's format.
""")
def _get_supported_formats(self):
return self._supported_formats
def _set_supported_formats(self, value):
try:
value = {f for f in value}
except TypeError:
value = {value}
if not value:
raise PiCameraMMALError(
mmal.MMAL_EINVAL, "port must have at least one valid format")
self._supported_formats = value
supported_formats = property(_get_supported_formats, _set_supported_formats, doc="""\
Retrieves or sets the set of valid formats for this port. The set must
always contain at least one valid format. A single format can be
specified; it will be converted implicitly to a singleton set.
If the current port :attr:`format` is not a member of the new set, no
error is raised. An error will be raised when :meth:`commit` is next
called if :attr:`format` is still not a member of the set.
""")
def _get_format(self):
return self._format[0].encoding
def _set_format(self, value):
self._format[0].encoding = value
format = property(_get_format, _set_format, doc="""\
Retrieves or sets the encoding format of the port. Setting this
attribute implicitly sets the encoding variant to a sensible value
(I420 in the case of OPAQUE).
""")
def _get_framesize(self):
return PiResolution(
self._format[0].es[0].video.crop.width,
self._format[0].es[0].video.crop.height,
)
def _set_framesize(self, value):
value = to_resolution(value)
video = self._format[0].es[0].video
video.width = bcm_host.VCOS_ALIGN_UP(value.width, 32)
video.height = bcm_host.VCOS_ALIGN_UP(value.height, 16)
video.crop.width = value.width
video.crop.height = value.height
framesize = property(_get_framesize, _set_framesize, doc="""\
Retrieves or sets the size of the source's video frames as a (width,
height) tuple. This attribute implicitly handles scaling the given
size up to the block size of the camera (32x16).
""")
def _get_framerate(self):
video = self._format[0].es[0].video
try:
return Fraction(
video.frame_rate.num,
video.frame_rate.den)
except ZeroDivisionError:
return Fraction(0, 1)
def _set_framerate(self, value):
value = to_fraction(value)
video = self._format[0].es[0].video
video.frame_rate.num = value.numerator
video.frame_rate.den = value.denominator
framerate = property(_get_framerate, _set_framerate, doc="""\
Retrieves or sets the framerate of the port's video frames in fps.
""")
@property
def pool(self):
"""
Returns the :class:`MMALPool` associated with the buffer, if any.
"""
return self._pool
@property
def opaque_subformat(self):
return None
def _get_buffer_count(self):
return self._buffer_count
def _set_buffer_count(self, value):
if value < 1:
raise PiCameraMMALError(mmal.MMAL_EINVAL, 'buffer count <1')
self._buffer_count = int(value)
buffer_count = property(_get_buffer_count, _set_buffer_count, doc="""\
The number of buffers allocated (or to be allocated) to the port. The
default is 2 but more may be required in the case of long pipelines
with replicated buffers.
""")
def _get_buffer_size(self):
return self._buffer_size
def _set_buffer_size(self, value):
if value < 0:
raise PiCameraMMALError(mmal.MMAL_EINVAL, 'buffer size <0')
self._buffer_size = value
buffer_size = property(_get_buffer_size, _set_buffer_size, doc="""\
The size of buffers allocated (or to be allocated) to the port. The
size of buffers defaults to a value dictated by the port's format.
""")
def copy_from(self, source):
"""
Copies the port's :attr:`format` from the *source*
:class:`MMALControlPort`.
"""
if isinstance(source, MMALPythonPort):
mmal.mmal_format_copy(self._format, source._format)
else:
mmal.mmal_format_copy(self._format, source._port[0].format)
def commit(self):
"""
Commits the port's configuration and automatically updates the number
and size of associated buffers. This is typically called after
adjusting the port's format and/or associated settings (like width and
height for video ports).
"""
if self.format not in self.supported_formats:
raise PiCameraMMALError(
mmal.MMAL_EINVAL, 'invalid format for port %r' % self)
self._buffer_count = 2
video = self._format[0].es[0].video
try:
self._buffer_size = int(
MMALPythonPort._FORMAT_BPP[str(self.format)]
* video.width
* video.height)
except KeyError:
# If it's an unknown / encoded format just leave the buffer size
# alone and hope the owning component knows what to set
pass
self._owner()._commit_port(self)
@property
def enabled(self):
"""
Returns a :class:`bool` indicating whether the port is currently
enabled. Unlike other classes, this is a read-only property. Use
:meth:`enable` and :meth:`disable` to modify the value.
"""
return self._enabled
def enable(self, callback=None):
"""
Enable the port with the specified callback function (this must be
``None`` for connected ports, and a callable for disconnected ports).
The callback function must accept two parameters which will be this
:class:`MMALControlPort` (or descendent) and an :class:`MMALBuffer`
instance. Any return value will be ignored.
"""
if self._connection is not None:
if callback is not None:
raise PiCameraMMALError(
mmal.MMAL_EINVAL,
'connected ports must be enabled without callback')
else:
if callback is None:
raise PiCameraMMALError(
mmal.MMAL_EINVAL,
'unconnected ports must be enabled with callback')
if self.type == mmal.MMAL_PORT_TYPE_INPUT or self._connection is None:
self._pool = MMALPythonPortPool(self)
self._callback = callback
self._enabled = True
def disable(self):
"""
Disable the port.
"""
self._enabled = False
if self._pool is not None:
# Release any unprocessed buffers from the owner's queue before
# we destroy them all
while True:
buf = self._owner()._queue.get(False)
if buf:
buf.release()
else:
break
self._pool.close()
self._pool = None
self._callback = None
def get_buffer(self, block=True, timeout=None):
"""
Returns a :class:`MMALBuffer` from the associated :attr:`pool`. *block*
and *timeout* act as they do in the corresponding
:meth:`MMALPool.get_buffer`.
"""
if not self._enabled:
raise PiCameraPortDisabled(
'cannot get buffer from disabled port %s' % self.name)
if self._pool is not None:
# Unconnected port or input port case; retrieve buffer from the
# allocated pool
return self._pool.get_buffer(block, timeout)
else:
# Connected output port case; get a buffer from the target input
# port (in this case the port is just a thin proxy for the
# corresponding input port)
assert self.type == mmal.MMAL_PORT_TYPE_OUTPUT
return self._connection.target.get_buffer(block, timeout)
def send_buffer(self, buf):
"""
Send :class:`MMALBuffer` *buf* to the port.
"""
# NOTE: The MMALPythonConnection callback must occur *before* the test
# for the port being enabled; it's meant to be the connection making
# the callback prior to the buffer getting to the port after all
if (
self.type == mmal.MMAL_PORT_TYPE_INPUT and
self._connection._callback is not None):
try:
modified_buf = self._connection._callback(self._connection, buf)
except:
buf.release()
raise
else:
if modified_buf is None:
buf.release()
else:
buf = modified_buf
if not self._enabled:
raise PiCameraPortDisabled(
'cannot send buffer to disabled port %s' % self.name)
if self._callback is not None:
# but what about output ports?
try:
# XXX Return value? If it's an input port we should ignore it,
self._callback(self, buf)
except:
buf.release()
raise
if self._type == mmal.MMAL_PORT_TYPE_INPUT:
# Input port case; queue the buffer for processing on the
# owning component
self._owner()._queue.put(buf)
elif self._connection is None:
# Unconnected output port case; release the buffer back to the
# pool
buf.release()
else:
# Connected output port case; forward the buffer to the
# connected component's input port
# XXX If it's a format-change event?
self._connection.target.send_buffer(buf)
@property
def name(self):
return '%s:%s:%d' % (self._owner().name, {
mmal.MMAL_PORT_TYPE_OUTPUT: 'out',
mmal.MMAL_PORT_TYPE_INPUT: 'in',
mmal.MMAL_PORT_TYPE_CONTROL: 'control',
mmal.MMAL_PORT_TYPE_CLOCK: 'clock',
}[self.type], self._index)
@property
def type(self):
"""
The type of the port. One of:
* MMAL_PORT_TYPE_OUTPUT
* MMAL_PORT_TYPE_INPUT
* MMAL_PORT_TYPE_CONTROL
* MMAL_PORT_TYPE_CLOCK
"""
return self._type
@property
def capabilities(self):
"""
The capabilities of the port. A bitfield of the following:
* MMAL_PORT_CAPABILITY_PASSTHROUGH
* MMAL_PORT_CAPABILITY_ALLOCATION
* MMAL_PORT_CAPABILITY_SUPPORTS_EVENT_FORMAT_CHANGE
"""
return mmal.MMAL_PORT_CAPABILITY_SUPPORTS_EVENT_FORMAT_CHANGE
@property
def index(self):
"""
Returns an integer indicating the port's position within its owning
list (inputs, outputs, etc.)
"""
return self._index
@property
def connection(self):
"""
If this port is connected to another, this property holds the
:class:`MMALConnection` or :class:`MMALPythonConnection` object which
represents that connection. If this port is not connected, this
property is ``None``.
"""
return self._connection
def connect(self, other, **options):
"""
Connect this port to the *other* :class:`MMALPort` (or
:class:`MMALPythonPort`). The type and configuration of the connection
will be automatically selected.
Various connection options can be specified as keyword arguments. These
will be passed onto the :class:`MMALConnection` or
:class:`MMALPythonConnection` constructor that is called (see those
classes for an explanation of the available options).
"""
# Always construct connections from the output end
if self.type != mmal.MMAL_PORT_TYPE_OUTPUT:
return other.connect(self, **options)
if other.type != mmal.MMAL_PORT_TYPE_INPUT:
raise PiCameraValueError(
'A connection can only be established between an output and '
'an input port')
return MMALPythonConnection(self, other, **options)
def disconnect(self):
"""
Destroy the connection between this port and another port.
"""
if self.connection is not None:
self.connection.close()
class MMALPythonPortPool(MMALPool):
"""
Creates a pool of buffer headers for an :class:`MMALPythonPort`. This is
only used when a fake port is used without a corresponding
:class:`MMALPythonConnection`.
"""
__slots__ = ('_port',)
def __init__(self, port):
super(MMALPythonPortPool, self).__init__(
mmal.mmal_pool_create(port.buffer_count, port.buffer_size))
self._port = port
@property
def port(self):
return self._port
def send_buffer(self, port=None, block=True, timeout=None):
"""
Get a buffer from the pool and send it to *port* (or the port the pool
is associated with by default). *block* and *timeout* act as they do in
:meth:`MMALPool.get_buffer`.
"""
if port is None:
port = self._port
super(MMALPythonPortPool, self).send_buffer(port, block, timeout)
def send_all_buffers(self, port=None, block=True, timeout=None):
"""
Send all buffers from the pool to *port* (or the port the pool is
associated with by default). *block* and *timeout* act as they do in
:meth:`MMALPool.get_buffer`.
"""
if port is None:
port = self._port
super(MMALPythonPortPool, self).send_all_buffers(port, block, timeout)
class MMALPythonBaseComponent(MMALObject):
"""
Base class for Python-implemented MMAL components. This class provides the
:meth:`_commit_port` method used by descendents to control their ports'
behaviour, and the :attr:`enabled` property. However, it is unlikely that
users will want to sub-class this directly. See
:class:`MMALPythonComponent` for a more useful starting point.
"""
__slots__ = ('_inputs', '_outputs', '_enabled',)
def __init__(self):
super(MMALPythonBaseComponent, self).__init__()
self._enabled = False
self._inputs = ()
self._outputs = ()
# TODO Control port?
def close(self):
"""
Close the component and release all its resources. After this is
called, most methods will raise exceptions if called.
"""
self.disable()
@property
def enabled(self):
"""
Returns ``True`` if the component is currently enabled. Use
:meth:`enable` and :meth:`disable` to control the component's state.
"""
return self._enabled
def enable(self):
"""
Enable the component. When a component is enabled it will process data
sent to its input port(s), sending the results to buffers on its output
port(s). Components may be implicitly enabled by connections.
"""
self._enabled = True
def disable(self):
"""
Disables the component.
"""
self._enabled = False
@property
def control(self):
"""
The :class:`MMALControlPort` control port of the component which can be
used to configure most aspects of the component's behaviour.
"""
return None
@property
def inputs(self):
"""
A sequence of :class:`MMALPort` objects representing the inputs
of the component.
"""
return self._inputs
@property
def outputs(self):
"""
A sequence of :class:`MMALPort` objects representing the outputs
of the component.
"""
return self._outputs
def _commit_port(self, port):
"""
Called by ports when their format is committed. Descendents may
override this to reconfigure output ports when input ports are
committed, or to raise errors if the new port configuration is
unacceptable.
.. warning::
This method must *not* reconfigure input ports when called; however
it can reconfigure *output* ports when input ports are committed.
"""
pass
def __repr__(self):
if self._outputs:
return '<%s "%s": %d inputs %d outputs>' % (
self.__class__.__name__, self.name,
len(self.inputs), len(self.outputs))
else:
return '<%s closed>' % self.__class__.__name__
class MMALPythonSource(MMALPythonBaseComponent):
"""
Provides a source for other :class:`MMALComponent` instances. The
specified *input* is read in chunks the size of the configured output
buffer(s) until the input is exhausted. The :meth:`wait` method can be
used to block until this occurs. If the output buffer is configured to
use a full-frame unencoded format (like I420 or RGB), frame-end flags will
be automatically generated by the source. When the input is exhausted an
empty buffer with the End Of Stream (EOS) flag will be sent.
The component provides all picamera's usual IO-handling characteristics; if
*input* is a string, a file with that name will be opened as the input and
closed implicitly when the component is closed. Otherwise, the input will
not be closed implicitly (the component did not open it, so the assumption
is that closing *input* is the caller's responsibility). If *input* is an
object with a ``read`` method it is assumed to be a file-like object and is
used as is. Otherwise, *input* is assumed to be a readable object
supporting the buffer protocol (which is wrapped in a :class:`BufferIO`
stream).
"""
__slots__ = ('_stream', '_opened', '_thread')
def __init__(self, input):
super(MMALPythonSource, self).__init__()
self._inputs = ()
self._outputs = (MMALPythonPort(self, mmal.MMAL_PORT_TYPE_OUTPUT, 0),)
self._stream, self._opened = open_stream(input, output=False)
self._thread = None
def close(self):
super(MMALPythonSource, self).close()
if self._outputs:
self._outputs[0].close()
self._outputs = ()
if self._stream:
close_stream(self._stream, self._opened)
self._stream = None
def enable(self):
super(MMALPythonSource, self).enable()
self._thread = Thread(target=self._send_run)
self._thread.daemon = True
self._thread.start()
def disable(self):
super(MMALPythonSource, self).disable()
if self._thread:
self._thread.join()
self._thread = None
def wait(self, timeout=None):
"""
Wait for the source to send all bytes from the specified input. If
*timeout* is specified, it is the number of seconds to wait for
completion. The method returns ``True`` if the source completed within
the specified timeout and ``False`` otherwise.
"""
if not self.enabled:
raise PiCameraMMALError(
mmal.MMAL_EINVAL, 'cannot wait on disabled component')
self._thread.join(timeout)
return not self._thread.is_alive()
def _send_run(self):
# Calculate the size of a frame if possible (i.e. when the output
# format is an unencoded full frame format). If it's an unknown /
# encoded format, we've no idea what the framesize is (this would
# presumably require decoding the stream) so leave framesize as None.
video = self._outputs[0]._format[0].es[0].video
try:
framesize = (
MMALPythonPort._FORMAT_BPP[str(self._outputs[0].format)]
* video.width
* video.height)
except KeyError:
framesize = None
frameleft = framesize
while self.enabled:
buf = self._outputs[0].get_buffer(timeout=0.1)
if buf:
try:
if frameleft is None:
send = buf.size
else:
send = min(frameleft, buf.size)
with buf as data:
if send == buf.size:
try:
# readinto() is by far the fastest method of
# getting data into the buffer
buf.length = self._stream.readinto(data)
except AttributeError:
# if there's no readinto() method, fallback on
# read() and the data setter (memmove)
buf.data = self._stream.read(buf.size)
else:
buf.data = self._stream.read(send)
if frameleft is not None:
frameleft -= buf.length
if not frameleft:
buf.flags |= mmal.MMAL_BUFFER_HEADER_FLAG_FRAME_END
frameleft = framesize
if not buf.length:
buf.flags |= mmal.MMAL_BUFFER_HEADER_FLAG_EOS
break
finally:
self._outputs[0].send_buffer(buf)
@property
def name(self):
return 'py.source'
class MMALPythonComponent(MMALPythonBaseComponent):
"""
Provides a Python-based MMAL component with a *name*, a single input and
the specified number of *outputs* (default 1). The :meth:`connect` and
:meth:`disconnect` methods can be used to establish or break a connection
from the input port to an upstream component.
Typically descendents will override the :meth:`_handle_frame` method to
respond to buffers sent to the input port, and will set
:attr:`MMALPythonPort.supported_formats` in the constructor to define the
formats that the component will work with.
"""
__slots__ = ('_name', '_thread', '_queue', '_error')
def __init__(self, name='py.component', outputs=1):
super(MMALPythonComponent, self).__init__()
self._name = name
self._thread = None
self._error = None
self._queue = MMALQueue.create()
self._inputs = (MMALPythonPort(self, mmal.MMAL_PORT_TYPE_INPUT, 0),)
self._outputs = tuple(
MMALPythonPort(self, mmal.MMAL_PORT_TYPE_OUTPUT, n)
for n in range(outputs)
)
def close(self):
super(MMALPythonComponent, self).close()
self.disconnect()
if self._inputs:
self._inputs[0].close()
self._inputs = ()
for output in self._outputs:
output.disable()
self._outputs = ()
self._queue.close()
self._queue = None
def connect(self, source, **options):
"""
Connects the input port of this component to the specified *source*
:class:`MMALPort` or :class:`MMALPythonPort`. Alternatively, as a
convenience (primarily intended for command line experimentation; don't
use this in scripts), *source* can be another component in which case
the first unconnected output port will be selected as *source*.
Keyword arguments will be passed along to the connection constructor.
See :class:`MMALConnection` and :class:`MMALPythonConnection` for
further information.
"""
if isinstance(source, (MMALPort, MMALPythonPort)):
return self.inputs[0].connect(source)
else:
for port in source.outputs:
if not port.connection:
return self.inputs[0].connect(port, **options)
raise PiCameraMMALError(
mmal.MMAL_EINVAL, 'no free output ports on %r' % source)
def disconnect(self):
"""
Destroy the connection between this component's input port and the
upstream component.
"""
self.inputs[0].disconnect()
@property
def connection(self):
"""
The :class:`MMALConnection` or :class:`MMALPythonConnection` object
linking this component to the upstream component.
"""
return self.inputs[0].connection
@property
def name(self):
return self._name
def _commit_port(self, port):
"""
Overridden to to copy the input port's configuration to the output
port(s), and to ensure that the output port(s)' format(s) match
the input port's format.
"""
super(MMALPythonComponent, self)._commit_port(port)
if port.type == mmal.MMAL_PORT_TYPE_INPUT:
for output in self.outputs:
output.copy_from(port)
elif port.type == mmal.MMAL_PORT_TYPE_OUTPUT:
if port.format != self.inputs[0].format:
raise PiCameraMMALError(mmal.MMAL_EINVAL, 'output format mismatch')
def enable(self):
super(MMALPythonComponent, self).enable()
if not self._thread:
self._thread = Thread(target=self._thread_run)
self._thread.daemon = True
self._thread.start()
def disable(self):
super(MMALPythonComponent, self).disable()
if self._thread:
self._thread.join()
self._thread = None
if self._error:
raise self._error
def _thread_run(self):
try:
while self._enabled:
buf = self._queue.get(timeout=0.1)
if buf:
try:
handler = {
0: self._handle_frame,
mmal.MMAL_EVENT_PARAMETER_CHANGED: self._handle_parameter_changed,
mmal.MMAL_EVENT_FORMAT_CHANGED: self._handle_format_changed,
mmal.MMAL_EVENT_ERROR: self._handle_error,
mmal.MMAL_EVENT_EOS: self._handle_end_of_stream,
}[buf.command]
if handler(self.inputs[0], buf):
self._enabled = False
finally:
buf.release()
except Exception as e:
self._error = e
self._enabled = False
def _handle_frame(self, port, buf):
"""
Handles frame data buffers (where :attr:`MMALBuffer.command` is set to
0).
Typically, if the component has output ports, the method is expected to
fetch a buffer from the output port(s), write data into them, and send
them back to their respective ports.
Return values are as for normal event handlers (``True`` when no more
buffers are expected, ``False`` otherwise).
"""
return False
def _handle_format_changed(self, port, buf):
"""
Handles format change events passed to the component (where
:attr:`MMALBuffer.command` is set to MMAL_EVENT_FORMAT_CHANGED).
The default implementation re-configures the input port of the
component and emits the event on all output ports for downstream
processing. Override this method if you wish to do something else in
response to format change events.
The *port* parameter is the port into which the event arrived, and
*buf* contains the event itself (a MMAL_EVENT_FORMAT_CHANGED_T
structure). Use ``mmal_event_format_changed_get`` on the buffer's data
to extract the event.
"""
with buf as data:
event = mmal.mmal_event_format_changed_get(buf._buf)
if port.connection:
# Handle format change on the source output port, if any. We
# don't check the output port capabilities because it was the
# port that emitted the format change in the first case so it'd
# be odd if it didn't support them (or the format requested)!
output = port.connection._source
output.disable()
if isinstance(output, MMALPythonPort):
mmal.mmal_format_copy(output._format, event[0].format)
else:
mmal.mmal_format_copy(output._port[0].format, event[0].format)
output.commit()
output.buffer_count = (
event[0].buffer_num_recommended
if event[0].buffer_num_recommended > 0 else
event[0].buffer_num_min)
output.buffer_size = (
event[0].buffer_size_recommended
if event[0].buffer_size_recommended > 0 else
event[0].buffer_size_min)
if isinstance(output, MMALPythonPort):
output.enable()
else:
output.enable(port.connection._transfer)
# Now deal with the format change on this input port (this is only
# called from _thread_run so port must be an input port)
try:
if not (port.capabilities & mmal.MMAL_PORT_CAPABILITY_SUPPORTS_EVENT_FORMAT_CHANGE):
raise PiCameraMMALError(
mmal.MMAL_EINVAL,
'port %s does not support event change' % self.name)
mmal.mmal_format_copy(port._format, event[0].format)
self._commit_port(port)
port.pool.resize(
event[0].buffer_num_recommended
if event[0].buffer_num_recommended > 0 else
event[0].buffer_num_min,
event[0].buffer_size_recommended
if event[0].buffer_size_recommended > 0 else
event[0].buffer_size_min)
port.buffer_count = len(port.pool)
port.buffer_size = port.pool[0].size
except:
# If this port can't handle the format change, or if anything goes
# wrong (like the owning component doesn't like the new format)
# stop the pipeline (from here at least)
if port.connection:
port.connection.disable()
raise
# Chain the format-change onward so everything downstream sees it.
# NOTE: the callback isn't given the format-change because there's no
# image data in it
for output in self.outputs:
out_buf = output.get_buffer()
out_buf.copy_from(buf)
output.send_buffer(out_buf)
return False
def _handle_parameter_changed(self, port, buf):
"""
Handles parameter change events passed to the component (where
:attr:`MMALBuffer.command` is set to MMAL_EVENT_PARAMETER_CHANGED).
The default implementation does nothing but return ``False``
(indicating that processing should continue). Override this in
descendents to respond to parameter changes.
The *port* parameter is the port into which the event arrived, and
*buf* contains the event itself (a MMAL_EVENT_PARAMETER_CHANGED_T
structure).
"""
return False
def _handle_error(self, port, buf):
"""
Handles error notifications passed to the component (where
:attr:`MMALBuffer.command` is set to MMAL_EVENT_ERROR).
The default implementation does nothing but return ``True`` (indicating
that processing should halt). Override this in descendents to respond
to error events.
The *port* parameter is the port into which the event arrived.
"""
return True
def _handle_end_of_stream(self, port, buf):
"""
Handles end-of-stream notifications passed to the component (where
:attr:`MMALBuffer.command` is set to MMAL_EVENT_EOS).
The default implementation does nothing but return ``True`` (indicating
that processing should halt). Override this in descendents to respond
to the end of stream.
The *port* parameter is the port into which the event arrived.
"""
return True
class MMALPythonTarget(MMALPythonComponent):
"""
Provides a simple component that writes all received buffers to the
specified *output* until a frame with the *done* flag is seen (defaults to
MMAL_BUFFER_HEADER_FLAG_EOS indicating End Of Stream).
The component provides all picamera's usual IO-handling characteristics; if
*output* is a string, a file with that name will be opened as the output
and closed implicitly when the component is closed. Otherwise, the output
will not be closed implicitly (the component did not open it, so the
assumption is that closing *output* is the caller's responsibility). If
*output* is an object with a ``write`` method it is assumed to be a
file-like object and is used as is. Otherwise, *output* is assumed to be a
writeable object supporting the buffer protocol (which is wrapped in a
:class:`BufferIO` stream).
"""
__slots__ = ('_opened', '_stream', '_done', '_event')
def __init__(self, output, done=mmal.MMAL_BUFFER_HEADER_FLAG_EOS):
super(MMALPythonTarget, self).__init__(name='py.target', outputs=0)
self._stream, self._opened = open_stream(output)
self._done = done
self._event = Event()
# Accept all the formats picamera generally produces (user can add
# other esoteric stuff if they need to)
self.inputs[0].supported_formats = {
mmal.MMAL_ENCODING_MJPEG,
mmal.MMAL_ENCODING_H264,
mmal.MMAL_ENCODING_JPEG,
mmal.MMAL_ENCODING_GIF,
mmal.MMAL_ENCODING_PNG,
mmal.MMAL_ENCODING_BMP,
mmal.MMAL_ENCODING_I420,
mmal.MMAL_ENCODING_RGB24,
mmal.MMAL_ENCODING_BGR24,
mmal.MMAL_ENCODING_RGBA,
mmal.MMAL_ENCODING_BGRA,
}
def close(self):
super(MMALPythonTarget, self).close()
close_stream(self._stream, self._opened)
def enable(self):
self._event.clear()
super(MMALPythonTarget, self).enable()
def wait(self, timeout=None):
"""
Wait for the output to be "complete" as defined by the constructor's
*done* parameter. If *timeout* is specified it is the number of seconds
to wait for completion. The method returns ``True`` if the target
completed within the specified timeout and ``False`` otherwise.
"""
return self._event.wait(timeout)
def _handle_frame(self, port, buf):
self._stream.write(buf.data)
if buf.flags & self._done:
self._event.set()
return True
return False
class MMALPythonConnection(MMALBaseConnection):
"""
Represents a connection between an :class:`MMALPythonBaseComponent` and a
:class:`MMALBaseComponent` or another :class:`MMALPythonBaseComponent`.
The constructor accepts arguments providing the *source* :class:`MMALPort`
(or :class:`MMALPythonPort`) and *target* :class:`MMALPort` (or
:class:`MMALPythonPort`).
The *formats* parameter specifies an iterable of formats (in preference
order) that the connection may attempt when negotiating formats between
the two ports. If this is ``None``, or an empty iterable, no negotiation
will take place and the source port's format will simply be copied to the
target port. Otherwise, the iterable will be worked through in order until
a format acceptable to both ports is discovered.
The *callback* parameter can optionally specify a callable which will be
executed for each buffer that traverses the connection (providing an
opportunity to manipulate or drop that buffer). If specified, it must be a
callable which accepts two parameters: the :class:`MMALPythonConnection`
object sending the data, and the :class:`MMALBuffer` object containing
data. The callable may optionally manipulate the :class:`MMALBuffer` and
return it to permit it to continue traversing the connection, or return
``None`` in which case the buffer will be released.
.. data:: default_formats
:annotation: = (MMAL_ENCODING_I420, MMAL_ENCODING_RGB24, MMAL_ENCODING_BGR24, MMAL_ENCODING_RGBA, MMAL_ENCODING_BGRA)
Class attribute defining the default formats used to negotiate
connections between Python and and MMAL components, in preference
order. Note that OPAQUE is not present in contrast with the default
formats in :class:`MMALConnection`.
"""
__slots__ = ('_enabled', '_callback')
default_formats = (
mmal.MMAL_ENCODING_I420,
mmal.MMAL_ENCODING_RGB24,
mmal.MMAL_ENCODING_BGR24,
mmal.MMAL_ENCODING_RGBA,
mmal.MMAL_ENCODING_BGRA,
)
def __init__(
self, source, target, formats=default_formats, callback=None):
if not (
isinstance(source, MMALPythonPort) or
isinstance(target, MMALPythonPort)
):
raise PiCameraValueError('use a real MMAL connection')
super(MMALPythonConnection, self).__init__(source, target, formats)
self._enabled = False
self._callback = callback
def close(self):
self.disable()
super(MMALPythonConnection, self).close()
@property
def enabled(self):
"""
Returns ``True`` if the connection is enabled. Use :meth:`enable`
and :meth:`disable` to control the state of the connection.
"""
return self._enabled
def enable(self):
"""
Enable the connection. When a connection is enabled, data is
continually transferred from the output port of the source to the input
port of the target component.
"""
if not self._enabled:
self._enabled = True
if isinstance(self._target, MMALPythonPort):
# Connected python input ports require no callback
self._target.enable()
else:
# Connected MMAL input ports don't know they're connected so
# provide a dummy callback
self._target.params[mmal.MMAL_PARAMETER_ZERO_COPY] = True
self._target.enable(lambda port, buf: True)
if isinstance(self._source, MMALPythonPort):
# Connected python output ports are nothing more than thin
# proxies for the target input port; no callback required
self._source.enable()
else:
# Connected MMAL output ports are made to transfer their
# data to the Python input port
self._source.params[mmal.MMAL_PARAMETER_ZERO_COPY] = True
self._source.enable(self._transfer)
def disable(self):
"""
Disables the connection.
"""
self._enabled = False
self._source.disable()
self._target.disable()
def _transfer(self, port, buf):
while self._enabled:
try:
dest = self._target.get_buffer(timeout=0.01)
except PiCameraPortDisabled:
dest = None
if dest:
dest.copy_from(buf)
try:
self._target.send_buffer(dest)
except PiCameraPortDisabled:
pass
return False
@property
def name(self):
return '%s/%s' % (self._source.name, self._target.name)
def __repr__(self):
try:
return '<MMALPythonConnection "%s">' % self.name
except NameError:
return '<MMALPythonConnection closed>'
| [
"dave@waveform.org.uk"
] | dave@waveform.org.uk |
0bed982517ec2d1db37c3a013aeab72f14937675 | e832ec980dfb5cb52fb3116ca1ac79052cb02fae | /guo_py1811code/first/day_0325/code/requests_07_https.py | d38a5e8c6e7d7af4f5e858619f1ace95123202f2 | [] | no_license | Liu-Zhijuan-0313/Spider | e6b10ebd5f9b5c70803494e1b894ac4556dfc544 | bf04e9615e23350f7c0b9321ac3b7cbd4450dd3e | refs/heads/master | 2020-05-03T18:36:12.102519 | 2019-04-19T02:48:13 | 2019-04-19T02:48:13 | 178,766,396 | 0 | 0 | null | 2019-04-18T07:11:23 | 2019-04-01T01:50:37 | HTML | UTF-8 | Python | false | false | 106 | py | import requests
response = requests.get('https://kennethreitz.com', verify=True)
print(response.text)
| [
"1602176692@qq.com"
] | 1602176692@qq.com |
3e5a0ee492a36fc877f8a707c76bfff088af3935 | c57ae1b94b8bd6260e80d380892042dcf05872ac | /www/models.py | df7ce4ff258fcba256bcc51b88e3657dd1cdfc24 | [] | no_license | fjzhangZzzzzz/WebApp-Test | 46e52ba3aebfb00665b280ecbba82f1f73be3f5b | cb772f50fa8c44892b2e543ebb7d68391e2c9602 | refs/heads/master | 2021-01-13T11:18:50.681142 | 2017-03-02T15:31:35 | 2017-03-02T15:31:35 | 81,418,952 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,543 | py | # -*- coding: utf-8 -*-
"""
Models for user, blog, comment.
"""
import time, uuid
from www.orm import Model, StringField, BooleanField, FloatField, TextField
__author__ = 'fjzhang'
def next_id():
return '%015d%s000' % (int(time.time() * 1000), uuid.uuid4().hex)
class User(Model):
""" 用户数据模型 """
__table__ = 'users'
id = StringField(primary_key=True, default=next_id, ddl='varchar(50)')
email = StringField(ddl='varchar(50)')
passwd = StringField(ddl='varchar(50)')
admin = BooleanField()
name = StringField(ddl='varchar(50)')
image = StringField(ddl='varchar(500)')
created_at = FloatField(default=time.time)
class Blog(Model):
""" 博客数据模型 """
__table__ = 'blogs'
id = StringField(primary_key=True, default=next_id, ddl='varchar(50)')
user_id = StringField(ddl='varchar(50)')
user_name = StringField(ddl='varchar(50)')
user_image = StringField(ddl='varchar(500)')
title = StringField(ddl='varchar(50)')
summary = StringField(ddl='varchar(200)')
content = TextField()
created_at = FloatField(default=time.time)
class Comment(Model):
""" 评论数据模型 """
__table__ = 'comments'
id = StringField(primary_key=True, default=next_id, ddl='varchar(50)')
blog_id = StringField(ddl='varchar(50)')
user_id = StringField(ddl='varchar(50)')
user_name = StringField(ddl='varchar(50)')
user_image = StringField(ddl='varchar(500)')
content = TextField()
created_at = FloatField(default=time.time)
| [
"fjzhang_@outlook.com"
] | fjzhang_@outlook.com |
6129595c29face62f25579c8350e0b7adf164974 | c464737ba510915e9414e38cfe4e218aaf5ed96f | /program.py | f6851d1649e88ef36ae6114ce30bc94eef2ab145 | [] | no_license | NehaMahindrakar/Sorting-Images | 26155bc18b8f5076f2ac7eb4b191805f87d76586 | fe4d4e93034f8073a7a1723bdeff3ac07ae4b8d1 | refs/heads/master | 2023-01-22T20:58:58.607097 | 2020-11-25T19:02:22 | 2020-11-25T19:02:22 | 316,028,210 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,556 | py | #importing the necessary libraries
import glob
import cv2
import os
import shutil
import time
#Quick sort Algorithm to sort the images based on size
def QuickSort(arr,left,right):
i = left
j = right
piv = arr[left + (right - left) // 2][1]
while i <= j:
while arr[i][1] < piv: i += 1
while arr[j][1] > piv: j -= 1
if i <= j:
arr[i], arr[j] = arr[j], arr[i]
i += 1
j -= 1
if left < j:
QuickSort(arr, left, j)
if i < right:
QuickSort(arr, i, right)
return arr
#Sort Function
def Sort(arr,n):
begin = time.perf_counter()
arr1 = QuickSort(arr,0,n-1)
end = time.perf_counter()
print(f"\nTotal runtime of the program is {(end - begin)*1000000000} ns")
if os.path.exists(dir+'/Sorted'):
shutil.rmtree(dir+'/Sorted')
os.makedirs(dir+'/Sorted')
for i in range(n):
shutil.copy(dir+'/Images/'+arr1[i][0],dir+'/Sorted')
os.rename(dir+'/Sorted/'+arr1[i][0],dir+'/Sorted/{0} '.format(i)+arr1[i][0])
return
#Binary search function
def BinarySearch(arr,img_size):
h = n-1
l = 0
m = 0
arr = QuickSort(arr,0,n-1)
begin = time.perf_counter()
while l<=h:
m = int((l+h)/2)
if arr[m][1] < img_size:
l = m + 1
elif arr[m][1] > img_size:
h = m - 1
else:
end =time.perf_counter()
print(f"\nTotal runtime of the program is {(end - begin)*1000000000} ns")
return m,arr
end =time.perf_counter()
print(f"\nTotal runtime of the program is {(end - begin)*1000000000} ns")
return -1,arr
#Linear search function
def LinearSearch(arr,img_size,img):
begin = time.perf_counter()
for i in range(n):
if(arr[i][1]==img_size):
img2 = cv2.imread(dir+"/Images/"+arr[i][0])
if(img == img2).all():
end =time.perf_counter()
print(f"\nTotal runtime of the program is {(end - begin)*1000000000} ns")
return i
end = time.perf_counter()
print(f"\nTotal runtime of the program is {(end - begin)*1000000000} ns")
return -1
#Search Function
def Search(arr,dir):
files=glob.glob(dir+"/Finder/*.jpg")
if not files:
print("No images in the finder folder")
return
img = cv2.imread(files[0])
img_size = img.size
out = 0
sw = int(input("\n\n1) Linear. \n2) Binary\nEnter : "))
if sw == 1 :
out = LinearSearch(arr,img_size,img)
elif sw == 2 :
out,arr = BinarySearch(arr,img_size)
else:
print("\n\nInvalid Choice")
return
if out == -1:
print("\nImage Not Found")
else:
img2 = cv2.imread(dir+"/Images/"+arr[out][0])
img = cv2.resize(img, (960, 960))
img2 = cv2.resize(img2, (960, 960))
cv2.imshow("Image 1",img)
cv2.imshow("Image 2",img2)
cv2.waitKey(0)
return
#Read the images from Images folder
def Read(dir):
files = glob.glob(dir+'/Images/*.jpg')
n = len(files)
x_data =[]
for i in files:
img = cv2.imread(i)
x_data.append([os.path.basename(i),img.size])
return x_data,n
#Main Program
dir = os.path.dirname(__file__)
x_data,n = Read(dir)
print(x_data)
sw = int(input("\n\n1) Sorting. \n2) Searching\nEnter : "))
if sw == 1 :
Sort(x_data,n)
print("\nSorting Completed check the Sorted Folder")
elif sw ==2 :
Search(x_data,dir)
else:
print("\n\nInvalid Choice")
| [
"noreply@github.com"
] | noreply@github.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.