id stringlengths 1 265 | text stringlengths 6 5.19M | dataset_id stringclasses 7
values |
|---|---|---|
79794 | # Generated by Django 3.2.9 on 2021-11-14 08:06
from decimal import Decimal
from django.db import migrations, models
import djmoney.models.fields
class Migration(migrations.Migration):
dependencies = [
('accounts', '0016_alter_order_location'),
]
operations = [
migrations.AlterField(
model_name='customer',
name='gifts',
field=models.IntegerField(default=0, editable=False),
),
migrations.AlterField(
model_name='customer',
name='wallet',
field=djmoney.models.fields.MoneyField(decimal_places=0, default=Decimal('0'), default_currency='IQD', editable=False, max_digits=14),
),
]
| StarcoderdataPython |
58235 | from typing import List
from pydantic import BaseModel
from watchmen_model.common import ConnectedSpaceId, DashboardId, LastVisit, UserBasedTuple
class Favorite(UserBasedTuple, LastVisit, BaseModel):
connectedSpaceIds: List[ConnectedSpaceId] = []
dashboardIds: List[DashboardId] = []
| StarcoderdataPython |
3378936 | <reponame>bfournie/lldp-decode
#!/usr/bin/env python
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import argparse
import binascii
import json
import logging
import os
import six
import sys
import subprocess
import lldp_parsers
def parse_opts(argv):
parser = argparse.ArgumentParser(
description='Decode raw LLDP introspection data')
parser.add_argument('-o', '--output-dir', metavar='OUTPUT_DIR',
help="""Output dir for all the templates""",
default='')
opts = parser.parse_args(argv[1:])
return opts
def get_introspection_data(node):
filename = "tmp-" + node
cmd = "/bin/openstack baremetal introspection data save " + \
node + " > " + filename
try:
p = subprocess.Popen(cmd, shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
except OSError as e:
print("Error running introspection data save, Error: %s" % e)
exit()
stdout, stderr = p.communicate()
if p.returncode != 0:
print(('Error running introspection data save.'
'Stdout: "%(stdout)s". Stderr: %(stderr)s') %
{'stdout': stdout, 'stderr': stderr})
with open(filename, 'r') as f:
contents = f.read()
data = json.loads(contents)
f.close()
os.remove(filename)
return data
def _parse_lldp_tlvs(tlvs, node):
"""Parse LLDP TLVs into dictionary of name/value pairs
:param tlvs: list of raw TLVs
:param node_info: node being introspected
:returns nv: dictionary of name/value pairs. The
LLDP user-friendly names, e.g.
"switch_port_id" are the keys
"""
# Generate name/value pairs for each TLV
parser = lldp_parsers.LLDPBasicMgmtParser(node)
for tlv_type, tlv_value in tlvs:
try:
data = bytearray(binascii.a2b_hex(tlv_value))
except TypeError as e:
print(
"TLV value for TLV type %(tlv_type)d not in correct "
"format, value must be in hexadecimal: %(msg)s",
{'tlv_type': tlv_type, 'msg': e})
continue
if not parser.parse_tlv(tlv_type, data):
print("LLDP TLV type %d not handled",
tlv_type)
return parser.nv_dict
def _lldp_decode():
root_logger = logging.getLogger(None)
if not root_logger.handlers:
root_logger.addHandler(logging.StreamHandler())
#opts = parse_opts(sys.argv)
if len(sys.argv) > 1:
node = sys.argv[1]
# node = "618a604d-cc41-4c10-945b-c5e6b5f01020"
data = get_introspection_data(node)
# inventory = data.get('inventory')
interfaces = data['inventory']['interfaces']
if not interfaces:
raise Error(_('Hardware inventory is empty or missing'),
data=data)
print("Interface Data for Node %s" % node)
print("============================================================")
for iface in interfaces:
if_name = iface['name']
tlvs = iface.get('lldp')
if tlvs is None:
print("No LLDP Data found for interface %s",
if_name)
continue
print("Interface: %s" % if_name)
print("++++++++++++++++")
print("MAC Address: %s" % iface.get('mac_address'))
nv = _parse_lldp_tlvs(tlvs, node)
for name, value in nv.items():
print("%(name)s: %(value)s" % {'name': name, 'value': value})
print('')
_lldp_decode() | StarcoderdataPython |
4830104 | a = 1
# some comment
b = 2
| StarcoderdataPython |
35772 | import argparse
import json
parser = argparse.ArgumentParser()
parser.add_argument("--text", type=str, help="path to original text file")
parser.add_argument("--train", type=str, help="path to original training data file")
parser.add_argument("--valid", type=str, help="path to original validation data file")
parser.add_argument("--converted_text", type=str, default="Qdesc.txt", help="path to converted text file")
parser.add_argument("--converted_train", type=str, default="train.txt", help="path to converted training file")
parser.add_argument("--converted_valid", type=str, default="valid.txt", help="path to converted validation file")
if __name__=='__main__':
args = parser.parse_args()
Qid={} #Entity to id (line number in the description file)
Pid={} #Relation to id
def getNum(s):
return int(s[1:])
with open(args.text, "r") as fin:
with open(args.converted_text, "w") as fout:
lines = fin.readlines()
Cnt=0
for idx, line in enumerate(lines):
data = line.split('\t')
assert len(data) >= 2
assert data[0].startswith('Q')
desc = '\t'.join(data[1:]).strip()
if getNum(data[0])>1000:
continue
fout.write(desc+"\n")
Qid[data[0]] = Cnt#idx
Cnt+=1
def convert_triples(inFile, outFile):
with open(inFile, "r") as fin:
with open(outFile, "w") as fout:
lines = fin.readlines()
for line in lines:
data = line.strip().split('\t')
assert len(data) == 3
if getNum(data[0])>1000 or getNum(data[2]) > 1000:
continue
if data[1] not in Pid:
Pid[data[1]] = len(Pid)
fout.write("%d %d %d\n"%(Qid[data[0]], Pid[data[1]], Qid[data[2]]))
convert_triples(args.train, args.converted_train)
convert_triples(args.valid, args.converted_valid)
| StarcoderdataPython |
126751 | <reponame>Llamasaiko/Track1_Primitive<filename>mysite/reviews/migrations/0001_initial.py
# -*- coding: utf-8 -*-
# Generated by Django 1.10.3 on 2016-11-15 11:46
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Review_Entity',
fields=[
('rid', models.IntegerField(primary_key=True, serialize=False)),
('hid', models.IntegerField(blank=False, null=False)),
('Review_Date', models.CharField(max_length=15)),
('Reviewer_Nationality', models.CharField(max_length=50)),
('Positive_Review', models.TextField(max_length=2000)),
('Negative_Review', models.TextField(max_length=2000)),
('Review_Total_Positive_Word_Counts', models.IntegerField(blank=True, null=True)),
('Review_Total_Negative_Word_Counts', models.IntegerField(blank=True, null=True)),
('Total_Number_of_Reviews_Reviewer_Has_Given', models.IntegerField(blank=True, null=True)),
('Reviewer_Score', models.FloatField(blank=True, null=True)),
('Tags', models.TextField(blank=True, null=True)),
],
),
]
| StarcoderdataPython |
53067 | # genRandInput.py - Generate ranodom input
def random_inputs(N = 5, maxvarn = 4, maxs = 3):
# N = size of each sample size for each kind
# maxvarn = maximum variation for n
# maxs = maximum value for s_i
# X = type (A, B, C, D, E or F)
# n = subscript
# r = superscript
# S = specialization
# in $X_n^{(r)}$
# k = number of nodes (GCM A is (k x k) matrix)
import random
sfrom = range(maxs + 1)
#
# Aff-1: r=1
#
r = 1
# Type $A_n^{(1)}$
X = "A"
nfrom = range(1, maxvarn + 1)
for _ in range(N):
n = random.choice(nfrom)
k = n + 1
S = [random.choice(sfrom) for i in range(k)]
print(X, n, r, S)
# Type $B_n^{(1)}$
X = "B"
nfrom = range(3, maxvarn + 3)
for _ in range(N):
n = random.choice(nfrom)
k = n + 1
S = [random.choice(sfrom) for i in range(k)]
print(X, n, r, S)
# Type $C_n^{(1)}$
X = "C"
nfrom = range(2, maxvarn + 2)
for _ in range(N):
n = random.choice(nfrom)
k = n + 1
S = [random.choice(sfrom) for i in range(k)]
print(X, n, r, S)
# Type $D_n^{(1)}$
X = "D"
nfrom = range(4, maxvarn + 4)
for _ in range(N):
n = random.choice(nfrom)
k = n + 1
S = [random.choice(sfrom) for i in range(k)]
print(X, n, r, S)
# Type $E_n^{(1)}$
X = "E"
nfrom = [6, 7, 8]
for _ in range(N):
n = random.choice(nfrom)
k = n + 1
S = [random.choice(sfrom) for i in range(k)]
print(X, n, r, S)
# Type $F_n^{(1)}$
X, n = "F", 4
k = n + 1
for _ in range(N):
S = [random.choice(sfrom) for i in range(k)]
print(X, n, r, S)
# Type $G_n^{(1)}$
X, n = "G", 2
k = n + 1
for _ in range(N):
S = [random.choice(sfrom) for i in range(k)]
print(X, n, r, S)
#
# Aff-2
#
r = 2
# Type $A_n^{(2)}:
X = "A"
## n is even
nfrom = range(2, 2 + 2*maxvarn, 2)
for _ in range(N):
n = random.choice(nfrom)
k = n/2 + 1
S = [random.choice(sfrom) for i in range(k)]
print(X, n, r, S)
## n is odd
nfrom = range(5, 5 + 2*maxvarn, 2)
for _ in range(N):
n = random.choice(nfrom)
k = (n + 1)/2 + 1
S = [random.choice(sfrom) for i in range(k)]
print(X, n, r, S)
# Type $D_n^{(2)}
X = "D"
nfrom = range(3, 3 + maxvarn)
for _ in range(N):
n = random.choice(nfrom)
k = n
S = [random.choice(sfrom) for i in range(k)]
print(X, n, r, S)
# Type $E_n^{(2)}$
X, n = "E", 6
k = n - 1
for _ in range(N):
S = [random.choice(sfrom) for i in range(k)]
print(X, n, r, S)
#
# Aff-3
#
r = 3
# Type $D_n^{(3)}
X, n = "D", 4
k = n - 1
for _ in range(N):
S = [random.choice(sfrom) for i in range(k)]
print(X, n, r, S)
# End of random_inputs(...)
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(
description='Generate random inputs.'
)
parser.add_argument(
'-N',
metavar='NRAND',
help='the number of inputs per test cases (default 5)',
action='store',
#dest='N',
type=int,
default=5,
)
parser.add_argument(
'-n', '--varn',
metavar='VARN',
help="the variability range for the parameter 'n' \
(default 4)",
action='store',
type=int,
default=4,
)
parser.add_argument(
'-s', '--maxs',
metavar='MAXS',
help="the max value for each 's_i's (default 3)",
action='store',
type=int,
default=3,
)
parser.add_argument(
'-m', '--message',
metavar='HDR_MSG',
help='the header message at the top',
action='store',
type=str,
default=None,
)
args = parser.parse_args();
# print args
if args.message:
print '# {}'.format(args.message)
random_inputs(args.N, args.varn, args.maxs)
| StarcoderdataPython |
1665377 | <filename>src/main.py
from interface import initialize_interface
from app import App
# Function main creates an instance of the App class and calls its contructor (reads the data from csv file
# and learns from them). After that the user interface is initialized.
def main():
# Initialization of the application with settings passed as parameter.
app = App()
# Creating the main user interface of the application.
initialize_interface(app)
if __name__ == "__main__":
main() | StarcoderdataPython |
1764046 | <gh_stars>0
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from model_predict import read_xmls, get_target_image_paths, InputData, EvalSummary
correct_index_map = {
"763a8z0136b6aae04_20181218_042832_-329078_380494.jpg": 0,
"763a8z0136b6aal04_20181218_042832_-348927_-78318.jpg": 1,
"763a8z0136b6aaq02_20181218_042832_-619803_-353217.jpg": 2
}
correct_input_datas = [{
'annotations': [{
"bbox": [526, 482, 539, 498],
"segmentation": [
[538, 491, 536, 488, 532, 485, 529, 484, 528, 484, 527, 484, 527, 496, 531, 497, 534, 497, 536, 495, 537,
494, 537, 493]
],
"category_id": 5
}]
}, {
'annotations': [{
"bbox": [516, 449, 539, 477],
"segmentation": [
[534, 460, 528, 455, 524, 452, 521, 454, 521, 460, 522, 464, 523, 468, 525, 473, 526, 474, 528, 473, 530,
469, 531, 470, 532, 472, 534, 471, 535, 470, 535, 467, 535, 466, 535, 464, 535, 462]
],
"category_id": 5
}]
}, {
'annotations': [{
"bbox": [450, 477, 492, 516],
"segmentation": [
[469, 481, 468, 483, 467, 487, 467, 491, 467, 494, 467, 495, 466, 497, 464, 498, 460, 500, 456, 502, 455,
502, 453, 505, 454, 507, 455, 509, 456, 511, 460, 511, 462, 510, 466, 509, 467, 508, 469, 507, 471, 507,
481, 506, 485, 506, 487, 505, 488, 501, 488, 499, 487, 496, 486, 493, 485, 492, 484, 492, 482, 492, 481,
492, 479, 490, 478, 488, 477, 486, 475, 483, 473, 482, 471, 481]
],
"category_id": 5
}]
}]
class ModelPredictTest(tf.test.TestCase):
def setUp(self):
xmls_root_path = "./test_data/mask_annotation_xmls"
self.input_datas = read_xmls(xmls_root_path)
def test_read_xmls(self):
# Image count = 30
self.assertEqual(len(self.input_datas), 3, "length of input datas read from xmls must be 3")
for input_data in self.input_datas:
self.assertTrue(isinstance(input_data, InputData))
self.assertTrue(input_data.file_name in correct_index_map)
correct_input_data = correct_input_datas[correct_index_map[input_data.file_name]]
self.assertEqual(len(input_data.annotations), len(correct_input_data['annotations']), "Length of Annotations")
for i, annotation in enumerate(input_data.annotations):
correct_annotation = correct_input_data['annotations'][i]
self.assertEqual(annotation.bbox, correct_annotation['bbox'])
self.assertEqual(annotation.segmentation[0], correct_annotation['segmentation'][0])
self.assertEqual(annotation.category_id, correct_annotation['category_id'])
def test_predict_bbox_and_segmentation(self):
image_root_dir = "/Users/rangkim/projects/datasets/winforsys/origin/v2/sample/1th_images"
target_image_paths = get_target_image_paths(self.input_datas, image_root_dir)
correct_full_paths = {
'763a8z0136b6aaq02_20181218_042832_-619803_-353217.jpg': '/Users/rangkim/projects/datasets/winforsys/origin/v2/sample/1th_images/ACTR2D/763a8z0136b6aaq02_20181218_042832_-619803_-353217.jpg',
'763a8z0136b6aae04_20181218_042832_-329078_380494.jpg': '/Users/rangkim/projects/datasets/winforsys/origin/v2/sample/1th_images/ACTR2D/763a8z0136b6aae04_20181218_042832_-329078_380494.jpg',
'763a8z0136b6aal04_20181218_042832_-348927_-78318.jpg': '/Users/rangkim/projects/datasets/winforsys/origin/v2/sample/1th_images/ACTR2D/763a8z0136b6aal04_20181218_042832_-348927_-78318.jpg'
}
self.assertEqual(target_image_paths, correct_full_paths)
if __name__ == "__main__":
tf.test.main() | StarcoderdataPython |
1641843 | <filename>tslib/stats.py
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Sep 2 19:24:59 2019
@author: mehrdad
"""
from scipy import stats
import numpy as np
# ----- Stats and Analysis ------------------------- #
def get_pdf_and_cdf(vals, bins):
counts, edges = np.histogram(vals, bins)
total = np.sum(counts)
pdf = counts/total # probabilities in each bin
cdf = np.cumsum(pdf)
ccdf = 1 - cdf
return pdf, cdf, ccdf, edges
def get_outliers_range(vals):
iqr = stats.iqr(vals)
q3, q1 = np.percentile(vals, [75, 25])
lower_whisker = np.max([q1 - iqr*1.5, np.min(vals)])
upper_whisker = np.min([q3 + iqr*1.5, np.max(vals)])
#print ("Middle box (Q1 to Q3):", q1, " to ", q3)
#print ("IQR:",iqr, " | lower_whisker: ",lower_whisker, "| upper_whisker: ",upper_whisker)
return lower_whisker, upper_whisker
def get_outliers(df, vals, lower, upper):
outliers_df = df[(vals < lower) | (vals > upper)]
return outliers_df
def get_non_outliers(df, column_name, lower, upper):
filtered_df = df[(df[column_name] >= lower) & (df[column_name] <= upper)]
return filtered_df
def get_cdf_point_value(cdf, edges, target_proability):
vals = edges[1:]
filtered_vals = vals[cdf >= target_proability]
point = filtered_vals[0]
return point
def get_cumulative_shares(sample_values, smaller_shares_first=False):
# Result of this function can be used to plot, for example:
# plt.plot(sample_shares, sample_value_shares)
# plt.hist(sample_value_shares)
shares = 100 * sample_values/np.sum(sample_values) # shares of each sample's value from the total value sum
shares = shares.sort_values(ascending = smaller_shares_first)
value_share_list =[] # list of cumulative values
sample_share_list = []
i = 0
c = 0
total_samples = len(shares)
# point_found = False
for share in shares:
c += share
i += 1
value_share_list.append(c)
sample_share_list.append(100 * i/total_samples)
# if not point_found and c>=50:
# point = B.iloc[i]
# point_found = True
#TODO: use range functions_
# step = 100/len(shares)
# sample_share_list = list(np.arange(step,100+step,step))
return sample_share_list, value_share_list
def get_cumulative_shares_of_samples(vals, smaller_shares_first=False):
# Result of this function can be used to plot, for example:
# plt.plot(sample_shares, sample_value_shares)
# plt.hist(sample_value_shares)
# cumulative share of each sample out of total sample count
step = 1/len(vals)
sample_shares = np.arange(step,1+step,step)
# share of each sample out of total sample count
shares = vals/vals.sum()
shares = -np.sort(-shares)
sample_value_shares = np.cumsum(shares)
return sample_shares, sample_value_shares | StarcoderdataPython |
1723265 | <gh_stars>1-10
from digital_tv import DigitalTv
from digital_tv_3d import DigitalTv3d
def main():
# 基底クラス
tv = DigitalTv('新潟')
tv.display(8)
# 派生クラス
tv3d = DigitalTv3d('新潟')
tv3d.is3d = False
tv3d.display(8)
tv3d.is3d = True
tv3d.display(8)
if __name__ == '__main__':
main()
| StarcoderdataPython |
86552 | <gh_stars>0
import mido
import tensorflow as tf
import numpy as np
import random
"""
Implementation of a converter of MIDI files to and from the event-based
vocabulary representation of MIDI files according to Oore et al., 2018
Also some heloer fuctions to be able to use the transformer model properly
Possible MIDI events being considered:
128 note_on events
128 note_off events #includes handling pedal_on and pedal_off events
125 time_shift events #time_shift = 1: 8 ms
32 velocity events
Total number of midi events = 413
The indices of the vocab corresponding to the events will be,
v[ 0] = '<pad>'
v[ 0..128] = note_on
v[129..256] = note_off
v[257..381] = time_shift
v[382..413] = velocity
v[414..415] = '<start>', '<end>'
A list of tokens will be generated from the midi file, and the indices of these
need to be passed into the Embedding
"""
"""MIDI TOKENIZER"""
note_on_events = 128
note_off_events = note_on_events
time_shift_events = 125 #time_shift = time_shift_events corresponds to 1 second
velocity_events = 32
LTH = 1000 #maximum number of milliseconds to be handled
DIV = 1000 // time_shift_events #time_shifts will correspond to steps of DIV milliseconds
#total midi events has + pad + start + end
total_midi_events = note_on_events + note_off_events + time_shift_events + velocity_events + 1 + 2
#create the vocabulary list to use when pedal is considered to be holding the
#note,instead of introducing pedal events -- nn might be able to learn easier
note_on_vocab = [f"note_on_{i}" for i in range(note_on_events)]
note_off_vocab = [f"note_off_{i}" for i in range(note_off_events)]
time_shift_vocab = [f"time_shift_{i}" for i in range(time_shift_events)]
velocity_vocab = [f"set_velocity_{i}" for i in range(velocity_events)]
#create vocab of tokens
vocab = ['<pad>'] + note_on_vocab + note_off_vocab + time_shift_vocab + velocity_vocab + ['<start>', '<end>']
vocab_size = len(vocab)
#tokena
pad_token = vocab.index("<pad>")
start_token = vocab.index("<start>")
end_token = vocab.index("<end>")
def Midiparser(fname=None, mid=None):
"""
Converts a midi file into a list of events and their indices in the vocab
"""
assert (fname == None) ^ (mid == None) == True, "Define only one of mid (a loaded midi file) or fname (the path from which to load a midi file)"
if fname is not None:
mid = mido.MidiFile(fname)
#conversion macros
delta_time = 0 #to be able to sum up message times
event_list = [] #list of events from vocab list
index_list = [] #list of indices of the events of event_list in the vocab
pedal_events = {} #dictionary to handle pedal events
pedal_flag = False
#create the event list as a list of elements of the vocab
for track in mid.tracks:
for msg in track:
delta_time += msg.time
if msg.is_meta:
continue
#add the time events
t = msg.type
if t == "note_on" or (t == "note_off" and not pedal_flag):
time_to_events(delta_time, event_list=event_list, index_list=index_list)
delta_time = 0
if t == "note_on":
#get the note
note = msg.note
vel = velocity_to_bin(msg.velocity)
#append the set velocity and note events; 1 is added to deal with the pad token
event_list.append(vocab[note_on_events + note_off_events + time_shift_events + vel + 1])
event_list.append(vocab[note + 1])
index_list.append(note_on_events + note_off_events + time_shift_events + vel + 1)
index_list.append(note + 1)
elif t == "note_off" and not pedal_flag:
#get the note
note = msg.note
#append the note off event
event_list.append(vocab[note_on_events + note + 1])
index_list.append(note_on_events + note + 1)
elif t == "note_off" and pedal_flag:
note = msg.note
if note not in pedal_events:
pedal_events[note] = 0
pedal_events[note] += 1
elif msg.type == "control_change":
if msg.control == 64:
if msg.value >= 64:
#pedal on
pedal_flag = True
elif msg.value <= 63:
#pedal off
pedal_flag = False
#perform note offs that occurred when pedal was on, after the pedal is lifted
for note in pedal_events:
for i in range(pedal_events[note]):
#add the time events
time_to_events(delta_time, event_list=event_list, index_list=index_list)
delta_time = 0
#repeatedly create and append note off events
event_list.append(vocab[note_on_events + note + 1])
index_list.append(note_on_events + note + 1)
#restart the pedal events list
pedal_events = {}
#return the lists of events
return np.array(index_list, dtype=np.int32), event_list
def Listparser(index_list=None, event_list=None, fname="test", tempo=512820):
"""
Takes a set of events in event_list or in index_list and converts it to a midi file
"""
assert (event_list == None) ^ (index_list == None) == True, "Input either the event_list or index_list but not both"
#convert event_list to index_list
if event_list is not None:
assert type(event_list[0]) == str, "All events in event_list must be str"
index_list = events_to_indices(event_list)
#set up the midi file and tracks to be added to it
mid = mido.MidiFile() #ticks_per_beat should be 480
meta_track = mido.MidiTrack()
track = mido.MidiTrack()
# set up the config track
meta_track.append(mido.MetaMessage("track_name").copy(name=fname))
meta_track.append(mido.MetaMessage("smpte_offset")) # open track
# time_signature
time_sig = mido.MetaMessage("time_signature") # assumes time sig is 4/4
time_sig = time_sig.copy(numerator=4, denominator=4)
meta_track.append(time_sig)
# key signature
key_sig = mido.MetaMessage("key_signature") # assumes key sig is C
meta_track.append(key_sig)
# tempo
set_tempo = mido.MetaMessage("set_tempo") # assume tempo is constant
set_tempo = set_tempo.copy(tempo=tempo)
meta_track.append(set_tempo)
# end of track
end = mido.MetaMessage("end_of_track")
end = end.copy(time=0) # time is delta time
meta_track.append(end) # check if this is the isolated problem
# set up the piano track
program = mido.Message("program_change") # 0 is piano
track.append(program)
# control
cc = mido.Message("control_change")
track.append(cc) # looks like that's done
#initialize the time and velocity attributes
delta_time = 0
vel = 0
#iterate over the events in event list to reconstruct the midi file
for idx in index_list:
if tf.is_tensor(idx):
idx = idx.numpy().item()
if idx == 0: #if it is the pad token, continue
continue
idx = idx - 1 #subtracting 1 to deal with the pad token
if 0 <= idx < note_on_events + note_off_events:
if 0 <= idx < note_on_events:
#note on event
note = idx
t = "note_on"
v = vel
else:
#note off event
note = idx - note_on_events
t = "note_off"
v = 127
#set up the message
msg = mido.Message(t)
msg = msg.copy(note=note, velocity=v, time=delta_time)
#reinitialize delta_time and velocity
delta_time = 0
vel = 0
#insert message into track
track.append(msg)
elif note_on_events + note_off_events <= idx < note_on_events + note_off_events + time_shift_events:
#time shift event
cut_time = idx - (note_on_events + note_off_events - 1) # from 1 to time_shift_events
delta_time += cut_time * DIV #div is used to turn the time from bins to milliseconds
elif note_on_events + note_off_events + time_shift_events <= idx < total_midi_events - 3: #subtract start and end tokens
#velocity event
vel = bin_to_velocity(idx - (note_on_events + note_off_events + time_shift_events))
#end the track
end2 = mido.MetaMessage("end_of_track").copy(time=0)
track.append(end2)
#create and return the midi file
mid.tracks.append(meta_track)
mid.tracks.append(track)
return mid
def events_to_indices(event_list, vocab=vocab):
"""
turns an event_list into an index_list
"""
index_list = []
for event in event_list:
index_list.append(vocab.index(event))
return tf.convert_to_tensor(index_list)
def indices_to_events(index_list, vocab=vocab):
"""
turns an index_list into an event_list
"""
event_list = []
for idx in index_list:
event_list.append(vocab[idx])
return event_list
def velocity_to_bin(velocity, step=4):
"""
Velocity in a midi file can take on any integer value in the range (0, 127)
But, so that each vector in the midiparser is fewer dimensions than it has to be,
without really losing any resolution in dynamics, the velocity is shifted
down to the previous multiple of step
"""
assert (128 % step == 0), "128 possible midi velocities must be divisible into the number of bins"
assert 0 <= velocity <= 127, f"velocity must be between 0 and 127, not {velocity}"
#bins = np.arange(0, 127, step) #bins[i] is the ith multiple of step, i.e., step * i
idx = velocity // step
return idx #returns the bin into which the actual velocity is placed
def bin_to_velocity(_bin, step=4):
"""
Takes a binned velocity, i.e., a value from 0 to 31, and converts it to a
proper midi velocity
"""
assert (0 <= _bin * step <= 127), f"bin * step must be between 0 and 127 to be a midi velocity\
not {_bin*step}"
return int(_bin * step)
def time_to_events(delta_time, event_list=None, index_list=None):
"""
takes the delta time summed up over irrelevant midi events, and converts it
to a series of keys from the vocab
"""
#since msg.time is the time since the previous message, the time
#shift events need to be put before the next messages
time = time_cutter(delta_time)
for i in time:
#repeatedly create and append time events
if event_list is not None:
event_list.append(vocab[note_on_events + note_off_events + i]) #should be -1, but adding +1
if index_list is not None: #because of pad token
index_list.append(note_on_events + note_off_events + i)
pass
def time_cutter(time, lth=LTH, div=DIV):
"""
In the mido files, with ticks_per_beat = 480, the default tempo
is 480000 µs/beat or 125 bpm. This does not depend on the time signature
1 tick is 1 ms at this tempo, therefore 8 ticks are 8 ms, which each of the
bins for time are supposed to be
lth is the maximum number of ticks, or milliseconds in this case, that will
be considered in one time_shift for this project
div is the number of milliseconds/ticks a time_shift of 1 represents, i.e.
time = time_shift * div
this function makes mido time attributes into multiplies of div, in the
integer range (1, lth); 0 will not be considered; then divides them into
lth // div possible bins, integers from 1 to lth // div
"""
assert (lth % div == 0), "lth must be divisible by div"
#create in the time shifts in terms of integers in the range (1, lth) then
#convert the time shifts into multiples of div, so that we only need to deal with
#lth // div possible bins
time_shifts = []
for i in range(time // lth):
time_shifts.append(real_round(lth / div)) #see below for real_round
last_term = real_round((time % lth) / div)
time_shifts.append(last_term) if last_term > 0 else None
return time_shifts
def check_note_pairs(fname=None, mid=None, return_notes=False):
"""
checks if each note_on is paired with a note_off in a midi file
"""
assert (fname == None)^(mid == None) == True, "Define only one of mid (a loaded midi file) or fname (the path from which to load a midi file)"
if fname is not None:
mid = mido.MidiFile(fname)
notes = {}
for track in mid.tracks:
for msg in track:
if msg.is_meta or (msg.type != "note_on" and msg.type != "note_off"):
continue
note = msg.note
t = msg.type
if note not in notes:
notes[note] = 0
if t == "note_on":
notes[note] += 1
elif t == "note_off":
notes[note] -= 1
flag = True # all note pairs exist
for i in notes:
if notes[note] != 0:
flag = False
break
if return_notes:
return notes
return flag #, notes
def real_round(a):
"""
properly rounds a float to an integer because python can't do it
"""
b = a // 1
decimal_digits = a % 1
adder = 0
if decimal_digits >= 0.5:
adder = 1
return int(b + adder)
"""TRANSFORMER UTIL"""
MAX_LENGTH = 2048
#stuff to make the data augmentation easier
noe = note_on_events
nfe = note_off_events
ne = noe + nfe #note events
tse = time_shift_events
def skew(t: tf.Tensor):
"""
Implements skewing procedure outlined in Huang et. al 2018 to reshape the
dot(Q, RelativePositionEmbeddings) matrix into the correct shape for which
Tij = compatibility of ith query in Q with relative position (j - i)
this implementation accounts for tensors of rank n
Algorithm:
1. Pad T
2. Reshape
3. Slice
Assumes T is of shape (..., L, L)
"""
# pad T
middle_dims = [[0, 0] for _ in range(tf.rank(t) - 1)] # allows padding to be generalized to rank n
padded = tf.pad(t, [*middle_dims, [1, 0]])
# reshape
srel = tf.reshape(padded, (*padded.shape[:-2], t.shape[-1] + 1, t.shape[-2]))
# final touches
srel = tf.reshape(srel, (-1, *srel.shape[-2:])) # flatten prior dims
srel = srel[:, 1:] # slice
return tf.reshape(srel, t.shape) # prior shape
def data_cutter(data, lth=MAX_LENGTH):
"""
takes a set of long input sequences, data, and cuts each sequence into
smaller sequences of length lth + 2
the start and end tokens are also added to the data
"""
#make sure data is iterable
if type(data) != list:
data = [data]
#initialize the cut data list and seqs to pad to add later to the cut data
cdata = []
seqs_to_pad = []
for seq in data:
#find the highest multiple of lth less than len(seq) so that until
#this point, the data can be cut into even multiples of lth
seq_len = len(seq)
if lth > seq_len:
seqs_to_pad.append(seq)
continue
mult = seq_len // lth
#iterate over parts of the sequence of length lth and add them to cdata
for i in range(0, lth * (mult), lth):
_slice = seq[i:i + lth]
cdata.append(_slice)
#take the last <lth elements of the sequnce and add to seqs_to_pad
idx = mult * lth
final_elems = seq[idx:]
seqs_to_pad.append(final_elems) if final_elems.size > 0 else None
#add the start and end tokens
for i, vec in enumerate(cdata):
# assume vec is of rank 1
cdata[i] = tf.pad(tf.pad(vec, [[1, 0]], constant_values=start_token), \
[[0, 1]], constant_values=end_token)
for i, vec in enumerate(seqs_to_pad):
seqs_to_pad[i] = tf.pad(tf.pad(vec, [[1, 0]], constant_values=start_token), \
[[0, 1]], constant_values=end_token)
#pad the sequences to pad
if seqs_to_pad:
padded_data = tf.keras.preprocessing.sequence.pad_sequences(seqs_to_pad, maxlen=lth + 2,
padding='post',value=pad_token)
final_data = tf.concat([tf.expand_dims(cd, 0) for cd in cdata] + \
[tf.expand_dims(pd, 0) for pd in padded_data], 0)
else:
final_data = tf.concat([tf.expand_dims(cd, 0) for cd in cdata], 0)
return final_data
def start_end_separator(data, lth=MAX_LENGTH):
"""
function to return only the first and last lth tokens of the index lists in data
as numpy arrays
input index lists are assumed to be numpy arrays
also pads the input data with start and end tokens
"""
if type(data) != list:
data = [data]
sep_data = []
seqs_to_pad = []
# separate the data and append to correct lists
for arr in data:
if len(arr) == lth:
sep_data.append(arr)
elif len(arr) < lth:
seqs_to_pad.append(arr)
else:
first = arr[:lth]
last = arr[-lth:]
sep_data.append(first)
sep_data.append(last)
# add start and end tokens
for i, vec in enumerate(sep_data):
# assume vec is of rank 1
sep_data[i] = tf.pad(tf.pad(vec, [[1, 0]], constant_values=start_token), \
[[0, 1]], constant_values=end_token)
for i, vec in enumerate(seqs_to_pad):
seqs_to_pad[i] = tf.pad(tf.pad(vec, [[1, 0]], constant_values=start_token), \
[[0, 1]], constant_values=end_token)
# pad seqs to pad
padded_data = tf.keras.preprocessing.sequence.pad_sequences(seqs_to_pad, maxlen=lth + 2,
padding='post', value=pad_token)
# concatenate
return tf.concat([tf.expand_dims(sd, 0) for sd in sep_data] + \
[tf.expand_dims(pd, 0) for pd in padded_data], 0)
def stretch_time(seq, time_stretch):
"""
function to help data augmentation that stretches index list in time
"""
# initialize time_shifted sequence to return
time_shifted_seq = []
delta_time = 0
# iterate over seq
if time_stretch == 1:
if type(seq) == np.ndarray:
return seq
else:
return np.array(seq)
for idx in seq:
idx = idx.item()
#if idx is a time_shift
if ne < idx <= ne + tse:
time = idx - (ne - 1) # get the index in the vocab
delta_time += real_round(time * DIV * time_stretch) #acculumate stretched times
else:
time_to_events(delta_time, index_list=time_shifted_seq) #add the accumulated stretched times to the list
delta_time = 0 #reinitialize delta time
time_shifted_seq.append(idx) #add other indices back
return np.array(time_shifted_seq, dtype=np.int32) #np ndarray instead of tf tensor to save
def aug(data, note_shifts=np.arange(-2, 3), time_stretches=[1, 1.05, 1.1],
sample_random_time=False, sample_size=None):
"""
uses note_shifts and time_stretches to implement the data augmentation
on data, which is a set of sequences of index_lists defined by
miditokenizer3
assumes note_shifts are integers
should put an assert positive for time stretches
"""
assert type(note_shifts) == list or type(note_shifts) == np.ndarray, \
"note_shifts must be a list of integers(number of semitones) to shift pitch by"
assert type(time_stretches) == list, "time_stretches must be a list of coefficients"
assert (sample_random_time == True) ^ (sample_size is None), "Define none or both of sample_random_time and sample_size"
assert (sample_size is None) or type(sample_size) == int, "sample_size must be an int"
#make sure data is in a list
if type(data) != list:
data = [data]
#preprocess the time stretches
if 1 not in time_stretches:
time_stretches.append(1)
ts = []
for t in time_stretches:
ts.append(t)
ts.append(1/t) if t != 1 else None
ts.sort() #make it ascending
predicted_len = len(data) * len(note_shifts) * sample_size if sample_random_time else len(data) * len(note_shifts) * len(ts)
print(f'Predicted number of augmented data samples: {predicted_len}')
#iterate over the sequences in the data to shift each one of them
note_shifted_data = [] #initialize the set of note_shifted sequences
count = 0
for seq in data:
#data will be shifted by each shift in note_shifts
for shift in note_shifts:
_shift = shift.item() #assume shift is a numpy ndarray
#initialize the note shifted sequence as a list
note_shifted_seq = []
if _shift == 0:
note_shifted_seq = seq
else:
#iterate over each elem of seq, shift it and append to note_shifted seq
for idx in seq:
_idx = idx + _shift #shift the index
#if idx is note on, and _idx is also note on, or
#if idx is note_off,and _idx is also note_off, then
#add _idx to note_shifted_sequence, else add idx
if (0 < idx <= noe and 0 < _idx <= noe) or (noe < idx <= ne and noe < _idx <= ne):
note_shifted_seq.append(_idx)
else:
note_shifted_seq.append(idx)
#note_shifted_seq = tf.convert_to_tensor(note_shifted_seq) #convert to tensor
note_shifted_data.append(np.array(note_shifted_seq, dtype=np.int32))
count += 1
if not sample_random_time:
print(f'Augmented data sample {count} created')
else:
print(f'Note shifted sample {count} created')
#now iterate over the note shifted data to stretch it in time
time_shifted_data = [] #initialize the set of time_stretched sequences
if sample_random_time: count = 0
for seq in note_shifted_data:
# data will be stretched in time by each time_stretch
# or by random time stretch if sample_random_time
if sample_random_time:
time_stretches_ = random.sample(ts, sample_size)
for _ in time_stretches_:
time_shifted_seq = stretch_time(seq, _)
time_shifted_data.append(time_shifted_seq)
count += 1
print(f"Augmented data sample {count} created")
continue
for time_stretch in ts:
time_shifted_seq = stretch_time(seq, time_stretch)
time_shifted_data.append(time_shifted_seq)
if time_stretch != 1:
count += 1
print(f"Augmented data sample {count} created")
#output the data
return time_shifted_data
"""TEST MODEL ACCURACY"""
def generate_scale(note=60, delta_times=[500], velocities=list(np.arange(9, 24)),
mode='ionian', octaves=1):
"""
generates a scale based on the input note and mode
"""
note = note + 1
iter_times = iter([time_cutter(dt) for dt in delta_times])
for i, velocity in enumerate(velocities):
if velocity > velocity_events:
velocities[i] = velocity_to_bin(velocity)
iter_vel = iter(velocities)
modes = ['ionian', 'dorian', 'phrygian', 'lydian', 'mixolydian', 'aeolian',
'locrian', 'major', 'harmonic', 'melodic']
mode_steps = np.array([[0, 2, 4, 5, 7, 9, 11, 12, 11, 9, 7, 5, 4, 2, 0],
[0, 2, 3, 5, 7, 9, 10, 12, 10, 9, 7, 5, 3, 2, 0],
[0, 1, 3, 5, 7, 8, 10, 12, 10, 8, 7, 5, 3, 1, 0],
[0, 2, 4, 6, 7, 9, 11, 12, 11, 9, 7, 6, 4, 2, 0],
[0, 2, 4, 5, 7, 9, 10, 12, 10, 9, 7, 5, 4, 2, 0],
[0, 2, 3, 5, 7, 8, 10, 12, 10, 8, 7, 5, 3, 2, 0],
[0, 1, 3, 5, 6, 8, 10, 12, 10, 8, 6, 5, 3, 1, 0],
[0, 2, 4, 5, 7, 9, 11, 12, 11, 9, 7, 5, 4, 2, 0],
[0, 2, 3, 5, 7, 8, 11, 12, 11, 8, 7, 5, 3, 2, 0],
[0, 2, 3, 5, 7, 9, 11, 12, 10, 8, 7, 5, 3, 2, 0]])
mode_steps = mode_steps[modes.index(mode)]
# get octaves
middle = mode_steps.max() + 12 * (octaves - 1)
ascend_ = mode_steps[:len(mode_steps) // 2]
ascend = ascend_[:]
descend_ = mode_steps[1 + len(mode_steps) // 2:] + 12 * (octaves - 1)
descend = descend_[:]
for i in range(octaves - 1):
ascend_ = ascend_ + 12
ascend = np.concatenate((ascend, ascend_))
descend_ = descend_ - 12
descend = np.concatenate((descend, descend_))
mode_steps = np.concatenate((ascend, np.expand_dims(middle, 0), descend))
scale_ons = np.add(note, mode_steps)
scale_offs = np.add(scale_ons, note_on_events)\
idx_list = []
for x, y in zip(scale_ons, scale_offs):
#get velocity
try:
velocity = next(iter_vel)
except StopIteration:
iter_vel = iter(velocities)
velocity = next(iter_vel)
velocity = vocab.index(f"set_velocity_{velocity}")
# get delta time
try:
delta_time = next(iter_times)
except StopIteration:
iter_times = iter([time_cutter(dt) for dt in delta_times])
delta_time = next(iter_times)
# append stuff
idx_list.append(velocity)
idx_list.append(x)
for time in delta_time:
idx_list.append(vocab.index(f"time_shift_{time - 1}"))
idx_list.append(y)
return np.array(idx_list, dtype=np.int32)
| StarcoderdataPython |
139510 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import scarlet.scheduling.fields
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
]
operations = [
migrations.CreateModel(
name='Schedule',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('object_args', scarlet.scheduling.fields.JSONField()),
('when', models.DateTimeField()),
('action', models.CharField(max_length=255, null=True)),
('json_args', scarlet.scheduling.fields.JSONField()),
('content_type', models.ForeignKey(to='contenttypes.ContentType')),
],
),
]
| StarcoderdataPython |
1783774 | <reponame>spacetelescope/exovetter
import exovetter.centroid.fastpsffit as fpf
import exovetter.centroid.covar as covar
import exovetter.centroid.disp as disp
import exovetter.utils as utils
import matplotlib.pyplot as plt
import numpy as np
def compute_diff_image_centroids(
time,
cube,
period_days,
epoch,
duration_days,
max_oot_shift_pix=1.5,
plot=False
):
"""Compute difference image centroid shifts for every transit in a dataset.
Given a data cube containing a time-series of images, and a transit
defined by a period, epoch and duration, compute centroid shift
between in- and out-of- transit images for each transit covered by
the time-series.
Inputs
-----------
time
(1d np array) Times of each slice of the data cube. Units of days
cube
(3d np array). Shape of the cube is (numCadences, numRows, numCols)
There are numCadence images, and each image has a shape of
(numRows, numCols).
period_days
(float) Orbital period of transit.
epoch
(float) Epoch of transit centre in the same time system as `time`.
duration_days
(float) Duration of transit.
max_oot_shift_pix
(float) Passed to `fastpsffit.fastGaussianPrfFit()
Returns
---------------
A 2d numpy array. Each row represents a single transit event.
The columns are
* Out of transit (OOT) centroid column
* OOT row.
* In Transit (ITR) column
* ITR row
* Difference image centroid (DIC) column
* DIC row
* DIC flag. A non-zero value means the centroid is untrustworthy.
ITR images are computed by co-adding all cadences in transit
(as defined by the period, epoch, and duration).
OOT images are computed by co-adding 1 transit-duration worth
of images from both before and after the transit.
Difference image centroids (DIC) are computed by subtracting
OOT from In-transit.
"""
isnan = np.isnan(time)
time = time[~isnan]
cube = cube[~isnan]
transits = getIngressEgressCadences(
time, period_days, epoch, duration_days)
figs = []
centroids = []
for i in range(len(transits)):
cin = transits[i]
cents, fig = measure_centroids(
cube,
cin,
max_oot_shift_pix=max_oot_shift_pix,
plot=plot
)
centroids.append(cents)
figs.append(fig)
centroids = np.array(centroids)
return centroids, figs
def measure_centroid_shift(centroids, plot=False):
"""Measure the average offset of the DIC centroids from the OOT centroids.
Inputs
----------
centroids
(2d np array) Output of :func:`compute_diff_image_centroids`
Returns
-----------
offset
(float) Size of offset in pixels (or whatever unit `centroids`
is in)
signif
(float) The statistical significance of the transit. Values
close to 1 mean the transit is likely on the target star.
Values less than ~1e-3 suggest the target is not the
source of the transit.
fig
A figure handle. Is **None** if plot is **False**
"""
# DIC - OOT
# dcol = centroids[:, 5] - centroids[:, 0]
# drow = centroids[:, 4] - centroids[:, 1]
dcol = centroids[:, 4] - centroids[:, 0]
drow = centroids[:, 5] - centroids[:, 1]
flags = centroids[:, -1].astype(bool)
offset_pix, signif = covar.compute_offset_and_signif(
dcol[~flags], drow[~flags])
fig = None
if plot:
fig = covar.diagnostic_plot(dcol, drow, flags)
return offset_pix, signif, fig
def getIngressEgressCadences(time, period_days, epoch_btjd, duration_days):
assert np.all(np.isfinite(time))
idx = utils.mark_transit_cadences(
time, period_days, epoch_btjd, duration_days)
transits = np.array(utils.plateau(idx, 0.5))
return transits
def measure_centroids(cube, cin, max_oot_shift_pix=0.5, plot=False):
"""Private function of :func:`compute_diff_image_centroids`
Computes OOT, ITR and diff images for a single transit event,
and computes image centroid by fitting a Gaussian.
Inputs
---------
cube
3d numpy array: Timeseries of images.
cin
2-tuple) Cadences of start and end of transit.
max_oot_shift_pixel
(float) OOT centroid is constrained in the fit to be within this distance
of the centre of the postage stamp image
plot
True if a plot should be produced
"""
oot, intrans, diff, ax = generateDiffImg(cube, cin, plot=plot)
# Constrain fit to within +-1 pixel for oot and intrans if desired.
nr, nc = oot.shape
#Silently pin max shift to size of postage stamp
max_oot_shift_pix = min(max_oot_shift_pix, nc/2, nr/2)
#Short names for easier reading
c2 = nc/2
r2 = nr/2
ms = max_oot_shift_pix
bounds = [
(c2-ms, c2+ms),
(r2-ms, r2+ms),
(0.2, 1),
(None, None),
(None, None),
]
guess = pickInitialGuess(oot)
ootSoln = fpf.fastGaussianPrfFit(oot, guess, bounds=bounds)
guess = pickInitialGuess(diff)
diffSoln = fpf.fastGaussianPrfFit(diff, guess)
guess = pickInitialGuess(intrans)
intransSoln = fpf.fastGaussianPrfFit(intrans, guess, bounds=bounds)
if not np.all(map(lambda x: x.success, [ootSoln, diffSoln, intransSoln])):
print("WARN: Not all fits converged for [%i, %i]" % (cin[0], cin[1]))
if plot:
clr = "orange"
if diffSoln.success:
clr = "green"
res = diffSoln.x
disp.plotCentroidLocation(res[0], res[1], marker="^", color=clr,
label="diff")
res = ootSoln.x
disp.plotCentroidLocation(res[0], res[1], marker="o", color=clr,
label="OOT")
res = intransSoln.x
disp.plotCentroidLocation(res[0], res[1], marker="+", color=clr,
label="InT")
plt.legend(fontsize=12, framealpha=0.7, facecolor='silver')
out = []
out.extend(ootSoln.x[:2])
out.extend(intransSoln.x[:2])
out.extend(diffSoln.x[:2])
flag = 0
if not diffSoln.success:
flag = 1
if diffSoln.x[3] < 4 * np.median(diff):
flag = 2
out.append(flag)
return out, ax
def generateDiffImg(cube, transits, plot=False):
"""Generate a difference image.
Also generates an image for each the $n$ cadedences before
and after the transit,
where $n$ is the number of cadences of the transit itself
Inputs
------------
cube
(np 3 array) Datacube of postage stamps
transits
(2-tuples) Indices of the first and last cadence
Optional Inputs
-----------------
plot
(Bool) If true, generate a diagnostic plot
Returns
-------------
Three 2d images, and a figure handle
diff
The difference between the flux in-transit and the average of the
flux before and after
Notes
---------
When there is image motion, the before and after images won't be
identical, and the difference
image will show distinct departures from the ideal prf.
"""
dur = transits[1] - transits[0]
s0, s1 = transits - dur
e0, e1 = transits + dur
before = cube[s0:s1].sum(axis=0)
during = cube[transits[0]: transits[1]].sum(axis=0)
after = cube[e0:e1].sum(axis=0)
oot = 0.5 * (before + after)
diff = oot - during
if plot:
fig = plt.figure()
fig.set_size_inches(16, 4)
disp.plotTransit(fig, oot, during, diff)
else:
fig = None
return oot, during, diff, fig
def pickInitialGuess(img):
"""Pick initial guess of params for `fastGaussianPrfFit`
Inputs
---------
img
(2d np array) Image to be fit
Returns
---------
An array of initial conditions for the fit
"""
r0, c0 = np.unravel_index(np.argmax(img), img.shape)
guess = [c0 + 0.5, r0 + 0.5, 0.5, 8 * np.max(img), np.median(img)]
return guess
| StarcoderdataPython |
3231171 | # encoding: utf-8
# 该文件,为无界面调度文件,以vtClient为基准,
# 通过vt_setting访问远程RPC server端口,进行指令调度
# runDispatchClient.py 指令 + 参数/参数文件
# 华富资产, 李来佳
import sys
import os
import json
from pick import pick
from vnpy.rpc import RpcClient
from vnpy.trader.utility import load_json
class DispatchClient(RpcClient):
def __init__(self, name):
super().__init__()
self.name = name
def callback(self, topic, data):
"""
Realize callable function
"""
pass
# print(f"client received topic:{topic}, data:{data}")
def status(self):
if self.__active:
return u'connected:[{}]'.format(self.name)
else:
return u'No connect'
@property
def connected(self):
return self.__active
def run_dispatch_client():
"""
运行远程调度客户端
:return:
"""
# 创建客户端对象
ds_client: DispatchClient = None
# 所有连接得配置信息
config = load_json('dispatch_client.json', auto_save=False)
# 主菜单
main_title = u'Rpc Client'
main_options = ['Connect', 'Disconnect',
'[GW] Status',
'[GW] Ticks',
'[Strategy] Get Running',
'[Strategy] Compare Pos',
'[Strategy] Balance Pos',
'[Strategy] Add',
'[Strategy] Init',
'[Strategy] Start',
'[Strategy] Stop',
'[Strategy] Reload', '[Strategy] Remove',
'[Strategy] Save Data', '[Strategy] Save Snapshot', 'Exit']
option = None
strategies = ['back']
gateways = []
def get_strategies():
"""获取策略清单/状态"""
if ds_client is None:
return []
if not ds_client.connected:
return []
all_strategy_status = ds_client.get_strategy_status()
if isinstance(all_strategy_status, dict):
strategy_names = sorted(all_strategy_status.keys())
for name in strategy_names:
print(u'{}:{}'.format(name, all_strategy_status.get(name)))
return strategy_names + ['back']
else:
return []
def get_all_gateway_status():
"""获取网关清单/状态"""
if ds_client is None:
return []
if not ds_client.connected:
return []
all_gateway_status = ds_client.get_all_gateway_status()
return all_gateway_status
def get_all_ticks():
"""获取系统当前最新tick"""
if ds_client is None:
return []
if not ds_client.connected:
return []
all_ticks = ds_client.get_all_ticks()
ticks_info_list = []
for tick in all_ticks:
ticks_info_list.append(
f'{tick.symbol}.{tick.exchange.value}, price:{tick.last_price}, datetime:{tick.datetime}')
return ticks_info_list
def get_status():
"""查询远程状态"""
if ds_client is None:
return ''
else:
return ds_client.status()
def get_local_json_file_names():
""" 获取本地json文件清单"""
print('get local json files:'.format(os.getcwd()))
file_names = []
for dirpath, dirnames, filenames in os.walk(os.getcwd()):
for filename in filenames:
if filename.endswith(".json"):
file_names.append(filename)
else:
continue
return sorted(file_names)
def add_strategies(file_name):
"""从本地Json文件添加策略"""
print(f'start add strategies from :{file_name}')
settings = load_json(file_name, auto_save=False)
if not isinstance(settings, dict):
print(f'{file_name} is not a dict format')
return
print(f'total strategies:{len(settings.keys())}')
for strategy_name, strategy_conf in settings.items():
if strategy_name in strategies:
print(f'{strategy_name} already exist in runing list, can not add', file=sys.stderr)
continue
if 'class_name' not in strategy_conf:
continue
if not ('vt_symbol' in strategy_conf or 'vt_symbols' in strategy_conf):
continue
if 'setting' not in strategy_conf:
continue
print(f'add strategy:{strategy_conf}')
if 'vt_symbol' in strategy_conf:
ret, msg = ds_client.add_strategy(
class_name=strategy_conf.get('class_name'),
strategy_name=strategy_name,
vt_symbol=strategy_conf.get('vt_symbol'),
setting=strategy_conf.get('setting'),
auto_init=strategy_conf.get('auto_init', True),
auto_start=strategy_conf.get('auto_start', True)
)
else:
ret, msg = ds_client.add_strategy(
class_name=strategy_conf.get('class_name'),
strategy_name=strategy_name,
vt_symbols=strategy_conf.get('vt_symbols'),
setting=strategy_conf.get('setting'),
auto_init=strategy_conf.get('auto_init', True),
auto_start=strategy_conf.get('auto_start', True)
)
if ret:
print(msg)
else:
print(msg, file=sys.stderr)
while (1):
if option is None:
option, index = pick(main_options, main_title + get_status())
# 退出
if option == 'Exit':
if ds_client and ds_client.connected:
ds_client.close()
print(u'Good bye\n')
os._exit(0)
break
# 连接远程服务
elif option == 'Connect':
if ds_client:
print(u'{},please disconnect first'.format(ds_client.status()))
else:
title = u'Select the Server to connect'
server_list = sorted(config.keys())
server, index = pick(server_list, title)
if server:
conf = config.get(server)
ds_client = DispatchClient(server)
print(conf)
ds_client.start(
req_address=conf.get('req_address'),
sub_address=conf.get('pub_address')
)
strategies = get_strategies()
if len(strategies) == 0:
print('no strategies running')
# 断开远程服务
elif option == 'Disconnect':
if ds_client.connected:
ds_client.close()
# 查询Gateway状态服务
elif option == '[GW] Status':
gateways = get_all_gateway_status()
s = json.dumps(gateways, indent=-1)
print(s)
elif option == '[GW] Ticks':
ticks_list = get_all_ticks()
s = json.dumps(ticks_list, indent=-1)
print(s)
elif option == '[GW] Connect':
gateways = get_all_gateway_status()
un_connect_gateway_name = [k for k, v in gateways if not v.get('con', False)]
selected = pick(options=sorted(un_connect_gateway_name), title='Please select gateway to connect Enter',
multi_select=False)
# 查询策略
elif option == '[Strategy] Get Running':
if not ds_client or not ds_client.connected:
print(u'{},please connect first'.format(get_status()))
else:
strategies = get_strategies()
# 添加策略
elif option == '[Strategy] Add':
if not ds_client or not ds_client.connected:
print(u'{},please connect first'.format(get_status()))
else:
json_files = get_local_json_file_names()
if len(json_files) == 0:
print('no json file to load')
else:
json_files.append('back')
selected = pick(
options=sorted(json_files),
title='Please select settings to add,press Enter',
multi_select=True)
for file_name, index in selected:
if file_name != 'back':
add_strategies(file_name)
strategies = get_strategies()
# 停止策略
elif option == '[Strategy] Stop':
if not ds_client or not ds_client.connected:
print(u'{},please connect first'.format(get_status()))
else:
selected = pick(options=sorted(strategies), title='Please select strategy to stop,press Enter',
multi_select=True)
for strategy_name, index in selected:
if strategy_name != 'back':
print('start stopping :{}\n'.format(strategy_name))
ret = ds_client.stop_strategy(strategy_name)
if ret:
print('stop success')
else:
print('stop fail')
# 初始化策略
elif option == '[Strategy] Init':
if not ds_client or not ds_client.connected:
print(u'{},please connect first'.format(get_status()))
else:
selected = pick(options=sorted(strategies),
title='Please use Space select multi strategies to init.press Enter',
multi_select=True)
for strategy_name, index in selected:
if strategy_name != 'back':
print('start init :{}\n'.format(strategy_name))
ret, msg = ds_client.rpc_client.init_strategy(strategy_name)
if ret:
print(f'init success:{msg}')
else:
print(f'init fail:{msg}')
# 启动策略
elif option == '[Strategy] Start':
if not ds_client or not ds_client.connected:
print(u'{},please connect first'.format(get_status()))
else:
selected = pick(options=sorted(strategies),
title='Please use Space select multi strategies to start.press Enter',
multi_select=True)
for strategy_name, index in selected:
if strategy_name != 'back':
print('start starting :{}\n'.format(strategy_name))
ret, msg = ds_client.rpc_client.start_strategy(strategy_name)
if ret:
print(f'start success:{msg}')
else:
print(f'start fail:{msg}')
# 重新加载策略
elif option == '[Strategy] Reload':
if not ds_client or not ds_client.connected:
print(u'{},please connect first'.format(get_status()))
else:
selected = pick(options=sorted(strategies),
title='Please use Space select multi strategies to reload.press Enter',
multi_select=True)
for strategy_name, index in selected:
if strategy_name != 'back':
print('start reloading :{}\n'.format(strategy_name))
ret, msg = ds_client.reload_strategy(strategy_name)
if ret:
print(msg)
else:
print(msg, file=sys.stderr)
# 移除策略
elif option == '[Strategy] Remove':
if not ds_client or not ds_client.connected:
print(u'{},please connect first'.format(get_status()))
else:
has_changed = False
selected = pick(options=sorted(strategies),
title='Please use Space select strategy to dispatch out.press Enter',
multi_select=True)
for strategy_name, index in selected:
if strategy_name != 'back':
print('start dispatching out :{}\n'.format(strategy_name))
ret, msg = ds_client.remove_strategy(strategy_name)
if ret:
print(msg)
has_changed = True
else:
print(msg, file=sys.stderr)
if has_changed:
strategies = get_strategies()
# 保存策略数据
elif option == '[Strategy] Save Data':
if not ds_client or not ds_client.connected:
print(u'{},please connect first'.format(get_status()))
else:
has_changed = False
selected = pick(options=['ALL'] + sorted(strategies),
title='Please use Space select strategy to save data.press Enter',
multi_select=True)
if 'ALL' in selected and len(selected) > 1:
selected = ['ALL']
for strategy_name, index in selected:
if strategy_name != 'back':
print('start save :{}\n'.format(strategy_name))
ret, msg = ds_client.save_strategy_data(strategy_name)
if ret:
print(msg)
has_changed = True
else:
print(msg, file=sys.stderr)
if has_changed:
strategies = get_strategies()
# 保存策略bars切片数据
elif option == '[Strategy] Save Snapshot':
if not ds_client or not ds_client.connected:
print(u'{},please connect first'.format(get_status()))
else:
has_changed = False
selected = pick(options=['ALL'] + sorted(strategies),
title='Please use Space select strategy to save bars snapshot.press Enter',
multi_select=True)
if 'ALL' in selected and len(selected) > 1:
selected = ['ALL']
for strategy_name, index in selected:
if strategy_name != 'back':
print('start save bars snapshot :{}\n'.format(strategy_name))
ret, msg = ds_client.save_strategy_snapshot(strategy_name)
if ret:
print(msg)
has_changed = True
else:
print(msg, file=sys.stderr)
if has_changed:
strategies = get_strategies()
# 比对仓位
elif option == '[Strategy] Compare Pos':
if not ds_client or not ds_client.connected:
print(u'{},please connect first'.format(get_status()))
else:
ret, msg = ds_client.compare_pos()
if ret:
print(msg)
else:
print(msg, file=sys.stderr)
# 比对仓位后,立刻自动平衡仓位
elif option == '[Strategy] Balance Pos':
if not ds_client or not ds_client.connected:
print(u'{},please connect first'.format(get_status()))
else:
ret, msg = ds_client.compare_pos(strategy_pos_list=[], auto_balance=True)
if ret:
print(msg)
else:
print(msg, file=sys.stderr)
_input = input('press any key')
option = None
continue
| StarcoderdataPython |
3285663 | '''
Unittests/Chemical/Proteins/peptide_list
________________________________________
Test suite for peptide containers.
:copyright: (c) 2015 The Regents of the University of California.
:license: GNU GPL, see licenses/GNU GPLv3.txt for more details.
'''
# load modules/submodules
import random
import unittest
from xldlib.chemical.proteins import peptide, peptide_list
# ITEMS
# -----
PEPTIDES = [
peptide.Peptide('EIETEEK', 1, 0),
{'sequence': 'EIETEEK', 'start': 1, 'id': 0},
['EIETEEK', 1, 0],
('EIETEEK', 1, 0),
]
SORTING = [
peptide.Peptide('EIETEEK', 1, 0),
peptide.Peptide('NLHLEEIFCSIK', 8, 0),
peptide.Peptide('VQLDAYEPADCELYR', 20, 0),
peptide.Peptide('DK', 35, 0),
peptide.Peptide('AELK', 37, 0),
peptide.Peptide('CAFK', 41, 0),
]
POSITIONS = [1, 8, 20, 35, 37, 41]
# CASES
# -----
class TestPeptides(unittest.TestCase):
'''Test list additions and sorting for the `Peptides` container'''
def test_items(self):
'''Test `Peptide` item recognition from heterogeneous data'''
peptides = peptide_list.Peptides()
for item in PEPTIDES:
peptides.append(item)
self.assertIsInstance(peptides[0], peptide.Peptide)
self.assertEquals(peptides[0], PEPTIDES[0])
peptides.clear()
def test_sorting(self):
'''Test peptides are sorted sequentially by protein position'''
peptides = peptide_list.Peptides()
random.shuffle(SORTING)
for item in SORTING:
peptides.append(item)
peptides.sort()
self.assertEquals([i.start for i in peptides], POSITIONS)
# TESTS
# -----
def add_tests(suite):
'''Add tests to the unittest suite'''
suite.addTest(TestPeptides('test_items'))
suite.addTest(TestPeptides('test_sorting'))
| StarcoderdataPython |
114977 | <reponame>joaopfonseca/research-learn
"""
=============================
Model search cross-validation
=============================
This example illustrates the usage of model
search cross-validation class.
"""
# Author: <NAME> <<EMAIL>>
# Licence: MIT
import matplotlib.pyplot as plt
import pandas as pd
from sklearn.base import clone
from sklearn.datasets import make_classification
from sklearn.preprocessing import MinMaxScaler
from sklearn.ensemble import GradientBoostingClassifier
from sklearn.neighbors import KNeighborsClassifier
from sklearn.pipeline import make_pipeline
from rlearn.model_selection import ModelSearchCV
print(__doc__)
RND_SEED = 0
###############################################################################
# Generate classification data
###############################################################################
###############################################################################
# We are generating a binary class data set, using
# ``make_classification`` from scikit-learn.
X, y = X, y = make_classification(n_classes=2, random_state=RND_SEED)
###############################################################################
# Using model search
###############################################################################
###############################################################################
# Model search cross-validation class allows to search the model and
# hyper-parameters in a unified way.
classifiers = [
('knn', make_pipeline(MinMaxScaler(), KNeighborsClassifier())),
('gbc', GradientBoostingClassifier(random_state=RND_SEED)),
]
param_grids = [
{
'knn__minmaxscaler__feature_range': [(0, 1), (0, 0.5)],
'knn__kneighborsclassifier__k_neighbors': [2, 4, 5],
},
{'gbc__gradientboostingclassifier__max_depth': [3, 4, 5]},
]
model_search_cv = ModelSearchCV(classifiers, param_grids, cv=5, scoring='accuracy')
| StarcoderdataPython |
1792257 | #!/usr/bin/env python3
import os
import html
import signal
from chwall.gui.shared import ChwallGui
from chwall.wallpaper import current_wallpaper_info
from chwall.utils import get_binary_path, reset_pending_list
import gi
gi.require_version("Gtk", "3.0")
from gi.repository import Gdk, GdkPixbuf, GLib, Gtk # noqa: E402
import gettext # noqa: E402
# Uncomment the following line during development.
# Please, be cautious to NOT commit the following line uncommented.
# gettext.bindtextdomain("chwall", "./locale")
gettext.textdomain("chwall")
_ = gettext.gettext
class ChwallApp(ChwallGui):
def __init__(self):
super().__init__()
self.app = Gtk.Window(title="Chwall")
self.app.set_icon_name("chwall")
self.app.set_position(Gtk.WindowPosition.CENTER_ALWAYS)
self.app.set_resizable(False)
self.app.connect("destroy", self.kthxbye)
hb = Gtk.HeaderBar()
hb.set_show_close_button(True)
hb.props.title = "Chwall"
button = Gtk.ToggleButton()
button.set_image(Gtk.Image.new_from_icon_name(
"open-menu-symbolic", Gtk.IconSize.BUTTON))
button.set_tooltip_text(_("Preferences"))
button.connect("toggled", self.show_main_menu)
hb.pack_end(button)
self.app.set_titlebar(hb)
app_box = Gtk.Box(orientation=Gtk.Orientation.VERTICAL)
self.notif_reset = Gtk.InfoBar()
self.notif_reset.set_message_type(Gtk.MessageType.WARNING)
notif_box = self.notif_reset.get_content_area()
notif_box.add(
Gtk.Label(label=_("Wallpapers list may be built again. It "
"may take a long time if you have a lot "
"of sources enabled. Please be patient.")))
app_box.pack_start(self.notif_reset, False, False, 0)
self.wallpaper = Gtk.Image()
app_box.pack_start(self.wallpaper, True, True, 0)
control_box = Gtk.ActionBar()
button = Gtk.Button.new_from_icon_name(
"media-skip-backward-symbolic", Gtk.IconSize.LARGE_TOOLBAR)
button.set_tooltip_text(_("Previous wallpaper"))
button.connect("clicked", self.on_change_wallpaper, True)
control_box.pack_start(button)
self.daemon_play_pause_button = Gtk.Button.new()
self.decorate_play_pause_button(True)
self.daemon_play_pause_button.connect(
"clicked", self.on_play_pause_clicked)
control_box.pack_start(self.daemon_play_pause_button)
button = Gtk.Button.new_from_icon_name(
"media-skip-forward-symbolic", Gtk.IconSize.LARGE_TOOLBAR)
button.set_tooltip_text(_("Next wallpaper"))
button.connect("clicked", self.on_change_wallpaper)
control_box.pack_start(button)
button = Gtk.Separator()
control_box.pack_start(button)
button = Gtk.Button.new_from_icon_name(
"media-playback-stop-symbolic", Gtk.IconSize.LARGE_TOOLBAR)
button.set_tooltip_text(_("Stop daemon and erase pending list"))
button.connect("clicked", self.on_stop_clicked)
control_box.pack_start(button)
button = Gtk.Separator()
control_box.pack_start(button)
self.favorite_button = Gtk.Button.new_from_icon_name(
"bookmark-new", Gtk.IconSize.LARGE_TOOLBAR)
control_box.pack_start(self.favorite_button)
self.walldesc = Gtk.Label(
hexpand=True, halign=Gtk.Align.CENTER,
justify=Gtk.Justification.CENTER,
wrap=True, single_line_mode=True
)
self.walldesc.set_markup(
"<a href=\"https://git.umaneti.net/chwall/\">Chwall</a>"
)
control_box.set_center_widget(self.walldesc)
button = Gtk.Button.new_from_icon_name(
"edit-delete", Gtk.IconSize.LARGE_TOOLBAR)
button.set_tooltip_text(_("Blacklist"))
button.connect("clicked", self.on_blacklist_wallpaper)
control_box.pack_end(button)
app_box.pack_end(control_box, False, False, 0)
self.app.add(app_box)
self.app.show_all()
self.update_wall_box()
signal.signal(signal.SIGUSR1, self.update_wall_box)
def update_wall_box(self, _signo=None, _stack_frame=None):
self.notif_reset.set_revealed(False)
self.notif_reset.hide()
wallinfo = current_wallpaper_info()
if wallinfo["type"] is None:
self.walldesc.set_markup("<i>{}</i>".format(
_("Current wallpaper is not managed by Chwall")))
self.wallpaper.set_from_icon_name(
"preferences-desktop-wallpaper-symbolic", Gtk.IconSize.DIALOG)
self.favorite_button.set_sensitive(False)
self.favorite_button.set_tooltip_text(
_("Current wallpaper is not managed by Chwall"))
return
try:
if self.is_current_wall_favorite(wallinfo):
self.favorite_button.set_sensitive(False)
self.favorite_button.set_tooltip_text(_("Already a favorite"))
else:
self.favorite_button.set_sensitive(True)
self.favorite_button.set_tooltip_text(_("Save as favorite"))
self.favorite_button.connect(
"clicked", self.on_favorite_wallpaper)
except PermissionError:
self.favorite_button.set_sensitive(False)
self.favorite_button.set_tooltip_text(
_("Error accessing the favorites folder"))
label_str = "<a href=\"{link}\">{text}</a>".format(
link=html.escape(wallinfo["remote-uri"]),
text=wallinfo["description"].replace("&", "&"))
self.walldesc.set_markup(label_str)
self.walldesc.grab_focus()
# Show it now to reserve correct size
self.walldesc.show()
# Now we can use this width to display the wallpaper itself
size_data = self.app.get_preferred_size()
# Get `natural_size`
width = size_data[1].width
if width < 800:
width = 800
try:
pixbuf = GdkPixbuf.Pixbuf.new_from_file_at_scale(
wallinfo["local-picture-path"], width, 600, True)
self.wallpaper.set_from_pixbuf(pixbuf)
except gi.repository.GLib.Error:
self.wallpaper.set_from_icon_name(
"image-missing", Gtk.IconSize.DIALOG)
self.wallpaper.show()
self.app.resize(width, size_data[1].height)
def show_main_menu(self, widget):
if not widget.get_active():
return
menu = Gtk.Menu()
dinfo = self.daemon_info()
if dinfo["next-change"] != -1:
item = Gtk.MenuItem.new_with_label(
dinfo["next-change-label"])
item.set_sensitive(False)
menu.append(item)
item = Gtk.SeparatorMenuItem()
menu.append(item)
item = Gtk.MenuItem.new_with_label(
_("Display notification icon"))
if self.is_chwall_component_started("icon"):
item.set_sensitive(False)
else:
item.connect("activate", self.run_chwall_component, "icon")
menu.append(item)
item = Gtk.MenuItem.new_with_label(_("Preferences"))
item.connect("activate", self.show_preferences_dialog)
menu.append(item)
item = Gtk.MenuItem.new_with_label(_("About Chwall"))
item.connect("activate", self.show_about_dialog)
menu.append(item)
menu.show_all()
menu.connect("hide", lambda _w, b: b.set_active(False), widget)
menu.popup_at_widget(widget, Gdk.Gravity.SOUTH_WEST,
Gdk.Gravity.NORTH_WEST, None)
def decorate_play_pause_button(self, startup=False):
dinfo = self.daemon_info()
# At startup we need to draw the real state of the daemon, but later,
# this function is called *before* the state change, thus it must
# reflect the future state of the daemon
if startup:
current_state = dinfo["daemon-state"]
elif dinfo["daemon-state"] == "started":
current_state = "stopped"
else:
current_state = "started"
if current_state == "started":
self.daemon_play_pause_button.set_image(
Gtk.Image.new_from_icon_name("media-playback-pause-symbolic",
Gtk.IconSize.LARGE_TOOLBAR))
self.daemon_play_pause_button.set_tooltip_text(_("Stop daemon"))
else:
self.daemon_play_pause_button.set_image(
Gtk.Image.new_from_icon_name("media-playback-start-symbolic",
Gtk.IconSize.LARGE_TOOLBAR))
self.daemon_play_pause_button.set_tooltip_text(_("Start daemon"))
return current_state
def on_play_pause_clicked(self, widget):
# When called after a click, this method return the future state. Then
# we should actually kill the daemon if the *current_state* is
# *stopped*.
if self.decorate_play_pause_button() == "stopped":
self.stop_daemon()
return
# Else we should start the server
self.notif_reset.show()
self.notif_reset.set_revealed(True)
self.run_chwall_component(widget, "daemon")
def on_stop_clicked(self, widget):
self.stop_daemon()
reset_pending_list()
self.decorate_play_pause_button(True)
def _build_translations_for_desktop_file(localedir):
lng_attrs = {
"gname": [],
"comment": [],
"next_name": [],
"previous_name": [],
"blacklist_name": []
}
for lng in sorted(os.listdir(localedir)):
if lng in ["chwall.pot", "en"]:
continue
domain_file = os.path.join(localedir, lng, "LC_MESSAGES", "chwall.mo")
if not os.path.exists(domain_file):
continue
glng = gettext.translation(
"chwall", localedir=localedir,
languages=[lng])
glng.install()
_ = glng.gettext
lng_attrs["gname"].append(
"GenericName[{lang}]={key}".format(
lang=lng, key=_("Wallpaper Changer")))
lng_attrs["comment"].append(
"Comment[{lang}]={key}".format(
lang=lng,
key=_("Main window of the Chwall wallpaper changer")))
lng_attrs["next_name"].append(
"Name[{lang}]={key}".format(
lang=lng,
key=_("Next wallpaper")))
lng_attrs["previous_name"].append(
"Name[{lang}]={key}".format(
lang=lng,
key=_("Previous wallpaper")))
lng_attrs["blacklist_name"].append(
"Name[{lang}]={key}".format(
lang=lng,
key=_("Blacklist")))
return lng_attrs
def _build_action_block(name, lng_attrs):
label = name.capitalize()
block_cmd = get_binary_path("client", "xdg", name)
block = ["", "[Desktop Action {name}]".format(name=label),
"Exec={app_exec}".format(app_exec=block_cmd),
"Name={name} wallpaper".format(name=label)]
for line in lng_attrs[name + "_name"]:
block.append(line)
return block
def generate_desktop_file(localedir="./locale", out="chwall-app.desktop"):
lng_attrs = _build_translations_for_desktop_file(localedir)
df_content = ["[Desktop Entry]"]
df_content.append("Name=Chwall")
df_content.append("GenericName=Wallpaper Changer")
for line in lng_attrs["gname"]:
df_content.append(line)
df_content.append("Comment=Main window of the Chwall wallpaper changer")
for line in lng_attrs["comment"]:
df_content.append(line)
df_content = "\n".join(df_content)
df_content += """
Exec={app_exec}
Icon=chwall
Terminal=false
Type=Application
Categories=GTK;GNOME;Utility;
StartupNotify=false
Actions=Next;Previous;Blacklist;
""".format(app_exec=get_binary_path("app", "xdg"))
actions = _build_action_block("next", lng_attrs) \
+ _build_action_block("previous", lng_attrs) \
+ _build_action_block("blacklist", lng_attrs)
df_content += "\n".join(actions)
if out == "print":
print(df_content)
else:
with open(out, "w") as f:
f.write(df_content)
def start_app():
# Install signal handlers
GLib.unix_signal_add(GLib.PRIORITY_DEFAULT, signal.SIGTERM,
Gtk.main_quit, None)
GLib.unix_signal_add(GLib.PRIORITY_DEFAULT, signal.SIGINT,
Gtk.main_quit, None)
ChwallApp()
Gtk.main()
if __name__ == "__main__":
start_app()
| StarcoderdataPython |
1603891 | <reponame>ahrvoje/colorio
# -*- coding: utf-8 -*-
#
import matplotlib.pyplot as plt
import numpy
import pytest
import colorio
@pytest.mark.parametrize('illuminant,decimals,values', [
(colorio.illuminants.a(5e-9), 5, [0.93048, 1.12821, 1.35769]),
(colorio.illuminants.d50(), 3, [0.019, 2.051, 7.778]),
(colorio.illuminants.d55(), 3, [0.024, 2.072, 11.224]),
(colorio.illuminants.d65(), 4, [0.03410, 3.2945, 20.2360]),
# 5.132 is different from the standard; 5.133 is listed there. This is a
# false rounding.
(colorio.illuminants.d75(), 3, [0.043, 5.132, 29.808]),
])
def test_values(illuminant, decimals, values):
_, data = illuminant
rdata = numpy.around(data, decimals=decimals)
assert rdata[0] == values[0]
assert rdata[1] == values[1]
assert rdata[2] == values[2]
return
@pytest.mark.parametrize('illuminant,ref,tol', [
(colorio.illuminants.d65(), [0.95048974, 1.0, 1.08892197], 1.0e-8),
(colorio.illuminants.e(), [1.00015018, 1.0, 1.00066598], 1.0e-8),
(colorio.illuminants.f2(), [0.99146841, 1.0, 0.67318498], 1.0e-8),
])
def test_white_point(illuminant, ref, tol):
values = colorio.illuminants.white_point(illuminant)
ref = 100 * numpy.array(ref)
assert numpy.all(abs(values - ref) < tol * abs(ref))
return
def test_show():
lmbda, data = colorio.illuminants.d65()
plt.plot(lmbda, data)
# for T in [1000, 2000, 3000, 4000, 5000, 1000]:
# lmbda, data = colorio.illuminants.planckian_radiator(T)
# plt.plot(lmbda, data)
plt.ylim(ymin=0)
plt.show()
return
def test_spectrum_to_xyz100():
spectrum = colorio.illuminants.d65()
observer = colorio.observers.cie_1931_2()
colorio.illuminants.spectrum_to_xyz100(spectrum, observer)
return
if __name__ == '__main__':
# test_white_point()
test_show()
| StarcoderdataPython |
1779773 | <reponame>walterjgsp/meaning
class EvaluationResult:
def __init__(self, individual_id: str, accuracy_test: float = -1.0, f1_test: float = -1.0):
self.individual_id = individual_id
self.accuracy_test = accuracy_test
self.f1_test = f1_test
| StarcoderdataPython |
3227494 | def part1(startTime, buses) -> int:
from math import ceil
activeBuses = list(filter(lambda x: x != "x", buses))
for i in range(len(activeBuses)):
value = ceil(startTime/activeBuses[i]) * activeBuses[i]
if i == 0 or (i != 0 and value < minTime):
minTime = value
minBus = activeBuses[i]
return minBus * (minTime - startTime)
def part2(buses) -> int:
from sympy.ntheory.modular import crt
activeBuses = []
remainders = []
for i in range(len(buses)):
if buses[i] != "x":
activeBuses.append(buses[i])
remainders.append(-i)
return crt(activeBuses, remainders)[0]
f = open("input.txt", "r")
input = f.read().splitlines()
startTime = int(input[0])
buses = input[1].split(",")
for i in range(len(buses)):
try:
buses[i] = int(buses[i])
except ValueError:
pass
print(part1(startTime, buses)) #3789
print(part2(buses)) #667437230788118 | StarcoderdataPython |
3318553 | from PIL import Image
from django import forms
from django.core.files import File
from .models import Photo
class PhotoForm(forms.ModelForm):
x = forms.FloatField(widget=forms.HiddenInput())
y = forms.FloatField(widget=forms.HiddenInput())
image_width = forms.FloatField(widget=forms.HiddenInput())
image_height = forms.FloatField(widget=forms.HiddenInput())
class Meta:
model = Photo
fields = ('title', 'image', 'x', 'y', 'image_height', 'image_width')
def save(self):
photo = super(PhotoForm, self).save(commit=False)
x = self.cleaned_data.get('x')
y = self.cleaned_data.get('y')
w = self.cleaned_data.get('image_width')
h = self.cleaned_data.get('image_height')
image = Image.open(photo.image)
cropped_image = image.crop((x, y, w + x, h + y))
resized_image = cropped_image.resize((200, 200), Image.ANTIALIAS)
resized_image.save(photo.image.path)
return super(PhotoForm, self).save(commit=True)
| StarcoderdataPython |
1761644 | <reponame>pygeonburger/server-stats
# Server stats monitor by Pigeonburger
# https://github.com/pigeonburger/server-stats
import psutil, json
from typing import List, Union
# THIS IS JUST A THING THAT ACCURATELY CONVERTS BYTES TO MB, GB, TB ETC - THE ACTUAL SERVER STATS PART IS BELOW THIS CLASS
# This method of accurately converting bytes to a human-readable string was taken from https://stackoverflow.com/a/63839503
class RenderBytes:
METRIC_LABELS: List[str] = ["B", "kB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"]
BINARY_LABELS: List[str] = ["B", "KiB", "MiB", "GiB", "TiB", "PiB", "EiB", "ZiB", "YiB"]
PRECISION_OFFSETS: List[float] = [0.5, 0.05, 0.005, 0.0005]
PRECISION_FORMATS: List[str] = ["{}{:.0f} {}", "{}{:.1f} {}", "{}{:.2f} {}", "{}{:.3f} {}"]
@staticmethod
def format(num: Union[int, float], metric: bool=False, precision: int=1) -> str:
assert isinstance(num, (int, float)), "num must be an int or float"
assert isinstance(metric, bool), "metric must be a bool"
assert isinstance(precision, int) and precision >= 0 and precision <= 3, "precision must be an int (range 0-3)"
unit_labels = RenderBytes.METRIC_LABELS if metric else RenderBytes.BINARY_LABELS
last_label = unit_labels[-1]
unit_step = 1000 if metric else 1024
unit_step_thresh = unit_step - RenderBytes.PRECISION_OFFSETS[precision]
is_negative = num < 0
if is_negative:
num = abs(num)
for unit in unit_labels:
if num < unit_step_thresh:
break
if unit != last_label:
num /= unit_step
return RenderBytes.PRECISION_FORMATS[precision].format("-" if is_negative else "", num, unit)
## THIS IS THE ACTUAL SERVER STATS PART ##
# INFINITELY RUN THIS (every 2 seconds because it takes 2 seconds to analyze CPU)
while True:
with open("server.json", 'r+') as server_stats: # The output of this program is written to server.json by default.
# Check CPU usage by analyzing for 2 seconds
cpuper = psutil.cpu_percent(interval=2)
# Get percentage of RAM in use
totalmem = str(psutil.virtual_memory().percent)
# Check how much total storage you have, and how much is taken up. For Windows, change it from '/' to 'C:\\'
store = psutil.disk_usage('/')
totalstore = RenderBytes.format(store.total, metric=True, precision=2)
usedstore = RenderBytes.format(store.used, metric=True, precision=2)
# Get CPU temperature (this does not work universally, you will need to find the name of your CPU temp sensor and replace it here)
temperature = psutil.sensors_temperatures()['k10temp'][0].current
cputemp = str(round(temperature, 1))+"°C"
cputemp_butforamericans = str(round((temperature * 1.8) + 32, 1))+"°F"
# Get CPU cooler fan speed (again, this needs to be adjusted like CPU temperature)
fanspeed = str(psutil.sensors_fans()['nct6779'][1].current)+"RPM"
cpu = f"{cpuper}%"
ram = f'{totalmem}%'
storage = f'{usedstore} of {totalstore}'
# Write the data to a JSON file
data = json.load(server_stats)
data['cpu'] = cpu
data['ram'] = ram
data['storage'] = storage
data['temperature'] = cputemp
data['temperature_but_its_for_the_americans_instead'] = cputemp_butforamericans
data['fan'] = fanspeed
server_stats.seek(0)
json.dump(data, server_stats)
server_stats.truncate()
| StarcoderdataPython |
2279 | <gh_stars>100-1000
import numpy as np
import unittest
from pydlm.modeler.trends import trend
from pydlm.modeler.seasonality import seasonality
from pydlm.modeler.builder import builder
from pydlm.base.kalmanFilter import kalmanFilter
class testKalmanFilter(unittest.TestCase):
def setUp(self):
self.kf1 = kalmanFilter(discount=[1])
self.kf0 = kalmanFilter(discount=[1e-10])
self.kf11 = kalmanFilter(discount=[1, 1])
self.trend0 = trend(degree=0, discount=1, w=1.0)
self.trend0_90 = trend(degree=0, discount=0.9, w=1.0)
self.trend0_98 = trend(degree=0, discount=0.98, w=1.0, name='a')
self.trend1 = trend(degree=1, discount=1, w=1.0)
def testForwardFilter(self):
dlm = builder()
dlm.add(self.trend0)
dlm.initialize()
self.kf1.predict(dlm.model)
self.assertAlmostEqual(dlm.model.prediction.obs, 0)
# the prior on the mean is zero, but observe 1, with
# discount = 1, one should expect the filterd mean to be 0.5
self.kf1.forwardFilter(dlm.model, 1)
self.assertAlmostEqual(dlm.model.obs, 0.5)
self.assertAlmostEqual(dlm.model.prediction.obs, 0)
self.assertAlmostEqual(dlm.model.sysVar, 0.375)
self.kf1.predict(dlm.model)
self.assertAlmostEqual(dlm.model.obs, 0.5)
self.assertAlmostEqual(dlm.model.prediction.obs, 0.5)
dlm.initialize()
self.kf0.predict(dlm.model)
self.assertAlmostEqual(dlm.model.prediction.obs, 0)
# the prior on the mean is zero, but observe 1, with discount = 0
# one should expect the filtered mean close to 1
self.kf0.forwardFilter(dlm.model, 1)
self.assertAlmostEqual(dlm.model.obs[0, 0], 1)
self.assertAlmostEqual(dlm.model.prediction.obs[0, 0], 0)
self.assertAlmostEqual(dlm.model.sysVar[0, 0], 0.5)
self.kf0.predict(dlm.model)
self.assertAlmostEqual(dlm.model.obs[0, 0], 1)
self.assertAlmostEqual(dlm.model.prediction.obs[0, 0], 1)
def testForwardFilterMultiDim(self):
dlm = builder()
dlm.add(seasonality(period=2, discount=1, w=1.0))
dlm.initialize()
self.kf11.forwardFilter(dlm.model, 1)
self.assertAlmostEqual(dlm.model.state[0][0, 0], 0.33333333333)
self.assertAlmostEqual(dlm.model.state[1][0, 0], -0.33333333333)
self.kf11.forwardFilter(dlm.model, -1)
self.assertAlmostEqual(dlm.model.state[0][0, 0], -0.5)
self.assertAlmostEqual(dlm.model.state[1][0, 0], 0.5)
def testBackwardSmoother(self):
dlm = builder()
dlm.add(self.trend0)
dlm.initialize()
# with mean being 0 and observe 1 and 0 consectively, one shall
# expect the smoothed mean at 1 will be 1/3, for discount = 1
self.kf1.forwardFilter(dlm.model, 1)
self.kf1.forwardFilter(dlm.model, 0)
self.kf1.backwardSmoother(dlm.model, \
np.matrix([[0.5]]), \
np.matrix([[0.375]]))
self.assertAlmostEqual(dlm.model.obs[0, 0], 1.0/3)
self.assertAlmostEqual(dlm.model.sysVar[0, 0], 0.18518519)
# second order trend with discount = 1. The smoothed result should be
# equal to a direct fit on the three data points, 0, 1, -1. Thus, the
# smoothed observation should be 0.0
def testBackwardSmootherMultiDim(self):
dlm = builder()
dlm.add(self.trend1)
dlm.initialize()
self.kf11.forwardFilter(dlm.model, 1)
state1 = dlm.model.state
cov1 = dlm.model.sysVar
self.kf11.forwardFilter(dlm.model, -1)
self.kf11.backwardSmoother(dlm.model, \
rawState = state1, \
rawSysVar = cov1)
self.assertAlmostEqual(dlm.model.obs[0, 0], 0.0)
def testMissingData(self):
dlm = builder()
dlm.add(self.trend0)
dlm.initialize()
self.kf0.forwardFilter(dlm.model, 1)
self.assertAlmostEqual(dlm.model.obs[0, 0], 1.0)
self.assertAlmostEqual(dlm.model.obsVar[0, 0], 1.0)
self.kf0.forwardFilter(dlm.model, None)
self.assertAlmostEqual(dlm.model.obs[0, 0], 1.0)
self.assertAlmostEqual(dlm.model.obsVar[0, 0]/1e10, 0.5)
self.kf0.forwardFilter(dlm.model, None)
self.assertAlmostEqual(dlm.model.obs[0, 0], 1.0)
self.assertAlmostEqual(dlm.model.obsVar[0, 0]/1e10, 0.5)
self.kf0.forwardFilter(dlm.model, 0)
self.assertAlmostEqual(dlm.model.obs[0, 0], 0.0)
def testMissingEvaluation(self):
dlm = builder()
dlm.add(self.trend0)
dlm.initialize()
dlm.model.evaluation = np.matrix([[None]])
self.kf1.forwardFilter(dlm.model, 1.0, dealWithMissingEvaluation = True)
self.assertAlmostEqual(dlm.model.obs, 0.0)
self.assertAlmostEqual(dlm.model.transition, 1.0)
def testEvolveMode(self):
dlm = builder()
dlm.add(self.trend0_90)
dlm.add(self.trend0_98)
dlm.initialize()
kf2 = kalmanFilter(discount=[0.9, 0.98],
updateInnovation='component',
index=dlm.componentIndex)
kf2.forwardFilter(dlm.model, 1.0)
self.assertAlmostEqual(dlm.model.innovation[0, 1], 0.0)
self.assertAlmostEqual(dlm.model.innovation[1, 0], 0.0)
if __name__ == '__main__':
unittest.main()
| StarcoderdataPython |
3348268 | <reponame>powplowdevs/2021-2022-Projects
import random
#unsorted numbers
numbers = [5,3,1,7,4,6,2]
#swaps numbers in a list
def swapPositions(list, pos1, pos2):
# popping both the elements from list
first_ele = list.pop(pos1)
second_ele = list.pop(pos2-1)
# inserting in each others positions
list.insert(pos1, second_ele)
list.insert(pos2, first_ele)
return list
#sorts a list
def sort(numbers):
sorted_list = []
swaped = False
for i in range(len(numbers)):
core = numbers[i]
#take the core number and compare it to the numbers[i]
#find the first numbers[i] that is greater then the core
#then swap the 2 numbers useing the swapPositions function
#sort numbers by calling sorting functions
sort(numbers) | StarcoderdataPython |
44253 | <reponame>jaemyungkim/hls4ml
import numpy as np
from hls4ml.model.types import CompressedType, NamedType, ExponentType, FixedPrecisionType, IntegerPrecisionType, XnorPrecisionType, ExponentPrecisionType, TensorVariable, PackedType, WeightVariable
#region Precision types
class PrecisionDefinition(object):
def definition_cpp(self):
raise NotImplementedError
class APIntegerPrecisionDefinition(PrecisionDefinition):
def definition_cpp(self):
typestring = 'ap_{signed}int<{width}>'.format(signed='u' if not self.signed else '', width=self.width)
return typestring
class APFixedPrecisionDefinition(PrecisionDefinition):
def _rounding_mode_cpp(self, mode):
if mode is not None:
return 'AP_' + str(mode)
def _saturation_mode_cpp(self, mode):
if mode is not None:
return 'AP_' + str(mode)
def definition_cpp(self):
args = [self.width, self.integer, self._rounding_mode_cpp(self.rounding_mode), self._saturation_mode_cpp(self.saturation_mode), self.saturation_bits]
args = ','.join([str(arg) for arg in args if arg is not None])
typestring = 'ap_{signed}fixed<{args}>'.format(signed='u' if not self.signed else '', args=args)
return typestring
class ACIntegerPrecisionDefinition(PrecisionDefinition):
def definition_cpp(self):
typestring = 'ac_int<{width}, {signed}>'.format(width=self.width, signed=str(self.signed).lower())
return typestring
class ACFixedPrecisionDefinition(PrecisionDefinition):
def _rounding_mode_cpp(self, mode):
if mode is not None:
return 'AC_' + str(mode)
def _saturation_mode_cpp(self, mode):
if mode is not None:
return 'AC_' + str(mode)
def definition_cpp(self):
args = [self.width, self.integer, str(self.signed).lower(), self._rounding_mode_cpp(self.rounding_mode), self._saturation_mode_cpp(self.saturation_mode), self.saturation_bits]
args = ','.join([str(arg) for arg in args if arg is not None])
typestring = 'ac_fixed<{args}>'.format(args=args)
return typestring
class PrecisionConverter(object):
def convert(self, precision_type):
raise NotImplementedError
class FixedPrecisionConverter(PrecisionConverter):
def __init__(self, type_map, prefix):
self.type_map = type_map
self.prefix = prefix
def convert(self, precision_type):
type_cls = type(precision_type)
type_cls_name = type_cls.__name__
# If the type is already converted, do nothing
if type_cls_name.startswith(self.prefix):
return precision_type
definition_cls = self.type_map.get(type_cls, None)
if definition_cls is not None:
precision_type.__class__ = type(self.prefix + type_cls_name, (type_cls, definition_cls), {})
return precision_type
else:
raise Exception('Cannot convert precision type to {}: {}'.format(self.prefix, precision_type.__class__.__name__))
class APTypeConverter(FixedPrecisionConverter):
def __init__(self):
super().__init__(
type_map={
FixedPrecisionType: APFixedPrecisionDefinition,
IntegerPrecisionType: APIntegerPrecisionDefinition,
ExponentPrecisionType: APIntegerPrecisionDefinition,
XnorPrecisionType: APIntegerPrecisionDefinition,
},
prefix='AP'
)
class ACTypeConverter(FixedPrecisionConverter):
def __init__(self):
super().__init__(
type_map={
FixedPrecisionType: ACFixedPrecisionDefinition,
IntegerPrecisionType: ACIntegerPrecisionDefinition,
ExponentPrecisionType: ACIntegerPrecisionDefinition,
XnorPrecisionType: ACIntegerPrecisionDefinition,
},
prefix='AC'
)
#endregion
#region Data types
class TypeDefinition(object):
def definition_cpp(self):
raise NotImplementedError
class TypePrecisionConverter(object):
def convert_precision(self, precision_converter):
self.precision = precision_converter.convert(self.precision)
class NamedTypeConverter(TypeDefinition, TypePrecisionConverter):
def definition_cpp(self):
return 'typedef {precision} {name};\n'.format(name=self.name, precision=self.precision.definition_cpp())
class CompressedTypeConverter(TypeDefinition, TypePrecisionConverter):
def definition_cpp(self):
cpp_fmt = (
'typedef struct {name} {{'
'{index} row_index;'
'{index} col_index;'
'{precision} weight; }} {name};\n'
)
return cpp_fmt.format(name=self.name, index=self.index_precision, precision=self.precision.definition_cpp())
def convert_precision(self, precision_converter):
super().convert_precision(precision_converter)
self.index_precision = precision_converter.convert(self.index_precision)
class ExponentTypeConverter(TypeDefinition, TypePrecisionConverter):
def definition_cpp(self):
cpp_fmt = (
'typedef struct {name} {{'
'{sign} sign;'
'{precision} weight; }} {name};\n'
)
return cpp_fmt.format(name=self.name, precision=self.precision.definition_cpp(), sign=self.sign.definition_cpp())
def convert_precision(self, precision_converter):
super().convert_precision(precision_converter)
self.sign = precision_converter.convert(self.sign)
class PackedTypeConverter(TypeDefinition, TypePrecisionConverter):
def definition_cpp(self):
n_elem_expr = '/' if self.unpack else '*'
return 'typedef nnet::array<{precision}, {n_elem}> {name};\n'.format(name=self.name, precision=self.precision.definition_cpp(), n_elem=str(self.n_elem) + n_elem_expr + str(self.n_pack))
class HLSTypeConverter(object):
def __init__(self, precision_converter):
self.precision_converter = precision_converter
self.type_map = {
NamedType: NamedTypeConverter,
CompressedType: CompressedTypeConverter,
ExponentType: ExponentTypeConverter,
PackedType: PackedTypeConverter,
}
def convert(self, atype):
type_cls = type(atype)
type_cls_name = type_cls.__name__
# If the type is already converted, do nothing
if type_cls_name.startswith('HLS'):
return atype
conversion_cls = self.type_map.get(type_cls, None)
if conversion_cls is not None:
atype.__class__ = type('HLS' + type_cls_name, (type_cls, conversion_cls), {})
atype.convert_precision(self.precision_converter)
return atype
else:
raise Exception('Cannot convert type: {}'.format(atype.__class__.__name__))
#endregion
#region Variables
class VariableDefinition(object):
def definition_cpp(self, name_suffix='', as_reference=False):
raise NotImplementedError
#region ArrayVariable
class VivadoArrayVariableDefinition(VariableDefinition):
def definition_cpp(self, name_suffix='', as_reference=False):
return '{type} {name}{suffix}[{shape}]'.format(type=self.type.name, name=self.cppname, suffix=name_suffix, shape=self.size_cpp())
class QuartusArrayVariableDefinition(VariableDefinition):
def definition_cpp(self, name_suffix='', as_reference=False):
return '{type} {name}{suffix}[{shape}] {pragma}'.format(type=self.type.name, name=self.cppname, suffix=name_suffix, shape=self.size_cpp(), pragma=self.pragma)
class ArrayVariableConverter(object):
def __init__(self, type_converter, prefix, definition_cls):
self.type_converter = type_converter
self.prefix = prefix
self.definition_cls = definition_cls
def convert(self, tensor_var, pragma='partition'):
if isinstance(tensor_var, self.definition_cls): # Already converted
return tensor_var
tensor_var.pragma = pragma
tensor_var.type = self.type_converter.convert(tensor_var.type)
tensor_var.__class__ = type(self.prefix + 'ArrayVariable', (type(tensor_var), self.definition_cls), {})
return tensor_var
class VivadoArrayVariableConverter(ArrayVariableConverter):
def __init__(self, type_converter):
super().__init__(type_converter=type_converter, prefix='Vivado', definition_cls=VivadoArrayVariableDefinition)
class QuartusArrayVariableConverter(ArrayVariableConverter):
def __init__(self, type_converter):
super().__init__(type_converter=type_converter, prefix='Quartus', definition_cls=QuartusArrayVariableDefinition)
#endregion
#region StructMemberVariable
class QuartusStructMemberVariableDefinition(VariableDefinition):
def definition_cpp(self, name_suffix='', as_reference=False):
return '{type} {name}{suffix}[{shape}]'.format(type=self.type.name, name=self.member_name, suffix=name_suffix, shape=self.size_cpp())
class StructMemberVariableConverter(object):
def __init__(self, type_converter, prefix, definition_cls):
self.type_converter = type_converter
self.prefix = prefix
self.definition_cls = definition_cls
def convert(self, tensor_var, pragma='partition', struct_name=None):
if isinstance(tensor_var, self.definition_cls): # Already converted
return tensor_var
tensor_var.pragma = pragma
tensor_var.type = self.type_converter.convert(tensor_var.type)
assert struct_name is not None, 'struct_name must be provided when creating a StructMemberVariable'
tensor_var.struct_name = str(struct_name)
tensor_var.member_name = tensor_var.name
tensor_var.name = tensor_var.struct_name + '.' + tensor_var.member_name
tensor_var.__class__ = type(self.prefix + 'StructMemberVariable', (type(tensor_var), self.definition_cls), {})
return tensor_var
class QuartusStructMemberVariableConverter(StructMemberVariableConverter):
def __init__(self, type_converter):
super().__init__(type_converter=type_converter, prefix='Quartus', definition_cls=QuartusStructMemberVariableDefinition)
#endregion
#region StreamVariable
class VivadoStreamVariableDefinition(VariableDefinition):
def definition_cpp(self, name_suffix='', as_reference=False):
if as_reference: # Function parameter
return 'hls::stream<{type}> &{name}{suffix}'.format(type=self.type.name, name=self.cppname, suffix=name_suffix)
else: # Declaration
return 'hls::stream<{type}> {name}{suffix}("{name}")'.format(type=self.type.name, name=self.cppname, suffix=name_suffix)
class StreamVariableConverter(object):
def __init__(self, type_converter, prefix, definition_cls):
self.type_converter = type_converter
self.prefix = prefix
self.definition_cls = definition_cls
def convert(self, tensor_var, n_pack=1, depth=0):
if isinstance(tensor_var, self.definition_cls): # Already converted
return tensor_var
if depth == 0:
depth = np.prod(tensor_var.shape) // tensor_var.shape[-1]
tensor_var.pragma = ('stream', depth)
tensor_var.type = self.type_converter.convert(PackedType(tensor_var.type.name, tensor_var.type.precision, tensor_var.shape[-1], n_pack))
tensor_var.__class__ = type(self.prefix + 'StreamVariable', (type(tensor_var), self.definition_cls), {})
return tensor_var
class VivadoStreamVariableConverter(StreamVariableConverter):
def __init__(self, type_converter):
super().__init__(type_converter=type_converter, prefix='Vivado', definition_cls=VivadoStreamVariableDefinition)
#endregion
#region InplaceVariable
class InplaceVariableConverter(object):
def __init__(self, type_converter, prefix):
self.type_converter = type_converter
self.prefix = prefix
def convert(self, tensor_var, io_type):
if tensor_var.__class__.__name__.startswith(self.prefix): # Already converted
return tensor_var
if io_type == 'io_stream':
tensor_var.type = self.type_converter.convert(PackedType(tensor_var.type.name, tensor_var.type.precision, tensor_var.shape[-1], n_pack=1))
else:
tensor_var.type = self.type_converter.convert(tensor_var.type)
tensor_var.__class__ = type(self.prefix + 'InplaceVariable', (type(tensor_var),), {})
return tensor_var
class VivadoInplaceVariableConverter(InplaceVariableConverter):
def __init__(self, type_converter):
super().__init__(type_converter=type_converter, prefix='Vivado')
class QuartusInplaceVariableConverter(InplaceVariableConverter):
def __init__(self, type_converter):
super().__init__(type_converter=type_converter, prefix='Quartus')
#endregion
#region WeightsVariable
class StaticWeightVariableDefinition(VariableDefinition):
def definition_cpp(self, name_suffix='', as_reference=False):
return '{type} {name}[{size}]'.format(type=self.type.name, name=self.cppname, size=self.data_length)
class StaticWeightVariableConverter(object):
def __init__(self, type_converter):
self.type_converter = type_converter
def convert(self, weight_var):
if isinstance(weight_var, StaticWeightVariableDefinition): # Already converted
return weight_var
weight_var.weight_class = weight_var.__class__.__name__
weight_var.storage = 'register'
weight_var.type = self.type_converter.convert(weight_var.type)
weight_var.__class__ = type('StaticWeightVariable', (type(weight_var), StaticWeightVariableDefinition), {})
return weight_var
class BramWeightVariableConverter(object):
@classmethod
def convert(cls, weight_var):
weight_var.storage = 'bram'
return weight_var
#endregion
#endregion | StarcoderdataPython |
83165 | <filename>misc/calc_absorption_coeff.py
import numpy as np
import matplotlib.pyplot as plt
import scipy.constants as const
from nanowire.optics.utils.utils import get_nk
import os
import glob
this_file = os.path.realpath(__file__)
nk_dir = os.path.normpath(os.path.join(this_file, '../../nanowire/NK/'))
nk_files = glob.glob(os.path.join(nk_dir, '00*.txt'))
for f in nk_files:
data = np.loadtxt(f)
# alpha = 2* omega * k / c
alpha = 2*data[:,0]*data[:, 2]/const.c
s4_alpha = alpha*1e-6
wvs = 1e9*const.c/data[:, 0]
diff = np.abs(wvs - 367)
index = np.argmin(diff)
print(s4_alpha[index])
plt.figure()
plt.plot(wvs, s4_alpha)
title = os.path.basename(f).split('_')[1]
plt.title(title)
plt.xlabel('Wavelength (nm)')
plt.ylabel('Absorption Coefficient')
plt.show()
| StarcoderdataPython |
115762 | import cv2
faceCascade = cv2.CascadeClassifier(cv2.data.haarcascades + "haarcascade_frontalface_default.xml")
img = cv2.imread(r'C:\Users\Jeevan\PycharmProjects\CartoonMaking\Resources\Sample.jpeg')
img = cv2.resize(img, (800, 600))
imgGray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) # converting image into grayscale image
faces = faceCascade.detectMultiScale(imgGray, 1.1, 2)
for (x, y, w, h) in faces:
cv2.rectangle(img, (x, y), (x+w, y+h), (255, 0, 0), 2)
cv2.imshow("Result", img)
cv2.waitKey(0)
| StarcoderdataPython |
4830356 | <reponame>tdilauro/library-registry
"""Implement logic common to more than one of the Simplified applications."""
from psycopg2 import DatabaseError
import flask
import json
import sys
from lxml import etree
from functools import wraps
from flask import make_response
from flask_babel import lazy_gettext as _
from util.flask_util import problem
from util.problem_detail import ProblemDetail
import traceback
import logging
from opds import OPDSCatalog
from sqlalchemy.orm.session import Session
from sqlalchemy.orm.exc import (
NoResultFound,
)
def catalog_response(catalog, cache_for=OPDSCatalog.CACHE_TIME):
content_type = OPDSCatalog.OPDS_TYPE
return _make_response(catalog, content_type, cache_for)
def _make_response(content, content_type, cache_for):
if isinstance(content, etree._Element):
content = etree.tostring(content)
elif not isinstance(content, str):
content = str(content)
if isinstance(cache_for, int):
# A CDN should hold on to the cached representation only half
# as long as the end-user.
client_cache = cache_for
cdn_cache = cache_for / 2
cache_control = "public, no-transform, max-age: %d, s-maxage: %d" % (
client_cache, cdn_cache)
else:
cache_control = "private, no-cache"
return make_response(content, 200, {"Content-Type": content_type,
"Cache-Control": cache_control})
def returns_problem_detail(f):
@wraps(f)
def decorated(*args, **kwargs):
v = f(*args, **kwargs)
if isinstance(v, ProblemDetail):
return v.response
return v
return decorated
def returns_json_or_response_or_problem_detail(f):
@wraps(f)
def decorated(*args, **kwargs):
v = f(*args, **kwargs)
if isinstance(v, ProblemDetail):
return v.response
if isinstance(v, flask.Response):
return v
return flask.jsonify(**v)
return decorated
class ErrorHandler(object):
def __init__(self, app, debug):
self.app = app
self.debug = debug
def handle(self, exception):
if hasattr(self.app, 'manager') and hasattr(self.app.manager, '_db'):
# There is an active database session. Roll it back.
self.app.manager._db.rollback()
tb = traceback.format_exc()
if isinstance(exception, DatabaseError):
# The database session may have become tainted. For now
# the simplest thing to do is to kill the entire process
# and let uwsgi restart it.
logging.error(
"Database error: %s Treating as fatal to avoid holding on to a tainted session!",
exception, exc_info=exception
)
shutdown = flask.request.environ.get('werkzeug.server.shutdown')
if shutdown:
shutdown()
else:
sys.exit()
# By default, the error will be logged at log level ERROR.
log_method = logging.error
# Okay, it's not a database error. Turn it into a useful HTTP error
# response.
if hasattr(exception, 'as_problem_detail_document'):
# This exception can be turned directly into a problem
# detail document.
document = exception.as_problem_detail_document(self.debug)
if not self.debug:
document.debug_message = None
else:
if document.debug_message:
document.debug_message += "\n\n" + tb
else:
document.debug_message = tb
if document.status_code == 502:
# This is an error in integrating with some upstream
# service. It's a serious problem, but probably not
# indicative of a bug in our software. Log it at log level
# WARN.
log_method = logging.warn
response = make_response(document.response)
else:
# There's no way to turn this exception into a problem
# document. This is probably indicative of a bug in our
# software.
if self.debug:
body = tb
else:
body = _('An internal error occured')
response = make_response(str(body), 500, {"Content-Type": "text/plain"})
log_method("Exception in web app: %s", exception, exc_info=exception)
return response
class HeartbeatController(object):
def heartbeat(self):
return make_response("", 200, {"Content-Type": "application/json"})
| StarcoderdataPython |
1640152 | <gh_stars>0
##========================================================================================
##========================================================================================
import numpy as np
n=25
cost_out=open('cost_av.dat','w')
for nh in range(0,9):
cost = np.loadtxt('cost/cost_%02d.dat'%(nh))
if nh == 0:
cost_obs = cost[0]
else:
cost_obs = np.mean(cost[100:,0])
cost_all = cost_obs*(1.+float(nh)/n)
cost_out.write("% i % f % f \n"%(nh,cost_obs,cost_all))
print(nh,cost_obs,cost_all)
cost_out.close()
| StarcoderdataPython |
1653537 | <reponame>GuerrillaAnalytics/made-cli<filename>tests/commands/project_grp/test_project_functions.py
"""Tests for all project functions."""
import os
import shutil
import tempfile
from made.commands.project_grp.project_functions \
import project_audit_name
from made.commands.project_grp.project_functions \
import project_create_folder, validate_project_name
def test_project_create_folder():
"""Test that the project folder is created in the right format."""
location = tempfile.mkdtemp()
os.chdir(location)
new_folder = project_create_folder(id="ds134", label="project")
expected_path = os.path.join(location, "ds134" + "_" + "project")
shutil.rmtree(location)
assert expected_path == new_folder
def test_project_audit_name_spaces():
"""Test that auditing project name returns correct results."""
test_name = "ds045_ rest of name"
result = project_audit_name(project_folder=test_name)
assert result is False
def test_project_audit_name_case():
"""Test that auditing project name returns correct results."""
test_name = "ds045_withUpperCase"
result = project_audit_name(project_folder=test_name)
assert result is False
def test_project_audit_name_correct():
"""Test that auditing project name returns correct results."""
test_name = "ds056_corectname"
result = project_audit_name(project_folder=test_name)
assert result is True
def test_validate_project_name():
"""Test that the validate project name function works."""
# name has a space should fail
assert validate_project_name('ds 045') is False
# name has a special character should fail
assert validate_project_name('ds#234') is False
assert validate_project_name('ds!234') is False
assert validate_project_name('ds$234') is False
assert validate_project_name('ds£234') is False
assert validate_project_name('ds&234') is False
assert validate_project_name('ds*234') is False
assert validate_project_name('ds(234') is False
assert validate_project_name('ds)234') is False
| StarcoderdataPython |
4807859 | import json
import os
import logging
_LOGGER = logging.getLogger(__name__)
def get_instance_id():
try:
filename = os.environ.get("ECS_CONTAINER_METADATA_FILE")
if filename is not None:
_LOGGER.info("ECS_CONTAINER_METADATA_FILE %s", filename)
with open(filename, encoding="utf-8") as file:
metadata = json.load(file)
_LOGGER.info("metadata %s", metadata)
_id = metadata.get("ContainerID")
if len(_id) > 12:
return _id[-12:]
return _id
except Exception: # pylint: disable=broad-except
_LOGGER.error("Failed to get instance id", exc_info=True)
return os.uname()[1].replace(".", "").replace("-", "")
| StarcoderdataPython |
3257943 | <reponame>pmay/tifinity<gh_stars>1-10
import hashlib
class Checksum():
@staticmethod
def checksum(tiff, alg="md5", justimage=False):
hashes = { 'full': 'Unknown',
'images': 'Unknown',
'ifds': 'Unknown' }
if not justimage:
hashes["full"] = Checksum._hash_data(tiff.raw_data(), alg)
image_hashes = []
ifd_hashes = []
for ifd in tiff.ifds:
image_hashes.append(Checksum._hash_data(ifd.img_data, alg))
if not justimage:
ifd_hashes.append(Checksum._hash_data(ifd.ifd_data, alg))
hashes["images"] = image_hashes
if not justimage:
hashes["ifds"] = ifd_hashes
return hashes
@staticmethod
def _hash_data(data, alg="sha256"):
"""Returns the hash value of the specified data using the specified hashing algorithm"""
m = hashlib.new(alg)
m.update(data)
return m.hexdigest() | StarcoderdataPython |
3208873 | <filename>exercises/0020-ValidParentheses/valid_parentheses.py
class Solution:
def isValid(self, s: str) -> bool:
left = []
matches = {")": "(", "]": "[", "}": "{"}
for i in s:
if i in "([{":
left.append(i)
else:
if not left:
return False
parenthese = left.pop()
if matches[i] != parenthese:
return False
return not left
| StarcoderdataPython |
3214900 | <filename>tests/rlc/test_rlc.py
# general imports
from math import exp, cos, sin, sqrt
from pathlib import Path
# AHA imports
import magma as m
# msdsl imports
from ..common import *
from msdsl import MixedSignalModel, VerilogGenerator, AnalogSignal, Deriv
NAME = Path(__file__).stem.split('_')[1]
BUILD_DIR = Path(__file__).resolve().parent / 'build'
def pytest_generate_tests(metafunc):
pytest_sim_params(metafunc)
pytest_real_type_params(metafunc)
def gen_model(cap=0.16e-6, ind=0.16e-6, res=0.1, dt=0.01e-6,
real_type=RealType.FixedPoint):
# declare model
m = MixedSignalModel('model', dt=dt, real_type=real_type)
m.add_analog_input('v_in')
m.add_analog_output('v_out')
m.add_digital_input('clk')
m.add_digital_input('rst')
# declare system of equations
m.add_analog_state('i_ind', 10) # TODO: can this be tightened down a bit?
v_l = AnalogSignal('v_l')
v_r = AnalogSignal('v_r')
eqns = [
Deriv(m.i_ind) == v_l / ind,
Deriv(m.v_out) == m.i_ind / cap,
v_r == m.i_ind * res,
m.v_in == m.v_out + v_l + v_r
]
m.add_eqn_sys(eqns, clk=m.clk, rst=m.rst)
BUILD_DIR.mkdir(parents=True, exist_ok=True)
model_file = BUILD_DIR / 'model.sv'
m.compile_to_file(VerilogGenerator(), filename=model_file)
return model_file
def test_rlc(simulator, real_type, cap=0.16e-6, ind=0.16e-6, res=0.1, dt=0.01e-6):
model_file = gen_model(cap=cap, ind=ind, res=res, dt=dt,
real_type=real_type)
# declare circuit
class dut(m.Circuit):
name=f'test_{NAME}'
io=m.IO(
v_in=fault.RealIn,
v_out=fault.RealOut,
clk=m.ClockIn,
rst=m.BitIn
)
# create the tester
tester = MsdslTester(dut, dut.clk)
# initialize
v_in = 1.0
tester.poke(dut.clk, 0)
tester.poke(dut.rst, 1)
tester.poke(dut.v_in, v_in)
tester.eval()
# reset
tester.step(2)
# model for circuit behavior
# see slide 15 here: http://tuttle.merc.iastate.edu/ee201/topics/capacitors_inductors/RLC_transients.pdf
vf = v_in
vi = 0.0
o = -res/(2*ind)
wd = sqrt(1/(ind*cap)-((res/(2*ind))**2))
def model(t):
return vf - (vf-vi)*(exp(o*t)*(cos(wd*t)-(o/wd)*sin(wd*t)))
# print the first few outputs
tester.poke(dut.rst, 0)
for k in range(20):
tester.expect(dut.v_out, model(k*dt), abs_tol=0.025)
tester.print("v_out: %0f\n", dut.v_out)
tester.step(2)
# run the simulation
tester.compile_and_run(
directory=BUILD_DIR,
simulator=simulator,
ext_srcs=[model_file, get_file(f'{NAME}/test_{NAME}.sv')],
real_type=real_type
)
| StarcoderdataPython |
3311881 | from baxter_controller import BaxterController
| StarcoderdataPython |
4825539 | <gh_stars>1000+
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import subprocess
import sys
import unittest
from . import common
class TestCommon(unittest.TestCase):
def setUp(self):
self.android_sdk = common.get_android_sdk()
self._environ = dict(os.environ)
os.environ.pop("ANDROID_SDK", None)
os.environ.pop("ANDROID_HOME", None)
def tearDown(self):
os.environ.clear()
os.environ.update(self._environ)
def test_get_android_sdk_happy_path(self):
os.environ["ANDROID_SDK"] = "/tmp/foo"
self.assertEqual("/tmp/foo", common.get_android_sdk())
def test_tilde_is_expanded(self):
if sys.version_info >= (3,):
return
os.environ["ANDROID_SDK"] = "~/foobar"
home = os.environ["HOME"]
self.assertEqual(os.path.join(home, "foobar"), common.get_android_sdk())
def test_get_adb_can_run_in_subprocess(self):
os.environ["ANDROID_SDK"] = self.android_sdk
subprocess.check_call([common.get_adb(), "devices"])
| StarcoderdataPython |
3244146 | #Crie um algoritmo que leia um número e mostre o seu dobro, triplo e raíz quadrada.
n = int(input('Digite um número: '))
d = n*2
t = n*3
rq = pow(n,(1/2))
print(f'O dobro de {n} vale {d}.')
print(f'O triplo de {n} vale {t}. \nE a sua raíz quadrada vale {rq:.3f}.')
| StarcoderdataPython |
3387658 | <filename>venv/lib/python3.9/site-packages/pyarrow/_generated_version.py
# coding: utf-8
# file generated by setuptools_scm
# don't change, don't track in version control
version = '7.0.0'
version_tuple = (7, 0, 0)
| StarcoderdataPython |
3318246 | <filename>2012_03_27/test_fatorial.py
import unittest
from fatorial import fatorial
class FatorialTestCase(unittest.TestCase):
def test_fatorial_1_1(self):
self.assertEqual(1, fatorial(1, 1))
def test_fatorial_2_1(self):
self.assertEqual(2, fatorial(2, 1))
def test_fatorial_3_1(self):
self.assertEqual(6, fatorial(3, 1))
def test_fatorial_4_1(self):
self.assertEqual(24, fatorial(4, 1))
def test_fatorial_10_3(self):
self.assertEqual(280, fatorial(10, 3))
def test_fatorial_21_19(self):
self.assertEqual(42, fatorial(21, 19))
unittest.main()
| StarcoderdataPython |
38357 | import dataclasses
import typing
from dataclasses import dataclass
from typing import List
from typing import Optional
from profiles.settings import DENOM_DKEY, VALUE_DKEY, GEOG_DKEY, TIME_DKEY
if typing.TYPE_CHECKING:
from indicators.models import CensusVariable, CKANVariable
@dataclass
class Datum:
variable: str
geog: str
time: str
value: Optional[float] = None
moe: Optional[float] = None
percent: Optional[float] = None
denom: Optional[float] = None
@staticmethod
def from_census_response_datum(variable: 'CensusVariable', census_datum) -> 'Datum':
return Datum(
variable=variable.slug,
geog=census_datum.get('geog'),
time=census_datum.get('time'),
value=census_datum.get('value'),
moe=census_datum.get('moe'),
denom=census_datum.get('denom'),
percent=census_datum.get('percent'), )
@staticmethod
def from_census_response_data(variable: 'CensusVariable', census_data: list[dict]) -> List['Datum']:
return [Datum.from_census_response_datum(variable, census_datum) for census_datum in census_data]
@staticmethod
def from_ckan_response_datum(variable: 'CKANVariable', ckan_datum) -> 'Datum':
denom, percent = None, None
if DENOM_DKEY in ckan_datum:
denom = ckan_datum[DENOM_DKEY]
percent = (ckan_datum[VALUE_DKEY] / ckan_datum[DENOM_DKEY])
return Datum(variable=variable.slug,
geog=ckan_datum[GEOG_DKEY],
time=ckan_datum[TIME_DKEY],
value=ckan_datum[VALUE_DKEY],
denom=denom,
percent=percent)
@staticmethod
def from_ckan_response_data(variable: 'CKANVariable', ckan_data: list[dict]) -> List['Datum']:
return [Datum.from_ckan_response_datum(variable, ckan_datum) for ckan_datum in ckan_data]
def update(self, **kwargs):
""" Creates new Datum similar to the instance with new values from kwargs """
return Datum(**{**self.as_dict(), **kwargs})
def with_denom_val(self, denom_val: Optional[float]):
""" Merge the denom value and generate the percent """
return dataclasses.replace(self, denom=denom_val, percent=(self.value / denom_val))
def as_dict(self):
return {'variable': self.variable, 'geog': self.geog, 'time': self.time,
'value': self.value, 'moe': self.moe, 'percent': self.percent, 'denom': self.denom}
def as_value_dict(self):
return {'value': self.value, 'moe': self.moe, 'percent': self.percent, 'denom': self.denom}
| StarcoderdataPython |
1717129 | <filename>docker/zarr-to-loom/ss2_plate_zarr_to_loom.py
#!/usr/bin/env python3
import os
import sys
import zarr
import loompy
import scipy as sc
from scipy.sparse import coo_matrix
import argparse
import numpy as np
def main():
description = """This script converts SS2 pipeline zarr output into loom"""
parser = argparse.ArgumentParser(description=description)
parser.add_argument('--input-zarr', dest='input_zarr_path', required=True, help="Path to input zarr file")
parser.add_argument('--output-loom', dest='output_loom_path', required=True, help="Path to output loom file")
parser.add_argument('--sample-id', dest='sample_id', required=True, help="Sample identifier")
args = parser.parse_args()
# Do input checks
if not os.path.isdir(args.input_zarr_path):
sys.exit("Error: the input zarr path is not a directoyr")
if os.path.exists(args.output_loom_path):
sys.exit("Error: the output loom file exists")
store = zarr.DirectoryStore(args.input_zarr_path)
root = zarr.open(store)
nrows = root["/output_zarr/expression"].shape[0]
ncols = root["/output_zarr/expression"].shape[1]
expr_sp = sc.sparse.coo_matrix((nrows, ncols), np.float32)
xcoord = []
ycoord = []
value = []
expr_coo = sc.sparse.coo_matrix(root["/output_zarr/expression"][:])
for k in range(0, expr_coo.data.shape[0]):
xcoord.append(expr_coo.row[k])
ycoord.append(expr_coo.col[k])
value.append(expr_coo.data[k])
xcoord = np.asarray(xcoord)
ycoord = np.asarray(ycoord)
value = np.asarray(value)
expr_sp_t = sc.sparse.coo_matrix((value, (ycoord, xcoord)), shape=(expr_sp.shape[1], expr_sp.shape[0]))
del xcoord
del ycoord
del value
# Save the gene metadata (just names)
row_attrs = {
"Gene": root.output_zarr.gene_id[:]
}
# Save the cell metadata
col_attrs = dict()
col_attrs["CellID"] = root.output_zarr.cell_id[:]
numeric_field_names = root.output_zarr.cell_metadata_numeric_name[:]
for i in range(0, numeric_field_names.shape[0]):
name = numeric_field_names[i]
data = root.output_zarr.cell_metadata_numeric[:,i]
col_attrs[name] = data
string_field_names = root.output_zarr.cell_metadata_string_name[:]
for i in range(0, string_field_names.shape[0]):
name = string_field_names[i]
data = root.output_zarr.cell_metadata_string[:,i]
col_attrs[name] = data
# Create the loom file
loompy.create(args.output_loom_path, expr_sp_t, row_attrs, col_attrs)
if __name__ == '__main__':
main()
| StarcoderdataPython |
3231595 | import cv2 as cv
import numpy as np
import image
class ColorImage(image.Image):
def __init__(self, img):
if len(img.shape) == 2:
img = cv.cvtColor(img, cv.COLOR_GRAY2BGR)
super().__init__(img)
def gray(self):
gray = cv.cvtColor(self.img, cv.COLOR_BGR2GRAY)
gray = np.round(gray * 255).astype(np.uint8)
return image.GrayImage(gray)
def draw_point_groups(self, point_groups):
img = np.copy(self.img)
for point_group in point_groups:
r = np.random.randint(255)
g = np.random.randint(255)
b = np.random.randint(255)
for point in point_group:
cv.circle(img, (point.x, point.y), 1, (r, g, b))
return ColorImage(img)
def draw_lines(self, lines):
img = np.copy(self.img)
for line in lines:
r = np.random.randint(255)
g = np.random.randint(255)
b = np.random.randint(255)
cv.line(img, line.p1.raw, line.p2.raw, (r, g, b), 20)
return ColorImage(img)
def draw_quadrilateral(self, quadrilateral):
img = np.copy(self.img)
r = np.random.randint(255)
g = np.random.randint(255)
b = np.random.randint(255)
for i in range(4):
start = quadrilateral.ordered_points[i]
stop = quadrilateral.ordered_points[(i + 1) % 4]
cv.line(img, start.raw, stop.raw, (r, g, b), 25)
return ColorImage(img)
| StarcoderdataPython |
138308 | <gh_stars>1000+
#!/usr/bin/python3
# Copyright 2020 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Wrapper around xcrun adding support for --developer-dir parameter to set
the DEVELOPER_DIR environment variable, and for converting paths relative
to absolute (since this is required by most of the tool run via xcrun).
"""
import argparse
import os
import subprocess
import sys
def xcrun(command, developer_dir):
environ = dict(os.environ)
if developer_dir:
environ['DEVELOPER_DIR'] = os.path.abspath(developer_dir)
processed_args = ['/usr/bin/xcrun']
for arg in command:
if os.path.exists(arg):
arg = os.path.abspath(arg)
processed_args.append(arg)
process = subprocess.Popen(processed_args,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
universal_newlines=True,
env=environ)
stdout, stderr = process.communicate()
sys.stdout.write(stdout)
if process.returncode:
sys.stderr.write(stderr)
sys.exit(process.returncode)
def main(args):
parser = argparse.ArgumentParser(add_help=False)
parser.add_argument(
'--developer-dir',
help='path to developer dir to use for the invocation of xcrun')
parsed, remaining_args = parser.parse_known_args(args)
xcrun(remaining_args, parsed.developer_dir)
if __name__ == '__main__':
main(sys.argv[1:])
| StarcoderdataPython |
1701789 | from typing import TypeVar
from networkx import nx
_G = TypeVar("_G", nx.DiGraph, nx.Graph)
| StarcoderdataPython |
1786001 | # problem link: https://leetcode.com/problems/word-search/
class Solution:
def exist(self, board: List[List[str]], word: str) -> bool:
num_r = len(board)
num_c = len(board[0])
def dfs(r, c, word_seg):
if len(word_seg) == 1 and board[r][c] == word_seg[0]:
return True
if word_seg[0] == board[r][c]:
visited[r][c] = True
for [i, j] in [[-1, 0], [1, 0], [0, -1], [0, 1]]: # 递归
next_r, next_c = r + i, c + j
if 0 <= next_r < num_r and 0 <= next_c < num_c and not visited[next_r][next_c]:
if dfs(next_r, next_c, word_seg[1:]):
return True
visited[r][c] = False
return False
visited = [[False] * num_c for _ in range(num_r)]
for r in range(num_r):
for c in range(num_c):
if board[r][c] == word[0]:
if dfs(r, c, word):
return True
return False | StarcoderdataPython |
3244679 | <filename>DatabaseNormalizer/db_normalizer/data_loader/api_external.py
# -*- coding: utf-8 -*-
"""
db_normalizer.data_loader.api_external
--------------------------------------
Toolbox to load missing data from several table structures
:authors: <NAME>, <NAME>.
:licence: MIT, see LICENSE for more details.
"""
import re
import urllib.parse
from http import HTTPStatus
import requests
from db_normalizer.data_loader.enum.loading_strategy import LoadingStrategy
from db_normalizer.data_loader.utils.table_objects import City, Country, \
NOT_SET, EXTERNAL_DATA
from db_normalizer.data_loader.utils.utils import ExternalSources
from db_normalizer.exceptions.api_external_exceptions import \
UnableToReachCountryApiException, ResourceNotFoundException, \
UnableToReachCityApiException
def fill_city(
city: City,
strategy: LoadingStrategy = LoadingStrategy.DEFAULT
) -> None:
"""Fill the missing values of a city depending on its name
:param city: city object to complete
:param strategy: on several records fetched, adopt the specified strategy
:raise UnableToReachCityApiException: on request timeout
:raise ResourceNotFoundException: on request failure
"""
# don't query the API if not needed
if city.population != EXTERNAL_DATA:
return
# build the request
target = ExternalSources.wikipedia_api
target += f'?action=parse&page={urllib.parse.quote(city.name)}&format=json'
# fetch the city's information
try:
r = requests.get(target)
except ConnectionError:
raise UnableToReachCityApiException
# test the API status
if r.status_code != HTTPStatus.OK:
raise ResourceNotFoundException
# selecting the appropriate result
data = r.json()
# try to fetch the data, raise an error if no data were found
try:
data = data['parse']['text']['*']
except KeyError:
raise ResourceNotFoundException
results = re.findall(
pattern=ExternalSources.city_population_regex,
string=data,
flags=re.IGNORECASE
)
results = [int(result.replace(',', '')) for result in results]
if len(results) == 0:
raise ResourceNotFoundException
# updating value
if strategy == LoadingStrategy.DEFAULT:
city.population = results[0]
elif strategy == LoadingStrategy.LEAST_POPULATED:
city.population = min(results)
elif strategy == LoadingStrategy.MOST_POPULATED:
city.population = max(results)
def fill_country(
country: Country,
strategy: LoadingStrategy = LoadingStrategy.DEFAULT
) -> None:
"""Fill the missing values of a country depending on its name
:param country: country object to complete
:param strategy: on several records fetched, adopt the specified strategy
:raise UnableToReachCountryApiException: on request timeout
:raise ResourceNotFoundException: on request failure
"""
# don't query the API if not needed
if country.area != EXTERNAL_DATA \
and country.population != EXTERNAL_DATA:
return
# build the request
target = ExternalSources.country_api
target += 'name/'
target += urllib.parse.quote(country.name)
target += '?fields=population;area'
# fetch the country's information
try:
r = requests.get(target)
except ConnectionError:
raise UnableToReachCountryApiException
# test the API status
if r.status_code != HTTPStatus.OK:
raise ResourceNotFoundException
# selecting the appropriate result
results = r.json()
if strategy == LoadingStrategy.DEFAULT:
results = results[0]
elif strategy == LoadingStrategy.LEAST_POPULATED:
results = min(results, key=lambda row: row['population'])
elif strategy == LoadingStrategy.MOST_POPULATED:
results = max(results, key=lambda row: row['population'])
# updating values if possible
country.area = results['area'] if 'area' in results \
else NOT_SET
country.population = results['population'] if 'population' in results \
else NOT_SET
def url_encode(to_sanitize: str) -> str:
"""format special chars met for URL purposes
:see: https://www.degraeve.com/reference/urlencoding.php
"""
return to_sanitize.replace(
' ', '%20'
).replace(
',', '%B4'
).replace(
'\'', '%27'
).replace(
'(', '%28'
).replace(
')', '%29'
)
| StarcoderdataPython |
3292829 | <gh_stars>1-10
import uuid
import pytest
from sqlalchemy.exc import SQLAlchemyError
from app import encryption
from app.dao.service_callback_api_dao import (
save_service_callback_api,
reset_service_callback_api,
get_service_callback,
get_service_delivery_status_callback_api_for_service)
from app.models import ServiceCallback, NOTIFICATION_FAILED, NOTIFICATION_TEMPORARY_FAILURE, \
NOTIFICATION_PERMANENT_FAILURE, NOTIFICATION_STATUS_TYPES_COMPLETED, NOTIFICATION_SENT, NOTIFICATION_DELIVERED, \
WEBHOOK_CHANNEL_TYPE
from app.schemas import service_callback_api_schema
from tests.app.db import create_service_callback_api
def test_save_service_callback_api(sample_service):
notification_statuses = [NOTIFICATION_FAILED]
service_callback_api = ServiceCallback( # nosec
service_id=sample_service.id,
url="https://some_service/callback_endpoint",
bearer_token="<PASSWORD>",
updated_by_id=sample_service.users[0].id,
notification_statuses=notification_statuses,
callback_channel=WEBHOOK_CHANNEL_TYPE
)
save_service_callback_api(service_callback_api)
results = ServiceCallback.query.all()
assert len(results) == 1
callback_api = results[0]
assert callback_api.id is not None
assert callback_api.service_id == sample_service.id
assert callback_api.updated_by_id == sample_service.users[0].id
assert callback_api.url == "https://some_service/callback_endpoint"
assert callback_api.bearer_token == "<PASSWORD>"
assert callback_api._bearer_token != "some_unique_string"
assert callback_api.updated_at is None
assert callback_api.notification_statuses == notification_statuses
versioned = ServiceCallback.get_history_model().query.filter_by(id=callback_api.id).one()
assert versioned.id == callback_api.id
assert versioned.service_id == sample_service.id
assert versioned.updated_by_id == sample_service.users[0].id
assert versioned.url == "https://some_service/callback_endpoint"
assert encryption.decrypt(versioned._bearer_token) == "some_unique_string"
assert versioned.updated_at is None
assert versioned.version == 1
def test_save_service_callback_api_fails_if_service_does_not_exist(notify_db, notify_db_session):
notification_statuses = [NOTIFICATION_FAILED]
service_callback_api = ServiceCallback( # nosec
service_id=uuid.uuid4(),
url="https://some_service/callback_endpoint",
bearer_token="<PASSWORD>",
updated_by_id=uuid.uuid4(),
notification_statuses=str(notification_statuses),
callback_channel=WEBHOOK_CHANNEL_TYPE
)
with pytest.raises(SQLAlchemyError):
save_service_callback_api(service_callback_api)
def test_update_service_callback_api_unique_constraint(sample_service):
notification_statuses = [NOTIFICATION_FAILED]
service_callback_api = ServiceCallback( # nosec
service_id=sample_service.id,
url="https://some_service/callback_endpoint",
bearer_token="<PASSWORD>",
updated_by_id=sample_service.users[0].id,
callback_type='delivery_status',
notification_statuses=str(notification_statuses),
callback_channel=WEBHOOK_CHANNEL_TYPE
)
save_service_callback_api(service_callback_api)
another = ServiceCallback( # nosec
service_id=sample_service.id,
url="https://some_service/another_callback_endpoint",
bearer_token="<PASSWORD>",
updated_by_id=sample_service.users[0].id,
callback_type='delivery_status',
notification_statuses=str(notification_statuses),
callback_channel=WEBHOOK_CHANNEL_TYPE
)
with pytest.raises(expected_exception=SQLAlchemyError):
save_service_callback_api(another)
def test_update_service_callback_can_add_two_api_of_different_types(sample_service):
notification_statuses = [NOTIFICATION_FAILED]
delivery_status = ServiceCallback( # nosec
service_id=sample_service.id,
url="https://some_service/callback_endpoint",
bearer_token="<PASSWORD>",
updated_by_id=sample_service.users[0].id,
callback_type='delivery_status',
notification_statuses=str(notification_statuses),
callback_channel=WEBHOOK_CHANNEL_TYPE
)
save_service_callback_api(delivery_status)
complaint = ServiceCallback( # nosec
service_id=sample_service.id,
url="https://some_service/another_callback_endpoint",
bearer_token="<PASSWORD>",
updated_by_id=sample_service.users[0].id,
callback_type='complaint',
callback_channel=WEBHOOK_CHANNEL_TYPE
)
save_service_callback_api(complaint)
results = ServiceCallback.query.order_by(ServiceCallback.callback_type).all()
assert len(results) == 2
results0_dump = service_callback_api_schema.dump(results[0]).data
results1_dump = service_callback_api_schema.dump(results[1]).data
assert results0_dump == service_callback_api_schema.dump(complaint).data
assert results1_dump == service_callback_api_schema.dump(delivery_status).data
def test_update_service_callback_api(sample_service):
notification_statuses = [NOTIFICATION_FAILED]
service_callback_api = ServiceCallback( # nosec
service_id=sample_service.id,
url="https://some_service/callback_endpoint",
bearer_token="<PASSWORD>",
updated_by_id=sample_service.users[0].id,
notification_statuses=str(notification_statuses),
callback_channel=WEBHOOK_CHANNEL_TYPE
)
save_service_callback_api(service_callback_api)
results = ServiceCallback.query.all()
assert len(results) == 1
saved_callback_api = results[0]
reset_service_callback_api(saved_callback_api, updated_by_id=sample_service.users[0].id,
url="https://some_service/changed_url")
updated_results = ServiceCallback.query.all()
assert len(updated_results) == 1
updated = updated_results[0]
assert updated.id is not None
assert updated.service_id == sample_service.id
assert updated.updated_by_id == sample_service.users[0].id
assert updated.url == "https://some_service/changed_url"
assert updated.bearer_token == "<PASSWORD>"
assert updated._bearer_token != "<PASSWORD>"
assert updated.updated_at is not None
versioned_results = ServiceCallback.get_history_model().query.filter_by(id=saved_callback_api.id).all()
assert len(versioned_results) == 2
for x in versioned_results:
if x.version == 1:
assert x.url == "https://some_service/callback_endpoint"
assert not x.updated_at
elif x.version == 2:
assert x.url == "https://some_service/changed_url"
assert x.updated_at
else:
pytest.fail("version should not exist")
assert x.id is not None
assert x.service_id == sample_service.id
assert x.updated_by_id == sample_service.users[0].id
assert encryption.decrypt(x._bearer_token) == "some_unique_string"
def test_get_service_callback_api(sample_service):
notification_statuses = [NOTIFICATION_FAILED]
service_callback_api = ServiceCallback( # nosec
service_id=sample_service.id,
url="https://some_service/callback_endpoint",
bearer_token="some_unique_string",
updated_by_id=sample_service.users[0].id,
notification_statuses=notification_statuses,
callback_channel=WEBHOOK_CHANNEL_TYPE
)
save_service_callback_api(service_callback_api)
callback_api = get_service_callback(service_callback_api.id)
assert callback_api.id is not None
assert callback_api.service_id == sample_service.id
assert callback_api.updated_by_id == sample_service.users[0].id
assert callback_api.url == "https://some_service/callback_endpoint"
assert callback_api.bearer_token == "<PASSWORD>"
assert callback_api._bearer_token != "some_<PASSWORD>"
assert callback_api.updated_at is None
def test_get_service_delivery_status_callback_api_for_service(sample_service):
service_callback_api = create_service_callback_api(service=sample_service)
result = get_service_delivery_status_callback_api_for_service(sample_service.id, 'delivered')
assert result.id == service_callback_api.id
assert result.url == service_callback_api.url
assert result.bearer_token == service_callback_api.bearer_token
assert result.created_at == service_callback_api.created_at
assert result.updated_at == service_callback_api.updated_at
assert result.updated_by_id == service_callback_api.updated_by_id
@pytest.mark.parametrize('notification_statuses', [
[NOTIFICATION_FAILED],
[NOTIFICATION_PERMANENT_FAILURE, NOTIFICATION_FAILED, NOTIFICATION_TEMPORARY_FAILURE],
[NOTIFICATION_PERMANENT_FAILURE, NOTIFICATION_FAILED],
])
def test_existing_service_delivery_status_callback_api_by_status(sample_service, notification_statuses):
service_callback_api = create_service_callback_api(
service=sample_service, notification_statuses=notification_statuses
)
for notification_status in notification_statuses:
result = get_service_delivery_status_callback_api_for_service(
sample_service.id,
notification_status=notification_status
)
assert result.id == service_callback_api.id
assert result.url == service_callback_api.url
assert result.bearer_token == service_callback_api.bearer_token
assert result.created_at == service_callback_api.created_at
assert result.updated_at == service_callback_api.updated_at
assert result.updated_by_id == service_callback_api.updated_by_id
@pytest.mark.parametrize('saved_notification_statuses, query_notification_statuses', [
(
[NOTIFICATION_FAILED],
list(filter(lambda status: status != NOTIFICATION_FAILED, NOTIFICATION_STATUS_TYPES_COMPLETED))
),
(
[NOTIFICATION_SENT, NOTIFICATION_DELIVERED],
[NOTIFICATION_PERMANENT_FAILURE, NOTIFICATION_TEMPORARY_FAILURE, NOTIFICATION_FAILED]
),
(
[NOTIFICATION_PERMANENT_FAILURE, NOTIFICATION_FAILED],
[NOTIFICATION_SENT, NOTIFICATION_DELIVERED]
),
])
def test_no_service_delivery_status_callback_api_by_status(
sample_service, saved_notification_statuses, query_notification_statuses
):
create_service_callback_api(
service=sample_service, notification_statuses=saved_notification_statuses
)
for notification_status in query_notification_statuses:
result = get_service_delivery_status_callback_api_for_service(
sample_service.id,
notification_status=notification_status
)
assert result is None
| StarcoderdataPython |
3364377 | from django import forms
from .models import Venda
class VendaForm(forms.ModelForm):
class Meta:
model = Venda
fields = (
'cliente',
'funcionario',
'animal',
'data_venda',
'preco'
)
class DateInput(forms.DateInput):
input_type = "date"
data_venda = forms.DateField(
label="Data da Venda",
widget=DateInput()
)
preco = forms.DecimalField(
label="Preço",
max_digits=6,
decimal_places=2
) | StarcoderdataPython |
82509 | <reponame>luluci/util7z
import re
import sys
from unpacker import unpacker
from cli import cli
class unpacker_hoge(unpacker):
def make_pw(self) -> None:
"""
復号用パスワードを設定する
"""
pw:str = ""
# パスワード1
re_results = re.findall("\d{0,2}(\d{6})", self._archive_basename)
for result in re_results:
pw = result
self._pw.append(pw)
# パスワード2
pw = self._latest_create_time.strftime("%y%m%d")
self._pw.append(pw)
def make_pw_manual(self) -> bool:
pw_str = input()
if pw_str == "":
return False
else:
self._pw = [pw_str]
return True
def make_instance(self, path: str):
return unpacker_hoge(path)
def main():
try:
cli_ = cli("unpack")
if cli_.enable():
unpacker_ = unpacker_hoge(cli_.archive_file)
unpacker_.exec()
return 0
except Exception as e:
print(e)
return 0
if __name__ == "__main__":
sys.exit(main())
| StarcoderdataPython |
1769794 | from django.urls import path
from .views import groups, students, enums, reference
app_name = "system"
urlpatterns = [
# Группы
path("groups/", groups.AcademicGroupList.as_view()),
path("groups/<int:pk>/", groups.AcademicGroupDetail.as_view()),
# Студенты
path("students/<int:pk>/", students.StudentDetail.as_view()),
# Справочники
path("nationality/", reference.NationalityList.as_view()),
path("nationality/<int:pk>/", reference.NationalityDetail.as_view()),
path("native-language/", reference.NativeLanguageList.as_view()),
path(
"native-language/<int:pk>/", reference.NativeLanguageDetail.as_view()
),
path("citizenship/", reference.CitizenshipList.as_view()),
path("citizenship/<int:pk>/", reference.CitizenshipDetail.as_view()),
path("training-direction/", reference.TrainingDirectionList.as_view()),
path(
"training-direction/<int:pk>/",
reference.TrainingDirectionDetail.as_view(),
),
# Перечесления
path("form-training/", enums.FormTrainingListView.as_view()),
path("payment-training/", enums.PaymentTrainingListView.as_view()),
path("type-training/", enums.TypeTrainingListView.as_view()),
path("active-group/", enums.ActiveGroupListView.as_view()),
path("disability-group/", enums.DisabilityGroupListView.as_view()),
path("gender/", enums.GenderListView.as_view()),
]
| StarcoderdataPython |
3292683 | # Initial code in JAVA has copyright © 2002–2015, <NAME> and <NAME>.
# A symbol table implemented using a left-leaning red-black BST.
# This is the 2-3 version.
RED = True
BLACK = False
class Node(object):
key = None
value = None # associated data
left = None
right = None
color = None #color of parent link
count = None # subtree count
def __init__(self, key, value, color, count):
self.key = key;
self.val = value;
self.color = color;
self.count = count;
# noinspection PyPep8Naming
class RedBlackBST(object):
def __init__(self):
self.root = None
def size(self):
return self._size(self.root)
def is_empty(self):
return self.root is None
# Node helper methods
@staticmethod
def isRed(node):
if node is None:
return False
return node.color == RED;
@staticmethod
def _size(node):
"""
number of node in subtree rooted at node; 0 if node is null
"""
if node is None:
return 0
return node.count;
def get(self, key):
"""
Returns the value associated with the given key.
"""
if key is None:
return None
return self._get(self.root, key)
@staticmethod
def _get(node, key):
while node is not None:
if key < node.key:
node = node.left;
elif key > node.key:
node = node.right;
else:
return node.val;
return None;
def contains(self, key):
return self.get(key) is not None
def put(self, key, val):
"""
Inserts the specified key-value pair into the symbol table, overwriting the old
value with the new value if the symbol table already contains the specified key.
Deletes the specified key (and its associated value) from this symbol table
if the specified value is None.
"""
if key is None:
raise ValueError("first argument to put() is null")
if val is None:
self.delete(key)
return
self.root = self._put(self.root, key, val);
root.color = BLACK;
def _put(self, node, key, val):
"""
insert the key-value pair in the subtree rooted at node
"""
if node is None:
return Node(key, val, RED, 1);
if key < node.key:
node.left = self._put(node.left, key, val);
elif key > node.key:
node.right = self._put(node.right, key, val);
else:
node.val = val;
# fix-up any right-leaning links
if self.isRed(node.right) and not self.isRed(node.left):
node = self.rotateLeft(node)
if self.isRed(node.left) and self.isRed(node.left.left):
node = self.rotateRight(node)
if self.isRed(node.left) and self.isRed(node.right):
self.flipColors(node)
node.count = self._size(node.left) + self._size(node.right) + 1;
return node;
def delete_min(self):
"""
Removes the smallest key and associated value from the symbol table.
"""
if self.is_empty():
raise Exception("BST is empty");
# if both children of root are black, set root to red
if not self.isRed(self.root.left) and not self.isRed(self.root.right):
self.root.color = RED;
self.root = self._delete_min(self.root);
if not self.is_empty():
self.root.color = BLACK;
def _delete_min(self, node):
"""
delete the key-value pair with the minimum key rooted at node
"""
if node.left is None:
return None;
if not self.isRed(node.left) and not self.isRed(node.left.left):
node = self.moveRedLeft(node)
node.left = self._delete_min(node.left)
return self.balance(node);
def deleteMax(self):
"""
Removes the largest key and associated value from the symbol table.
"""
if self.is_empty():
raise Exception("BST is empty");
# if both children of root are black, set root to red
if not self.isRed(self.root.left) and not self.isRed(self.root.right):
self.root.color = RED;
self.root = self._deleteMax(self.root);
if not self.is_empty():
self.root.color = BLACK;
def _deleteMax(self, node):
"""
delete the key-value pair with the maximum key rooted at h
"""
if self._is_red(node.left):
node = self.rotateRight(node);
if node.right is None:
return None;
if not self.isRed(node.right) and not self.isRed(node.right.left):
node = self.moveRedRight(node);
node.right = self._deleteMax(node.right);
return self.balance(node);
def delete(self, key):
"""
Removes the specified key and its associated value from this symbol table
"""
if key is None:
raise ValueError("argument to delete() is null")
if not self.contains(key):
return
# if both children of root are black, set root to red
if not self.isRed(self.root.left) and not self.isRed(self.root.right):
self.root.color = RED
self.root = self._delete(self.root, key)
if not self.isEmpty():
root.color = BLACK
def _delete(self, node, key):
"""
delete the key-value pair with the given key rooted at node
"""
if key < node.key:
if not self.isRed(node.left) and not self.isRed(node.left.left):
h = self.moveRedLeft(node)
h.left = self._delete(node.left, key);
else:
if self.isRed(node.left):
node = self.rotateRight(node)
if key == node.key and node.right is None:
return None
if not self.isRed(node.right) and not self.isRed(node.right.left):
node = self.moveRedRight(node)
if key == node.key:
x = min(node.right)
node.key = x.key
node.val = x.val
# node.val = self.get(node.right, min(node.right).key);
# node.key = self.min(node.right).key;
node.right = self._deleteMin(node.right)
else:
node.right = self._delete(node.right, key)
return self.balance(node)
def rotateRight(self, node):
"""
make a left-leaning link lean to the right
"""
x = node.left;
node.left = x.right;
x.right = node;
x.color = x.right.color;
x.right.color = RED;
x.count = node.count;
node.count = self.size(node.left) + self.size(node.right) + 1;
return x;
def rotateLeft(self, node):
"""
make a right-leaning link lean to the left
"""
x = node.right;
node.right = x.left;
x.left = node;
x.color = x.left.color;
x.left.color = RED;
x.count = node.count;
node.count = self.size(node.left) + self.size(node.right) + 1;
return x;
def flipColors(self, node):
"""
flip the colors of a node and its two children
"""
node.color = not node.color
node.left.color = not node.left.color
node.right.color = not node.right.color
def moveRedLeft(self, node):
"""
Assuming that node is red and both node.left and node.left.left
are black, make node.left or one of its children red.
assert node is not None
assert isRed(node) and not isRed(node.left) and not isRed(node.left.left)
"""
self.flipColors(node);
if self.isRed(node.right.left):
node.right = self.rotateRight(node.right)
node = self.rotateLeft(node)
self.flipColors(node)
return node
def moveRedRight(self, node):
"""
Assuming that h is red and both h.right and h.right.left
are black, make h.right or one of its children red.
assert node is not None
assert isRed(node) and not isRed(node.right) and not isRed(node.right.left)
"""
self.flipColors(node)
if self.isRed(node.left.left):
node = self.rotateRight(node)
self.flipColors(node)
return node
def balance(self, node):
"""
restore red-black tree invariant
assert node is not None
"""
if self.isRed(node.right):
node = self.rotateLeft(node)
if self.isRed(node.left) and self.isRed(node.left.left):
node = self.rotateRight(node)
if self.isRed(node.left) and self.isRed(node.right):
self.flipColors(node)
node.count = self._size(node.left) + self._size(node.right) + 1
return node
#
# public class RedBlackBST<Key extends Comparable<Key>, Value> {
#
# /***************************************************************************
# * Utility functions.
# ***************************************************************************/
#
# /**
# * Returns the height of the BST (for debugging).
# * @return the height of the BST (a 1-node tree has height 0)
# */
# public int height() {
# return height(root);
# }
# private int height(Node x) {
# if (x == null) return -1;
# return 1 + Math.max(height(x.left), height(x.right));
# }
#
# /***************************************************************************
# * Ordered symbol table methods.
# ***************************************************************************/
#
# /**
# * Returns the smallest key in the symbol table.
# * @return the smallest key in the symbol table
# * @throws NoSuchElementException if the symbol table is empty
# */
# public Key min() {
# if (isEmpty()) throw new NoSuchElementException("called min() with empty symbol table");
# return min(root).key;
# }
#
# // the smallest key in subtree rooted at x; null if no such key
# private Node min(Node x) {
# // assert x != null;
# if (x.left == null) return x;
# else return min(x.left);
# }
#
# /**
# * Returns the largest key in the symbol table.
# * @return the largest key in the symbol table
# * @throws NoSuchElementException if the symbol table is empty
# */
# public Key max() {
# if (isEmpty()) throw new NoSuchElementException("called max() with empty symbol table");
# return max(root).key;
# }
#
# // the largest key in the subtree rooted at x; null if no such key
# private Node max(Node x) {
# // assert x != null;
# if (x.right == null) return x;
# else return max(x.right);
# }
#
#
# /**
# * Returns the largest key in the symbol table less than or equal to <tt>key</tt>.
# * @param key the key
# * @return the largest key in the symbol table less than or equal to <tt>key</tt>
# * @throws NoSuchElementException if there is no such key
# * @throws NullPointerException if <tt>key</tt> is <tt>null</tt>
# */
# public Key floor(Key key) {
# if (key == null) throw new NullPointerException("argument to floor() is null");
# if (isEmpty()) throw new NoSuchElementException("called floor() with empty symbol table");
# Node x = floor(root, key);
# if (x == null) return null;
# else return x.key;
# }
#
# // the largest key in the subtree rooted at x less than or equal to the given key
# private Node floor(Node x, Key key) {
# if (x == null) return null;
# int cmp = key.compareTo(x.key);
# if (cmp == 0) return x;
# if (cmp < 0) return floor(x.left, key);
# Node t = floor(x.right, key);
# if (t != null) return t;
# else return x;
# }
#
# /**
# * Returns the smallest key in the symbol table greater than or equal to <tt>key</tt>.
# * @param key the key
# * @return the smallest key in the symbol table greater than or equal to <tt>key</tt>
# * @throws NoSuchElementException if there is no such key
# * @throws NullPointerException if <tt>key</tt> is <tt>null</tt>
# */
# public Key ceiling(Key key) {
# if (key == null) throw new NullPointerException("argument to ceiling() is null");
# if (isEmpty()) throw new NoSuchElementException("called ceiling() with empty symbol table");
# Node x = ceiling(root, key);
# if (x == null) return null;
# else return x.key;
# }
#
# // the smallest key in the subtree rooted at x greater than or equal to the given key
# private Node ceiling(Node x, Key key) {
# if (x == null) return null;
# int cmp = key.compareTo(x.key);
# if (cmp == 0) return x;
# if (cmp > 0) return ceiling(x.right, key);
# Node t = ceiling(x.left, key);
# if (t != null) return t;
# else return x;
# }
#
# /**
# * Return the kth smallest key in the symbol table.
# * @param k the order statistic
# * @return the kth smallest key in the symbol table
# * @throws IllegalArgumentException unless <tt>k</tt> is between 0 and
# * <em>N</em> − 1
# */
# public Key select(int k) {
# if (k < 0 || k >= size()) throw new IllegalArgumentException();
# Node x = select(root, k);
# return x.key;
# }
#
# // the key of rank k in the subtree rooted at x
# private Node select(Node x, int k) {
# // assert x != null;
# // assert k >= 0 && k < size(x);
# int t = size(x.left);
# if (t > k) return select(x.left, k);
# else if (t < k) return select(x.right, k-t-1);
# else return x;
# }
#
# /**
# * Return the number of keys in the symbol table strictly less than <tt>key</tt>.
# * @param key the key
# * @return the number of keys in the symbol table strictly less than <tt>key</tt>
# * @throws NullPointerException if <tt>key</tt> is <tt>null</tt>
# */
# public int rank(Key key) {
# if (key == null) throw new NullPointerException("argument to rank() is null");
# return rank(key, root);
# }
#
# // number of keys less than key in the subtree rooted at x
# private int rank(Key key, Node x) {
# if (x == null) return 0;
# int cmp = key.compareTo(x.key);
# if (cmp < 0) return rank(key, x.left);
# else if (cmp > 0) return 1 + size(x.left) + rank(key, x.right);
# else return size(x.left);
# }
#
# /***************************************************************************
# * Range count and range search.
# ***************************************************************************/
#
# /**
# * Returns all keys in the symbol table as an <tt>Iterable</tt>.
# * To iterate over all of the keys in the symbol table named <tt>st</tt>,
# * use the foreach notation: <tt>for (Key key : st.keys())</tt>.
# * @return all keys in the sybol table as an <tt>Iterable</tt>
# */
# public Iterable<Key> keys() {
# if (isEmpty()) return new Queue<Key>();
# return keys(min(), max());
# }
#
# /**
# * Returns all keys in the symbol table in the given range,
# * as an <tt>Iterable</tt>.
# * @return all keys in the sybol table between <tt>lo</tt>
# * (inclusive) and <tt>hi</tt> (exclusive) as an <tt>Iterable</tt>
# * @throws NullPointerException if either <tt>lo</tt> or <tt>hi</tt>
# * is <tt>null</tt>
# */
# public Iterable<Key> keys(Key lo, Key hi) {
# if (lo == null) throw new NullPointerException("first argument to keys() is null");
# if (hi == null) throw new NullPointerException("second argument to keys() is null");
#
# Queue<Key> queue = new Queue<Key>();
# // if (isEmpty() || lo.compareTo(hi) > 0) return queue;
# keys(root, queue, lo, hi);
# return queue;
# }
#
# // add the keys between lo and hi in the subtree rooted at x
# // to the queue
# private void keys(Node x, Queue<Key> queue, Key lo, Key hi) {
# if (x == null) return;
# int cmplo = lo.compareTo(x.key);
# int cmphi = hi.compareTo(x.key);
# if (cmplo < 0) keys(x.left, queue, lo, hi);
# if (cmplo <= 0 && cmphi >= 0) queue.enqueue(x.key);
# if (cmphi > 0) keys(x.right, queue, lo, hi);
# }
#
# /**
# * Returns the number of keys in the symbol table in the given range.
# * @return the number of keys in the sybol table between <tt>lo</tt>
# * (inclusive) and <tt>hi</tt> (exclusive)
# * @throws NullPointerException if either <tt>lo</tt> or <tt>hi</tt>
# * is <tt>null</tt>
# */
# public int size(Key lo, Key hi) {
# if (lo == null) throw new NullPointerException("first argument to size() is null");
# if (hi == null) throw new NullPointerException("second argument to size() is null");
#
# if (lo.compareTo(hi) > 0) return 0;
# if (contains(hi)) return rank(hi) - rank(lo) + 1;
# else return rank(hi) - rank(lo);
# }
#
#
# /***************************************************************************
# * Check integrity of red-black tree data structure.
# ***************************************************************************/
# private boolean check() {
# if (!isBST()) StdOut.println("Not in symmetric order");
# if (!isSizeConsistent()) StdOut.println("Subtree counts not consistent");
# if (!isRankConsistent()) StdOut.println("Ranks not consistent");
# if (!is23()) StdOut.println("Not a 2-3 tree");
# if (!isBalanced()) StdOut.println("Not balanced");
# return isBST() && isSizeConsistent() && isRankConsistent() && is23() && isBalanced();
# }
#
# // does this binary tree satisfy symmetric order?
# // Note: this test also ensures that data structure is a binary tree since order is strict
# private boolean isBST() {
# return isBST(root, null, null);
# }
#
# // is the tree rooted at x a BST with all keys strictly between min and max
# // (if min or max is null, treat as empty constraint)
# // Credit: <NAME>'s elegant solution
# private boolean isBST(Node x, Key min, Key max) {
# if (x == null) return true;
# if (min != null && x.key.compareTo(min) <= 0) return false;
# if (max != null && x.key.compareTo(max) >= 0) return false;
# return isBST(x.left, min, x.key) && isBST(x.right, x.key, max);
# }
#
# // are the size fields correct?
# private boolean isSizeConsistent() { return isSizeConsistent(root); }
# private boolean isSizeConsistent(Node x) {
# if (x == null) return true;
# if (x.N != size(x.left) + size(x.right) + 1) return false;
# return isSizeConsistent(x.left) && isSizeConsistent(x.right);
# }
#
# // check that ranks are consistent
# private boolean isRankConsistent() {
# for (int i = 0; i < size(); i++)
# if (i != rank(select(i))) return false;
# for (Key key : keys())
# if (key.compareTo(select(rank(key))) != 0) return false;
# return true;
# }
#
# // Does the tree have no red right links, and at most one (left)
# // red links in a row on any path?
# private boolean is23() { return is23(root); }
# private boolean is23(Node x) {
# if (x == null) return true;
# if (isRed(x.right)) return false;
# if (x != root && isRed(x) && isRed(x.left))
# return false;
# return is23(x.left) && is23(x.right);
# }
#
# // do all paths from root to leaf have same number of black edges?
# private boolean isBalanced() {
# int black = 0; // number of black links on path from root to min
# Node x = root;
# while (x != null) {
# if (!isRed(x)) black++;
# x = x.left;
# }
# return isBalanced(root, black);
# }
#
# // does every path from the root to a leaf have the given number of black links?
# private boolean isBalanced(Node x, int black) {
# if (x == null) return black == 0;
# if (!isRed(x)) black--;
# return isBalanced(x.left, black) && isBalanced(x.right, black);
# }
#
#
# /**
# * Unit tests the <tt>RedBlackBST</tt> data type.
# */
# public static void main(String[] args) {
# RedBlackBST<String, Integer> st = new RedBlackBST<String, Integer>();
# for (int i = 0; !StdIn.isEmpty(); i++) {
# String key = StdIn.readString();
# st.put(key, i);
# }
# for (String s : st.keys())
# StdOut.println(s + " " + st.get(s));
# StdOut.println();
# }
# }
#
#
| StarcoderdataPython |
4814376 | import os
import tweepy
import openai
from pytz import timezone
from datetime import datetime, timedelta
class TweepyClient:
def __init__(self,consumer_key,consumer_secret,access_token,access_token_secret):
# auth = tweepy.OAuth2BearerHandler(BEARER_TOKEN)
# self.api = tweepy.API(auth)
# self.client = tweepy.Client(bearer_token=BEARER_TOKEN)
self.client = tweepy.Client(
consumer_key=consumer_key, consumer_secret=consumer_secret,
access_token=access_token, access_token_secret=access_token_secret
)
def make_a_tweet(self,msg):
# response = self.client.get_me()
# print(response)
response = self.client.create_tweet(text=msg)
print(f"https://twitter.com/user/status/{response.data['id']}")
class OpenaiClient:
def __init__(self,openai_key):
openai.organization = "org-Ap7v6umG9OoBxoHFmUONp6ie"
openai.api_key = openai_key
# print( openai.Engine.list() )
def QnA(self):
today = datetime.now(timezone("America/Los_Angeles")).strftime('%B %d %Y')
tomorrow = (datetime.now(timezone("America/Los_Angeles")) + timedelta(1)).strftime('%B %d %Y')
response = openai.Completion.create(
engine="text-davinci-001",
prompt=f"""
Q: Who is Batman?
A: Batman is a fictional comic book character.
Q: What is torsalplexity?
A: ?
Q: What is Devz9?
A: ?
Q: Who is <NAME>?\nA: <NAME> is American film director and producer famous for creating Star Wars.
Q: What is the capital of California?
A: Sacramento.
Q: What orbits the Earth?
A: The Moon.
Q: Who is <NAME>?
A: ?
Q: Any major events happening in November to December 2022?
A: The 2022 FIFA World Cup is scheduled to be the 22nd running of the FIFA World Cup competition, the quadrennial international men's football championship contested by the national teams of the member associations of FIFA. It is scheduled to take place in Qatar from 21 November to 18 December 2022.
Q: Whats happening on febuary 14th 2022?
A: Valentine's Day is celebrated on February 14, and we are ready to shower our significant others with love and tokens of our affection. Unlike National Boyfriend Day, this day isn't just for the boyfriends — anyone and everyone can be shown some love today.
Q: What happened on september 11 2001?
A: The September 11 attacks, also commonly referred to as 9/11, were a series of four coordinated terrorist attacks by the militant Islamist terrorist group al-Qaeda against the United States on Tuesday, September 11, 2001.
Q: How many moons does Mars have?
A: Two, Phobos and Deimos.
Q:Today is {today} and tomrrow is {tomorrow}. Tell me what is happening tomorrow? Any predictions for cryptocurrency?
A:""",
temperature=0,
max_tokens=60,
top_p=1.0,
frequency_penalty=0.0,
presence_penalty=0.0
)
print(response)
return response['choices'][0]['text']
| StarcoderdataPython |
1690267 | <reponame>searayeah/sublime-snippets
from sys import stdin
class MatrixError(Exception):
def __init__(self, matrix1, matrix2):
self.matrix1 = matrix1
self.matrix2 = matrix2
class Matrix:
def __init__(self, lst):
self.lst = [line.copy() for line in lst]
self.lines = len(lst)
self.columns = len(lst[0])
def __str__(self):
return '\n'.join(['\t'.join(map(str, line)) for line in self.lst])
def size(self):
return (self.lines, self.columns)
def __add__(self, other):
if self.size() != other.size():
raise MatrixError(self, other)
else:
temp1 = [line.copy() for line in self.lst]
temp2 = [line.copy() for line in other.lst]
output = []
for i in range(len(temp1)):
output.append([x + y for x, y in zip(temp1[i], temp2[i])])
return Matrix(output)
def __mul__(self, other):
temp = [line.copy() for line in self.lst]
output = []
for i in range(len(temp)):
output.append([x * other for x in temp[i]])
return Matrix(output)
__rmul__ = __mul__
def transpose(self):
self.lst = list(map(list, zip(*self.lst)))
return self
@staticmethod
def transposed(matrix):
return Matrix(list(map(list, zip(*matrix.lst))))
exec(stdin.read())
| StarcoderdataPython |
1642242 | import pytest
from django.test import RequestFactory
from bmh_lims.database.api.views import SampleViewSet
from bmh_lims.database.models import Sample
pytestmark = pytest.mark.django_db
class TestSampleViewSet:
def test_get_queryset(self, sample: Sample, rf: RequestFactory):
view = SampleViewSet()
request = rf.get("/fake-url/")
request.sample = sample
view.request = request
assert sample in view.get_queryset()
| StarcoderdataPython |
1737687 | <gh_stars>10-100
# --------------------------------------------------------------------------
#
# Copyright (c) Microsoft Corporation. All rights reserved.
#
# The MIT License (MIT)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the ""Software""), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
# --------------------------------------------------------------------------
from async_generator import yield_, async_generator
import unittest
import subprocess
import sys
import isodate
import tempfile
import json
from decimal import Decimal
from datetime import date, datetime, timedelta
import os
from os.path import dirname, pardir, join, realpath
from azure.core.exceptions import DecodeError
from bodynumber.aio import AutoRestNumberTestService
import pytest
@pytest.fixture
@async_generator
async def client():
async with AutoRestNumberTestService(base_url="http://localhost:3000") as client:
await yield_(client)
class TestNumber(object):
@pytest.mark.asyncio
async def test_big_float(self, client):
await client.number.put_big_float(3.402823e+20)
assert (await client.number.get_big_float()) == 3.402823e+20
@pytest.mark.asyncio
async def test_small_float(self, client):
await client.number.put_small_float(3.402823e-20)
assert (await client.number.get_small_float()) == 3.402823e-20
@pytest.mark.asyncio
async def test_big_double(self, client):
await client.number.put_big_double(2.5976931e+101)
assert (await client.number.get_big_double()) == 2.5976931e+101
@pytest.mark.asyncio
async def test_small_double(self, client):
await client.number.put_small_double(2.5976931e-101)
assert (await client.number.get_small_double()) == 2.5976931e-101
@pytest.mark.asyncio
async def test_big_double_negative_decimal(self, client):
await client.number.put_big_double_negative_decimal()
assert (await client.number.get_big_double_negative_decimal()) == -99999999.99
@pytest.mark.asyncio
async def test_big_double_positive_decimal(self, client):
await client.number.put_big_double_positive_decimal()
assert (await client.number.get_big_double_positive_decimal()) == 99999999.99
@pytest.mark.asyncio
async def test_big_decimal(self, client):
await client.number.put_big_decimal(Decimal(2.5976931e+101))
assert (await client.number.get_big_decimal()) == 2.5976931e+101
@pytest.mark.asyncio
async def test_small_decimal(self, client):
await client.number.put_small_decimal(Decimal(2.5976931e-101))
assert (await client.number.get_small_decimal()) == 2.5976931e-101
@pytest.mark.asyncio
async def test_get_big_decimal_negative_decimal(self, client):
await client.number.put_big_decimal_positive_decimal()
assert (await client.number.get_big_decimal_negative_decimal()) == -99999999.99
@pytest.mark.asyncio
async def test_get_big_decimal_positive_decimal(self, client):
await client.number.put_big_decimal_negative_decimal()
assert (await client.number.get_big_decimal_positive_decimal()) == 99999999.99
@pytest.mark.asyncio
async def test_get_null(self, client):
await client.number.get_null()
@pytest.mark.asyncio
async def test_get_invalid_decimal(self, client):
with pytest.raises(DecodeError):
await client.number.get_invalid_decimal()
@pytest.mark.asyncio
async def test_get_invalid_double(self, client):
with pytest.raises(DecodeError):
await client.number.get_invalid_double()
@pytest.mark.asyncio
async def test_get_invalid_float(self, client):
with pytest.raises(DecodeError):
await client.number.get_invalid_float()
| StarcoderdataPython |
1661685 | # -*- coding: utf-8 -*-
import sys
import webbrowser
import datetime
import os.path
from pathlib import Path
from . import utils
# -------------------------------------------
# Get today date and the file
now = datetime.datetime.now()
schedule_file = str(Path.home())+'/projectsSchedule.md'
# ARGS
# 1 OPTION
# 2 PROJECT
# 3 HOURS
# 4 DATE (OPTIONAL)
num_args = len(sys.argv)
# -------------------------------------------
def check_horas_file():
"""If the file doesn't exists, creates it"""
if not os.path.exists(schedule_file):
reset_horas()
# -------------------------------------------
def reset_horas():
"""Creates from 0 the new file based on language"""
with open(schedule_file, 'w', encoding='UTF-8') as file:
if("es" in utils.get_OS().get('lang')):
file.write('# Registro de Proyectos\n\n')
file.write('|Proyecto|Horas|Dia|\n')
file.write('|:------:|:---:|:-:|\n')
else:
file.write('# Project Schedule\n\n')
file.write('|Project|Hours|Day|\n')
file.write('|:-----:|:---:|:-:|\n')
file.close()
# -------------------------------------------
def show_horas():
"""If chrome installed, opens the file on it, else shows it on the terminal"""
chrome_path = Path(utils.get_OS().get('chrome'))
try:
chrome_path.is_dir()
chrome = utils.get_OS().get('chrome')
webbrowser.get(chrome).open(
'file:///'+schedule_file)
except webbrowser.Error:
print(utils.get_errors().get('chrome_error'))
with open(schedule_file, 'r') as file:
lines = file.readlines()
for line in lines:
print(line)
file.close()
except:
raise Exception()
# -------------------------------------------
def new_horas():
"""Adds a new line with the info passed in arguments"""
if num_args == 5:
nueva_fila = '|' + sys.argv[2] + '|' + \
sys.argv[3] + 'h|' + sys.argv[4] + '|\n'
elif num_args == 4:
nueva_fila = '|' + sys.argv[2] + '|' + \
sys.argv[3] + 'h|' + now.strftime("%d/%m") + '|\n'
else:
raise Exception()
with open(schedule_file, 'a') as file:
file.write(nueva_fila)
file.close()
# -------------------------------------------
def delete_last_horas():
"""Deletes last line"""
with open(schedule_file, 'r') as file:
lines = file.readlines()
file.close()
lines = lines[0:len(lines)-1]
with open(schedule_file, 'w') as file:
for line in lines:
file.write(line)
file.close()
# -------------------------------------------
def file_directory():
"""*ONLY IN WINDOWS* Opens the directory of the file"""
if (utils.get_OS().get('explorer')):
os.startfile(Path.home())
else:
raise Exception()
# -------------------------------------------
def open_with_code():
"""*ONLY IN WINDOWS* Opens the file with VSCode"""
if (utils.get_OS().get('explorer')):
os.system('code '+str(Path.home())+'\\projectsSchedule.md')
else:
raise Exception()
# -------------------------------------------
def main():
"""Checks if the file exists and do what is passed in args"""
check_horas_file()
try:
if (sys.argv[1] in ['-s', '--show']):
show_horas()
elif (sys.argv[1] in ['-r', '--reset']):
reset_horas()
elif (sys.argv[1] in ['-n', '--new']):
new_horas()
elif (sys.argv[1] in ['-d', '--delete']):
delete_last_horas()
elif (sys.argv[1] in ['-f', '--file']):
file_directory()
elif (sys.argv[1] in ['-c', '--code']):
open_with_code()
else:
raise Exception()
except:
print(utils.get_errors().get('help_msg'))
# -------------------------------------------
if __name__ == '__main__':
main()
| StarcoderdataPython |
4827651 | import src.tnet as tnet
import src.CARS as cars
import numpy as np
import copy
from src.utils import *
import matplotlib as mpl
from matplotlib import rc
import matplotlib.pyplot as plt
rc('font',**{'family':'sans-serif','sans-serif':['Helvetica']})
#rc('text', usetex=True)
netFile, gFile, fcoeffs, tstamp, dir_out = tnet.get_network_parameters('EMA', experiment_name='EMA_penRate_comparison-'+'REB')
#netFile, gFile, fcoeffs, tstamp, dir_out = tnet.get_network_parameters('NYC_Uber_small', experiment_name='NYC_Uber_small_penRate_comparison')
#netFile, gFile, fcoeffs, tstamp, dir_out = tnet.get_network_parameters('NYC_Uber_small_1', experiment_name='NYC_Uber_small_1_penRate_comparison-REB')
#netFile, gFile, fcoeffs, tstamp, dir_out = tnet.get_network_parameters('Anaheim', experiment_name='Anaheim_test_CARSn')
#netFile, gFile, flowFile, fcoeffs, tstamp, dir_out = tnet.get_network_parameters('Barcelons', experiment_name='Barcelona_buildNet')
#netFile, gFile, flowFile, fcoeffs, tstamp, dir_out = tnet.get_network_parameters('ChicagoSketch', experiment_name='ChicagoSketch')
#netFile, gFile, flowFile, fcoeffs, tstamp, dir_out = tnet.get_network_parameters('Sydeny', experiment_name='Sydeny')
demand_multiplier = list(np.linspace(0.8,1.8,2))
demand_multiplier = [1]
'''
print('---- solving NLP problem to set up a base ---')
real_obj = []
for g_multi in demand_multiplier:
tNet = tnet.tNet(netFile=netFile, gFile=gFile, fcoeffs=fcoeffs)
tNet.build_supergraph(walk_multiplier=1)
pedestrian = [(u, v) for (u, v, d) in tNet.G_supergraph.edges(data=True) if d['type'] == 'p']
connector = [(u, v) for (u, v, d) in tNet.G_supergraph.edges(data=True) if d['type'] == 'f']
g_per = tnet.perturbDemandConstant(tNet.g, g_multi)
tNet.set_g(g_per)
cars.solve_social_Julia(tNet, exogenous_G=False)
print('\t solve for g_multiplier = ' + str(round(g_multi,2)))
socialObj = tnet.get_totalTravelTime(tNet.G_supergraph, fcoeffs)
real_obj.append(socialObj)
print(socialObj)
'''
n = [2+i for i in range(4)]
print("\ntestCars progressBar:")
progBar = progressBar(len(n)*2*len(demand_multiplier))
progBar.set()
CARS = {}
for i in n:
CARS[i] = {}
for g_multi in demand_multiplier:
for linear in [True, False]:
tNet = tnet.tNet(netFile=netFile, gFile=gFile, fcoeffs=fcoeffs)
tNet.build_supergraph(walk_multiplier=1)
pedestrian = [(u, v) for (u, v, d) in tNet.G_supergraph.edges(data=True) if d['type'] == 'p']
connector = [(u, v) for (u, v, d) in tNet.G_supergraph.edges(data=True) if d['type'] == 'f']
g_per = tnet.perturbDemandConstant(tNet.g, g_multi)
tNet.set_g(g_per)
tNet, runtime, od_flows = cars.solve_CARSn(tNet, fcoeffs=fcoeffs, n=i, exogenous_G=False, rebalancing=False, linear=linear, method=1)
CARS2obj = tnet.get_totalTravelTime(tNet.G_supergraph, fcoeffs)
CARS[i][linear] = (CARS2obj-1630.1380990494615)/1630.1380990494615*100
progBar.tic()
del tNet
fig, ax = plt.subplots(figsize=(5,2))
ax.plot(n, [v[True] for k,v in CARS.items()], label = 'LP')
ax.plot(n, [v[False] for k,v in CARS.items()], label = 'QP')
ax.set_xlabel('n')
ax.set_ylabel('% deviation from NLP')
ax.set_xlim([n[0], n[-1]])
ax.legend(framealpha=1)
ax.grid(True)
#plt.tight_layout()
plt.show()
| StarcoderdataPython |
1687527 | """
Copyright 2020 The Magma Authors.
This source code is licensed under the BSD-style license found in the
LICENSE file in the root directory of this source tree.
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import logging
import os
from bcc import BPF # pylint:disable=import-error
from jinja2 import Template
from magma.common.job import Job
from magma.kernsnoopd.handlers import ebpf_handlers
EBPF_SRC_DIR = "/var/opt/magma/ebpf/kernsnoopd/"
if not os.path.isdir(EBPF_SRC_DIR):
EBPF_SRC_DIR = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
'ebpf',
)
EBPF_COMMON_FILE = 'common.bpf.h'
def _get_ebpf_source(filename, context) -> str:
"""
_get_ebpf_source reads template source from file and renders it with
context parameters
Args:
filename: absolute path of file from which to read template source
context: dict containing parameter values
Returns:
Rendered source contents
"""
with open(filename, 'r', encoding="utf-8") as src_f:
src = src_f.read()
template = Template(src)
return template.render(context)
class NoSourcesFoundError(Exception):
"""
NoSourcesFoundError is thrown when Snooper does not find any eBPF programs
or source files to load into the kernel
"""
class Snooper(Job):
"""
Snooper is a Job that compiles and loads eBPF programs, registered relevant
front-end programs as handlers, and periodically calls their handle methods
"""
def __init__(
self, programs: list, collect_interval: int,
service_registry, service_loop,
):
super().__init__(interval=collect_interval, loop=service_loop)
self._bpf = None
self._handlers = []
self._loop = service_loop
self._ebpf_programs = programs
self._service_registry = service_registry
self._context = {
'PROXY_PORT': service_registry.get_proxy_config().get(
'local_port',
),
}
try:
self._load_ebpf_programs()
self.start()
except NoSourcesFoundError:
logging.error('Fatal: no eBPF sources loaded')
def _load_ebpf_programs(self) -> None:
"""
_load_ebpf_programs reads eBPF templates from _ebpf_programs, renders
them with context, compiles and loads them into kernel, and registers
corresponding front-end handlers
Raises:
NoSourcesFoundError: self._ebpf_programs was empty or no source in
self._ebpf_programs could be loaded
"""
if not self._ebpf_programs:
raise NoSourcesFoundError()
sources = []
for basename in self._ebpf_programs:
filename = os.path.join(EBPF_SRC_DIR, f'{basename}.bpf.c')
try:
sources.append(_get_ebpf_source(filename, self._context))
handler = ebpf_handlers[basename](self._service_registry)
self._handlers.append(handler)
except FileNotFoundError:
logging.error('Could not open eBPF source file %s', filename)
except KeyError:
logging.error('Fatal: did not find handler for %s', basename)
# found eBPF sources to load into kernel
if sources:
# find and prepend header
header = os.path.join(EBPF_SRC_DIR, EBPF_COMMON_FILE)
try:
sources.insert(0, _get_ebpf_source(header, self._context))
self._bpf = BPF(text='\n'.join(sources))
logging.info('Loaded sources into kernel')
except FileNotFoundError:
logging.error('Fatal: Could not open header file %s', header)
else:
raise NoSourcesFoundError()
async def _run(self) -> None:
if self._bpf is not None:
for handler in self._handlers:
handler.handle(self._bpf)
| StarcoderdataPython |
122458 | # Learn about functions
from random import randint
from objects.mario import Mario
from objects.goomba import Goomba
from objects.bowser import Bowser
# Remember functions? We have so many print statements now, we're going to have the user press enter for each print!
def user_print(string):
input(string)
mario = Mario(100)
door1 = randint(0, 50)
door2 = randint(0, 75)
door3 = randint(0, 100)
choice = int(input("Door 1, door 2, door 3? "))
print("Entering door", choice)
if choice == 1:
mario.coins = mario.coins - door1
print("Took away", door1, "coins. You now have", mario.coins, "coins.")
door4 = randint(0, 20)
door5 = randint(0, 50)
door6 = randint(0, 30)
choice = int(input("Door 4, door 5, door 6? "))
if choice == 4:
user_print("A Goomba appeared behind the door!")
goomba = Goomba()
# A while loop allows fights to happen!
while goomba.hp > 0 and mario.hp > 0:
damage = goomba.attack()
mario.hp = mario.hp - damage
# The str function turns an int into a string. That way we can pass in one string into our function
user_print("The Goomba took away " + str(damage) + " hp. You now have " + str(mario.hp) + " health!")
if mario.hp > 0:
damage = mario.attack()
goomba.hp = goomba.hp - damage
user_print("Mario attacked the goomba! Mario took away " + str(damage) + "hp. The Goomba now has " + str(goomba.hp) + " health!")
if mario.hp < 0:
print("You lose! Try again!")
else:
print("The Goomba was defeated!")
door7 = "bowser"
door8 = "princess"
user_print("You see one door ahead at the end of a long hallway. You slowly enter.")
user_print("It's bowser! Go get 'em!")
bowser = Bowser()
while bowser.hp > 0 and mario.hp > 0:
damage = bowser.attack()
mario.hp = mario.hp - damage
user_print("Bowser took away " + str(damage) + " hp. You now have " + str(mario.hp) + " health!")
if mario.hp > 0:
damage = mario.attack()
bowser.hp = bowser.hp - damage
user_print("Mario attacked Bowser! Mario took away " + str(damage) + "hp. Bowser now has " + str(bowser.hp) + " health!")
if mario.hp < 0:
print("You lose! Try again!")
else:
print("You jumped over Bowser and cut the cable to the bridge he was standing on!")
print("Bowser fell down a pit, never to be seen again!")
print("Congratulations! You saved the princess!")
print("Score:", mario.coins + mario.hp)
if choice == 5:
mario.coins = mario.coins - door6
print("You fall through a trap door and get put in prison.")
if choice == 6:
mario.coins = mario.coins - door6
print("You fall through a trap door and get put in prison.")
if choice == 2:
print("Entered door 2")
mario.coins = mario.coins - door2
mario.hp = 0
print("You trip and break your arm. You can't continue, and so you exit the castle until you heal")
if choice == 3:
mario.coins = mario.coins - door3
print("Took away", door3, "coins. You now have", mario.coins, "coins.")
print("You step on a trap and get burned alive")
mario.hp = 0
| StarcoderdataPython |
3234384 | import sys
from functools import reduce
import pandas as pd
import pymysql
from data_pipeline.dtype import dtype
from query_builder.core import InsertBuilder
from utils.logging import init_logger
from utils.s3_manager.manage import S3Manager
class Processor:
"""
load public data from s3 origin bucket and check valid
save to staging schema in rds and return pd DataFrame
"""
def __init__(self, key):
self.key = key
self.logger = init_logger()
# s3
self.bucket_name = "production-bobsim"
self.s3_key = "public_data/{dir}/origin".format(dir=self.key)
# valid check
self.dtypes = dtype[key]
self.columns = list(self.dtypes.keys())
# rdb
self.schema_name = "public_data"
self.table_name = self.key
# return
self.df = None
def load(self):
"""
init S3Manager instances and fetch objects
:return: list of pd DataFrame (origin)
"""
manager = S3Manager(bucket_name=self.bucket_name)
df_list = manager.fetch_df_from_csv(key=self.s3_key)
self.logger.info("{num} files is loaded".format(num=len(df_list)))
self.logger.info("load df from origin bucket")
return df_list
def validate(self):
# load the list of DataFrames
df_list = self.load()
# combine the list and check DataFrame type.
def combine(accum, ele):
"""
:return: pd DataFrame combined with df_list
"""
tmp = ele[self.columns]
return accum[self.columns].append(tmp)
tmp_df = reduce(combine, df_list)
self.df = tmp_df.astype(dtype=self.dtypes)
def save(self):
"""
# TODO: catch error that query_builder raise
ask how to handle pymysql.err.OperationalError by input_value size
save validated data to RDS
:return: success or fail (bool)
"""
# replace pd.NA to None in order to save NULL to rds.
tmp_df = self.df.replace({pd.NA: None}, inplace=False)
# temporary head(2)
input_value = tmp_df.apply(lambda x: tuple(x.values), axis=1)
qb = InsertBuilder(
schema_name=self.schema_name,
table_name=self.table_name,
value=tuple(input_value) if len(input_value) > 1 else input_value[0]
)
qb.execute()
def execute(self):
"""
:return: pandas DataFrame that load from s3 origin bucket
"""
self.logger.info("start processing {key}".format(key=self.key))
try:
self.validate()
# self.save()
except KeyError:
self.logger.critical("columns are not matched", exc_info=True)
sys.exit()
except pymysql.err.IntegrityError as e:
# TODO: specify error
self.logger.critical(e, exc_info=True)
# sys.exit()
self.logger.info("success processing {key}".format(key=self.key))
return self.df
| StarcoderdataPython |
4833120 | """
WSGI config for website project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/wsgi/
"""
import os
from socket import gethostbyname, gethostname
from django.core.wsgi import get_wsgi_application
ipaddress = gethostbyname(gethostname())
if ipaddress.startswith('127.0'):
os.environ.setdefault(
'DJANGO_SETTINGS_MODULE',
'website.settings.development'
)
else:
os.environ.setdefault(
'DJANGO_SETTINGS_MODULE',
'website.settings.production'
)
application = get_wsgi_application()
| StarcoderdataPython |
123757 | <reponame>adgalad/Cash4Home
"""
Django settings for C4H project.
Generated by 'django-admin startproject' using Django 1.11.4.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
DEFAULT_DOMAIN = 'https://www.cash4home.io'
#DEFAULT_DOMAIN = 'http://0.0.0.0:8000/'
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = os.environ['SECRET_KEY']
USER_DB = os.environ['USER_DB']
PASSWORD_DB = os.environ['PASSWORD_DB']
NAME_DB = os.environ.get('NAME_DB','cashhom2_db')
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['*']
LOGIN_URL = "/v1/login/"
OPERATION_TIMEOUT = 90 # minutos
EMAIL_VALIDATION_EXPIRATION = 60*3 # 3 horas
# Application definition
# Redirect to HTTPS
SECURE_SSL_REDIRECT = True
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'mathfilters',
'app',
]
CRON_CLASSES = [
"app.cron.UpdateBTCPrice",
# ...
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'app.middleware.AutoLogout'
]
ROOT_URLCONF = 'C4H.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'C4H.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': NAME_DB,
'USER': USER_DB,
'PASSWORD': <PASSWORD>,
'PORT': '5432',
}
}
# User substitution
# https://docs.djangoproject.com/en/1.11/topics/auth/customizing/#auth-custom-user
AUTH_USER_MODEL = 'app.User'
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
# {
# 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
# },
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
# {
# 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
# },
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Session timeout
SESSION_SERIALIZER = 'django.contrib.sessions.serializers.PickleSerializer'
AUTO_LOGOUT_DELAY = 5 # minutes
# SESSION_EXPIRE_AT_BROWSER_CLOSE = True
# SESSION_COOKIE_AGE = 10
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'es-ES'
TIME_ZONE = 'UTC'
USE_I18N = True
# USE_L10N = True
USE_TZ = True
DATE_FORMAT = 'Y/m/d'
DATETIME_FORMAT = 'Y/m/d H:i'
TIME_FORMAT = 'H:i'
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
# Static asset configuration
#BASE_DIR = os.path.dirname(os.path.abspath(__file__))
PROJECT_ROOT = os.path.dirname(os.path.abspath(__file__))
#STATIC_ROOT = os.path.join(PROJECT_ROOT, 'staticfiles')
STATIC_ROOT = 'static'
STATIC_URL = '/static/'
STATICFILES_DIRS = (
os.path.join(PROJECT_ROOT, 'static'),
)
# Directorio de templates y de statics.
STATICFILES_DIRS = [os.path.join(BASE_DIR, 'static')]
MEDIA_ROOT = os.path.join(BASE_DIR, 'media/')
MEDIA_URL = '/media/'
# Email
# EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_HOST = os.environ['EMAIL_HOST']
EMAIL_HOST_USER = os.environ['EMAIL_HOST_USER']
EMAIL_HOST_PASSWORD = os.environ['EMAIL_HOST_PASSWORD']
EMAIL_PORT = 465
# EMAIL_USE_TLS = True
EMAIL_USE_SSL = True
DEFAULT_FROM_EMAIL = "Soporte de Cash4Home <%s>"%os.environ['EMAIL_HOST_USER']
SERVER_EMAIL = os.environ['EMAIL_HOST_USER']
ADMINS = [('Carlos', '<EMAIL>'), ('Vicky', '<EMAIL>')]
| StarcoderdataPython |
1707189 | """Integration test: run experiments with some small & fast configs.
Only cursory 'smoke' checks -- there are plenty of errors this won't catch."""
import os
import shutil
import tempfile
import numpy as np
import pytest
import ray
from ray import tune
from aprl.activations.density.pipeline import density_ex
from aprl.activations.tsne.pipeline import tsne_ex
from aprl.multi.score import multi_score_ex
from aprl.multi.train import multi_train_ex
from aprl.policies.loader import AGENT_LOADERS
from aprl.score_agent import score_ex
from aprl.train import NO_VECENV, RL_ALGOS, train_ex
EXPERIMENTS = [score_ex, train_ex]
@pytest.mark.parametrize("experiment", EXPERIMENTS)
def test_experiment(experiment):
"""Smoke test to check the experiments runs with default config."""
run = experiment.run()
assert run.status == "COMPLETED"
BASE_DIR = os.path.dirname(os.path.realpath(__file__))
SCORE_AGENT_CONFIGS = [
{"agent_b_type": "zoo", "agent_b_path": "2", "videos": True, "episodes": 2},
{"env_name": "multicomp/KickAndDefend-v0", "episodes": 1},
{"record_traj": True, "record_traj_params": {"save_dir": "test_dir"}},
{"noisy_agent_index": 0},
{"mask_agent_index": 0},
{"mask_agent_index": 0, "mask_agent_masking_type": "additive_noise", "mask_agent_noise": 1.0},
]
SCORE_AGENT_CONFIGS += [
{
"agent_b_type": rl_algo,
"agent_b_path": os.path.join(BASE_DIR, "dummy_sumo_ants", rl_algo),
"episodes": 1,
}
for rl_algo in AGENT_LOADERS.keys()
if rl_algo != "zoo"
]
@pytest.mark.parametrize("config", SCORE_AGENT_CONFIGS)
def test_score_agent(config):
"""Smoke test for score agent to check it runs with some different configs."""
config = dict(config)
if "episodes" not in config:
config["episodes"] = 1 # speed up tests
config["render"] = False # faster without, test_experiment already tests with render
run = score_ex.run(config_updates=config)
assert run.status == "COMPLETED"
outcomes = [run.result[k] for k in ["ties", "win0", "win1"]]
assert sum(outcomes) == run.config["episodes"]
if config.get("record_traj", False):
try:
for i in range(2):
traj_file_path = os.path.join(
config["record_traj_params"]["save_dir"], f"agent_{i}.npz"
)
traj_data = np.load(traj_file_path)
assert set(traj_data.keys()).issuperset(["observations", "actions", "rewards"])
for k, ep_data in traj_data.items():
assert len(ep_data) == config["episodes"], f"unexpected array length at '{k}'"
os.remove(traj_file_path)
finally:
os.rmdir(config["record_traj_params"]["save_dir"])
SCORE_AGENT_VIDEO_CONFIGS = {
"none_dir": {
"videos": True,
"video_params": {"save_dir": None},
"episodes": 1,
"render": False,
},
"specified_dir": {
"videos": True,
"video_params": {"save_dir": "specific_video_dir"},
"episodes": 1,
"render": False,
},
}
def test_score_agent_video():
# Confirm that experiment runs properly saving videos to a temp dir
none_dir_run = score_ex.run(config_updates=SCORE_AGENT_VIDEO_CONFIGS["none_dir"])
assert none_dir_run.status == "COMPLETED"
try:
# Confirm that the first time you try to save videos to a specified dir, it works properly
specified_dir_run = score_ex.run(config_updates=SCORE_AGENT_VIDEO_CONFIGS["specified_dir"])
assert specified_dir_run.status == "COMPLETED"
# Confirm that the second time you try to save videos to the same specified dir, it fails
with pytest.raises(AssertionError):
_ = score_ex.run(config_updates=SCORE_AGENT_VIDEO_CONFIGS["specified_dir"])
finally:
shutil.rmtree(SCORE_AGENT_VIDEO_CONFIGS["specified_dir"]["video_params"]["save_dir"])
TRAIN_CONFIGS = [
{"num_env": 1},
{"env_name": "multicomp/YouShallNotPassHumans-v0"},
{"normalize": False},
{"embed_type": "ppo2", "embed_path": os.path.join(BASE_DIR, "dummy_sumo_ants", "ppo2")},
{
"env_name": "multicomp/SumoHumans-v0",
"rew_shape": True,
"rew_shape_params": {"anneal_frac": 0.1},
},
{"env_name": "multicomp/SumoHumans-v0", "embed_noise": True},
{"env_name": "Humanoid-v3", "embed_types": [], "embed_paths": []},
{
"env_name": "multicomp/SumoHumansAutoContact-v0",
"rew_shape": True,
"rew_shape_params": {"metric": "length", "min_wait": 100, "window_size": 100},
},
{
"env_name": "multicomp/SumoHumans-v0",
"rew_shape": True,
"embed_noise": True,
"embed_noise_params": {"metric": "sparse", "min_wait": 100, "window_size": 100},
},
{"env_name": "multicomp/SumoHumansAutoContact-v0", "adv_noise_params": {"noise_val": 0.1}},
{
# test TransparentLSTMPolicy
"transparent_params": ["ff_policy", "hid"],
},
{
# test TransparentMLPPolicyValue
"env_name": "multicomp/YouShallNotPassHumans-v0",
"transparent_params": ["ff_policy"],
"batch_size": 32,
},
{
"env_name": "multicomp/SumoHumans-v0",
"lookback_params": {"lb_num": 2, "lb_path": 1, "lb_type": "zoo"},
"adv_noise_params": {"noise_val": 0.1},
"transparent_params": ["ff_policy"],
},
]
try:
from stable_baselines import GAIL
del GAIL
TRAIN_CONFIGS.append(
{
"rl_algo": "gail",
"num_env": 1,
"expert_dataset_path": os.path.join(BASE_DIR, "SumoAnts_traj/agent_0.npz"),
}
)
except ImportError: # pragma: no cover
# skip GAIL test if algorithm not available
pass
TRAIN_CONFIGS += [
{"rl_algo": algo, "num_env": 1 if algo in NO_VECENV else 2}
for algo in RL_ALGOS.keys()
if algo != "gail"
]
# Choose hyperparameters to minimize resource consumption in tests
TRAIN_SMALL_RESOURCES = {
"batch_size": 64,
"total_timesteps": 128,
"num_env": 2,
}
@pytest.mark.parametrize("config", TRAIN_CONFIGS)
def test_train(config):
config = dict(config)
for k, v in TRAIN_SMALL_RESOURCES.items():
config.setdefault(k, v)
run = train_ex.run(config_updates=config)
assert run.status == "COMPLETED"
final_dir = run.result
assert os.path.isdir(final_dir), "final result not saved"
assert os.path.isfile(os.path.join(final_dir, "model.pkl")), "model weights not saved"
def _test_multi(ex, config_updates=None):
multi_config = {
"spec": {
"run_kwargs": {
"resources_per_trial": {"cpu": 2}, # CI build only has 2 cores
"upload_dir": None, # do not upload test results anywhere
"sync_to_cloud": None, # as above
},
},
"init_kwargs": {"num_cpus": 2}, # CI build only has 2 cores
}
if config_updates:
multi_config.update(config_updates)
run = ex.run(config_updates=multi_config, named_configs=("debug_config",))
assert run.status == "COMPLETED"
assert ray.state.state.redis_client is None, "ray has not been shutdown"
return run
def test_multi_score():
run = _test_multi(multi_score_ex)
assert "scores" in run.result
assert "exp_id" in run.result
assert isinstance(run.result["scores"], dict)
def test_multi_train():
config_updates = {
"train": TRAIN_SMALL_RESOURCES,
}
run = _test_multi(multi_train_ex, config_updates=config_updates)
analysis, exp_id = run.result
assert isinstance(analysis, tune.analysis.ExperimentAnalysis)
assert isinstance(exp_id, str)
ACTIVATION_EXPERIMENTS = [
(density_ex, "fit_density_model"),
(tsne_ex, "tsne_fit_model"),
]
@pytest.mark.parametrize("test_cfg", ACTIVATION_EXPERIMENTS)
def test_activation_pipeline(test_cfg):
ex, inner_exp_name = test_cfg
with tempfile.TemporaryDirectory(prefix="test_activation_pipeline") as tmpdir:
config_updates = {
"generate_activations": {
"score_update": {
"spec": {
"run_kwargs": {
"resources_per_trial": {"cpu": 2}, # CI build only has 2 cores
"upload_dir": os.path.join(tmpdir, "ray"),
"sync_to_cloud": (
"mkdir -p {target} && " "rsync -rlptv {source}/ {target}"
),
},
},
"init_kwargs": {"num_cpus": 2}, # CI build only has 2 cores
},
"ray_upload_dir": os.path.join(tmpdir, "ray"),
},
inner_exp_name: {"init_kwargs": {"num_cpus": 2}}, # CI build only has 2 cores
"output_root": os.path.join(tmpdir, "main"),
}
run = ex.run(config_updates=config_updates, named_configs=("debug_config",))
assert run.status == "COMPLETED"
os.stat(run.result) # check output path exists
| StarcoderdataPython |
1799222 |
class Transformer:
__slots__ = '_name', '_func'
def __init__(self, name, func):
self._name = name
self._func = func
def transform(self, data):
return self._func(data)
@property
def name(self):
return self._name
def __repr__(self):
return f'<{type(self).__name__} {self._name}>'
class Pipeline:
__slots__ = '_transformers',
def __init__(self, transformers=None):
if transformers is None:
transformers = []
self._transformers = transformers
def transform(self, data):
for transformer in self._transformers:
data = transformer.transform(data)
return data
@property
def transformers(self):
return self._transformers
def then(self, transformer):
return type(self)(self._transformers + [transformer])
def extended(self, other):
return type(self)(self._transformers + other._transformers)
def __repr__(self):
return f'<{type(self).__name__} {self._transformers!r}>'
def is_empty(self):
return len(self._transformers) == 0
def __len__(self):
return len(self._transformers)
def __str__(self):
return (
# f'{type(self).__name__}\n -> ' +
'\n'.join(f' -> {trfmr.name}' for trfmr in self._transformers)
)
| StarcoderdataPython |
3393518 | import numpy as np
from scipy.ndimage import gaussian_filter
import sys
from scipy.spatial import Delaunay
import os
import subprocess
#from matplotlib import image as mpimg
from PIL import Image, ImageDraw
import shutil
GAUSS = 2
OUTDIR = 'dm/temp/'
PERSISTENCE_THRESHOLD = 512
l = 512
w = 512
def build_vert_by_th(im_cube, nx, ny):
vertex = []
for j in range(ny):
for i in range(nx):
vertex.append([i, j, im_cube[i, j]])
vertex = np.asarray(vertex)
return vertex
def buildTriFromTetra(tetra):
tri = {}
nTe = tetra.shape[0]
tri_index = 0
for i in range(nTe):
print('tetra:', i)
for j in range(4):
# Four triangles
newTri = []
for k in range(4):
# Triangles' three vertices
if k != j:
newTri.append(tetra[i, k])
newTri = tuple(newTri)
if newTri not in tri:
# Add new triangles
tri[newTri] = tri_index
tri_index = tri_index + 1
# Convert everything into list
nTri = len(tri)
tri_array = np.zeros([nTri, 3])
for key, value in tri.items():
tri_array[value, :] = list(key)
return tri_array
def builEdgeFromTri(tri):
edge = {}
edge_index = 0
nTri = len(tri)
for i in range(nTri):
print('tri:', i)
for j in range(3):
# 3 edges
newEdge = []
for k in range(3):
if k != j:
newEdge.append(tri[i, k])
newEdge = tuple(newEdge)
if newEdge not in edge:
edge[newEdge] = edge_index
edge_index = edge_index + 1
nEdge = len(edge)
edge_array = np.zeros([nEdge, 2])
for key, value in edge.items():
edge_array[value, :] = list(key)
return edge_array
def outBinary(vert, edge, triangle, nV, nE, nT,file_name):
open(file_name, 'wb').close()
with open(file_name, 'wb') as f:
nV.astype(np.int32).tofile(f)
vert.astype('d').tofile(f)
nE.astype(np.int32).tofile(f)
edge.astype(np.int32).tofile(f)
nT.astype(np.int32).tofile(f)
triangle.astype(np.int32).tofile(f)
f.close()
def cmp_dm_img_tri2D(i_file_name, i_th):
print(i_file_name)
subprocess.check_call([r"spt_cpp/spt_cpp", i_file_name + '/SC.bin', i_file_name + "/", str(i_th), str(2)])
print('process done')
o_vert = np.loadtxt(i_file_name + "/vert.txt")
o_edge = np.loadtxt(i_file_name + "/edge.txt")
if len(o_edge) == 0:
return [], []
o_vert = o_vert[:, :2]
# visualize result
stable_vert = o_vert.copy()
stable_vert[:, 0] = o_vert[:, 1]
stable_vert[:, 1] = o_vert[:, 0]
o_vert = stable_vert
return o_vert, o_edge
def make_png(verts, edges, path, l, w, linestroke=1):
im = Image.new('L', (l, w), color=0)
draw = ImageDraw.Draw(im)
for e in edges:
u = verts[int(e[0])]
v = verts[int(e[1])]
draw.line((u[0], u[1], v[0], v[1]), fill=255, width=linestroke)
return np.asarray(im) / 255
def dm_cal(tile, id):
img = tile
nx, ny = img.shape
if GAUSS > 0:
img = gaussian_filter(img, GAUSS)
vert = build_vert_by_th(img, nx, ny)
print('verts:', len(vert))
sys.stdout.flush()
base_square_vert = np.asarray([vert[0], vert[1], vert[nx], vert[nx + 1]])
tri_vert_to_og_vert = {}
tri_vert_to_og_vert[0] = 0
tri_vert_to_og_vert[1] = 1
tri_vert_to_og_vert[2] = nx
tri_vert_to_og_vert[3] = nx + 1
# print(base_cube)
tri = Delaunay(base_square_vert[:, :2])
tri.simplices.sort()
# np.savetxt("simpliecs.txt",tri.simplices)
print("Build tri from tetra.")
sys.stdout.flush()
base_square_tri = tri.simplices
print("Build edge from tri.")
sys.stdout.flush()
base_square_edge = builEdgeFromTri(base_square_tri)
# print(base_cube_edge)
square_edge = []
for e in base_square_edge:
new_e = [tri_vert_to_og_vert[e[0]], tri_vert_to_og_vert[e[1]]]
square_edge.append(new_e)
square_tri = []
for t in base_square_tri:
new_t = [tri_vert_to_og_vert[t[0]], tri_vert_to_og_vert[t[1]], tri_vert_to_og_vert[t[2]]]
square_tri.append(new_t)
# print(cube_edge)
print('creating dups for row...')
sys.stdout.flush()
edges_to_dup = []
for e in square_edge:
if (e[0] == 0 or e[0] == nx) and (e[1] == 0 or e[1] == nx):
continue
edges_to_dup.append(e)
tris_to_dup = []
for t in square_tri:
tris_to_dup.append(t)
# print(edges_to_dup)
print('creating row...')
sys.stdout.flush()
row_edge = []
for e in square_edge:
row_edge.append(e)
row_tri = []
for t in square_tri:
row_tri.append(t)
for i in range(1, nx - 1):
# print('working on', i)
for e in edges_to_dup:
new_e = [e[0] + i, e[1] + i]
row_edge.append(new_e)
for t in tris_to_dup:
new_t = [t[0] + i, t[1] + i, t[2] + i]
row_tri.append(new_t)
print('creating plane...')
sys.stdout.flush()
edges_to_dup = []
for e in row_edge:
v0_y = vert[e[0]][1]
v1_y = vert[e[1]][1]
if v0_y == 0 and v1_y == 0:
continue
edges_to_dup.append(e)
tris_to_dup = []
for t in row_tri:
tris_to_dup.append(t)
plane_edge = []
for e in row_edge:
plane_edge.append(e)
plane_tri = []
for t in row_tri:
plane_tri.append(t)
for i in range(1, ny - 1):
# for i in range(1, 2):
# print('working on', i)
shift = i * nx
for e in edges_to_dup:
new_e = [e[0] + shift, e[1] + shift]
plane_edge.append(new_e)
for t in tris_to_dup:
new_t = [t[0] + shift, t[1] + shift, t[2] + shift]
plane_tri.append(new_t)
edge = np.asarray(plane_edge)
tri = np.asarray(plane_tri)
nV = vert.shape[0] * np.ones(1)
nE = edge.shape[0] * np.ones(1)
nT = tri.shape[0] * np.ones(1)
if not os.path.exists(OUTDIR):
os.makedirs(OUTDIR)
file_name = OUTDIR + str(id) + '/'
if not os.path.exists(file_name):
os.makedirs(file_name)
print('writing binary...')
bi_file_name = OUTDIR + str(id) + '/SC.bin'
outBinary(vert, edge, tri, nV, nE, nT, bi_file_name)
threshold = PERSISTENCE_THRESHOLD
verts, edges = cmp_dm_img_tri2D(file_name, threshold)
# verts = np.asarray([[v[1], v[0]] for v in verts])
path = os.path.join(file_name, 'dimo.png')
morse_tile = make_png(verts, edges, path, l, w)
# morse_tile = mpimg.imread(path)
shutil.rmtree(file_name)
return morse_tile
| StarcoderdataPython |
4838087 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright 2020-2021 by <NAME>. All rights reserved. This file is part
# of the Robot Operating System project, released under the MIT License. Please
# see the LICENSE file included as part of this package.
#
# author: <NAME>
# created: 2020-10-27
# modified: 2020-10-27
#
import pytest
import time, itertools, traceback
from datetime import datetime as dt
from colorama import init, Fore, Style
init()
from lib.logger import Logger, Level
from lib.config_loader import ConfigLoader
from lib.event import Event
from lib.message_factory import MessageFactory
from lib.clock import Clock
INFINITE = True
SHOW_STATS = False
# ..............................................................................
class MockMessageBus():
'''
This message queue just displays filtered, clock-related events as they arrive.
'''
def __init__(self, level):
super().__init__()
self._counter = itertools.count()
self._log = Logger("queue", Level.INFO)
self._start_time = dt.now()
self._last_time = self._start_time
self._log.info('ready.')
# ......................................................
def set_clock(self, clock):
self._clock = clock
# ......................................................
def add(self, message):
global tock_count
message.number = next(self._counter)
self._log.debug('added message {:06d}: priority {}: {}'.format(message.number, message.priority, message.description))
_event = message.event
_value = message.value
_now = dt.now()
_delta = _now - self._start_time
_elapsed_ms = (_now - self._last_time).total_seconds() * 1000.0
_process_delta = _now - message.timestamp
_elapsed_loop_ms = _delta.total_seconds() * 1000.0
_process_ms = _process_delta.total_seconds() * 1000.0
_total_ms = _elapsed_loop_ms + _process_ms
if SHOW_STATS:
if _event is Event.CLOCK_TICK:
# self._log.info(Fore.YELLOW + Style.NORMAL + 'CLOCK_TICK: {}; value: {}'.format(_event.description, _value))
self._log.info(Fore.BLACK + Style.DIM + 'event: {}; proc time: {:8.5f}ms; tick loop: {:5.2f}ms; total: {:5.2f}ms; '.format(\
_event.description, _process_ms, _elapsed_loop_ms, _total_ms) + Fore.WHITE + Style.NORMAL \
+ ' elapsed: {:6.3f}ms; trim: {:7.4f}'.format(_elapsed_ms, self._clock.trim))
elif _event is Event.CLOCK_TOCK:
self._log.info(Fore.YELLOW + Style.DIM + 'event: {}; proc time: {:8.5f}ms; tock loop: {:5.2f}ms; total: {:6.3f}ms; '.format(\
_event.description, _process_ms, _elapsed_loop_ms, _total_ms) + Fore.WHITE + Style.NORMAL \
+ ' elapsed: {:6.3f}ms; trim: {:7.4f}'.format(_elapsed_ms, self._clock.trim))
self._start_time = _now
tock_count += 1
else:
self._log.info(Fore.BLACK + Style.BRIGHT + 'other event: {}'.format(_event.description))
self._last_time = _now
# ..............................................................................
@pytest.mark.unit
def test_clock():
global tock_count
_log = Logger('clock-test', Level.INFO)
_loader = ConfigLoader(Level.INFO)
filename = 'config.yaml'
_config = _loader.configure(filename)
tock_count = 0
_message_factory = MessageFactory(Level.INFO)
_message_bus = MockMessageBus(Level.INFO)
_clock = Clock(_config, _message_bus, _message_factory, Level.INFO)
_message_bus.set_clock(_clock)
_clock.enable()
_log.info('ready; begin test.')
_loops = 3
while INFINITE or tock_count < _loops:
time.sleep(1.0)
_log.info('test complete.')
# ..............................................................................
def main():
try:
test_clock()
except KeyboardInterrupt:
print(Fore.RED + 'Ctrl-C caught; exiting...' + Style.RESET_ALL)
except Exception as e:
print(Fore.RED + Style.BRIGHT + 'error starting ifs: {}\n{}'.format(e, traceback.format_exc()) + Style.RESET_ALL)
finally:
pass
if __name__== "__main__":
main()
#EOF
| StarcoderdataPython |
1734581 | <filename>lib/dataset/augmentor/augmentation.py
import numbers
import os
import warnings
import numpy as np
import cv2
import random
import math
from train_config import config as cfg
######May wrong, when use it check it
def Rotate_aug(src,angle,label=None,center=None,scale=1.0):
'''
:param src: src image
:param label: label should be numpy array with [[x1,y1],
[x2,y2],
[x3,y3]...]
:param angle:
:param center:
:param scale:
:return: the rotated image and the points
'''
image=src
(h, w) = image.shape[:2]
# 若未指定旋转中心,则将图像中心设为旋转中心
if center is None:
center = (w / 2, h / 2)
# 执行旋转
M = cv2.getRotationMatrix2D(center, angle, scale)
if label is None:
for i in range(image.shape[2]):
image[:,:,i] = cv2.warpAffine(image[:,:,i], M, (w, h),
flags=cv2.INTER_CUBIC,
borderMode=cv2.BORDER_CONSTANT,
borderValue=0.)
return image,None
else:
label=label.T
####make it as a 3x3 RT matrix
full_M=np.row_stack((M,np.asarray([0,0,1])))
img_rotated = cv2.warpAffine(image, M, (w, h), flags=cv2.INTER_CUBIC,
borderMode=cv2.BORDER_CONSTANT, borderValue=cfg.DATA.PIXEL_MEAN)
###make the label as 3xN matrix
full_label = np.row_stack((label, np.ones(shape=(1,label.shape[1]))))
label_rotated=np.dot(full_M,full_label)
label_rotated=label_rotated[0:2,:]
#label_rotated = label_rotated.astype(np.int32)
label_rotated=label_rotated.T
return img_rotated,label_rotated
def Rotate_coordinate(label,rt_matrix):
if rt_matrix.shape[0]==2:
rt_matrix=np.row_stack((rt_matrix, np.asarray([0, 0, 1])))
full_label = np.row_stack((label, np.ones(shape=(1, label.shape[1]))))
label_rotated = np.dot(rt_matrix, full_label)
label_rotated = label_rotated[0:2, :]
return label_rotated
def box_to_point(boxes):
'''
:param boxes: [n,x,y,x,y]
:return: [4n,x,y]
'''
##caution the boxes are ymin xmin ymax xmax
points_set=np.zeros(shape=[4*boxes.shape[0],2])
for i in range(boxes.shape[0]):
points_set[4 * i]=np.array([boxes[i][0],boxes[i][1]])
points_set[4 * i+1] =np.array([boxes[i][0],boxes[i][3]])
points_set[4 * i+2] =np.array([boxes[i][2],boxes[i][3]])
points_set[4 * i+3] =np.array([boxes[i][2],boxes[i][1]])
return points_set
def point_to_box(points):
boxes=[]
points=points.reshape([-1,4,2])
for i in range(points.shape[0]):
box=[np.min(points[i][:,0]),np.min(points[i][:,1]),np.max(points[i][:,0]),np.max(points[i][:,1])]
boxes.append(box)
return np.array(boxes)
def Rotate_with_box(src,angle,boxes=None,center=None,scale=1.0):
'''
:param src: src image
:param label: label should be numpy array with [[x1,y1],
[x2,y2],
[x3,y3]...]
:param angle:angel
:param center:
:param scale:
:return: the rotated image and the points
'''
label=box_to_point(boxes)
image=src
(h, w) = image.shape[:2]
# 若未指定旋转中心,则将图像中心设为旋转中心
if center is None:
center = (w / 2, h / 2)
# 执行旋转
M = cv2.getRotationMatrix2D(center, angle, scale)
new_size=Rotate_coordinate(np.array([[0,w,w,0],
[0,0,h,h]]), M)
new_h,new_w=np.max(new_size[1])-np.min(new_size[1]),np.max(new_size[0])-np.min(new_size[0])
scale=min(h/new_h,w/new_w)
M = cv2.getRotationMatrix2D(center, angle, scale)
if boxes is None:
for i in range(image.shape[2]):
image[:,:,i] = cv2.warpAffine(image[:,:,i], M, (w, h), flags=cv2.INTER_CUBIC, borderMode=cv2.BORDER_CONSTANT)
return image,None
else:
label=label.T
####make it as a 3x3 RT matrix
full_M=np.row_stack((M,np.asarray([0,0,1])))
img_rotated = cv2.warpAffine(image, M, (w, h), flags=cv2.INTER_CUBIC, borderMode=cv2.BORDER_CONSTANT)
###make the label as 3xN matrix
full_label = np.row_stack((label, np.ones(shape=(1,label.shape[1]))))
label_rotated=np.dot(full_M,full_label)
label_rotated=label_rotated[0:2,:]
#label_rotated = label_rotated.astype(np.int32)
label_rotated=label_rotated.T
boxes_rotated = point_to_box(label_rotated)
return img_rotated,boxes_rotated
###CAUTION:its not ok for transform with label for perspective _aug
def Perspective_aug(src,strength,label=None):
image = src
pts_base = np.float32([[0, 0], [300, 0], [0, 300], [300, 300]])
pts1=np.random.rand(4, 2)*random.uniform(-strength,strength)+pts_base
pts1=pts1.astype(np.float32)
#pts1 =np.float32([[56, 65], [368, 52], [28, 387], [389, 398]])
M = cv2.getPerspectiveTransform(pts1, pts_base)
trans_img = cv2.warpPerspective(image, M, (src.shape[1], src.shape[0]))
label_rotated=None
if label is not None:
label=label.T
full_label = np.row_stack((label, np.ones(shape=(1, label.shape[1]))))
label_rotated = np.dot(M, full_label)
label_rotated=label_rotated.astype(np.int32)
label_rotated=label_rotated.T
return trans_img,label_rotated
def Affine_aug(src,strength,label=None):
image = src
pts_base = np.float32([[10,100],[200,50],[100,250]])
pts1 = np.random.rand(3, 2) * random.uniform(-strength, strength) + pts_base
pts1 = pts1.astype(np.float32)
M = cv2.getAffineTransform(pts1, pts_base)
trans_img = cv2.warpAffine(image, M, (image.shape[1], image.shape[0]) ,
borderMode=cv2.BORDER_CONSTANT,
borderValue=cfg.DATA.PIXEL_MEAN)
label_rotated=None
if label is not None:
label=label.T
full_label = np.row_stack((label, np.ones(shape=(1, label.shape[1]))))
label_rotated = np.dot(M, full_label)
#label_rotated = label_rotated.astype(np.int32)
label_rotated=label_rotated.T
return trans_img,label_rotated
def Padding_aug(src,max_pattern_ratio=0.05):
src=src.astype(np.float32)
pattern=np.ones_like(src)
ratio = random.uniform(0, max_pattern_ratio)
height,width,_=src.shape
if random.uniform(0,1)>0.5:
if random.uniform(0, 1) > 0.5:
pattern[0:int(ratio*height),:,:]=0
else:
pattern[height-int(ratio * height):, :, :] = 0
else:
if random.uniform(0, 1) > 0.5:
pattern[:,0:int(ratio * width), :] = 0
else:
pattern[:,width-int(ratio * width):, :] = 0
bias_pattern=(1-pattern)*cfg.DATA.PIXEL_MEAN
img=src*pattern+bias_pattern
img=img.astype(np.uint8)
return img
def Blur_heatmaps(src, ksize=(3, 3)):
for i in range(src.shape[2]):
src[:, :, i] = cv2.GaussianBlur(src[:, :, i], ksize, 0)
amin, amax = src[:, :, i].min(), src[:, :, i].max() # 求最大最小值
if amax>0:
src[:, :, i] = (src[:, :, i] - amin) / (amax - amin) # (矩阵元素-最小值)/(最大值-最小值)
return src
def Blur_aug(src,ksize=(3,3)):
for i in range(src.shape[2]):
src[:, :, i]=cv2.GaussianBlur(src[:, :, i],ksize,1.5)
return src
def Img_dropout(src,max_pattern_ratio=0.05):
width_ratio = random.uniform(0, max_pattern_ratio)
height_ratio = random.uniform(0, max_pattern_ratio)
width=src.shape[1]
height=src.shape[0]
block_width=width*width_ratio
block_height=height*height_ratio
width_start=int(random.uniform(0,width-block_width))
width_end=int(width_start+block_width)
height_start=int(random.uniform(0,height-block_height))
height_end=int(height_start+block_height)
src[height_start:height_end,width_start:width_end,:]=0.
return src
def Fill_img(img_raw,target_height,target_width,label=None):
###sometimes use in objs detects
channel=img_raw.shape[2]
raw_height = img_raw.shape[0]
raw_width = img_raw.shape[1]
if raw_width / raw_height >= target_width / target_height:
shape_need = [int(target_height / target_width * raw_width), raw_width, channel]
img_fill = np.zeros(shape_need, dtype=img_raw.dtype)+np.array(cfg.DATA.PIXEL_MEAN ,dtype=img_raw.dtype)
shift_x=(img_fill.shape[1]-raw_width)//2
shift_y=(img_fill.shape[0]-raw_height)//2
for i in range(channel):
img_fill[shift_y:raw_height+shift_y, shift_x:raw_width+shift_x, i] = img_raw[:,:,i]
else:
shape_need = [raw_height, int(target_width / target_height * raw_height), channel]
img_fill = np.zeros(shape_need, dtype=img_raw.dtype)+np.array(cfg.DATA.PIXEL_MEAN ,dtype=img_raw.dtype)
shift_x = (img_fill.shape[1] - raw_width) // 2
shift_y = (img_fill.shape[0] - raw_height) // 2
for i in range(channel):
img_fill[shift_y:raw_height + shift_y, shift_x:raw_width + shift_x, i] = img_raw[:, :, i]
if label is None:
return img_fill,shift_x,shift_y
else:
label[:,0]+=shift_x
label[:, 1]+=shift_y
return img_fill,label
class RandomResizedCrop(object):
### torch_convert codes
"""Crop the given PIL Image to random size and aspect ratio.
A crop of random size (default: of 0.08 to 1.0) of the original size and a random
aspect ratio (default: of 3/4 to 4/3) of the original aspect ratio is made. This crop
is finally resized to given size.
This is popularly used to train the Inception networks.
Args:
size: expected output size of each edge
scale: range of size of the origin size cropped
ratio: range of aspect ratio of the origin aspect ratio cropped
interpolation: Default: PIL.Image.BILINEAR
"""
def __init__(self, size, scale=(0.08, 1.0), ratio=(3. / 4., 4. / 3.)):
if isinstance(size, (tuple, list)):
self.size = size
else:
self.size = (size, size)
if (scale[0] > scale[1]) or (ratio[0] > ratio[1]):
warnings.warn("range should be of kind (min, max)")
self.scale = scale
self.ratio = ratio
@staticmethod
def get_params(img, scale, ratio):
"""Get parameters for ``crop`` for a random sized crop.
Args:
img (PIL Image): Image to be cropped.
scale (tuple): range of size of the origin size cropped
ratio (tuple): range of aspect ratio of the origin aspect ratio cropped
Returns:
tuple: params (i, j, h, w) to be passed to ``crop`` for a random
sized crop.
"""
width, height = img.shape[1],img.shape[0]
area = height * width
for _ in range(10):
target_area = random.uniform(*scale) * area
log_ratio = (math.log(ratio[0]), math.log(ratio[1]))
aspect_ratio = math.exp(random.uniform(*log_ratio))
w = int(round(math.sqrt(target_area * aspect_ratio)))
h = int(round(math.sqrt(target_area / aspect_ratio)))
if 0 < w <= width and 0 < h <= height:
i = random.randint(0, height - h)
j = random.randint(0, width - w)
return i, j, h, w
# Fallback to central crop
in_ratio = float(width) / float(height)
if (in_ratio < min(ratio)):
w = width
h = int(round(w / min(ratio)))
elif (in_ratio > max(ratio)):
h = height
w = int(round(h * max(ratio)))
else: # whole image
w = width
h = height
i = (height - h) // 2
j = (width - w) // 2
return i, j, h, w
def __call__(self, img):
"""
Args:
img (PIL Image): Image to be cropped and resized.
Returns:
PIL Image: Randomly cropped and resized image.
"""
i, j, h, w = self.get_params(img, self.scale, self.ratio)
target_img = img[i:i + h, j:j + w, :]
interp_methods = [cv2.INTER_LINEAR, cv2.INTER_CUBIC, cv2.INTER_AREA, cv2.INTER_NEAREST,
cv2.INTER_LANCZOS4]
interp_method = random.choice(interp_methods)
target_img = cv2.resize(target_img, (self.size[1], self.size[0]), interpolation=interp_method)
return target_img
class CenterCrop(object):
"""Crops the given PIL Image at the center.
Args:
size (sequence or int): Desired output size of the crop. If size is an
int instead of sequence like (h, w), a square crop (size, size) is
made.
"""
def __init__(self, target_size,resize_size=256):
if isinstance(target_size, numbers.Number):
self.size = (int(target_size), int(target_size))
else:
self.size = target_size
self.resizer=OpencvResize(resize_size)
def __call__(self, img):
"""
Args:
img (PIL Image): Image to be cropped.
Returns:
PIL Image: Cropped image.
"""
img=self.resizer(img)
image_width, image_height = img.shape[1],img.shape[0]
crop_height, crop_width = self.size[0],self.size[1]
crop_top = int(round((image_height - crop_height) / 2.))
crop_left = int(round((image_width - crop_width) / 2.))
center_croped_img=img[crop_top:crop_top+crop_height,crop_left:crop_left+crop_width,:]
return center_croped_img
def __repr__(self):
return self.__class__.__name__ + '(size={0})'.format(self.size)
class OpencvResize(object):
def __init__(self, size=256):
self.size = size
def __call__(self, img):
img = np.ascontiguousarray(img)
H, W, _ = img.shape
target_size = (int(self.size/H * W + 0.5), self.size) if H < W else (self.size, int(self.size/W * H + 0.5))
img = cv2.resize(img, target_size, interpolation=cv2.INTER_LINEAR)
img = np.ascontiguousarray(img)
return img
def box_in_img(img,boxes,min_overlap=0.5):
raw_bboxes = np.array(boxes)
face_area=(boxes[:,3]-boxes[:,1])*(boxes[:,2]-boxes[:,0])
h,w,_=img.shape
boxes[:, 0][boxes[:, 0] <=0] =0
boxes[:, 0][boxes[:, 0] >=w] = w
boxes[:, 2][boxes[:, 2] <= 0] = 0
boxes[:, 2][boxes[:, 2] >= w] = w
boxes[:, 1][boxes[:, 1] <= 0] = 0
boxes[:, 1][boxes[:, 1] >= h] = h
boxes[:, 3][boxes[:, 3] <= 0] = 0
boxes[:, 3][boxes[:, 3] >= h] = h
boxes_in = []
for i in range(boxes.shape[0]):
box=boxes[i]
if ((box[3]-box[1])*(box[2]-box[0]))/face_area[i]>min_overlap :
boxes_in.append(boxes[i])
boxes_in = np.array(boxes_in)
return boxes_in
def Random_scale_withbbox(image,bboxes,target_shape,jitter=0.5):
###the boxes is in ymin,xmin,ymax,xmax mode
hi, wi, _ = image.shape
while 1:
if len(bboxes)==0:
print('errrrrrr')
bboxes_=np.array(bboxes)
crop_h = int(hi * random.uniform(0.2, 1))
crop_w = int(wi * random.uniform(0.2, 1))
start_h = random.randint(0, hi - crop_h)
start_w = random.randint(0, wi - crop_w)
croped = image[start_h:start_h + crop_h, start_w:start_w + crop_w, :]
bboxes_[:, 0] = bboxes_[:, 0] - start_w
bboxes_[:, 1] = bboxes_[:, 1] - start_h
bboxes_[:, 2] = bboxes_[:, 2] - start_w
bboxes_[:, 3] = bboxes_[:, 3] - start_h
bboxes_fix=box_in_img(croped,bboxes_)
if len(bboxes_fix)>0:
break
###use box
h,w=target_shape
croped_h,croped_w,_=croped.shape
croped_h_w_ratio=croped_h/croped_w
rescale_h=int(h * random.uniform(0.5, 1))
rescale_w = int(rescale_h/(random.uniform(0.7, 1.3)*croped_h_w_ratio))
rescale_w=np.clip(rescale_w,0,w)
image=cv2.resize(croped,(rescale_w,rescale_h))
new_image=np.zeros(shape=[h,w,3],dtype=np.uint8)
dx = int(random.randint(0, w - rescale_w))
dy = int(random.randint(0, h - rescale_h))
new_image[dy:dy+rescale_h,dx:dx+rescale_w,:]=image
bboxes_fix[:, 0] = bboxes_fix[:, 0] * rescale_w/ croped_w+dx
bboxes_fix[:, 1] = bboxes_fix[:, 1] * rescale_h / croped_h+dy
bboxes_fix[:, 2] = bboxes_fix[:, 2] * rescale_w / croped_w+dx
bboxes_fix[:, 3] = bboxes_fix[:, 3] * rescale_h / croped_h+dy
return new_image,bboxes_fix
def Random_flip(im, boxes):
im_lr = np.fliplr(im).copy()
h,w,_ = im.shape
xmin = w - boxes[:,2]
xmax = w - boxes[:,0]
boxes[:,0] = xmin
boxes[:,2] = xmax
return im_lr, boxes
def Mirror(src,label=None,symmetry=None):
img = cv2.flip(src, 1)
if label is None:
return img,None
width=img.shape[1]
cod = []
allc = []
for i in range(label.shape[0]):
x, y = label[i][0], label[i][1]
if x >= 0:
x = width - 1 - x
cod.append((x, y))
# **** the joint index depends on the dataset ****
for (q, w) in symmetry:
cod[q], cod[w] = cod[w], cod[q]
for i in range(label.shape[0]):
allc.append(cod[i][0])
allc.append(cod[i][1])
label = np.array(allc).reshape(label.shape[0], 2)
return img,label
def produce_heat_maps(label,map_size,stride,sigma):
def produce_heat_map(center,map_size,stride,sigma):
grid_y = map_size[0] // stride
grid_x = map_size[1] // stride
start = stride / 2.0 - 0.5
y_range = [i for i in range(grid_y)]
x_range = [i for i in range(grid_x)]
xx, yy = np.meshgrid(x_range, y_range)
xx = xx * stride + start
yy = yy * stride + start
d2 = (xx - center[0]) ** 2 + (yy - center[1]) ** 2
exponent = d2 / 2.0 / sigma / sigma
heatmap = np.exp(-exponent)
am = np.amax(heatmap)
if am > 0:
heatmap /= am / 255.
return heatmap
all_keypoints = label
point_num = all_keypoints.shape[0]
heatmaps_this_img=np.zeros([map_size[0]//stride,map_size[1]//stride,point_num])
for k in range(point_num):
heatmap = produce_heat_map([all_keypoints[k][0],all_keypoints[k][1]], map_size, stride, sigma)
heatmaps_this_img[:,:,k]=heatmap
return heatmaps_this_img
def visualize_heatmap_target(heatmap):
map_size=heatmap.shape[0:2]
frame_num = heatmap.shape[2]
heat_ = np.zeros([map_size[0], map_size[1]])
for i in range(frame_num):
heat_ = heat_ + heatmap[:, :, i]
cv2.namedWindow('heat_map', 0)
cv2.imshow('heat_map', heat_)
cv2.waitKey(0)
def produce_heatmaps_with_bbox(image,label,h_out,w_out,num_klass,ksize=9,sigma=0):
heatmap=np.zeros(shape=[h_out,w_out,num_klass])
h,w,_=image.shape
for single_box in label:
if single_box[4]>=0:
####box center (x,y)
center=[(single_box[0]+single_box[2])/2/w,(single_box[1]+single_box[3])/2/h] ###0-1
heatmap[round(center[1]*h_out),round(center[0]*w_out),int(single_box[4]) ]=1.
heatmap = cv2.GaussianBlur(heatmap, (ksize,ksize), sigma)
am = np.amax(heatmap)
if am>0:
heatmap /= am / 255.
heatmap=np.expand_dims(heatmap,-1)
return heatmap
def produce_heatmaps_with_keypoint(image,label,h_out,w_out,num_klass,ksize=7,sigma=0):
heatmap=np.zeros(shape=[h_out,w_out,num_klass])
h,w,_=image.shape
for i in range(label.shape[0]):
single_point=label[i]
if single_point[0]>0 and single_point[1]>0:
heatmap[int(single_point[1]*(h_out-1)),int(single_point[0]*(w_out-1)),i ]=1.
heatmap = cv2.GaussianBlur(heatmap, (ksize,ksize), sigma)
am = np.amax(heatmap)
if am>0:
heatmap /= am / 255.
return heatmap
if __name__=='__main__':
pass
| StarcoderdataPython |
3349757 | <filename>statsmodels/tsa/tests/results/arima112nc_results.py
import numpy as np
llf = np.array([-240.79351748413])
nobs = np.array([ 202])
k = np.array([ 4])
k_exog = np.array([ 1])
sigma = np.array([ .79349479007541])
chi2 = np.array([ 31643.977146904])
df_model = np.array([ 3])
k_ar = np.array([ 1])
k_ma = np.array([ 2])
params = np.array([ .99502750401315,
-.68630179255403,
-.19840894739396,
.79349479007541])
cov_params = np.array([ .00010016992219,
-.00021444523598,
-.00023305572854,
-6.768123591e-06,
-.00021444523598,
.00104186449549,
.00023669747281,
3.902897504e-06,
-.00023305572854,
.00023669747281,
.0010810935718,
.00020764808165,
-6.768123591e-06,
3.902897504e-06,
.00020764808165,
.0002668504612]).reshape(4,4)
xb = np.array([ 0,
0,
.11361486464739,
.14230862259865,
.07256115227938,
.13274206221104,
.06747215241194,
.13822889328003,
.09585004299879,
.06099047139287,
.10120190680027,
.07761032879353,
.07942545413971,
.11177492141724,
.06088993698359,
.11208334565163,
.0755797252059,
.07422959059477,
.12350624799728,
.07623053342104,
.12391770631075,
.06531815230846,
.08897981792688,
.09103457629681,
.10980048775673,
.09255626052618,
.12732291221619,
.08764986693859,
.14262486994267,
.19330270588398,
.13410428166389,
.2202503234148,
.1138079687953,
.17364008724689,
.19471970200539,
.20120698213577,
.24072274565697,
.21839858591557,
.29251739382744,
.26838713884354,
.28637075424194,
.3556769490242,
.33624804019928,
.35650280117989,
.3974232673645,
.40968126058578,
.36446389555931,
.40641778707504,
.40639826655388,
.32208651304245,
.41636437177658,
.34307014942169,
.3511538207531,
.34216031432152,
.33759200572968,
.36354607343674,
.39148297905922,
.55032896995544,
.4113195836544,
.7244918346405,
.67152947187424,
.76787060499191,
.77276849746704,
.96944856643677,
.88270664215088,
.7563271522522,
.86404490470886,
.82250237464905,
.83520317077637,
.65044301748276,
.83044308423996,
.79827356338501,
.78103590011597,
.93702721595764,
.78709679841995,
.81435388326645,
.89593154191971,
.92867535352707,
1.0709822177887,
1.0957812070847,
1.0792914628983,
1.3286831378937,
1.4503024816513,
1.4619816541672,
1.7190475463867,
1.8096150159836,
1.5324629545212,
1.721804857254,
1.8408879041672,
1.6955831050873,
1.8928952217102,
1.7459137439728,
1.5055395364761,
1.3664853572845,
1.8893030881882,
1.256967663765,
1.0567245483398,
1.2921603918076,
1.2266329526901,
1.2085332870483,
1.275726556778,
1.2278587818146,
1.1046848297119,
1.1517647504807,
.99646359682083,
1.194694519043,
.97580307722092,
1.0148292779922,
1.1635760068893,
.35167038440704,
.95728904008865,
.78414303064346,
.95968008041382,
.97746151685715,
.94291216135025,
.99327826499939,
.93940645456314,
1.013852596283,
1.0454497337341,
1.0929356813431,
1.0810794830322,
1.2874436378479,
1.1533098220825,
1.0470397472382,
1.4171674251556,
1.1959022283554,
1.3181202411652,
1.7197531461716,
1.2677561044693,
1.0768386125565,
1.2508004903793,
1.1625586748123,
1.1872273683548,
1.1668027639389,
1.1576874256134,
1.1782459020615,
1.1398378610611,
1.1065219640732,
1.0032601356506,
1.1087976694107,
.95788156986237,
1.0163568258286,
1.079482793808,
1.0131409168243,
1.0506906509399,
1.1052004098892,
.95601671934128,
.99452114105225,
1.0641269683838,
1.1217628717422,
.98107707500458,
1.0877858400345,
1.0735836029053,
.86890149116516,
.86449563503265,
1.0060983896255,
.79812264442444,
.70991164445877,
.91461282968521,
.78625136613846,
.8291689157486,
.93680161237717,
.81633454561234,
1.0196126699448,
.95442569255829,
1.1131925582886,
1.1916073560715,
1.05200278759,
1.2451642751694,
.97296446561813,
1.0647999048233,
.78715896606445,
.74267995357513,
1.1400059461594,
.82839399576187,
1.0262999534607,
1.0628409385681,
.84051495790482,
.82304340600967,
1.0028872489929,
1.0457111597061,
.97847640514374,
1.1855980157852,
1.195351600647,
1.0270363092422,
1.3610677719116,
1.0189098119736,
2.1800265312195,
.86722087860107,
1.3893752098083,
1.4851142168045,
.65110164880753,
1.417050242424,
1.4938380718231,
1.2786860466003,
1.446773648262,
1.9284181594849,
1.4071846008301,
2.4745123386383,
.53088372945786,
-.25887301564217,
1.0166070461273,
1.1028108596802])
y = np.array([np.nan,
28.979999542236,
29.263614654541,
29.492309570313,
29.442562103271,
29.672742843628,
29.617471694946,
29.888229370117,
29.935850143433,
29.870990753174,
30.021202087402,
30.057609558105,
30.119426727295,
30.321773529053,
30.280889511108,
30.492082595825,
30.515581130981,
30.554229736328,
30.813507080078,
30.826231002808,
31.063919067383,
31.015319824219,
31.108980178833,
31.21103477478,
31.389801025391,
31.472555160522,
31.707323074341,
31.737649917603,
32.022624969482,
32.473300933838,
32.584106445313,
33.070247650146,
33.013809204102,
33.273639678955,
33.594722747803,
33.901206970215,
34.340721130371,
34.61840057373,
35.192520141602,
35.568386077881,
35.98637008667,
36.65567779541,
37.136245727539,
37.65650177002,
38.297424316406,
38.909679412842,
39.264465332031,
39.806419372559,
40.306400299072,
40.42208480835,
41.016361236572,
41.243072509766,
41.551155090332,
41.84215927124,
42.137592315674,
42.563545227051,
43.091484069824,
44.250328063965,
44.611320495605,
46.324489593506,
47.471527099609,
48.867870330811,
50.072769165039,
51.9694480896,
53.182704925537,
53.756328582764,
54.864044189453,
55.722503662109,
56.635204315186,
56.750442504883,
57.830444335938,
58.698276519775,
59.481037139893,
60.937026977539,
61.587097167969,
62.414352416992,
63.595932006836,
64.828674316406,
66.570983886719,
68.195777893066,
69.579292297363,
71.928680419922,
74.450302124023,
76.661979675293,
79.719047546387,
82.709617614746,
84.132461547852,
86.421798706055,
89.040885925293,
90.79557800293,
93.39289855957,
95.14591217041,
95.905540466309,
96.366485595703,
99.389305114746,
99.356964111328,
98.956726074219,
100.09216308594,
101.02663421631,
102.00853729248,
103.37572479248,
104.52786254883,
105.20468139648,
106.25176239014,
106.69645690918,
108.19469451904,
108.67579650879,
109.51483154297,
111.06357574463,
109.05166625977,
110.45729064941,
110.98413848877,
112.35968017578,
113.6774597168,
114.74291229248,
115.99327850342,
116.93940734863,
118.21385192871,
119.54544830322,
120.99293518066,
122.28107452393,
124.38744354248,
125.65331268311,
126.44704437256,
128.91716003418,
130.09590148926,
131.81811523438,
135.11975097656,
135.96775817871,
136.17684936523,
137.45079040527,
138.36254882813,
139.48722839355,
140.56680297852,
141.65768432617,
142.87825012207,
143.93983459473,
144.9065246582,
145.50326538086,
146.70880126953,
147.25788879395,
148.21635437012,
149.47947692871,
150.41313171387,
151.55068969727,
152.90519714355,
153.55603027344,
154.49452209473,
155.76412963867,
157.22177124023,
157.98107910156,
159.28778076172,
160.47357177734,
160.76889038086,
161.26449584961,
162.50610351563,
162.7981262207,
162.90991210938,
164.11460876465,
164.6862487793,
165.5291595459,
166.83679199219,
167.5163269043,
169.11961364746,
170.25442504883,
172.01318359375,
173.8916015625,
174.95199584961,
176.84516906738,
177.37295532227,
178.46479797363,
178.38716125488,
178.44267272949,
180.44000244141,
180.8283996582,
182.22630310059,
183.66284179688,
184.04051208496,
184.52304077148,
185.90287780762,
187.34571838379,
188.37846374512,
190.28559875488,
191.99536132813,
192.82704162598,
195.16107177734,
195.71890258789,
201.3800201416,
200.26721191406,
202.08937072754,
204.18510437012,
202.55110168457,
204.99105834961,
207.41383361816,
208.61668395996,
210.57977294922,
214.4234161377,
215.40417480469,
221.08451843262,
217.41989135742,
211.91511535645,
213.68760681152,
215.57180786133])
resid = np.array([np.nan,
.17000007629395,
.08638589829206,
-.12230817228556,
.09743892401457,
-.12274374067783,
.13252860307693,
-.04822873324156,
-.12585072219372,
.04901013895869,
-.04120244085789,
-.01760895550251,
.0905727148056,
-.1017746925354,
.09910991042852,
-.05208196863532,
-.03558072075248,
.13577139377594,
-.0635067820549,
.11377000063658,
-.11391747742891,
.00468154205009,
.01102056354284,
.0689652711153,
-.00980201456696,
.10744450241327,
-.05732321739197,
.14234967529774,
.25737473368645,
-.02330071851611,
.26589342951775,
-.17024727165699,
.08618897944689,
.12636296451092,
.10527954250574,
.19879072904587,
.05928030610085,
.28160139918327,
.10748030990362,
.13161440193653,
.31362771987915,
.14432306587696,
.16375194489956,
.24349948763847,
.20257519185543,
-.00967973750085,
.13553610444069,
.09358221292496,
-.20640131831169,
.17791347205639,
-.11636133491993,
-.04307091236115,
-.05115458369255,
-.04216108098626,
.06240950897336,
.13645392656326,
.60851699113846,
-.05032894387841,
.98867815732956,
.47550892829895,
.62846976518631,
.43213015794754,
.92723226547241,
.33055067062378,
-.18270587921143,
.24367287755013,
.0359565988183,
.07749536633492,
-.53520393371582,
.24955849349499,
.0695584192872,
.00172565307003,
.51896333694458,
-.13702796399593,
.01290242280811,
.28564843535423,
.30406919121742,
.67132312059402,
.5290162563324,
.30422034859657,
1.0207070112228,
1.0713183879852,
.74969446659088,
1.3380213975906,
1.1809539794922,
-.10961806029081,
.56753551959991,
.77819520235062,
.05911364778876,
.70441842079163,
.0071063125506,
-.74591380357742,
-.90554106235504,
1.1335146427155,
-1.2893046140671,
-1.4569646120071,
-.15672297775745,
-.29216033220291,
-.22663290798664,
.09146212786436,
-.0757219940424,
-.42786338925362,
-.10468477010727,
-.55176627635956,
.3035394847393,
-.49469754099846,
-.1758000254631,
.38517218828201,
-2.3635804653168,
.44833266735077,
-.25729209184647,
.41586154699326,
.34031534194946,
.1225445792079,
.25708478689194,
.00672172475606,
.26059049367905,
.28615045547485,
.35455179214478,
.2070597410202,
.81892204284668,
.11255792528391,
-.25330832600594,
1.0529587268829,
-.0171735137701,
.40410390496254,
1.5818736553192,
-.41975006461143,
-.86774694919586,
.02315225638449,
-.25080046057701,
-.06255260109901,
-.08723650127649,
-.06679660826921,
.04230948910117,
-.07823982834816,
-.13983787596226,
-.40652501583099,
.09674594551325,
-.40880066156387,
-.05788768827915,
.1836401373148,
-.07948285341263,
.08686522394419,
.24931237101555,
-.30519738793373,
-.05602284148335,
.20547580718994,
.33588215708733,
-.22176894545555,
.21891987323761,
.11221113055944,
-.57358360290527,
-.36890152096748,
.23551045358181,
-.50609844923019,
-.59812569618225,
.29008835554123,
-.21461589634418,
.013751687482,
.37082800269127,
-.13679853081703,
.5836746096611,
.18038421869278,
.64556515216827,
.68681055307388,
.00838960427791,
.64800941944122,
-.44517654180527,
.02703555487096,
-.8647877573967,
-.68716812133789,
.85732614994049,
-.44000896811485,
.37160298228264,
.37370926141739,
-.46285009384155,
-.34051498770714,
.37695357203484,
.3971218764782,
.05427964404225,
.72153580188751,
.51439893245697,
-.19535164535046,
.97296363115311,
-.46107393503189,
3.4810900688171,
-1.9800295829773,
.43278217315674,
.61062479019165,
-2.2851173877716,
1.0229095220566,
.92894279956818,
-.07583882659674,
.51631212234497,
1.9152258634567,
-.42641922831535,
3.2058219909668,
-4.1955056190491,
-5.2458953857422,
.75588232278824,
.7813817858696,
.81318950653076])
yr = np.array([np.nan,
.17000007629395,
.08638589829206,
-.12230817228556,
.09743892401457,
-.12274374067783,
.13252860307693,
-.04822873324156,
-.12585072219372,
.04901013895869,
-.04120244085789,
-.01760895550251,
.0905727148056,
-.1017746925354,
.09910991042852,
-.05208196863532,
-.03558072075248,
.13577139377594,
-.0635067820549,
.11377000063658,
-.11391747742891,
.00468154205009,
.01102056354284,
.0689652711153,
-.00980201456696,
.10744450241327,
-.05732321739197,
.14234967529774,
.25737473368645,
-.02330071851611,
.26589342951775,
-.17024727165699,
.08618897944689,
.12636296451092,
.10527954250574,
.19879072904587,
.05928030610085,
.28160139918327,
.10748030990362,
.13161440193653,
.31362771987915,
.14432306587696,
.16375194489956,
.24349948763847,
.20257519185543,
-.00967973750085,
.13553610444069,
.09358221292496,
-.20640131831169,
.17791347205639,
-.11636133491993,
-.04307091236115,
-.05115458369255,
-.04216108098626,
.06240950897336,
.13645392656326,
.60851699113846,
-.05032894387841,
.98867815732956,
.47550892829895,
.62846976518631,
.43213015794754,
.92723226547241,
.33055067062378,
-.18270587921143,
.24367287755013,
.0359565988183,
.07749536633492,
-.53520393371582,
.24955849349499,
.0695584192872,
.00172565307003,
.51896333694458,
-.13702796399593,
.01290242280811,
.28564843535423,
.30406919121742,
.67132312059402,
.5290162563324,
.30422034859657,
1.0207070112228,
1.0713183879852,
.74969446659088,
1.3380213975906,
1.1809539794922,
-.10961806029081,
.56753551959991,
.77819520235062,
.05911364778876,
.70441842079163,
.0071063125506,
-.74591380357742,
-.90554106235504,
1.1335146427155,
-1.2893046140671,
-1.4569646120071,
-.15672297775745,
-.29216033220291,
-.22663290798664,
.09146212786436,
-.0757219940424,
-.42786338925362,
-.10468477010727,
-.55176627635956,
.3035394847393,
-.49469754099846,
-.1758000254631,
.38517218828201,
-2.3635804653168,
.44833266735077,
-.25729209184647,
.41586154699326,
.34031534194946,
.1225445792079,
.25708478689194,
.00672172475606,
.26059049367905,
.28615045547485,
.35455179214478,
.2070597410202,
.81892204284668,
.11255792528391,
-.25330832600594,
1.0529587268829,
-.0171735137701,
.40410390496254,
1.5818736553192,
-.41975006461143,
-.86774694919586,
.02315225638449,
-.25080046057701,
-.06255260109901,
-.08723650127649,
-.06679660826921,
.04230948910117,
-.07823982834816,
-.13983787596226,
-.40652501583099,
.09674594551325,
-.40880066156387,
-.05788768827915,
.1836401373148,
-.07948285341263,
.08686522394419,
.24931237101555,
-.30519738793373,
-.05602284148335,
.20547580718994,
.33588215708733,
-.22176894545555,
.21891987323761,
.11221113055944,
-.57358360290527,
-.36890152096748,
.23551045358181,
-.50609844923019,
-.59812569618225,
.29008835554123,
-.21461589634418,
.013751687482,
.37082800269127,
-.13679853081703,
.5836746096611,
.18038421869278,
.64556515216827,
.68681055307388,
.00838960427791,
.64800941944122,
-.44517654180527,
.02703555487096,
-.8647877573967,
-.68716812133789,
.85732614994049,
-.44000896811485,
.37160298228264,
.37370926141739,
-.46285009384155,
-.34051498770714,
.37695357203484,
.3971218764782,
.05427964404225,
.72153580188751,
.51439893245697,
-.19535164535046,
.97296363115311,
-.46107393503189,
3.4810900688171,
-1.9800295829773,
.43278217315674,
.61062479019165,
-2.2851173877716,
1.0229095220566,
.92894279956818,
-.07583882659674,
.51631212234497,
1.9152258634567,
-.42641922831535,
3.2058219909668,
-4.1955056190491,
-5.2458953857422,
.75588232278824,
.7813817858696,
.81318950653076])
mse = np.array([ 1.4407075643539,
1.4407075643539,
.79720854759216,
.75209444761276,
.71109557151794,
.68920749425888,
.67417079210281,
.66374856233597,
.65616118907928,
.65050256252289,
.64619332551956,
.64286160469055,
.64025497436523,
.63819670677185,
.63655960559845,
.6352499127388,
.63419729471207,
.63334810733795,
.63266098499298,
.63210368156433,
.63165074586868,
.63128209114075,
.63098156452179,
.63073641061783,
.63053613901138,
.63037252426147,
.6302387714386,
.63012927770615,
.63003975152969,
.62996637821198,
.62990635633469,
.62985718250275,
.62981688976288,
.62978386878967,
.62975686788559,
.62973469495773,
.62971651554108,
.62970167398453,
.62968945503235,
.62967944145203,
.62967127561569,
.62966454029083,
.62965905666351,
.62965452671051,
.62965083122253,
.62964779138565,
.62964528799057,
.62964326143265,
.62964159250259,
.62964022159576,
.62963908910751,
.62963819503784,
.62963742017746,
.62963682413101,
.62963628768921,
.6296358704567,
.62963551282883,
.62963527441025,
.62963503599167,
.62963485717773,
.6296346783638,
.62963455915451,
.62963443994522,
.62963438034058,
.62963432073593,
.62963426113129,
.62963420152664,
.629634141922,
.629634141922,
.62963408231735,
.62963408231735,
.62963408231735,
.62963402271271,
.62963402271271,
.62963402271271,
.62963402271271,
.62963402271271,
.62963402271271,
.62963402271271,
.62963402271271,
.62963402271271,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806,
.62963396310806])
icstats = np.array([ 202,
np.nan,
-240.79351748413,
4,
489.58703496826,
502.82010575786])
class Bunch(dict):
def __init__(self, **kw):
dict.__init__(self, kw)
self.__dict__ = self
results = Bunch(llf=llf, nobs=nobs, k=k, k_exog=k_exog, sigma=sigma, chi2=chi2, df_model=df_model, k_ar=k_ar, k_ma=k_ma, params=params, cov_params=cov_params, xb=xb, y=y, resid=resid, yr=yr, mse=mse, icstats=icstats, )
| StarcoderdataPython |
3347652 | # -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import os
import subprocess
import boto3
import psycopg2
from jinja2 import Template
from sqlalchemy.engine.url import make_url
from airflow.hooks.postgres_hook import PostgresHook
from airflow.plugins_manager import AirflowPlugin
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
from airflow import AirflowException
logger = logging.getLogger(__name__)
AIRFLOW_CACHE_PATH = ''
AIRFLOW_CACHE_BUCKET = ''
AIRFLOW_CACHE_FOLDER = 'psql_copy_cache'
REDSHIFT_IAM_ROLE = ''
def run_commands(statement: str, error_message: str = "") -> None:
p = subprocess.Popen(statement, shell=True, stderr=subprocess.PIPE)
_, error = p.communicate()
if error:
raise AirflowException(error.decode('utf-8') + error_message)
class PostgresToRedshiftOperator(BaseOperator):
"""
Load data from Postgres tables into Redshift.
PSQL COPY the tables into Redshift, via S3 bucket.
It supports full replication and partial replication using a SELECT query as incremental source.
"""
s3_client = boto3.client('s3')
template_fields = ('table_mapping',)
@apply_defaults
def __init__(
self,
source_conn_id: str,
dest_conn_id: str,
table_mapping: dict,
truncate: bool = False,
*args, **kwargs
):
super().__init__(*args, **kwargs)
self.source_hook = PostgresHook(source_conn_id)
self.dest_hook = PostgresHook(dest_conn_id)
self.table_mapping = table_mapping
self.truncate = truncate
self.context = None
def execute(self, context):
self.context = context
for source_table, dest_table in self.table_mapping.items():
# parse dest_table
dest_table_name = dest_table.get("table_name")
transformation_sql = dest_table.get('transformation_sql')
# declare path
local_file_path = os.path.join(
AIRFLOW_CACHE_PATH, AIRFLOW_CACHE_FOLDER, f'{dest_table_name.replace(".", "_")}.csv'
)
s3_key = os.path.join(AIRFLOW_CACHE_FOLDER, f'{dest_table_name.replace(".", "_")}.csv')
s3_full_path = f"s3://{AIRFLOW_CACHE_BUCKET}/{s3_key}"
# dump data to local host
self.dump_from_postgres(
table=source_table,
hook=self.source_hook,
file_path=local_file_path,
transformation_sql_path=transformation_sql # dump data while applying transformation
)
logger.warning(f"Table {source_table} dumped to local file {local_file_path}")
# upload to S3
self.s3_client.upload_file(local_file_path, AIRFLOW_CACHE_BUCKET, s3_key)
logger.warning(f"Local file {local_file_path} uploaded to {s3_full_path}")
self.dump_from_s3_to_redshift(local_file_path, s3_key, s3_full_path, dest_table_name)
def dump_from_s3_to_redshift(self, local_file_path, s3_key, s3_full_path, dest_table) -> None:
"""
dump data from s3 to redshift
:param local_file_path: absolute local path of dumped data
:param s3_key: s3 key
:param s3_full_path: s3 path
:param dest_table: fully qualified destination table
:return: None
"""
logger.warning(f"Loading table {dest_table} from {s3_full_path}")
truncate_clause = ''
if self.truncate:
truncate_clause = f"TRUNCATE TABLE {dest_table};"
copy_to_redshift_sql = f"""
BEGIN;
{truncate_clause}
COPY {dest_table}
FROM 's3://{AIRFLOW_CACHE_BUCKET}/{s3_key}'
IAM_ROLE '{REDSHIFT_IAM_ROLE}'
DELIMITER '|'
IGNOREHEADER 1
ESCAPE
REMOVEQUOTES
;
COMMIT;
"""
try:
self.dest_hook.run(copy_to_redshift_sql)
except psycopg2.InternalError as e:
if "stl_load_errors" in str(e):
logger.warning("Loading error, checking reason in Redshift.")
# check err reason in redshift loading
check_sql = f"""
SELECT err_reason FROM stl_load_errors
WHERE filename = '{s3_full_path}'
ORDER BY starttime DESC
LIMIT 1;
"""
error_reason = self.dest_hook.get_records(check_sql)[0][0].strip()
logger.warning(f"Error reason: {error_reason}")
self.dest_hook.run(copy_to_redshift_sql)
# remove local file afterwards
os.remove(local_file_path)
def dump_from_postgres(self, table, hook, file_path: str, transformation_sql_path: str = None) -> None:
"""
dump data from postgres to local path
:param table: fully qualified table name
:param hook: hook used
:param file_path: local file path
:param transformation_sql_path: transformation SQL applied to source data
:return:
"""
if transformation_sql_path is None:
sql = f"SELECT * FROM {table}"
if not self.truncate:
raise AirflowException("The destination is not set to truncated, "
"while copying the entire source table.")
else:
sql = self.read_sql_from_file(transformation_sql_path)
dburi = make_url(hook.get_uri())
stmt = f"""PGPASSWORD='{<PASSWORD>}' psql -X -U {dburi.username} -h {dburi.host} \
-d {dburi.database} -c "\copy (
{sql}
) TO '{file_path}' DELIMITER '|' \
CSV HEADER;" """
run_commands(stmt)
def read_sql_from_file(self, file_path: str) -> str:
"""
read SQL file from input directory, used by operator for
incremental replication or pre-replication transformation
:param file_path: absolute path of the SQL
:return: (templated if applicable) SQL
"""
with open(file_path, 'r') as sql_file:
sql_str = sql_file.read()
# remove semi-column in PSQL copy statement just in case
final_sql = Template(sql_str).render(self.context).replace(";", "")
logger.info(f"Psql Copy SQL: {final_sql}")
return final_sql
# Defining the plugin class
class AirflowRedshiftPlugin(AirflowPlugin):
name = "postgres_to_redshift_operator"
operators = [PostgresToRedshiftOperator]
hooks = []
# A list of class(es) derived from BaseExecutor
executors = []
# A list of references to inject into the macros namespace
macros = []
# A list of objects created from a class derived
# from flask_admin.BaseView
admin_views = []
# A list of Blueprint object created from flask.Blueprint
flask_blueprints = []
# A list of menu links (flask_admin.base.MenuLink)
menu_links = []
| StarcoderdataPython |
121854 | <gh_stars>0
"""Component for the Somfy MyLink device supporting the Synergy API."""
CONF_ENTITY_CONFIG = "entity_config"
CONF_SYSTEM_ID = "system_id"
CONF_REVERSE = "reverse"
CONF_DEFAULT_REVERSE = "default_reverse"
DEFAULT_CONF_DEFAULT_REVERSE = False
DATA_SOMFY_MYLINK = "somfy_mylink_data"
MYLINK_STATUS = "mylink_status"
MYLINK_ENTITY_IDS = "mylink_entity_ids"
DOMAIN = "somfy_mylink"
SOMFY_MYLINK_COMPONENTS = ["cover"]
MANUFACTURER = "Somfy"
DEFAULT_PORT = 44100
| StarcoderdataPython |
1797198 | <filename>Dice Roller 1.2 Eng/Funcs.py
import sys
import os
from random import *
# This file is what it is, I guess I might be able to do this another way, with
# classes maybe (??) But for now, I'm starting out and this works, and I can
# Understand how.
def toInt(In):
'''
this is so if you just write for example d20. it turns the '' in the Input
list, into a 1, to roll a single die. I could've done it another way maybe,
but getting the 'menu' with help and invalid input to work by my own was tireing,
so this just wokrs.
'''
if In[0] == '':
return str(1)
else:
return In[0]
def help(In):
'''
Called when In is 'h' or 'H'
----------------
prints: Information on what the program expects as Input.
----------------
Input: Asks for a new input.
----------------
returns input In.
'''
os.system('cls')
print ('''
Help
To roll a die you have to write the # of die
to roll followed by die denomination, for example,
if you want to roll 3 six sided die, you would
write '3d6' (No air quotes), '3D6' would also work,
for single dice, 1d6 or d6 would both work.
Supported denominations: d4, d6, d8, d10, d12, d20
''')
In = input("\n Type 'h' for help, 'c' to close or your dice roll. \n > ")
return In
def invIn(In):
'''
Called when '' is entered as input.
Takes 1 argument In.
--------------------
prints error message
---------------------
returns new input : In
'''
print ("\n You have to input something...")
In = input("\n Type 'h' for help, 'c' to close or your dice roll. \n > ")
return In
#This function is called when you get the right format, to initiate the roll.
def dieRoll(roll):
'''
Takes one input List 'roll'
------------------
Your roll input (now: In) after being processed by checkSyntax()
------------------
returns an int :
your result
'''
if roll[0] == '':
numberOfDie = 1
else:
numberOfDie = int(roll[0])
if roll[1] == 'd4' or roll[1] == 'D4':
return rollD4(numberOfDie)
elif roll[1] == 'd6' or roll[1] == 'D6':
return rollD6(numberOfDie)
elif roll[1] == 'd8' or roll[1] == 'D8':
return rollD8(numberOfDie)
elif roll[1] == 'd10' or roll[1] == 'D10':
return rollD10(numberOfDie)
elif roll[1] == 'd12' or roll[1] == 'D12':
return rollD12(numberOfDie)
elif roll[1] == 'd20' or roll[1] == 'D20':
return rollD20(numberOfDie)
# This are all the functions for the different die.
def rollD4(numberOfDie):
'''
Takes one input: numberOfDice
--------------------------
returns a d4 die roll, times the number of die.
'''
if numberOfDie == 1:
return randint(1,4)
else:
return (randint(1,4) + rollD4(numberOfDie-1))
def rollD6(numberOfDie):
'''
Takes one input: numberOfDice
--------------------------
returns a d6 die roll, times the number of die.
'''
if numberOfDie == 1:
return randint(1,6)
else:
return (randint(1,6) + rollD6(numberOfDie-1))
def rollD8(numberOfDie):
'''
Takes one input: numberOfDice
--------------------------
returns a d8 die roll and adds any aditional rolls.
'''
if numberOfDie == 1:
return randint(1,8)
else:
return (randint(1,8) + rollD6(numberOfDie-1))
def rollD10(numberOfDie):
'''
Takes one input: numberOfDice
--------------------------
returns a d10 die roll, times the number of die.
'''
if numberOfDie == 1:
return randint(1,10)
else:
return (randint(1,10) + rollD6(numberOfDie-1))
def rollD12(numberOfDie):
'''
Takes one input: numberOfDice
--------------------------
returns a d12 die roll, times the number of die.
'''
if numberOfDie == 1:
return randint(1,12)
else:
return (randint(1,12) + rollD6(numberOfDie-1))
def rollD20(numberOfDie):
'''
Takes one input: numberOfDice
--------------------------
returns a d20 die roll, times the number of die.
'''
if numberOfDie == 1:
return randint(1,20)
else:
return (randint(1,20
) + rollD6(numberOfDie-1))
def checkSyntax(In):
"""
Takes two inputs >
In: Input from end User
diceTypes: the dice types.
-----------
Returns
syntax: A list syntax = [Number of die to roll, type of dice]
-----------
"""
diceTypes = ['d4', 'd6', 'd8', 'd10', 'd12', 'd20',
'D4', 'D6', 'D8', 'D10', 'D12', 'D20']
numbers = ['1', '2', '3', '4', '5', '6', '7', '8', '9', '0', '']
numberOfDice = ''
diceType = ''
for ch in range(len(In)):
if In[ch] in numbers:
numberOfDice += In[ch]
elif In[ch] == 'd' or In[ch] == 'D':
diceType = In[ch:len(In)+1]
break
else:
break
check = [numberOfDice, diceType]
if check[0] == '':
check[0] = '1'
try:
check[0] = int(check[0])
except:
return 'error'
if check[1] in diceTypes:
return check
else:
return 'error'
| StarcoderdataPython |
97487 | <filename>wgltrace.py
##########################################################################
#
# Copyright 2008-2009 VMware, Inc.
# All Rights Reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
##########################################################################/
"""WGL tracing code generator."""
from specs.stdapi import API
from specs.glapi import glapi
from specs.wglapi import wglapi
from dispatch import function_pointer_type, function_pointer_value
from gltrace import GlTracer
from codegen import *
class WglTracer(GlTracer):
def wrap_ret(self, function, instance):
GlTracer.wrap_ret(self, function, instance)
if function.name == "wglGetProcAddress":
print ' if (%s) {' % instance
func_dict = dict([(f.name, f) for f in glapi.functions + wglapi.functions])
def handle_case(function_name):
f = func_dict[function_name]
ptype = function_pointer_type(f)
pvalue = function_pointer_value(f)
print ' %s = (%s)%s;' % (pvalue, ptype, instance)
print ' %s = (%s)&%s;' % (instance, function.type, f.name);
def handle_default():
print ' os::log("apitrace: warning: unknown function \\"%s\\"\\n", lpszProc);'
string_switch('lpszProc', func_dict.keys(), handle_case, handle_default)
print ' }'
if __name__ == '__main__':
print
print '#define _GDI32_'
print
print '#include <string.h>'
print '#include <windows.h>'
print
print '#include "trace_writer.hpp"'
print '#include "os.hpp"'
print
print '// To validate our prototypes'
print '#define GL_GLEXT_PROTOTYPES'
print '#define WGL_GLXEXT_PROTOTYPES'
print
print '#include "glproc.hpp"'
print '#include "glsize.hpp"'
print
api = API()
api.add_api(glapi)
api.add_api(wglapi)
tracer = WglTracer()
tracer.trace_api(api)
| StarcoderdataPython |
1713791 | import cv2
import numpy as np
from ...src.move import horizontally, vertically
from ..mocks.image_mock import image
tranlated_image = horizontally(image, -100)
(height, width) = image.shape[:2]
translation_matrix = np.float32([[1, 0, -100], [0, 1, 1]])
translated_image_mock = cv2.warpAffine(image, translation_matrix, (width, height))
assert np.allclose(translated_image_mock, tranlated_image), "Should translate image horizontally with success"
tranlated_image = vertically(image, 30)
(height, width) = image.shape[:2]
translation_matrix = np.float32([[1, 0, 30], [0, 1, 1]])
translated_image_mock = cv2.warpAffine(image, translation_matrix, (width, height))
assert np.allclose(translated_image_mock, tranlated_image), "Should translate image vertically with success"
| StarcoderdataPython |
1616662 | <reponame>goFrendiAsgard/kokoropy<gh_stars>1-10
from kokoropy.model import DB_Model, auto_migrate, base_url, Column, or_, and_, create_engine, MetaData, ForeignKey, func, \
Integer, String, Date, DateTime, Boolean, Upload, Text, scoped_session, sessionmaker, relationship, backref, association_proxy
from ..configs.db import connection_string
from kokoropy import save_uploaded_asset, request
import os
engine = create_engine(connection_string, echo=False)
session = scoped_session(sessionmaker(bind=engine))
DB_Model.metadata = MetaData()
class Pokemon(DB_Model):
__session__ = session
__prefix_of_id__ = 'pokemon-'
__automatic_assigned_column__ = ['id','name']
# Fields Declarations
name = Column(String(50))
image = Column(Upload(50, is_image = True))
def quick_preview(self):
if self.is_list_state():
return self.build_representation('image') + '<br />' + self.name
return self.name
'''
By using auto_migrate, kokoropy will automatically adjust your database schema
based on DB_Model changes. However this is not always works. This method is merely
there for the sake of easyness and not recommended for production environment.
'''
auto_migrate(engine)
if Pokemon.count() == 0:
for pokemon_name in ['bubasaur', 'caterpie', 'charmender', 'pikachu', 'squirtle']:
pokemon = Pokemon()
pokemon.name = pokemon_name
pokemon.image = pokemon_name + '.png'
pokemon.save() | StarcoderdataPython |
1669909 | import numpy as np
from keras import backend as K
from keras.models import Model
from keras.losses import binary_crossentropy
from keras.layers import Input, Dense, Dropout
from keras.regularizers import l2
from sklearn.model_selection import train_test_split
class DropoutVAE:
def __init__(self, original_dim, input_shape,
intermediate_dim=32, latent_dim=3, dropout=0.05,
summary=False):
self._build_model(original_dim, input_shape,
intermediate_dim,
latent_dim, summary,
dropout)
def _build_model(self, original_dim, input_shape, intermediate_dim, latent_dim,
summary=False, dropout=0.05):
inputs = Input(shape=input_shape, name='encoder_input')
x = inputs
x = Dense(intermediate_dim, activation='relu')(x)
x = Dense(intermediate_dim//2, activation='relu')(x)
z_mean = Dense(latent_dim, name='z_mean')(x)
z_log_var = Dense(latent_dim, name='z_log_var')(x)
# We remove the z layer ( z layer is used in VAE but not here)
self.encoder = Model(inputs, [z_mean, z_log_var],
name='encoder')
latent_inputs = Input(shape=(latent_dim,),
name='z_sampling')
x = latent_inputs
x = Dense(intermediate_dim//2, activation='relu',
kernel_regularizer=l2(1e-4),
bias_regularizer=l2(1e-4))(x)
x = Dropout(dropout)(x)
x = Dense(intermediate_dim, activation='relu',
kernel_regularizer=l2(1e-4),
bias_regularizer=l2(1e-4))(x)
x = Dropout(dropout)(x)
outputs = Dense(original_dim, activation='sigmoid',
kernel_regularizer=l2(1e-4),
bias_regularizer=l2(1e-4))(x)
self.decoder = Model(latent_inputs,
outputs,
name='decoder')
# Here we take the mean (not the z-layer)
outputs = self.decoder(self.encoder(inputs)[0])
self.vae = Model(inputs, outputs,
name='vae_mlp')
reconstruction_loss = binary_crossentropy(inputs, outputs)
reconstruction_loss *= original_dim
kl_loss = 1 + z_log_var - K.square(z_mean) - K.exp(z_log_var)
kl_loss = K.sum(kl_loss, axis=-1)
kl_loss *= -0.5
vae_loss = K.mean(reconstruction_loss + kl_loss)
self.vae.add_loss(vae_loss)
self.vae.compile(optimizer='adam')
if summary:
print(self.vae.summary())
def fit(self, x_train, x_test, epochs=100, batch_size=100,
verbose=1):
self.vae.fit(x_train,
shuffle=True,
epochs=epochs,
batch_size=batch_size,
verbose=verbose,
validation_data=(x_test, None))
def fit_unsplit(self, X, epochs=100, batch_size=100, verbose=1):
x_train, x_test = train_test_split(X, test_size = 0.5)
self.fit(x_train, x_test, epochs, batch_size, verbose)
def encoder_predict(self, x_test, batch_size=100):
return self.encoder.predict(x_test,
batch_size=batch_size)
def generate(self, latent_val, batch_size=100):
return self.decoder.predict(latent_val)
def predict(self, x_test, batch_size=1, nums=1000):
Yt_hat = []
for _ in range(nums):
Yt_hat.extend(self.vae.predict(x_test))
return np.asarray(Yt_hat)
def mean_predict(self, x_test, batch_size=1, nums=1000):
predict_stochastic = K.function([self.decoder.layers[0].input,
K.learning_phase()],
[self.decoder.get_output_at(0)])
latents = self.encoder.predict(x_test)[0]
Yt_hat = []
for _ in range(nums):
Yt_hat.append(predict_stochastic([latents, 1]))
return np.asarray(Yt_hat) | StarcoderdataPython |
123347 | <gh_stars>1-10
# Generated by Django 2.2.10 on 2020-03-06 15:04
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('salespipes', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='pipeline',
name='job_candidate',
),
]
| StarcoderdataPython |
3224195 | <reponame>BreadFish64/teakra
#!/usr/bin/env python3
import sys
import socket
import struct
for UDP_IP in ["192.168.1.158", "127.0.0.1"]:
UDP_PORT = 8888
MESSAGE = struct.pack('<H', 0xD590)
for i in range(1, len(sys.argv)):
if sys.argv[i] == '-1':
MESSAGE = struct.pack('<H', 0xD591)
continue
inst = sys.argv[i]
splitted = inst.split('+')
code = int(splitted[0], 16)
for param in splitted[1:]:
v, p = param.split('@')
code += int(v) << int(p)
MESSAGE += struct.pack('<H', code)
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.sendto(MESSAGE, (UDP_IP, UDP_PORT))
| StarcoderdataPython |
3292899 | from django.conf.urls.defaults import *
from django.views.generic.simple import direct_to_template
urlpatterns = patterns("",
url(r"^$", direct_to_template, {"template": "about/about.html"}, name="about"),
url(r"^terms/$", direct_to_template, {"template": "about/terms.html"}, name="terms"),
url(r"^privacy/$", direct_to_template, {"template": "about/privacy.html"}, name="privacy"),
url(r"^dmca/$", direct_to_template, {"template": "about/dmca.html"}, name="dmca"),
url(r"^what_next/$", direct_to_template, {"template": "about/what_next.html"}, name="what_next"),
) | StarcoderdataPython |
39950 |
# ///////////////////////////////////////////////////////////////////////////
#
#
#
# ///////////////////////////////////////////////////////////////////////////
class GLLight:
def __init__(self, pos=(0.0,0.0,0.0), color=(1.0,1.0,1.0)):
self.pos = pos
self.color = color
self.ambient = (1.0, 1.0, 1.0)
self.diffuse = (1.0, 1.0, 1.0)
# attenuation
self.constant = 1.0
self.linear = 0.09
self.quadratic = 0.032
| StarcoderdataPython |
67503 | # Copyright (C) 2015 Nippon Telegraph and Telephone Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from fabric import colors
from lib.base import (
BGPContainer,
CmdBuffer,
try_several_times,
wait_for_completion,
)
class ExaBGPContainer(BGPContainer):
SHARED_VOLUME = '/shared_volume'
PID_FILE = '/var/run/exabgp.pid'
def __init__(self, name, asn, router_id, ctn_image_name='osrg/exabgp:4.0.5'):
super(ExaBGPContainer, self).__init__(name, asn, router_id, ctn_image_name)
self.shared_volumes.append((self.config_dir, self.SHARED_VOLUME))
def _pre_start_exabgp(self):
# Create named pipes for "exabgpcli"
named_pipes = '/run/exabgp.in /run/exabgp.out'
self.local('mkfifo {0}'.format(named_pipes), capture=True)
self.local('chmod 777 {0}'.format(named_pipes), capture=True)
def _start_exabgp(self):
cmd = CmdBuffer(' ')
cmd << 'env exabgp.log.destination={0}/exabgpd.log'.format(self.SHARED_VOLUME)
cmd << 'exabgp.daemon.user=root'
cmd << 'exabgp.daemon.pid={0}'.format(self.PID_FILE)
cmd << 'exabgp.tcp.bind="0.0.0.0" exabgp.tcp.port=179'
cmd << 'exabgp {0}/exabgpd.conf'.format(self.SHARED_VOLUME)
self.local(str(cmd), detach=True)
def _wait_for_boot(self):
def _f():
ret = self.local('exabgpcli version > /dev/null 2>&1; echo $?', capture=True)
return ret == '0'
return wait_for_completion(_f)
def run(self):
super(ExaBGPContainer, self).run()
self._pre_start_exabgp()
# To start ExaBGP, it is required to configure neighbor settings, so
# here does not start ExaBGP yet.
# self._start_exabgp()
return self.WAIT_FOR_BOOT
def create_config(self):
# Manpage of exabgp.conf(5):
# https://github.com/Exa-Networks/exabgp/blob/master/doc/man/exabgp.conf.5
cmd = CmdBuffer('\n')
for peer, info in self.peers.iteritems():
cmd << 'neighbor {0} {{'.format(info['neigh_addr'].split('/')[0])
cmd << ' router-id {0};'.format(self.router_id)
cmd << ' local-address {0};'.format(info['local_addr'].split('/')[0])
cmd << ' local-as {0};'.format(self.asn)
cmd << ' peer-as {0};'.format(peer.asn)
caps = []
if info['as2']:
caps.append(' asn4 disable;')
if info['addpath']:
caps.append(' add-path send/receive;')
if caps:
cmd << ' capability {'
for cap in caps:
cmd << cap
cmd << ' }'
if info['passwd']:
cmd << ' md5-password "{0}";'.format(info['passwd'])
if info['passive']:
cmd << ' passive;'
cmd << '}'
with open('{0}/exabgpd.conf'.format(self.config_dir), 'w') as f:
print colors.yellow('[{0}\'s new exabgpd.conf]'.format(self.name))
print colors.yellow(str(cmd))
f.write(str(cmd))
def _is_running(self):
ret = self.local("test -f {0}; echo $?".format(self.PID_FILE), capture=True)
return ret == '0'
def reload_config(self):
if not self.peers:
return
def _reload():
if self._is_running():
self.local('/usr/bin/pkill --pidfile {0} && rm -f {0}'.format(self.PID_FILE), capture=True)
else:
self._start_exabgp()
self._wait_for_boot()
if not self._is_running():
raise RuntimeError('Could not start ExaBGP')
try_several_times(_reload)
def _construct_ip_unicast(self, path):
cmd = CmdBuffer(' ')
cmd << str(path['prefix'])
if path['next-hop']:
cmd << 'next-hop {0}'.format(path['next-hop'])
else:
cmd << 'next-hop self'
return str(cmd)
def _construct_flowspec(self, path):
cmd = CmdBuffer(' ')
cmd << '{ match {'
for match in path['matchs']:
cmd << '{0};'.format(match)
cmd << '} then {'
for then in path['thens']:
cmd << '{0};'.format(then)
cmd << '} }'
return str(cmd)
def _construct_path_attributes(self, path):
cmd = CmdBuffer(' ')
if path['as-path']:
cmd << 'as-path [{0}]'.format(' '.join(str(i) for i in path['as-path']))
if path['med']:
cmd << 'med {0}'.format(path['med'])
if path['local-pref']:
cmd << 'local-preference {0}'.format(path['local-pref'])
if path['community']:
cmd << 'community [{0}]'.format(' '.join(c for c in path['community']))
if path['extended-community']:
cmd << 'extended-community [{0}]'.format(path['extended-community'])
if path['attr']:
cmd << 'attribute [ {0} ]'.format(path['attr'])
return str(cmd)
def _construct_path(self, path, rf='ipv4', is_withdraw=False):
cmd = CmdBuffer(' ')
if rf in ['ipv4', 'ipv6']:
cmd << 'route'
cmd << self._construct_ip_unicast(path)
elif rf in ['ipv4-flowspec', 'ipv6-flowspec']:
cmd << 'flow route'
cmd << self._construct_flowspec(path)
else:
raise ValueError('unsupported address family: %s' % rf)
if path['identifier']:
cmd << 'path-information {0}'.format(path['identifier'])
if not is_withdraw:
# Withdrawal should not require path attributes
cmd << self._construct_path_attributes(path)
return str(cmd)
def add_route(self, route, rf='ipv4', attribute=None, aspath=None,
community=None, med=None, extendedcommunity=None,
nexthop=None, matchs=None, thens=None,
local_pref=None, identifier=None, reload_config=False):
if not self._is_running():
raise RuntimeError('ExaBGP is not yet running')
self.routes.setdefault(route, [])
path = {
'prefix': route,
'rf': rf,
'attr': attribute,
'next-hop': nexthop,
'as-path': aspath,
'community': community,
'med': med,
'local-pref': local_pref,
'extended-community': extendedcommunity,
'identifier': identifier,
'matchs': matchs,
'thens': thens,
}
cmd = CmdBuffer(' ')
cmd << "exabgpcli 'announce"
cmd << self._construct_path(path, rf=rf)
cmd << "'"
self.local(str(cmd), capture=True)
self.routes[route].append(path)
def del_route(self, route, identifier=None, reload_config=False):
if not self._is_running():
raise RuntimeError('ExaBGP is not yet running')
path = None
new_paths = []
for p in self.routes.get(route, []):
if p['identifier'] != identifier:
new_paths.append(p)
else:
path = p
if not path:
return
rf = path['rf']
cmd = CmdBuffer(' ')
cmd << "exabgpcli 'withdraw"
cmd << self._construct_path(path, rf=rf, is_withdraw=True)
cmd << "'"
self.local(str(cmd), capture=True)
self.routes[route] = new_paths
def _get_adj_rib(self, peer, rf, in_out='in'):
# IPv4 Unicast:
# neighbor 172.17.0.2 ipv4 unicast 192.168.100.0/24 path-information 0.0.0.20 next-hop self
# IPv6 FlowSpec:
# neighbor 172.17.0.2 ipv6 flow flow destination-ipv6 2002:1::/64/0 source-ipv6 2002:2::/64/0 next-header =udp flow-label >100
rf_map = {
'ipv4': ['ipv4', 'unicast'],
'ipv6': ['ipv6', 'unicast'],
'ipv4-flowspec': ['ipv4', 'flow'],
'ipv6-flowspec': ['ipv6', 'flow'],
}
assert rf in rf_map
assert in_out in ('in', 'out')
peer_addr = self.peer_name(peer)
lines = self.local('exabgpcli show adj-rib {0}'.format(in_out), capture=True).split('\n')
# rib = {
# <nlri>: [
# {
# 'nlri': <nlri>,
# 'next-hop': <next-hop>,
# ...
# },
# ...
# ],
# }
rib = {}
for line in lines:
if not line:
continue
values = line.split()
if peer_addr != values[1]:
continue
elif rf is not None and rf_map[rf] != values[2:4]:
continue
if rf in ('ipv4', 'ipv6'):
nlri = values[4]
rib.setdefault(nlri, [])
path = {k: v for k, v in zip(*[iter(values[5:])] * 2)}
path['nlri'] = nlri
rib[nlri].append(path)
elif rf in ('ipv4-flowspec', 'ipv6-flowspec'):
# XXX: Missing path attributes?
nlri = ' '.join(values[5:])
rib.setdefault(nlri, [])
path = {'nlri': nlri}
rib[nlri].append(path)
return rib
def get_adj_rib_in(self, peer, rf='ipv4'):
return self._get_adj_rib(peer, rf, 'in')
def get_adj_rib_out(self, peer, rf='ipv4'):
return self._get_adj_rib(peer, rf, 'out')
class RawExaBGPContainer(ExaBGPContainer):
def __init__(self, name, config, ctn_image_name='osrg/exabgp',
exabgp_path=''):
asn = None
router_id = None
for line in config.split('\n'):
line = line.strip()
if line.startswith('local-as'):
asn = int(line[len('local-as'):].strip('; '))
if line.startswith('router-id'):
router_id = line[len('router-id'):].strip('; ')
if not asn:
raise Exception('asn not in exabgp config')
if not router_id:
raise Exception('router-id not in exabgp config')
self.config = config
super(RawExaBGPContainer, self).__init__(name, asn, router_id,
ctn_image_name, exabgp_path)
def create_config(self):
with open('{0}/exabgpd.conf'.format(self.config_dir), 'w') as f:
print colors.yellow('[{0}\'s new exabgpd.conf]'.format(self.name))
print colors.yellow(self.config)
f.write(self.config)
| StarcoderdataPython |
183207 | <reponame>gnafit/gna<gh_stars>1-10
#!/usr/bin/env python
"""Check PointsToHist adapter class
"""
from load import ROOT as R
from matplotlib import pyplot as plt
import numpy as N
from matplotlib.ticker import MaxNLocator
import gna.constructors as C
from gna.bindings import DataType
from gna.unittest import *
# Create the matrix
def test_points_to_hist_01():
mat = N.arange(1, 13, dtype='d')
# Create transformations
points = C.Points(mat)
adapter = C.PointsToHist(points.points, 0.)
hist_edges = N.array(adapter.adapter.hist.datatype().hist().edges())
# Add zero as initial bin edge
orig = N.concatenate((N.zeros(1), mat), axis=0)
assert (orig==hist_edges).all(), "C++ and Python results doesn't match"
# Create the matrix
def test_points_to_hist_02():
mat = N.arange(1, 13, dtype='d')
# Create transformations
points = C.Points(mat)
adapter = C.PointsToHist(points.points)
hist_edges = N.array(adapter.adapter.hist.datatype().hist().edges())
assert (mat==hist_edges).all(), "C++ and Python results doesn't match"
if __name__ == "__main__":
run_unittests(globals())
| StarcoderdataPython |
3325554 | <gh_stars>0
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from .Utility import deep_update
from .JsonAccessor.JsonAccessor import load_json
class ConfigureLoader(object):
CONFIG_FILENAME_DEFAULT = "ConfigDefault.json"
CONFIG_FILENAME_USER = "ConfigUser.json"
@staticmethod
def load_file(dir_path, filename):
if dir_path[-1] != os.sep:
dir_path += os.sep
try:
return load_json(dir_path + filename)
except FileNotFoundError:
return {}
@classmethod
def load_integrated_config(cls, dir_path, default_file=None, user_file=None):
"""
Loading both default and user config, return the integrated result.
"""
if default_file is None:
default_file = cls.CONFIG_FILENAME_DEFAULT
if user_file is None:
user_file = cls.CONFIG_FILENAME_USER
default_config = cls.load_file(dir_path, default_file)
user_config = cls.load_file(dir_path, user_file)
return deep_update(default_config, user_config)
| StarcoderdataPython |
3364874 | import logging, json
from collections import OrderedDict
import utils
class Vocab(object):
def __init__(self, vocab_size, special_tokens=[]):
self.vocab_size = vocab_size
self._idx2word = {}
self._word2idx = {}
self._freq_dict = {}
self.special_tokens = special_tokens
for w in self.special_tokens:
self._absolute_add_word(w)
def __len__(self):
return len(self._idx2word)
def _absolute_add_word(self, w):
idx = len(self)
self._idx2word[idx] = w
self._word2idx[w] = idx
def add_word(self, word):
if word not in self._freq_dict:
self._freq_dict[word] = 0
self._freq_dict[word] += 1
def has_word(self, word):
return self._freq_dict.get(word)
def _add_to_vocab(self, word):
if word not in self._word2idx:
idx = len(self._idx2word)
self._idx2word[idx] = word
self._word2idx[word] = idx
def construct(self):
l = sorted(self._freq_dict.keys(), key=lambda x: -self._freq_dict[x])
if len(l) + len(self._idx2word) < self.vocab_size:
logging.warning('actual vocabulary set smaller than that configured: {}/{}'
.format(len(l) + len(self._idx2word), self.vocab_size))
for word in l:
self._add_to_vocab(word)
if len(self._idx2word) >= self.vocab_size:
break
def load_vocab(self, vocab_path):
self._freq_dict = json.loads(open(vocab_path+'.freq.json', 'r').read())
self._word2idx = json.loads(open(vocab_path+'.word2idx.json', 'r').read())
self._idx2word = {}
for w, idx in self._word2idx.items():
self._idx2word[idx] = w
self.vocab_size_true = len(self._idx2word)
print('vocab file loaded from "'+vocab_path+'"')
print('Vocabulary size: %d' % (self.vocab_size_true))
def save_vocab(self, vocab_path):
_freq_dict = OrderedDict(sorted(self._freq_dict.items(), key=lambda kv:kv[1], reverse=True))
utils.write_dict(vocab_path+'.word2idx.json', self._word2idx)
utils.write_dict(vocab_path+'.freq.json', _freq_dict)
def sentence_encode(self, word_list):
return [self.encode(_) for _ in word_list]
def sentence_decode(self, index_list, eos=None):
l = [self.decode(_) for _ in index_list]
if not eos or eos not in l:
return ' '.join(l)
else:
idx = l.index(eos)
return ' '.join(l[:idx])
def nl_decode(self, l, eos=None):
return [self.sentence_decode(_, eos) + '\n' for _ in l]
def encode(self, word):
word = '<unk>' if word not in self._word2idx else word
return self._word2idx[word]
def decode(self, idx):
if type(idx) is not int:
idx = int(idx.item())
return self._idx2word.get(idx, '<unk>') | StarcoderdataPython |
3352779 | #!/usr/bin/env python
#
# Copyright 2019 GoPro Inc.
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import os
import pynodegl as ngl
from pynodegl_utils.misc import get_backend
from pynodegl_utils.toolbox.grid import autogrid_simple
_backend_str = os.environ.get('BACKEND')
_backend = get_backend(_backend_str) if _backend_str else ngl.BACKEND_AUTO
_vert = 'void main() { ngl_out_pos = ngl_projection_matrix * ngl_modelview_matrix * vec4(ngl_position, 1.0); }'
_frag = 'void main() { ngl_out_color = color; }'
def _get_scene(geometry=None):
program = ngl.Program(vertex=_vert, fragment=_frag)
if geometry is None:
geometry = ngl.Quad()
scene = ngl.Render(geometry, program)
scene.update_frag_resources(color=ngl.UniformVec4(value=(1.0, 1.0, 1.0, 1.0)))
return scene
def api_backend():
ctx = ngl.Context()
assert ctx.configure(backend=0x1234) < 0
del ctx
def api_reconfigure():
ctx = ngl.Context()
assert ctx.configure(offscreen=1, width=16, height=16, backend=_backend) == 0
scene = _get_scene()
assert ctx.set_scene(scene) == 0
assert ctx.draw(0) == 0
assert ctx.configure(offscreen=1, width=16, height=16, backend=_backend) == 0
assert ctx.draw(1) == 0
del ctx
def api_reconfigure_clearcolor(width=16, height=16):
import zlib
capture_buffer = bytearray(width * height * 4)
ctx = ngl.Context()
assert ctx.configure(offscreen=1, width=width, height=height, backend=_backend, capture_buffer=capture_buffer) == 0
scene = _get_scene()
assert ctx.set_scene(scene) == 0
assert ctx.draw(0) == 0
assert zlib.crc32(capture_buffer) == 0xb4bd32fa
assert ctx.configure(offscreen=1, width=width, height=height, backend=_backend, capture_buffer=capture_buffer,
clear_color=(0.4, 0.4, 0.4, 1.0)) == 0
assert ctx.draw(0) == 0
assert zlib.crc32(capture_buffer) == 0x05c44869
del capture_buffer
del ctx
def api_reconfigure_fail():
ctx = ngl.Context()
assert ctx.configure(offscreen=1, width=16, height=16, backend=_backend) == 0
scene = _get_scene()
assert ctx.set_scene(scene) == 0
assert ctx.draw(0) == 0
assert ctx.configure(offscreen=0, backend=_backend) != 0
assert ctx.draw(1) != 0
del ctx
def api_capture_buffer(width=16, height=16):
import zlib
ctx = ngl.Context()
assert ctx.configure(offscreen=1, width=width, height=height, backend=_backend) == 0
scene = _get_scene()
assert ctx.set_scene(scene) == 0
for i in range(2):
capture_buffer = bytearray(width * height * 4)
assert ctx.set_capture_buffer(capture_buffer) == 0
assert ctx.draw(0) == 0
assert ctx.set_capture_buffer(None) == 0
assert ctx.draw(0) == 0
assert zlib.crc32(capture_buffer) == 0xb4bd32fa
del ctx
def api_ctx_ownership():
ctx = ngl.Context()
ctx2 = ngl.Context()
assert ctx.configure(offscreen=1, width=16, height=16, backend=_backend) == 0
assert ctx2.configure(offscreen=1, width=16, height=16, backend=_backend) == 0
scene = _get_scene()
assert ctx.set_scene(scene) == 0
assert ctx.draw(0) == 0
assert ctx2.set_scene(scene) != 0
assert ctx2.draw(0) == 0
del ctx
del ctx2
def api_ctx_ownership_subgraph():
for shared in (True, False):
ctx = ngl.Context()
ctx2 = ngl.Context()
assert ctx.configure(offscreen=1, width=16, height=16, backend=_backend) == 0
assert ctx2.configure(offscreen=1, width=16, height=16, backend=_backend) == 0
quad = ngl.Quad()
render1 = _get_scene(quad)
if not shared:
quad = ngl.Quad()
render2 = _get_scene(quad)
scene = ngl.Group([render1, render2])
assert ctx.set_scene(render2) == 0
assert ctx.draw(0) == 0
assert ctx2.set_scene(scene) != 0
assert ctx2.draw(0) == 0 # XXX: drawing with no scene is allowed?
del ctx
del ctx2
def api_capture_buffer_lifetime(width=1024, height=1024):
capture_buffer = bytearray(width * height * 4)
ctx = ngl.Context()
assert ctx.configure(offscreen=1, width=width, height=height, backend=_backend, capture_buffer=capture_buffer) == 0
del capture_buffer
scene = _get_scene()
assert ctx.set_scene(scene) == 0
assert ctx.draw(0) == 0
del ctx
# Exercise the HUD rasterization. We can't really check the output, so this is
# just for blind coverage and similar code instrumentalization.
def api_hud(width=234, height=123):
ctx = ngl.Context()
assert ctx.configure(offscreen=1, width=width, height=height, backend=_backend, hud=1) == 0
scene = _get_scene()
assert ctx.set_scene(scene) == 0
for i in range(60 * 3):
assert ctx.draw(i / 60.) == 0
del ctx
def api_text_live_change(width=320, height=240):
import zlib
ctx = ngl.Context()
capture_buffer = bytearray(width * height * 4)
assert ctx.configure(offscreen=1, width=width, height=height, backend=_backend, capture_buffer=capture_buffer) == 0
# An empty string forces the text node to deal with a pipeline with nul
# attributes, this is what we exercise here, along with a varying up and
# down number of characters
text_strings = ["foo", "", "foobar", "world", "hello\nworld", "\n\n", "last"]
# Exercise the diamond-form/prepare mechanism
text_node = ngl.Text()
ctx.set_scene(autogrid_simple([text_node] * 4))
ctx.draw(0)
last_crc = zlib.crc32(capture_buffer)
for i, s in enumerate(text_strings, 1):
text_node.set_text(s)
ctx.draw(i)
crc = zlib.crc32(capture_buffer)
assert crc != last_crc
last_crc = crc
def _ret_to_fourcc(ret):
if ret >= 0:
return None
x = -ret
return chr(x>>24) + chr(x>>16 & 0xff) + chr(x>>8 & 0xff) + chr(x&0xff)
def api_media_sharing_failure():
ctx = ngl.Context()
assert ctx.configure(offscreen=1, width=16, height=16, backend=_backend) == 0
m = ngl.Media('/dev/null')
scene = ngl.Group(children=(ngl.Texture2D(data_src=m), ngl.Texture2D(data_src=m)))
assert _ret_to_fourcc(ctx.set_scene(scene)) == 'Eusg' # Usage error
| StarcoderdataPython |
1767576 | <filename>airflow_elt_scripts/dags/my_first_dag.py
#importando as bibliotecas que vamos usar nesse exemplo
from airflow import DAG
from datetime import datetime, timedelta
from airflow.operators.bash_operator import BashOperator
#Definindo alguns argumentos básicos
default_args = {
'owner': 'howard',
'depends_on_past': False,
'start_date': datetime(2020, 3, 30),
'retries': 0
}
'''Nomeando a DAG e definindo quando ela vai ser executada (você pode
usar argumentos de Crontab também caso queira que a DAG execute por
exemplo todos os dias as 8 da manhã'''
with DAG(
'my-first-dag',
schedule_interval=timedelta(minutes=1),
catchup=False,
default_args=default_args
) as dag:
#Definindo as tarefas que a DAG vai executar, nesse caso a execução de dois programas Python, chamando sua execução por comandos bash
t1 = BashOperator(task_id='first_etl',
bash_command="""
cd $AIRFLOW_HOME/dags/etl_scripts/
python3 my_first_etl_script.py
""")
t2 = BashOperator(task_id='second_etl',
bash_command="""
cd $AIRFLOW/dags/etl_scripts/
python3 my_second_etl_script.py
""")
#Definindo o padrão de execução, nesse caso executamos t1 e depois t2
t1 >> t2
| StarcoderdataPython |
130051 | <reponame>ajmal017/amp
import datetime
from typing import Union
import pandas as pd
from tqdm.auto import tqdm
import core.finance as cfinan
import helpers.dataframe as hdataf
import helpers.dbg as dbg
import im.common.data.load.abstract_data_loader as icdlab
import im.common.data.types as vcdtyp
_PANDAS_DATE_TYPE = Union[str, pd.Timestamp, datetime.datetime]
class FuturesForwardContracts:
"""
Contract data for open futures contracts.
"""
def __init__(
self, data_loader: icdlab.AbstractDataLoader, disable_tqdm: bool = False
) -> None:
"""
Initialize by injecting a data loader.
:param data_loader: data loader implementing abstract interface
"""
self._data_loader = data_loader
self._disable_tqdm = disable_tqdm
def replace_contracts_with_data(
self, df: pd.DataFrame, col: str
) -> pd.DataFrame:
"""
Accept a series of contracts and return market data.
:param df: dataframe of contracts indexed by a datetime index with a
frequency, e.g.,
CL1 CL2
2010-01-12 CLG10 CLH10
2010-01-13 CLG10 CLH10
2010-01-14 CLH10 CLJ10
:param col: name of column to extract, e.g., "open", "close", "volume",
"twap"
:return: dataframe of market data indexed like `df`. Each contract
name is replaced with its relevant col of market data (as of the
time given by the index). E.g.,
CL1 CL2
2010-01-12 80.79 81.17
2010-01-13 79.65 80.04
2010-01-14 79.88 80.47
"""
data = []
for column in tqdm(
df.columns, disable=self._disable_tqdm, desc="Processing columns"
):
contract_srs = df[column]
market_data = self._replace_contracts_with_data(contract_srs)
data_srs = market_data[col]
data_srs.name = column
data.append(data_srs)
data_df = pd.concat(data, axis=1)
return data_df
def _replace_contracts_with_data(self, srs: pd.Series) -> pd.DataFrame:
"""
Accept a series of contracts and return market data.
:param srs: series of contracts indexed by a datetime index with a
frequency, e.g.,
2010-01-12 CLG10
2010-01-13 CLG10
2010-01-14 CLH10
:return: dataframe of market data indexed like `srs`. Each contract
name is replaced with a row of market data (as of the time given
by the index). E.g.,
open high low close vol
2010-01-12 82.07 82.34 79.91 80.79 333866
2010-01-13 80.06 80.67 78.37 79.65 401627
2010-01-14 79.97 80.75 79.32 79.88 197449
"""
# Determine whether to use daily or minutely contract data.
ppy = hdataf.infer_sampling_points_per_year(srs)
if ppy < 366:
freq = vcdtyp.Frequency.Daily
else:
freq = vcdtyp.Frequency.Minutely
# Get the list of contracts to extract data for.
contracts = srs.unique().tolist()
# Extract relevant data subseries for each contract and put in list.
data_subseries = []
for contract in tqdm(
contracts,
leave=False,
disable=self._disable_tqdm,
desc="Replacing contracts with data",
):
# Load contract data.
data = self._data_loader.read_data(
"Kibot",
contract,
vcdtyp.AssetClass.Futures,
freq,
vcdtyp.ContractType.Expiry,
)
resampled = cfinan.resample_ohlcv_bars(
data,
rule=srs.index.freq,
volume_col="vol",
add_twap_vwap=True,
)
# Restrict to relevant subseries.
subseries = resampled.reindex(srs[srs == contract].index)
data_subseries.append(subseries.copy())
# Merge the contract data over the partitioned srs index.
df = pd.concat(data_subseries, axis=0)
dbg.dassert_strictly_increasing_index(df)
dbg.dassert(df.index.equals(srs.index))
return df
| StarcoderdataPython |
67247 | <reponame>datakortet/cmsplugin-contact<gh_stars>0
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('cms', '0014_auto_20160404_1908'),
]
operations = [
migrations.CreateModel(
name='Contact',
fields=[
('cmsplugin_ptr', models.OneToOneField(serialize=False, to='cms.CMSPlugin', parent_link=True, primary_key=True, auto_created=True)),
('form_name', models.CharField(max_length=60, verbose_name='Form name', help_text='Used to distinguish multiple contact forms on the same site.', blank=True)),
('form_layout', models.CharField(choices=[('cmsplugin_contact.forms.ContactForm', 'default'), ('quomodusapp.forms.ContactForm', 'Quomodus contact form')], max_length=255, verbose_name='Form Layout', help_text='Choose the layout of contact form')),
('site_email', models.EmailField(max_length=254, verbose_name='Email recipient')),
('thanks', models.TextField(default='Thank you for your message.', max_length=200, verbose_name='Thanks message', help_text='Message displayed on successful submit')),
('submit_text', models.CharField(default='Submit', max_length=30, verbose_name='Submit button value')),
('spam_protection_method', models.SmallIntegerField(choices=[(0, 'Honeypot'), (1, 'Akismet'), (2, 'ReCAPTCHA')], default=0, verbose_name='Spam protection method')),
('akismet_api_key', models.CharField(max_length=255, blank=True)),
('recaptcha_public_key', models.CharField(max_length=255, blank=True)),
('recaptcha_private_key', models.CharField(max_length=255, blank=True)),
('recaptcha_theme', models.CharField(choices=[('light', 'Light'), ('dark', 'Dark')], default='light', max_length=20, verbose_name='ReCAPTCHA theme')),
('recaptcha_size', models.CharField(choices=[('normal', 'Normal'), ('compact', 'Compact')], default='normal', max_length=20, verbose_name='ReCAPTCHA size')),
('redirect_url', models.URLField(verbose_name='URL Redirection', help_text='If it is set, the form redirect to url when the form is valid', blank=True)),
],
options={
'abstract': False,
},
bases=('cms.cmsplugin',),
),
]
| StarcoderdataPython |
171147 | # -*- coding: utf-8 -*-
import json
from pioreactor.pubsub import QOS
from pioreactor.utils.timing import current_utc_time
from pioreactor.background_jobs.subjobs.base import BackgroundSubJob
from pioreactor.background_jobs.temperature_control import TemperatureController
class TemperatureAutomation(BackgroundSubJob):
"""
This is the super class that Temperature automations inherit from.
The `execute` function, which is what subclasses will define, is updated every time a new temperature is recorded to MQTT.
Temperatures are updated every 10 minutes.
To change setting over MQTT:
`pioreactor/<unit>/<experiment>/temperature_automation/<setting>/set` value
"""
latest_growth_rate = None
previous_growth_rate = None
latest_temperature = None
previous_temperature = None
latest_settings_started_at = current_utc_time()
latest_settings_ended_at = None
def __init_subclass__(cls, **kwargs):
super().__init_subclass__(**kwargs)
# this registers all subclasses of TemperatureAutomation back to TemperatureController, so the subclass
# can be invoked in TemperatureController.
if hasattr(cls, "key") and cls.key:
TemperatureController.automations[cls.key] = cls
def __init__(self, unit=None, experiment=None, parent=None, **kwargs):
super(TemperatureAutomation, self).__init__(
job_name="temperature_automation", unit=unit, experiment=experiment
)
self.logger.info(f"Starting {self.__class__.__name__} automation.")
self.temperature_control_parent = parent
self.start_passive_listeners()
def update_heater(self, new_duty_cycle: float):
"""
Update heater's duty cycle. This function checks for a lock on the PWM, and will not
update if the PWM is locked.
Returns true if the update was made (eg: no lock), else returns false
"""
return self.temperature_control_parent.update_heater(new_duty_cycle)
def update_heater_with_delta(self, delta_duty_cycle: float):
"""
Update heater's duty cycle. This function checks for a lock on the PWM, and will not
update if the PWM is locked.
Returns true if the update was made (eg: no lock), else returns false
"""
return self.temperature_control_parent.update_heater_with_delta(delta_duty_cycle)
def execute(self):
raise NotImplementedError
########## Private & internal methods
def on_disconnect(self):
self.latest_settings_ended_at = current_utc_time()
self._send_details_to_mqtt()
for job in self.sub_jobs:
job.set_state("disconnected")
self.clear_mqtt_cache()
def __setattr__(self, name, value) -> None:
super(TemperatureAutomation, self).__setattr__(name, value)
if name in self.published_settings and name != "state":
self.latest_settings_ended_at = current_utc_time()
self._send_details_to_mqtt()
self.latest_settings_started_at, self.latest_settings_ended_at = (
current_utc_time(),
None,
)
def _set_growth_rate(self, message):
if not message.payload:
return
self.previous_growth_rate = self.latest_growth_rate
self.latest_growth_rate = float(json.loads(message.payload)["growth_rate"])
def _set_temperature(self, message):
if not message.payload:
return
self.previous_temperature = self.latest_temperature
self.latest_temperature = float(json.loads(message.payload)["temperature"])
if self.state == self.READY or self.state == self.INIT:
self.execute()
def _send_details_to_mqtt(self):
self.publish(
f"pioreactor/{self.unit}/{self.experiment}/{self.job_name}/temperature_automation_settings",
json.dumps(
{
"pioreactor_unit": self.unit,
"experiment": self.experiment,
"started_at": self.latest_settings_started_at,
"ended_at": self.latest_settings_ended_at,
"automation": self.__class__.__name__,
"settings": json.dumps(
{
attr: getattr(self, attr, None)
for attr in self.published_settings
if attr != "state"
}
),
}
),
qos=QOS.EXACTLY_ONCE,
)
def start_passive_listeners(self):
self.subscribe_and_callback(
self._set_growth_rate,
f"pioreactor/{self.unit}/{self.experiment}/growth_rate_calculating/growth_rate",
allow_retained=False,
)
self.subscribe_and_callback(
self._set_temperature,
f"pioreactor/{self.unit}/{self.experiment}/temperature_control/temperature",
allow_retained=True,
)
class TemperatureAutomationContrib(TemperatureAutomation):
key: str
| StarcoderdataPython |
1779832 | # -*- coding: utf-8 -*-
BRIE = "Aged Brie"
BACKSTAGE_PASS = "Backstage passes to a TAFKAL80ETC concert"
SULFURAS = "Sulfuras, Hand of Ragnaros"
def step_quality(item, rate=-1):
item.quality = item.quality + rate if 0 <= item.quality + rate <= 50 else item.quality
def update_quality(item):
if item.name not in {BRIE, BACKSTAGE_PASS, SULFURAS}:
# Handle decaying
if item.sell_in < 0:
step_quality(item, -2)
else:
step_quality(item, -1)
elif item.name == BACKSTAGE_PASS:
if item.sell_in <= 0:
# Quality drops to 0 after the concert
step_quality(item, item.quality*-1)
elif 0 < item.sell_in < 6:
step_quality(item, 3)
elif 6 <= item.sell_in < 11:
step_quality(item, 2)
elif 11 < item.sell_in:
step_quality(item, 1)
elif item.name == BRIE:
step_quality(item, 1)
if item.name != SULFURAS:
item.sell_in = item.sell_in - 1
class GildedRose:
def __init__(self, items):
self.items = items
def update_quality(self):
for item in self.items:
update_quality(item)
class Item:
def __init__(self, name, sell_in, quality):
self.name = name
self.sell_in = sell_in
self.quality = quality
def __repr__(self):
return "%s, %s, %s" % (self.name, self.sell_in, self.quality)
| StarcoderdataPython |
1615251 | <gh_stars>0
#!/usr/bin/env python3
"""
This script will loop over all installed pip packages and update them to the
latest available version
"""
import pip
if __name__ == "__main__":
pip_args = ['install', '-U', '--upgrade-strategy=only-if-needed']
for pkg in sorted(pip.get_installed_distributions(), key=lambda x: x.project_name.lower()):
# skip pip
if pkg.project_name in ["pip", "setuptools"]:
continue
pip.main(pip_args + [pkg.project_name])
| StarcoderdataPython |
3360149 | #!/usr/bin/env python
#-*- coding: utf-8 -*-
"""
xmind.core.workbook
~~~~~~~~~~~~~~~~~~~
:mod:``xmind.core.workbook`` implements the command XMind
manipulations.
:copyright:
:license:
"""
__author__ = "<EMAIL> <<NAME>>"
from . import Document
from . import const
from .mixin import WorkbookMixinElement
from .sheet import SheetElement
from .topic import TopicElement
from .relationship import RelationshipElement
from .. import utils
class WorkbookElement(WorkbookMixinElement):
TAG_NAME = const.TAG_WORKBOOK
def __init__(self, node, ownerWorkbook):
super(WorkbookElement, self).__init__(node, ownerWorkbook)
# Initialize WorkbookElement with default attribute
namespace = (const.NAMESPACE, const.XMAP)
attrs = [const.NS_FO, const.NS_XHTML, const.NS_XLINK, const.NS_SVG]
for attr in attrs:
self.setAttributeNS(namespace, attr)
# Initialize WorkbookElement need contains at least one SheetElement
if not self.getSheets():
sheet = self.createSheet()
self.addSheet(sheet)
def setOwnerWorkbook(self, workbook):
raise Exception(
"""WorkbookDocument allowed only contains one WorkbookElement
""")
def getSheets(self):
sheets = self.getChildNodesByTagName(const.TAG_SHEET)
owner_workbook = self.getOwnerWorkbook()
sheets = [SheetElement(sheet, owner_workbook) for sheet in sheets]
return sheets
def getSheetByIndex(self, index):
sheets = self.getSheets()
if index < 0 or index >= len(sheets):
return
return sheets[index]
def createSheet(self):
sheet = SheetElement(None, self.getOwnerWorkbook())
return sheet
def addSheet(self, sheet, index=-1):
sheets = self.getSheets()
if index < 0 or index >= len(sheets):
self.appendChild(sheet)
else:
self.insertBefore(sheet, sheets[index])
self.updateModifiedTime()
def removeSheet(self, sheet):
sheets = self.getSheets()
if len(sheets) <= 1:
return
if sheet.getParentNode() == self.getImplementation():
self.removeChild(sheet)
self.updateModifiedTime()
def moveSheet(self, original_index, target_index):
if original_index < 0 or original_index == target_index:
return
sheets = self.getSheets()
if original_index >= len(sheets):
return
sheet = sheets[original_index]
if not target_index < 0 and target_index < len(sheets) - 1:
if original_index < target_index:
target_index += 1
else:
target_index = target_index
target = sheets[target_index]
if target != sheet:
self.removeChild(sheet)
self.insertBefore(sheet, target)
else:
self.removeChild(sheet)
self.appendChild(sheet)
self.updateModifiedTime()
def getVersion(self):
return self.getAttribution(const.ATTR_VERSION)
class WorkbookDocument(Document):
""" `WorkbookDocument` as central object correspond XMind workbook.
"""
def __init__(self, node=None, path=None):
"""
Construct new `WorkbookDocument` object
:param node: pass DOM node object and parse as `WorkbookDocument`
object. if node not given then created new one.
:param path: set workbook will to be placed.
"""
super(WorkbookDocument, self).__init__(node)
self._path = path
# Initialize WorkbookDocument to make sure that contains
# WorkbookElement as root.
_workbook_element = self.getFirstChildNodeByTagName(
const.TAG_WORKBOOK)
self._workbook_element = WorkbookElement(
_workbook_element,
self)
if not _workbook_element:
self.appendChild(self._workbook_element)
self.setVersion(const.VERSION)
def getWorkbookElement(self):
return self._workbook_element
def createRelationship(self, end1, end2, title=None):
""" Create relationship with two topics. Convenience method
to access the sheet method of the same name
"""
sheet1 = end1.getOwnerSheet()
sheet2 = end2.getOwnerSheet()
if sheet1.getImplementation() == sheet2.getImplementation():
rel = sheet1.create_relationship(end1.getID(),end2.getID(),title)
return rel
else:
raise Exception("Topics not on the same sheet!")
def createTopic(self):
"""
Create new `TopicElement` object and return. Please notice that
this topic will not be added to the workbook.
"""
return TopicElement(None, self)
def getSheets(self):
"""
List all sheets under workbook, if not sheets then return
empty list
"""
return self._workbook_element.getSheets()
def getPrimarySheet(self):
"""
Get the first sheet under workbook.
"""
return self._workbook_element.getSheetByIndex(0)
def createSheet(self, index=-1):
"""
Create new sheet. Please notice the new created sheet
has been added to the workbook.
:param index: insert sheet before another sheet that given by
index. If index not given, append sheet to the
sheets list.
"""
sheet = self._workbook_element.createSheet()
self._workbook_element.addSheet(sheet, index)
return sheet;
def removeSheet(self, sheet):
"""
Remove a sheet from the workbook
:param sheet: remove passed `SheetElement` object
"""
self._workbook_element.removeSheet(sheet)
def moveSheet(self, original_index, target_index):
"""
Move a sheet from the original index to the target index
:param original_index: index of the sheet will be moved.
`original_index` must be positive integer and
less than `target_index`.
:param target_index: index that sheet want to move to.
`target_index` must be positive integer and
less than the length of sheets list.
"""
self._workbook_element.moveSheet(original_index, target_index)
def getVersion(self):
return self._workbook_element.getVersion()
def getModifiedTime(self):
return self._workbook_element.getModifiedTime()
def updateModifiedTime(self):
return self._workbook_element.updateModifiedTime()
def setModifiedTime(self):
return self._workbook_element.setModifiedTime()
def get_path(self):
if self._path:
return utils.get_abs_path(self._path)
def set_path(self, path):
self._path = utils.get_abs_path(path)
| StarcoderdataPython |
1799890 | <reponame>aviplane/scope_interface
import labscript_utils.h5_lock
from labscript_utils.ls_zprocess import ProcessTree
process_tree = ProcessTree.instance()
process_tree.zlock_client.set_process_name("probe-error-scope")
from ScopeInterface import ScopeServer
port = 2626
kserver = ScopeServer("DS1ZA205020656", port, "ProbeErrorScope")
kserver.shutdown_on_interrupt()
| StarcoderdataPython |
1788511 | import os
import sys
sys.path.append("../../common")
from env_indigo import *
indigo = Indigo()
print("****** Arom/Dearom ********")
m = indigo.loadMolecule("[As]1C=N[AsH]S=1C")
origin_smiles = m.smiles()
print(origin_smiles)
m.aromatize()
print(m.smiles())
m.dearomatize()
restored_smiles = m.smiles()
print(restored_smiles)
if origin_smiles != restored_smiles:
sys.stderr.write("%s != %s" % (origin_smiles, restored_smiles))
def processMolecule(m):
sm1 = m.canonicalSmiles()
print(sm1)
try:
print("Arom:")
m.aromatize()
except IndigoException as e:
print(" %s" % (getIndigoExceptionText(e)))
try:
print("Dearom:")
m.dearomatize()
except IndigoException as e:
print(" %s" % (getIndigoExceptionText(e)))
sm2 = m.canonicalSmiles()
print(sm2)
print("***** Invalid valence *****")
processMolecule(indigo.loadMolecule("I1c2ccccc2c3ccccc13"))
print("***** SMILES with special aromatic atoms *****")
mols = [
"[si]1(c(OC)c(c(C)cc1)c2ccccc2)OC", # Cactvs: [si]
"c1ccc2[as]c3ccccc3[siH]c2c1",
"c1ccc2[te]c3ccccc3[bH]c2c1", # RDKit: [te]
"C[b]1o[b](C)o[b](C)o1",
]
for smiles in mols:
print("***\n%s: " % (smiles))
try:
processMolecule(indigo.loadMolecule(smiles))
except IndigoException as e:
print(" %s" % (getIndigoExceptionText(e)))
print("***** Other cases *****")
def processToDearomatize(m):
try:
print(" " + m.smiles())
print(" " + m.canonicalSmiles())
m.dearomatize()
print(" " + m.smiles())
print(" " + m.canonicalSmiles())
except IndigoException as err:
print(" %s" % (getIndigoExceptionText(err)))
for m in indigo.iterateSmilesFile(
joinPathPy("molecules/dearomatization.smi", __file__)
):
print(m.rawData())
processToDearomatize(m)
m2 = indigo.loadMolecule(m.molfile())
processToDearomatize(m2)
print("***** Radical *****")
m = indigo.loadMoleculeFromFile(
joinPathPy("molecules/benzene_radical.mol", __file__)
)
print(m.smiles())
print("Aromatize")
try:
m.aromatize()
print(" " + m.smiles())
except IndigoException as err:
print(" %s" % (getIndigoExceptionText(err)))
print("Dearomatize")
try:
m.dearomatize()
print(" " + m.smiles())
except IndigoException as err:
print(" %s" % (getIndigoExceptionText(err)))
print("Aromatize")
try:
m.aromatize()
print(" " + m.smiles())
except IndigoException as err:
print(" %s" % (getIndigoExceptionText(err)))
print("***** Valences *****")
mol = indigo.loadMolecule("I1c2ccccc2c3ccccc13")
print(mol.smiles())
try:
mol.aromatize()
print(mol.smiles())
except IndigoException as err:
print(" %s" % (getIndigoExceptionText(err)))
mol.dearomatize()
print(mol.smiles())
try:
mol.aromatize()
print(mol.smiles())
except IndigoException as err:
print(" %s" % (getIndigoExceptionText(err)))
print("***** Coordination compound *****")
def executeOperation(m, func, msg):
try:
func(m)
print(msg + m.smiles())
except IndigoException as e:
print(msg + getIndigoExceptionText(e))
def arom(m):
m.aromatize()
def dearom(m):
m.dearomatize()
def noneFunc(m):
pass
indigo = Indigo()
for root, dirnames, filenames in os.walk(
joinPathPy("molecules/arom-test", __file__)
):
filenames.sort()
for filename in filenames:
sys.stdout.write("%s: \n" % filename)
try:
m1 = indigo.loadMoleculeFromFile(os.path.join(root, filename))
m2 = indigo.loadMoleculeFromFile(os.path.join(root, filename))
executeOperation(m1, noneFunc, " Original: ")
executeOperation(m1, arom, " Arom: ")
executeOperation(m2, dearom, " Dearom: ")
executeOperation(m1, dearom, " Arom->Dearom: ")
executeOperation(m2, arom, " Dearom->Arom: ")
except IndigoException as e:
print(" %s" % (getIndigoExceptionText(e)))
print("***** R-group fragments *****")
def printMoleculeWithRGroups(m):
print(m.smiles())
print("RGroup count: %d" % m.countRGroups())
for rg in m.iterateRGroups():
print("RGroup=%d:" % rg.index())
for fr in rg.iterateRGroupFragments():
print(" Fragment=%d:" % fr.index())
print(" " + fr.smiles())
mol = indigo.loadMoleculeFromFile(
joinPathPy("molecules/arom_rgroup_member.mol", __file__)
)
printMoleculeWithRGroups(mol)
print("")
print("Aromatized:")
mol.aromatize()
printMoleculeWithRGroups(mol)
print("***** Number of hydrogens when loading from SMILES *****")
orginal = "Cc1nnc2c(N)ncnc12"
print(orginal)
m = indigo.loadMolecule("Cc1nnc2c(N)ncnc12")
print(m.smiles())
try:
print(m.canonicalSmiles())
except IndigoException as e:
print(" %s" % (getIndigoExceptionText(e)))
try:
m.dearomatize()
except IndigoException as e:
print(" %s" % (getIndigoExceptionText(e)))
try:
print(m.canonicalSmiles())
except IndigoException as e:
print(" %s" % (getIndigoExceptionText(e)))
m.aromatize()
try:
print(m.canonicalSmiles())
except IndigoException as e:
print(" %s" % (getIndigoExceptionText(e)))
m2 = indigo.loadMolecule(m.smiles())
try:
print(m2.canonicalSmiles())
except IndigoException as e:
print(" %s" % (getIndigoExceptionText(e)))
try:
m2.dearomatize()
except IndigoException as e:
print(" %s" % (getIndigoExceptionText(e)))
try:
print(m2.canonicalSmiles())
except IndigoException as e:
print(" %s" % (getIndigoExceptionText(e)))
m.aromatize()
try:
print(m.canonicalSmiles())
except IndigoException as e:
print(" %s" % (getIndigoExceptionText(e)))
print("***** Dearomatize -> Aromatize *****")
m = indigo.loadMolecule("OC(C1=C(N)N=CN1)=O")
print(m.smiles())
m.aromatize()
indigo.setOption("unique-dearomatization", "true")
try:
m.dearomatize()
print(m.smiles())
except IndigoException as e:
print(" %s" % (getIndigoExceptionText(e)))
print("***** Bridge bond *****")
m = indigo.loadMolecule("CC1=CC2=CNC=CC2=N1")
print(m.smiles())
m.aromatize()
print(m.smiles())
m.dearomatize()
print(m.smiles())
print("***** Invalid connectivity *****")
m = indigo.loadMoleculeFromFile(
joinPathPy("molecules/invalid-connectivity.mol", __file__)
)
print(m.smiles())
m.aromatize()
print(m.smiles())
m.dearomatize()
print(m.smiles())
print("***** Select dearomatization with higher number of double bonds *****")
indigo = Indigo()
m = indigo.loadMolecule("c1cnn2nnnc2c1")
print(m.smiles())
m.dearomatize()
print(m.smiles())
m.aromatize()
print(m.smiles())
print("***** Arom and cis-trans *****")
indigo = Indigo()
for model in ["basic", "generic"]:
print(model)
indigo.setOption("aromaticity-model", model)
m = indigo.loadMolecule("Cn1c2ccccc2c(-c2ccccc2)n/c(=N\O)c1=O")
print(m.smiles())
m.dearomatize()
print(m.smiles())
m.aromatize()
print(m.smiles())
print("***** Process arom atoms *****")
indigo = Indigo()
for mol in indigo.iterateSDFile(
joinPathPy("molecules/issue_22.sdf", __file__)
):
mol.dearomatize()
print(mol.smiles())
print("***** Process ferrocene-like structure *****")
indigo = Indigo()
m = indigo.loadMoleculeFromFile(
joinPathPy("molecules/BoPhoz(R).mol", __file__)
)
m.aromatize()
print(m.smiles())
q = indigo.loadQueryMoleculeFromFile(
joinPathPy("molecules/BoPhoz(R).mol", __file__)
)
q.aromatize()
print(q.smiles())
matcher = indigo.substructureMatcher(m)
assert matcher.match(q) != None
print("***** Dearomatization for R-Groups *****")
indigo = Indigo()
mol = indigo.loadMoleculeFromFile(
joinPathPy(
"../../../../../data/molecules/rgroups/Rgroup_for_Dearomatize.mol",
__file__,
)
)
mol.aromatize()
print("before")
for rgroup in mol.iterateRGroups():
print(" Rgroup #" + str(rgroup.index()))
for frag in rgroup.iterateRGroupFragments():
print(frag.canonicalSmiles())
mol.dearomatize()
print("after dearom")
for rgroup in mol.iterateRGroups():
print(" Rgroup #" + str(rgroup.index()))
for frag in rgroup.iterateRGroupFragments():
print(frag.canonicalSmiles())
| StarcoderdataPython |
17356 | import shapefile
class Andir:
def __init__(self):
self.kelurahan = shapefile.Writer(
'kelurahan_andir', shapeType=shapefile.POLYGON)
self.kelurahan.shapeType
self.kelurahan.field('kelurahan_di_andir', 'C')
self.kantor = shapefile.Writer(
'kantor_kelurahan_andir', shapeType=shapefile.POINT)
self.kantor.shapeType
self.kantor.field('kantor_kelurahan_di_andir', 'C')
self.jalan = shapefile.Writer(
'jalan_andir', shapeType=shapefile.POLYLINE)
self.jalan.shapeType
self.jalan.field('jalan_di_andir', 'C')
# Kelurahan
def kelurahanCampaka(self, nama):
self.kelurahan.record(nama)
self.kelurahan.poly([[
[107.5688412, -6.9100128],
[107.5691201, -6.9097865],
[107.5691094, -6.9097226],
[107.569163, -6.9097412],
[107.5691523, -6.9096667],
[107.5692167, -6.9096507],
[107.5693562, -6.9094057],
[107.5693213, -6.9093125],
[107.5693401, -6.9092593],
[107.5693937, -6.909254],
[107.5693588, -6.9092193],
[107.5694259, -6.9090809],
[107.5694232, -6.9089158],
[107.5694956, -6.9086948],
[107.5695412, -6.9086335],
[107.5695734, -6.9085164],
[107.5696029, -6.9084791],
[107.5696002, -6.9084285],
[107.5696888, -6.908298],
[107.5697504, -6.9080557],
[107.5697183, -6.9079758],
[107.5699838, -6.9070998],
[107.570016, -6.9070679],
[107.5701353, -6.9069387],
[107.5701655, -6.906761],
[107.5701471, -6.9066948],
[107.5701499, -6.906643],
[107.570264, -6.9064906],
[107.5702372, -6.9063532],
[107.5701916, -6.9062912],
[107.5701796, -6.9062082],
[107.5702621, -6.9061069],
[107.5702705, -6.9060224],
[107.5702735, -6.9059272],
[107.5702582, -6.9058752],
[107.5702812, -6.9058353],
[107.570331, -6.9053586],
[107.5703968, -6.9051163],
[107.5703898, -6.9050277],
[107.5704728, -6.9047611],
[107.570537, -6.9044838],
[107.5705486, -6.9044575],
[107.5705852, -6.904431],
[107.5706371, -6.9043805],
[107.5706415, -6.9043086],
[107.5706277, -6.9042101],
[107.570663, -6.9041775],
[107.5706565, -6.9041212],
[107.5707491, -6.9039007],
[107.5707861, -6.9038517],
[107.5708515, -6.903726],
[107.571029, -6.9033889],
[107.5711822, -6.9031591],
[107.5712829, -6.9031001],
[107.5713198, -6.9030174],
[107.5713183, -6.9029548],
[107.5712337, -6.9028629],
[107.5709224, -6.9026604],
[107.5709029, -6.9026318],
[107.5708951, -6.9026101],
[107.570902, -6.9025687],
[107.5709255, -6.902512],
[107.5699489, -6.9018673],
[107.5693062, -6.9014289],
[107.5681693, -6.9006596],
[107.567794, -6.9004107],
[107.5674402, -6.9001352],
[107.5671834, -6.8999973],
[107.5668738, -6.8997586],
[107.5673784, -6.8990922],
[107.5674804, -6.898867],
[107.5674938, -6.8987944],
[107.5676575, -6.8983809],
[107.5678829, -6.8980119],
[107.5679768, -6.8978061],
[107.5680828, -6.897634],
[107.5682431, -6.8973935],
[107.5683634, -6.897188],
[107.5684758, -6.8969693],
[107.5685892, -6.8969163],
[107.5686339, -6.8968247],
[107.5686241, -6.8967202],
[107.5686192, -6.8965988],
[107.5685952, -6.8964875],
[107.5685484, -6.8964145],
[107.5685499, -6.8963149],
[107.5685818, -6.896162],
[107.5685851, -6.8960259],
[107.5684635, -6.8959323],
[107.5684081, -6.8959108],
[107.5683958, -6.8958547],
[107.5683944, -6.8957056],
[107.568337, -6.8955877],
[107.5682957, -6.895382],
[107.5681746, -6.8949567],
[107.5682117, -6.894851],
[107.5680265, -6.8945171],
[107.5680009, -6.8944163],
[107.5678567, -6.8942487],
[107.56782, -6.8941755],
[107.5678101, -6.894097],
[107.5678994, -6.8939173],
[107.5679365, -6.8938042],
[107.5679396, -6.8937164],
[107.5678969, -6.8936179],
[107.5677161, -6.8934741],
[107.5671385, -6.8930081],
[107.5671064, -6.8928797],
[107.5669697, -6.8927356],
[107.5669268, -6.8925901],
[107.5670463, -6.8923704],
[107.5668794, -6.8920902],
[107.5668064, -6.892011],
[107.56655, -6.8918584],
[107.5662883, -6.891759],
[107.5660598, -6.8918053],
[107.5648658, -6.8913936],
[107.5648508, -6.8916644],
[107.5646481, -6.8921482],
[107.564771, -6.8922401],
[107.5649719, -6.892278],
[107.5651165, -6.8924518],
[107.5647567, -6.8931561],
[107.5645768, -6.8931904],
[107.5645606, -6.8932806],
[107.5642931, -6.8933683],
[107.5639611, -6.8939379],
[107.5639608, -6.8940531],
[107.5650172, -6.894951],
[107.5649834, -6.8950079],
[107.5650354, -6.895118],
[107.5650229, -6.8952121],
[107.5648317, -6.8955468],
[107.5646603, -6.8960059],
[107.5645372, -6.8961801],
[107.5644152, -6.8963125],
[107.5643791, -6.896405],
[107.5642962, -6.8964301],
[107.5642355, -6.8965784],
[107.563983, -6.8967268],
[107.5639292, -6.8968649],
[107.563634, -6.8969206],
[107.5630975, -6.896794],
[107.5630412, -6.8968213],
[107.5628963, -6.8970962],
[107.5617891, -6.8964114],
[107.5614757, -6.8962167],
[107.5611903, -6.8960248],
[107.5608727, -6.8958382],
[107.5602911, -6.8954171],
[107.5601647, -6.8953644],
[107.5599015, -6.8953304],
[107.5621585, -6.8990243],
[107.5648286, -6.9034798],
[107.5675416, -6.9078873],
[107.568771, -6.909903],
[107.5688412, -6.9100128],
]])
def kelurahanCiroyom(self, nama):
self.kelurahan.record(nama)
self.kelurahan.poly([[
[107.5835421, -6.9108951],
[107.5834349, -6.9110762],
[107.5831184, -6.9118271],
[107.5827777, -6.912759],
[107.5825149, -6.913691],
[107.5830862, -6.9138388],
[107.5833719, -6.9138914],
[107.5835388, -6.9138911],
[107.5836441, -6.9139414],
[107.5833732, -6.9150303],
[107.5835301, -6.9150849],
[107.5847384, -6.9154724],
[107.5852265, -6.9156322],
[107.5851635, -6.9173336],
[107.5851589, -6.9176585],
[107.5850925, -6.9185052],
[107.592997, -6.9194399],
[107.5933806, -6.9166693],
[107.5933698, -6.9164523],
[107.593343, -6.9163811],
[107.593351, -6.9162309],
[107.5932464, -6.9160569],
[107.5931579, -6.9159776],
[107.5928159, -6.9157064],
[107.5928199, -6.9154431],
[107.5925933, -6.91527],
[107.5926596, -6.9150317],
[107.5926606, -6.9149552],
[107.5928542, -6.9143204],
[107.5929564, -6.9143133],
[107.5929807, -6.9142711],
[107.592966, -6.9140969],
[107.5929728, -6.9140132],
[107.5916209, -6.9139234],
[107.5901283, -6.9139075],
[107.5891399, -6.9137692],
[107.588118, -6.9135059],
[107.5868466, -6.9130123],
[107.5852239, -6.9120075],
[107.5847424, -6.9116616],
[107.5835421, -6.9108951],
]])
def kelurahanDungusCariang(self, nama):
self.kelurahan.record(nama)
self.kelurahan.poly([[
[107.5791752, -6.9079447],
[107.57897, -6.9082596],
[107.5787527, -6.9087209],
[107.5769161, -6.9168328],
[107.5768158, -6.9174711],
[107.57737, -6.9175158],
[107.578077, -6.9176377],
[107.5808391, -6.9179676],
[107.581166, -6.918042],
[107.5850925, -6.9185052],
[107.5851589, -6.9176585],
[107.5851635, -6.9173336],
[107.5851729, -6.9170687],
[107.5852265, -6.9156322],
[107.5847384, -6.9154724],
[107.5835301, -6.9150849],
[107.5833732, -6.9150303],
[107.5836441, -6.9139414],
[107.5835388, -6.9138911],
[107.5833719, -6.9138914],
[107.5830862, -6.9138388],
[107.5825149, -6.913691],
[107.5827777, -6.912759],
[107.5831184, -6.9118271],
[107.5834349, -6.9110762],
[107.5835421, -6.9108951],
[107.5791752, -6.9079447],
]])
def kelurahanGaruda(self, nama):
self.kelurahan.record(nama)
self.kelurahan.poly([[
[107.5768158, -6.9174711],
[107.5769161, -6.9168328],
[107.5772109, -6.91553],
[107.5773243, -6.9150268],
[107.5774343, -6.9145455],
[107.5775151, -6.9141873],
[107.5775811, -6.9138958],
[107.5777239, -6.9132647],
[107.5779033, -6.9124703],
[107.57808, -6.9116905],
[107.578135, -6.9114494],
[107.5781879, -6.9112091],
[107.5785809, -6.9094757],
[107.5786521, -6.9091642],
[107.5787527, -6.9087209],
[107.57897, -6.9082596],
[107.5791752, -6.9079447],
[107.5752133, -6.9053072],
[107.5749994, -6.9056383],
[107.5748938, -6.905937],
[107.5746716, -6.9063303],
[107.5749331, -6.9064624],
[107.574801, -6.9066536],
[107.5747065, -6.9068635],
[107.5744047, -6.9067304],
[107.5742339, -6.9071245],
[107.5739906, -6.9076517],
[107.5739769, -6.9079446],
[107.5740325, -6.9080411],
[107.5737868, -6.9084887],
[107.5734708, -6.9094448],
[107.5733059, -6.909819],
[107.5737331, -6.9100447],
[107.5749462, -6.910534],
[107.5760434, -6.9109361],
[107.5760824, -6.9109827],
[107.5761964, -6.9110346],
[107.5760496, -6.9114401],
[107.5763302, -6.9115669],
[107.5760199, -6.9123399],
[107.5758189, -6.9122928],
[107.5755187, -6.9124428],
[107.5753446, -6.912919],
[107.5753835, -6.9129355],
[107.5750134, -6.9137162],
[107.5746031, -6.9135943],
[107.5745069, -6.913683],
[107.5743743, -6.9139497],
[107.5739091, -6.9150764],
[107.5736437, -6.9150009],
[107.5734682, -6.9149867],
[107.5730218, -6.9153425],
[107.5729636, -6.9155205],
[107.572743, -6.9155911],
[107.5725292, -6.9158665],
[107.5724994, -6.9158819],
[107.573832, -6.9170058],
[107.5753239, -6.9173423],
[107.5768158, -6.9174711],
]])
def kelurahanKebonJeruk(self, nama):
self.kelurahan.record(nama)
self.kelurahan.poly([[
[107.592997, -6.9194399],
[107.6041614, -6.9207853],
[107.6045322, -6.9159498],
[107.6045972, -6.9154333],
[107.6046274, -6.9148208],
[107.6046281, -6.9145413],
[107.6022538, -6.9144167],
[107.5983239, -6.9143134],
[107.5952277, -6.914224],
[107.5934297, -6.9139855],
[107.5929728, -6.9140132],
[107.592966, -6.9140969],
[107.5929807, -6.9142711],
[107.5929564, -6.9143133],
[107.5928542, -6.9143204],
[107.5926606, -6.9149552],
[107.5926596, -6.9150317],
[107.5925933, -6.91527],
[107.5928199, -6.9154431],
[107.5928159, -6.9157064],
[107.5931579, -6.9159776],
[107.5932464, -6.9160569],
[107.593351, -6.9162309],
[107.593343, -6.9163811],
[107.5933698, -6.9164523],
[107.5933806, -6.9166693],
[107.592997, -6.9194399],
]])
def kelurahanMaleber(self, nama):
self.kelurahan.record(nama)
self.kelurahan.poly([[
[107.5709255, -6.902512],
[107.570902, -6.9025687],
[107.5708951, -6.9026101],
[107.5709029, -6.9026318],
[107.5709224, -6.9026604],
[107.5712337, -6.9028629],
[107.5713183, -6.9029548],
[107.5713198, -6.9030174],
[107.5712829, -6.9031001],
[107.5711822, -6.9031591],
[107.571029, -6.9033889],
[107.5707861, -6.9038517],
[107.5707491, -6.9039007],
[107.5706565, -6.9041212],
[107.570663, -6.9041775],
[107.5706277, -6.9042101],
[107.5706415, -6.9043086],
[107.5706371, -6.9043805],
[107.5705852, -6.904431],
[107.5705486, -6.9044575],
[107.570537, -6.9044838],
[107.5704728, -6.9047611],
[107.5703898, -6.9050277],
[107.5703968, -6.9051163],
[107.570331, -6.9053586],
[107.5702812, -6.9058353],
[107.5702582, -6.9058752],
[107.5702735, -6.9059272],
[107.5702705, -6.9060224],
[107.5702621, -6.9061069],
[107.5701796, -6.9062082],
[107.5701916, -6.9062912],
[107.5702372, -6.9063532],
[107.570264, -6.9064906],
[107.5701499, -6.906643],
[107.5701471, -6.9066948],
[107.5701655, -6.906761],
[107.5701353, -6.9069387],
[107.570016, -6.9070679],
[107.5699838, -6.9070998],
[107.5697183, -6.9079758],
[107.5697504, -6.9080557],
[107.5696888, -6.908298],
[107.5696002, -6.9084285],
[107.5696029, -6.9084791],
[107.5695734, -6.9085164],
[107.5695412, -6.9086335],
[107.5694956, -6.9086948],
[107.5694232, -6.9089158],
[107.5694259, -6.9090809],
[107.5693588, -6.9092193],
[107.5693937, -6.909254],
[107.5693401, -6.9092593],
[107.5693213, -6.9093125],
[107.5693403, -6.9093623],
[107.5693562, -6.9094057],
[107.5692167, -6.9096507],
[107.5691523, -6.9096667],
[107.569163, -6.9097412],
[107.5691094, -6.9097226],
[107.5691201, -6.9097865],
[107.5688412, -6.9100128],
[107.5700534, -6.9120367],
[107.5714501, -6.9143263],
[107.5715541, -6.9146136],
[107.5716975, -6.914762],
[107.5720703, -6.9154531],
[107.5724994, -6.9158819],
[107.5725292, -6.9158665],
[107.572743, -6.9155911],
[107.5729636, -6.9155205],
[107.5730218, -6.9153425],
[107.5734682, -6.9149867],
[107.5736437, -6.9150009],
[107.5739091, -6.9150764],
[107.5742, -6.9143704],
[107.5743743, -6.9139497],
[107.5745069, -6.913683],
[107.5746031, -6.9135943],
[107.5750134, -6.9137162],
[107.5753835, -6.9129355],
[107.5753446, -6.912919],
[107.5755187, -6.9124428],
[107.5758189, -6.9122928],
[107.5760199, -6.9123399],
[107.5763302, -6.9115669],
[107.5760496, -6.9114401],
[107.5761964, -6.9110346],
[107.5760824, -6.9109827],
[107.5760434, -6.9109361],
[107.5749462, -6.910534],
[107.5737331, -6.9100447],
[107.5733059, -6.909819],
[107.5734708, -6.9094448],
[107.5737868, -6.9084887],
[107.5740325, -6.9080411],
[107.5739769, -6.9079446],
[107.5739906, -6.9076517],
[107.5742339, -6.9071245],
[107.5744047, -6.9067304],
[107.5747065, -6.9068635],
[107.574801, -6.9066536],
[107.5749331, -6.9064624],
[107.5746716, -6.9063303],
[107.5748938, -6.905937],
[107.5749994, -6.9056383],
[107.5752133, -6.9053072],
[107.5709255, -6.902512],
]])
# Kantor Kelurahan
def kantorKelurahanCampaka(self, nama):
self.kantor.record(nama)
self.kantor.point(107.5631291, -6.8977897)
def kantorKelurahanCiroyom(self, nama):
self.kantor.record(nama)
self.kantor.point(107.5875214, -6.9144205)
def kantorKelurahanDungusCariang(self, nama):
self.kantor.record(nama)
self.kantor.point(107.5806731, -6.9125569)
def kantorKelurahanGaruda(self, nama):
self.kantor.record(nama)
self.kantor.point(107.5764865, -6.9160994)
def kantorKelurahanKebonJeruk(self, nama):
self.kantor.record(nama)
self.kantor.point(107.6011905, -6.918966)
def kantorKelurahanMaleber(self, nama):
self.kantor.record(nama)
self.kantor.point(107.5734513, -6.9073058)
# Jalan
def jalanKelurahanCampaka(self, nama):
self.jalan.record(nama)
self.jalan.line([[
[107.5696727,-6.903724],
[107.5697268,-6.9035733],
[107.5696674,-6.9035376],
[107.5697241,-6.9035733],
[107.5699168,-6.9036068],
[107.5701264,-6.9035813],
[107.5702467,-6.9034683],
[107.5703674,-6.9031728],
[107.5704988,-6.9029597],
[107.5707939,-6.9027334],
[107.5708904,-6.902808],
[107.5709445,-6.902873],
[107.5710889,-6.9029757],
[107.5709418,-6.9028703],
[107.5708904,-6.902808],
[107.5707939,-6.9027334],
[107.5707916,-6.9025375],
[107.5709002,-6.902592],
[107.5707862,-6.9025348],
[107.5705998,-6.9023923],
[107.5704737,-6.902576],
[107.5705998,-6.9023923],
[107.5704469,-6.9022991],
[107.570117,-6.9028236],
[107.5699051,-6.9027145],
[107.5702619,-6.9021713],
[107.5704469,-6.9022991],
[107.5702619,-6.9021713],
[107.5697415,-6.9018784],
[107.5696905,-6.9020248],
[107.5697415,-6.9018784],
[107.5694612,-6.9017094],
[107.5693271,-6.9019916],
[107.5694639,-6.901704],
[107.5692372,-6.9016014],
[107.569192,-6.9015038],
[107.5679511,-6.9007108],
[107.5667267,-6.8999008],
[107.5679511,-6.9007108],
[107.569192,-6.9015038],
[107.5692372,-6.9016014],
[107.5691916,-6.9016361],
[107.5690106,-6.9015496],
[107.5677473,-6.9007854],
[107.5676952,-6.9007974],
[107.5672929,-6.9015004],
[107.5673036,-6.9015696],
[107.5678079,-6.9018359],
[107.5678814,-6.9018904],
[107.5678814,-6.9019703],
[107.567817,-6.9020156],
[107.567722,-6.9019957],
[107.567773,-6.9018186],
[107.567722,-6.9019957],
[107.5676416,-6.9022779],
[107.5676843,-6.902328],
[107.5675397,-6.9027626],
[107.5677621,-6.9028579],
[107.5675397,-6.9027626],
[107.5676816,-6.9023226],
[107.5684194,-6.9025176],
[107.5684326,-6.9024771],
[107.5688217,-6.9014312],
[107.5690106,-6.9015496],
[107.5691916,-6.9016361],
[107.5689154,-6.9023657],
[107.5699051,-6.9027145],
[107.5689154,-6.9023657],
[107.5688493,-6.902523],
[107.5703674,-6.9031728],
[107.5702467,-6.9034683],
[107.569681,-6.9032161],
[107.5697827,-6.9029278],
[107.5688493,-6.902523],
[107.5687598,-6.9027677],
[107.569426,-6.9030878],
[107.5687598,-6.9027677],
[107.5686428,-6.9029574],
[107.5689566,-6.9030772],
[107.5692597,-6.9032343],
[107.5689566,-6.9030772],
[107.5686428,-6.9029574],
[107.5684084,-6.9033349],
[107.5680066,-6.9031487],
[107.5684084,-6.9033349],
[107.5681912,-6.9035905],
]])
self.jalan.record(nama)
self.jalan.line([[
[107.5688412,-6.9100128],
[107.5684508,-6.9093516],
[107.5687715,-6.9091011],
[107.5688948,-6.9088615],
[107.5689996,-6.9083494],
[107.5688948,-6.9088615],
[107.5687715,-6.9091011],
[107.5684508,-6.9093516],
[107.5682389,-6.9089975],
[107.568412,-6.908723],
[107.5682967,-6.9089014],
[107.5681156,-6.9087791],
[107.5683933,-6.908313],
[107.5683718,-6.9082517],
[107.5683933,-6.908313],
[107.5685354,-6.9083875],
[107.5687366,-6.9081665],
[107.5688761,-6.9082224],
[107.5687366,-6.9081665],
[107.5685354,-6.9083875],
[107.5683933,-6.908313],
[107.5681867,-6.9086591],
[107.5680821,-6.9085792],
[107.5681867,-6.9086591],
[107.5681156,-6.9087791],
[107.5682389,-6.9089975],
[107.5681156,-6.9087791],
[107.5675416,-6.9078873],
[107.5665894,-6.9062815],
[107.5675416,-6.9078873],
[107.5680661,-6.9075027],
[107.5683826,-6.9072897],
[107.5689941,-6.9070767],
[107.5685623,-6.9072264],
[107.568726,-6.9067624],
[107.5687947,-6.9066466],
[107.5689887,-6.9064269],
[107.5689083,-6.9063497],
[107.5686454,-6.9062326],
[107.5689083,-6.9063497],
[107.5691032,-6.9059463],
[107.5693821,-6.9055415],
[107.5693419,-6.905435],
[107.569248,-6.9052673],
[107.5691914,-6.905117],
[107.5691941,-6.9050238],
[107.5692531,-6.9048854],
[107.5691941,-6.9050238],
[107.5691914,-6.905117],
[107.569248,-6.9052726],
[107.5693419,-6.9054377],
[107.5693658,-6.905322],
[107.5698378,-6.9049999],
[107.5699505,-6.90488],
[107.5700098,-6.9047161],
[107.5699505,-6.90488],
[107.5698378,-6.9049999],
[107.5693658,-6.905322],
[107.5693419,-6.9054404],
[107.5693821,-6.9055442],
[107.5695592,-6.9057013],
[107.5696459,-6.9058065],
[107.5689887,-6.9064269],
[107.5696459,-6.9058065],
[107.5700026,-6.9054763],
[107.5701421,-6.9055136],
[107.570185,-6.9054923],
[107.5702655,-6.9051222],
[107.5702145,-6.9050184],
[107.5701957,-6.9049278],
[107.5702923,-6.904784],
[107.5703826,-6.9046682],
[107.5704148,-6.9045137],
[107.5703746,-6.9044711],
[107.569957,-6.9042754],
[107.5698873,-6.9043047],
[107.5697764,-6.9042687],
[107.5696459,-6.9041396],
[107.5696539,-6.9040651],
[107.5696459,-6.9041396],
[107.5697737,-6.9042687],
[107.5698873,-6.9043047],
[107.569957,-6.9042754],
[107.5699454,-6.9042022],
[107.569983,-6.9039732],
[107.5700151,-6.9038534],
[107.5700876,-6.9036909],
[107.5701492,-6.903659],
[107.5702512,-6.9036084],
[107.5703692,-6.903433],
]])
self.jalan.record(nama)
self.jalan.line([[
[107.5654548,-6.9044202],
[107.565461,-6.9044548],
[107.5665894,-6.9062815],
[107.5669547,-6.9055636],
[107.5676279,-6.9058565],
[107.5669547,-6.9055636],
[107.5669681,-6.9054997],
[107.5666784,-6.9053719],
[107.5669681,-6.9054997],
[107.5670673,-6.9052734],
[107.5668984,-6.9052042],
[107.5670673,-6.9052734],
[107.5670888,-6.9052121],
[107.5673731,-6.9053639],
[107.5678103,-6.905537],
[107.5677298,-6.9057314],
[107.5676467,-6.9057048],
[107.5677298,-6.9057314],
[107.5678103,-6.905537],
[107.5679069,-6.9053666],
[107.5680946,-6.9054278],
[107.5679069,-6.9053666],
[107.5678103,-6.905537],
[107.5673731,-6.9053639],
[107.5670888,-6.9052121],
[107.5671639,-6.9050471],
[107.5673976,-6.9051418],
[107.5671639,-6.9050471],
[107.5673141,-6.9046796],
[107.5669654,-6.9045518],
[107.5673141,-6.9046796],
[107.5673838,-6.9045358],
[107.5676574,-6.9046423],
[107.5676789,-6.9047302],
[107.5678589,-6.9049181],
[107.5681107,-6.9050271],
[107.5684379,-6.9051376],
[107.5681107,-6.9050271],
[107.567982,-6.9049718],
[107.5678961,-6.9049349],
[107.5679954,-6.9046343],
[107.5682126,-6.9046876],
[107.5682475,-6.9045491],
[107.568336,-6.9045624],
[107.5684084,-6.9043068],
[107.5684648,-6.9042882],
[107.5686207,-6.9043856],
[107.5688376,-6.9044905],
[107.5690575,-6.9039127],
[107.568787,-6.9037945],
[107.5687437,-6.9038062],
[107.5686874,-6.9039207],
[107.5686096,-6.9039154],
[107.5684648,-6.9042882],
[107.5686096,-6.9039154],
[107.5686874,-6.9039207],
[107.5687437,-6.9038062],
[107.568556,-6.9037343],
[107.568784,-6.9032177],
[107.568556,-6.9037343],
[107.5680624,-6.9035426],
[107.5681322,-6.9034095],
[107.5680624,-6.9035426],
[107.567864,-6.9035053],
[107.5676682,-6.9039234],
[107.5679954,-6.9040672],
[107.5676682,-6.9039234],
[107.5674589,-6.9043707],
[107.5679954,-6.9046343],
[107.5674589,-6.9043707],
[107.5673838,-6.9045358],
]])
self.jalan.record(nama)
self.jalan.line([[
[107.5652,-6.9039968],
[107.5654548,-6.9044202],
[107.5658421,-6.9040966],
[107.5657401,-6.9039155],
[107.5658421,-6.9040966],
[107.5659385,-6.9041193],
[107.5660191,-6.9041552],
[107.5663946,-6.9039155],
[107.5665502,-6.9037025],
[107.5666735,-6.9036812],
[107.5667326,-6.9035641],
[107.5667326,-6.9034575],
[107.5665931,-6.9034096],
[107.5667326,-6.9034575],
[107.5667326,-6.9035641],
[107.5672553,-6.9037584],
[107.5674243,-6.9037904],
[107.5672553,-6.9037584],
[107.5675265,-6.9032179],
[107.5675855,-6.9031939],
[107.5676123,-6.9030981],
[107.5675855,-6.9031939],
[107.5676821,-6.9032179],
[107.567733,-6.9031966],
[107.5678108,-6.902933],
[107.567733,-6.9031966],
[107.5676821,-6.9032179],
[107.5675855,-6.9031939],
[107.5675265,-6.9032179],
[107.5670491,-6.9041925],
[107.5663946,-6.9039155],
[107.5667755,-6.9040806],
[107.5665984,-6.9044587],
[107.5664697,-6.9046984],
[107.5665984,-6.9044587],
[107.5660191,-6.9041552],
]])
self.jalan.record(nama)
self.jalan.line([[
[107.5671064,-6.8928797],
[107.5670844,-6.8929687],
[107.5671826,-6.8930938],
[107.567197,-6.8931524],
[107.5671853,-6.8930912],
[107.5672721,-6.8931284],
]])
self.jalan.record(nama)
self.jalan.line([[
[107.5655931,-6.8933281],
[107.5659042,-6.8934826],
[107.5658211,-6.8936264],
[107.5659042,-6.8934826],
[107.5663951,-6.8937382],
[107.5665453,-6.8933974],
[107.5663951,-6.8937382],
[107.5663887,-6.8937888],
[107.5661386,-6.8941398],
[107.5661306,-6.8942223],
[107.5659696,-6.8944886],
[107.5661306,-6.8942223],
[107.5661386,-6.8941398],
[107.5663914,-6.8937888],
[107.5663951,-6.8937382],
[107.5666767,-6.8939246],
[107.5665345,-6.8941536],
[107.5666767,-6.8939246],
[107.5667062,-6.8939379],
[107.5670378,-6.8933335],
[107.5671129,-6.893171],
[107.567197,-6.8931524],
[107.5675913,-6.8934347],
[107.5675618,-6.8934853],
[107.5673945,-6.8934293],
[107.5673473,-6.8934426],
[107.567197,-6.8937036],
[107.5674331,-6.8938474],
[107.567197,-6.8937036],
[107.5670468,-6.8939539],
[107.567197,-6.8937036],
[107.5673473,-6.8934426],
[107.5673918,-6.8934267],
[107.5675618,-6.8934853],
[107.5677549,-6.8935971],
[107.5677818,-6.893677],
[107.5676852,-6.8938634],
]])
self.jalan.record(nama)
self.jalan.line([[
[107.5648649,-6.8914296],
[107.5654767,-6.8916706],
[107.5654982,-6.8916147],
[107.5654767,-6.8916706],
[107.5658814,-6.891821],
[107.5658496,-6.8919396],
[107.5658814,-6.891821],
[107.5660856,-6.891857],
[107.5661017,-6.8918038],
[107.5660933,-6.8918396],
[107.566096,-6.8919515],
[107.5658559,-6.8926917],
[107.5657691,-6.8926799],
[107.5655824,-6.8926145],
[107.5657691,-6.8926825],
[107.5658559,-6.8926917],
[107.5655931,-6.8933281],
[107.5654161,-6.8932989],
[107.5655555,-6.8928994],
[107.5654161,-6.8932989],
[107.5652122,-6.8935172],
[107.5654161,-6.8932989],
[107.5655931,-6.8933281],
[107.5657084,-6.8930539],
[107.5661698,-6.893227],
[107.5663522,-6.8928116],
[107.5664836,-6.8929127],
[107.5663522,-6.8928116],
[107.5664809,-6.8924894],
[107.5660035,-6.8922524],
[107.5664809,-6.8924894],
[107.5665332,-6.8923775],
[107.5669328,-6.8925346],
[107.5665332,-6.8923775],
[107.5665842,-6.8922337],
[107.5665577,-6.8921633],
[107.566096,-6.8919515],
]])
self.jalan.record(nama)
self.jalan.line([[
[107.5645095,-6.8932962],
[107.564906,-6.8936092],
[107.5650572,-6.8937553],
[107.5652261,-6.893517],
[107.5650853,-6.8934371],
[107.5650424,-6.8933612],
[107.5650531,-6.8932494],
[107.5651076,-6.8930699],
[107.5648662,-6.8929607],
[107.5651076,-6.8930699],
[107.5652712,-6.892713],
[107.5652364,-6.8927836],
[107.5650003,-6.8926851],
]])
self.jalan.record(nama)
self.jalan.line([[
[107.5650859,-6.894871],
[107.5649973,-6.8950414],
[107.5650724,-6.8950787],
]])
self.jalan.record(nama)
self.jalan.line([[
[107.5647506,-6.8957444],
[107.5650142,-6.8958335],
[107.565051,-6.8958136],
[107.565338,-6.8952704],
]])
self.jalan.record(nama)
self.jalan.line([[
[107.5599402,-6.8953401],
[107.5601682,-6.8956996],
[107.5602335,-6.8957136],
[107.5606604,-6.8959371],
[107.5607914,-6.8960384],
[107.5608343,-6.8961183],
[107.5608396,-6.8962195],
[107.5606697,-6.8965144],
[107.5601682,-6.8956996],
[107.5606697,-6.8965144],
[107.561225,-6.8974171],
[107.5615853,-6.8971515],
[107.5618964,-6.8967574],
[107.5615853,-6.8971515],
[107.561225,-6.8974171],
[107.5613001,-6.8975396],
[107.5615048,-6.8974124],
[107.5613001,-6.8975396],
[107.5615254,-6.897915],
[107.5622451,-6.8974817],
[107.562449,-6.8975935],
[107.5624651,-6.8976841],
[107.562288,-6.8978598],
[107.5624651,-6.8976841],
[107.562449,-6.8975935],
[107.5622451,-6.8974817],
[107.5615254,-6.897915],
[107.5619116,-6.8985328],
[107.5621378,-6.8983551],
[107.5619116,-6.8985328],
[107.5620162,-6.8987299],
[107.5621088,-6.8986744],
[107.5620162,-6.8987299],
[107.5625634,-6.8996299],
[107.5629886,-6.8992762],
[107.5632032,-6.8989993],
[107.5634816,-6.8983231],
[107.56331,-6.8982086],
[107.5631624,-6.898198],
[107.5630498,-6.8980702],
[107.5628835,-6.8981447],
[107.5627816,-6.8983311],
[107.5628835,-6.8981447],
[107.5630498,-6.8980702],
[107.5631281,-6.8979182],
[107.5631651,-6.8978904],
[107.5633797,-6.8980076],
[107.56331,-6.8982086],
[107.5633797,-6.8980076],
[107.5631651,-6.8978904],
[107.5632751,-6.8976588],
[107.564702,-6.8986493],
[107.5641661,-6.8996427],
[107.5642117,-6.8997129],
[107.564378,-6.8998451],
[107.5646194,-6.8994931],
[107.5643512,-6.8993173],
[107.5646194,-6.8994931],
[107.5647642,-6.8992588],
[107.5644906,-6.899083],
[107.5647642,-6.8992588],
[107.5649922,-6.8988972],
[107.564702,-6.8986493],
[107.5649922,-6.8988972],
[107.5652202,-6.8990516],
[107.5649834,-6.8994079],
[107.5647642,-6.8992588],
[107.5649861,-6.8994079],
[107.5648574,-6.8996475],
[107.5646194,-6.8994931],
[107.5648574,-6.8996502],
[107.5646033,-6.9000368],
[107.5643699,-6.8998744],
[107.564378,-6.8998451],
[107.5643699,-6.8998744],
[107.5644263,-6.8999223],
[107.56404,-6.9004975],
[107.5645309,-6.9007957],
[107.5647079,-6.9005507],
[107.5646489,-6.9004629],
[107.5642385,-6.9001993],
[107.5646489,-6.9004629],
[107.5647079,-6.9005507],
[107.5649171,-6.9002392],
[107.5646033,-6.9000368],
[107.5649171,-6.9002392],
[107.5655255,-6.8992671],
[107.5652202,-6.8990516],
[107.5655255,-6.8992671],
[107.5651183,-6.8999223],
[107.5655742,-6.9002392],
[107.5652999,-6.9000443],
[107.5656735,-6.8995149],
[107.5654796,-6.8993493],
[107.5656735,-6.8995149],
[107.5658688,-6.8996505],
[107.5658237,-6.899688],
[107.5654991,-6.9001913],
[107.5658237,-6.899688],
[107.5658688,-6.8996505],
[107.5659544,-6.8996715],
[107.5664589,-6.9000073],
[107.5660753,-6.899757],
[107.5662121,-6.8996239],
[107.5667267,-6.8999008],
[107.566869,-6.8996741],
[107.5667993,-6.8995809],
[107.5664962,-6.8993839],
[107.5665605,-6.8993014],
[107.5664935,-6.8993839],
[107.566287,-6.8992534],
[107.5663781,-6.8992028],
[107.5662789,-6.8992561],
[107.5659168,-6.8990324],
[107.5659544,-6.8988939],
[107.5663325,-6.8980099],
[107.5676199,-6.8984603],
[107.5663325,-6.8980099],
[107.5651229,-6.8975945],
[107.5649592,-6.8979407],
[107.5649405,-6.8980845],
[107.5653052,-6.8982549],
[107.5649405,-6.8980845],
[107.5648198,-6.8984067],
[107.5659168,-6.8990324],
[107.5648036,-6.8984017],
[107.563999,-6.8978315],
[107.5648036,-6.8984017],
[107.5649405,-6.8980845],
[107.5649592,-6.8979407],
[107.5651229,-6.8975945],
[107.5636133,-6.8970598],
[107.5649061,-6.8975232],
[107.5654991,-6.8962457],
[107.5655847,-6.896221],
[107.5683769,-6.897161],
[107.5669875,-6.896703],
[107.5671217,-6.8963249],
[107.5669849,-6.8962583],
[107.5669527,-6.8962264],
[107.5666764,-6.8960719],
[107.5667461,-6.8958616],
[107.5665664,-6.895779],
[107.5663358,-6.8961145],
[107.5662124,-6.896253],
[107.5663358,-6.8961145],
[107.5665664,-6.895779],
[107.566612,-6.8956272],
[107.5667087,-6.8956643],
[107.5668133,-6.8956696],
[107.5668749,-6.8956991],
[107.566808,-6.8956696],
[107.5667087,-6.8956669],
[107.566612,-6.8956272],
[107.5665745,-6.8957338],
[107.5662687,-6.895582],
[107.5665745,-6.8957338],
[107.5665986,-6.8956619],
[107.5666469,-6.8955607],
[107.5664591,-6.8954329],
[107.5664053,-6.8953507],
[107.5661614,-6.8952065],
[107.5661105,-6.8952518],
[107.5660675,-6.8953796],
[107.5665986,-6.8956619],
[107.5660675,-6.8953796],
[107.5658852,-6.8960134],
[107.5659442,-6.8960293],
[107.5660622,-6.896048],
[107.5659442,-6.8960293],
[107.5658852,-6.8960134],
[107.5657618,-6.8962716],
[107.5658852,-6.8960134],
[107.56603,-6.8955234],
[107.5655177,-6.8952598],
[107.5656006,-6.895143],
[107.5656275,-6.8950498],
[107.5656572,-6.8949509],
[107.5656275,-6.8950524],
[107.5656033,-6.8951456],
[107.5655177,-6.8952598],
[107.5653834,-6.8955823],
[107.5652627,-6.8957927],
[107.5653807,-6.8955823],
[107.5655177,-6.8952598],
[107.56603,-6.8955234],
[107.5660675,-6.8953796],
[107.5661105,-6.8952518],
[107.5661614,-6.8952065],
[107.566384,-6.8948391],
[107.5662164,-6.8947043],
[107.5658945,-6.8951064],
[107.5662164,-6.8947043],
[107.5659696,-6.8944886],
[107.5659015,-6.8944306],
[107.5660249,-6.8942122],
[107.5659015,-6.8944306],
[107.5658372,-6.8943746],
[107.5659364,-6.8941909],
[107.5658372,-6.8943746],
[107.565408,-6.8940311],
[107.5652632,-6.8942175],
[107.565408,-6.8940311],
[107.5652685,-6.8939273],
[107.5650996,-6.8941802],
[107.5652685,-6.8939273],
[107.5651934,-6.8939033],
[107.5652873,-6.8937995],
[107.5651934,-6.8939033],
[107.565003,-6.8937116],
[107.5649279,-6.8937995],
[107.565003,-6.8937116],
[107.564906,-6.8936092],
[107.5647509,-6.8938474],
[107.5646677,-6.8940684],
[107.5644421,-6.8944663],
[107.5646677,-6.8940684],
[107.5643137,-6.8938607],
[107.5644075,-6.8936983],
[107.5643137,-6.8938607],
[107.5641259,-6.8941163],
[107.5643137,-6.8938607],
[107.5641179,-6.8937276],
[107.5639596,-6.8940045],
]])
self.jalan.record(nama)
self.jalan.line([[
[107.5662883,-6.891759],
[107.5668064,-6.892011],
[107.5668571,-6.8920704],
[107.5668544,-6.8921343],
[107.566892,-6.8922196],
[107.5668893,-6.8922835],
]])
self.jalan.record(nama)
self.jalan.line([[
[107.5667062,-6.8939379],
[107.566732,-6.8939459],
[107.5671595,-6.8942308],
[107.5672909,-6.8939912],
[107.5671595,-6.8942308],
[107.5674331,-6.8944066],
[107.5675672,-6.8942016],
[107.5676959,-6.8940631],
[107.5675672,-6.8942016],
[107.5674331,-6.8944066],
[107.5680124,-6.8948007],
]])
self.jalan.record(nama)
self.jalan.line([[
[107.566384,-6.8948391],
[107.5667287,-6.8951037],
[107.5668977,-6.894896],
[107.5670774,-6.8946936],
[107.5668548,-6.8945498],
[107.5670774,-6.8946936],
[107.5672866,-6.8948241],
[107.5671659,-6.895101],
[107.5672165,-6.8951513],
[107.5670533,-6.8953726],
[107.5667287,-6.8951037],
[107.5674365,-6.8956839],
[107.5671217,-6.8963249],
]])
self.jalan.record(nama)
self.jalan.line([[
[107.5651612,-6.9012271],
[107.565019,-6.9014638],
[107.5653543,-6.9016398],
[107.5656493,-6.9017889],
[107.5656433,-6.9018904],
[107.5653463,-6.9023827],
[107.5650861,-6.9022496],
[107.5650378,-6.9021324],
[107.5651337,-6.9020022],
[107.5653543,-6.9016398],
[107.5651987,-6.9019114],
[107.5655126,-6.9020739],
[107.5651987,-6.9019114],
[107.565078,-6.9020632],
[107.564834,-6.9017274],
[107.565019,-6.9014638],
[107.564834,-6.9017274],
[107.5646569,-6.9018845],
[107.5647749,-6.9020579],
[107.5646569,-6.9018845],
[107.5641137,-6.9021941],
[107.5637918,-6.9016483],
[107.5641137,-6.9021941],
[107.5644034,-6.9026734],
[107.5647165,-6.9024614],
[107.5648849,-6.9026914],
[107.5648795,-6.9027712],
[107.5645911,-6.9029797],
[107.5644034,-6.9026734],
[107.5645911,-6.9029797],
[107.5652,-6.9039968],
[107.5652759,-6.9039315],
[107.5655119,-6.9035215],
[107.5656982,-6.9031164],
[107.565843,-6.9027277],
[107.5659732,-6.9023392],
[107.5669281,-6.9003421],
[107.5668208,-6.9005605],
[107.5675236,-6.9008747],
[107.567309,-6.9007815],
[107.5669227,-6.9015643],
[107.5664721,-6.901322],
[107.5669227,-6.9015643],
[107.5664453,-6.9025416],
[107.5659732,-6.9023392],
[107.5664453,-6.9025416],
[107.5668798,-6.9027439],
[107.5670339,-6.9027383],
[107.5670622,-6.9025629],
[107.5674485,-6.9017427],
]])
self.jalan.record(nama)
self.jalan.line([[
[107.5672263,-6.8961199],
[107.5676715,-6.8964101],
[107.5676956,-6.8964767],
[107.5677788,-6.8965273],
[107.5678603,-6.8965123],
[107.5680296,-6.8964031],
[107.5676993,-6.8959798],
[107.5674529,-6.8956975],
[107.567702,-6.8959771],
[107.5680296,-6.8964031],
[107.5683001,-6.8968212],
[107.56844,-6.8969251],
[107.5685553,-6.8968878],
[107.5685925,-6.8968132],
[107.5686032,-6.8967493],
[107.5686036,-6.8967892],
[107.5684423,-6.8965895],
[107.5682733,-6.8963392],
[107.5682254,-6.896254],
[107.56798,-6.8959758],
[107.5681017,-6.8958999],
[107.56798,-6.8959758],
[107.5676742,-6.8956216],
[107.5673184,-6.8953753],
[107.5673067,-6.8952994],
[107.567642,-6.8947802],
[107.5673067,-6.8952994],
[107.5673184,-6.895378],
[107.5676742,-6.8956216],
[107.5680292,-6.8950265],
[107.5679219,-6.8952075],
[107.5681017,-6.8958999],
[107.5682254,-6.8961502],
[107.5682254,-6.896254],
]])
self.jalan.record(nama)
self.jalan.line([[
[107.5637852,-6.8990676],
[107.5635867,-6.898977],
[107.5634794,-6.8989717],
[107.5632751,-6.8988477],
[107.5634794,-6.8989717],
[107.5629859,-6.899664],
[107.5630771,-6.899925],
[107.5628772,-6.9001332],
[107.5625634,-6.8996299],
[107.5628772,-6.9001332],
[107.5637918,-6.9016483],
[107.5639374,-6.9015708],
[107.5642378,-6.9011448],
[107.5645309,-6.9007957],
[107.5651612,-6.9012271],
[107.5655152,-6.900657],
[107.5655126,-6.9006064],
[107.5649171,-6.9002392],
[107.5655126,-6.9006064],
[107.5656708,-6.9004946],
[107.5657566,-6.9004946],
[107.5662609,-6.9007475],
[107.5661911,-6.9007103],
[107.5664406,-6.9001564],
[107.5665184,-6.9002097],
[107.5664406,-6.9001564],
[107.5661214,-6.8999221],
[107.566159,-6.8999514],
[107.5657566,-6.9004946],
[107.5659578,-6.900223],
[107.5660544,-6.9002709],
[107.5661885,-6.8999753],
[107.5660544,-6.9002709],
[107.5659149,-6.9005745],
]])
def jalanKelurahanCiroyom(self, nama):
self.jalan.record(nama)
self.jalan.line([[
[107.5840308,-6.915249],
[107.5842615,-6.9142664],
[107.5845351,-6.9142771],
[107.5845431,-6.9142478],
[107.5845807,-6.9137019],
[107.5848891,-6.9136514],
[107.5848918,-6.9135901],
[107.584983,-6.9135795],
[107.5850367,-6.913204],
[107.584924,-6.9132067],
[107.5850367,-6.913204],
[107.5851118,-6.9129031],
[107.5851359,-6.9128153],
[107.5852083,-6.9126022],
[107.58516,-6.9125836],
[107.5852405,-6.9123386],
[107.5844621,-6.9118109],
[107.5852405,-6.9123386],
[107.5857367,-6.9126875],
[107.5855785,-6.9130576],
[107.585439,-6.9135209],
[107.5849991,-6.9134596],
[107.585439,-6.9135235],
[107.5854014,-6.9137259],
[107.5854014,-6.9139815],
[107.5852915,-6.9139815],
[107.5854309,-6.9147564],
[107.5852888,-6.9148203],
[107.585321,-6.9151558],
[107.5853103,-6.9154833],
[107.5851842,-6.9154966],
[107.5851815,-6.9154327],
[107.5851842,-6.9154966],
[107.5851815,-6.9156191],
]])
self.jalan.record(nama)
self.jalan.line([[
[107.5850876,-6.9184706],
[107.586536,-6.9186623],
[107.5878127,-6.9187848],
[107.5878556,-6.918199],
[107.5879307,-6.918199],
[107.5878556,-6.918199],
[107.5878127,-6.9187848],
[107.5884028,-6.9188594],
[107.5889982,-6.9189233],
[107.5901462,-6.9190724],
[107.5917019,-6.9192747],
[107.5923242,-6.919344],
[107.5929357,-6.9194292],
[107.5930215,-6.9188221],
[107.5917829,-6.918673],
[107.5915737,-6.9185186],
[107.5914717,-6.9183109],
[107.5917185,-6.9181937],
[107.591461,-6.9183162],
[107.5914825,-6.9181511],
[107.5914771,-6.9183109],
[107.5909299,-6.9183748],
[107.5907529,-6.9181937],
[107.5901896,-6.9185186],
[107.590136,-6.9186624],
[107.5901462,-6.9190724],
[107.590136,-6.9186624],
[107.5901896,-6.9185186],
[107.590256,-6.91848],
[107.5901574,-6.9182363],
[107.5893079,-6.9181977],
[107.5893474,-6.9178156],
[107.5892938,-6.9183269],
[107.5895995,-6.9183641],
[107.5896371,-6.918215],
[107.5895995,-6.9183641],
[107.5895942,-6.9185399],
[107.5895995,-6.9183641],
[107.5892938,-6.9183269],
[107.5890631,-6.9183002],
[107.5889982,-6.9189233],
[107.5890631,-6.9183002],
[107.5885267,-6.9182363],
[107.5884028,-6.9188594],
[107.5885267,-6.9182363],
[107.5881994,-6.9181724],
[107.58806,-6.9181937],
[107.5881994,-6.9181724],
[107.5882316,-6.9177357],
[107.5879098,-6.9176931],
[107.5882316,-6.9177357],
[107.5881994,-6.9181724],
[107.5885267,-6.9182363],
[107.5887124,-6.9182576],
[107.5888163,-6.9172991],
[107.5889773,-6.9172298],
[107.5889987,-6.9171074],
[107.5889773,-6.9172298],
[107.5888163,-6.9172991],
[107.5887626,-6.9172586],
[107.5884622,-6.9172347],
[107.5878239,-6.9171415],
[107.5872043,-6.9170935],
[107.5870353,-6.9171415],
[107.5867617,-6.9171441],
[107.5866705,-6.9171388],
[107.5866437,-6.9173145],
[107.5865364,-6.9173465],
[107.5864774,-6.9174743],
[107.5865042,-6.9175968],
[107.5865525,-6.9177672],
[107.5865632,-6.918108],
[107.586536,-6.9186623],
]])
self.jalan.record(nama)
self.jalan.line([[
[107.5851589,-6.9176585],
[107.5852342,-6.9176629],
[107.5852879,-6.9171677],
[107.5852342,-6.9176629],
[107.5861998,-6.9177375],
[107.586232,-6.9173647],
[107.5854917,-6.9172955],
[107.5853844,-6.9180304],
[107.5861569,-6.9180996],
[107.5861998,-6.9177375],
]])
self.jalan.record(nama)
self.jalan.line([[
[107.5930215,-6.9188221],
[107.5931774,-6.9177051],
[107.5931077,-6.9177158],
[107.5931774,-6.9177051],
[107.5932954,-6.9170661],
[107.5933169,-6.9165122],
[107.5927697,-6.9164803],
[107.5928233,-6.9160383],
[107.592877,-6.915969],
[107.5928877,-6.915772],
[107.592877,-6.915969],
[107.5929521,-6.915985],
[107.5929574,-6.9158226],
[107.5929521,-6.915985],
[107.5932149,-6.9160289],
[107.5929521,-6.915985],
[107.592877,-6.915969],
[107.5928233,-6.9160383],
[107.5927697,-6.9164803],
[107.5913213,-6.9164962],
[107.5910531,-6.9168104],
[107.5909136,-6.917966],
[107.5907529,-6.9181937],
[107.5909136,-6.917966],
[107.590974,-6.9174628],
[107.5910531,-6.9168104],
[107.5910531,-6.9164962],
[107.5913213,-6.9164962],
[107.5910531,-6.9164962],
[107.5904737,-6.9164536],
[107.5904415,-6.9166986],
[107.5904737,-6.9164536],
[107.590066,-6.9164004],
[107.5899534,-6.9171459],
[107.5898944,-6.9177424],
[107.5898622,-6.9182163],
[107.589889,-6.9177371],
[107.589948,-6.9171353],
[107.5900017,-6.9168477],
[107.5898085,-6.9168477],
[107.5898085,-6.9167359],
[107.589712,-6.9165921],
[107.5897066,-6.9163738],
[107.590066,-6.9164004],
[107.5897066,-6.9163738],
[107.5894223,-6.9163205],
[107.5894062,-6.9164749],
[107.5893794,-6.9166187],
[107.5894545,-6.9166773],
[107.5894598,-6.9167146],
[107.5894116,-6.9167998],
[107.5894759,-6.9168158],
[107.5894545,-6.9169223],
[107.5896261,-6.916885],
[107.5898085,-6.9168477],
]])
self.jalan.record(nama)
self.jalan.line([[
[107.5890883,-6.9137554],
[107.589024,-6.9145329],
[107.5891151,-6.9147299],
[107.5900646,-6.9159441],
[107.5902953,-6.9157844],
[107.5908371,-6.915827],
[107.5902953,-6.9157844],
[107.5904884,-6.9155501],
[107.5903972,-6.9156619],
[107.5899627,-6.91514],
[107.5903972,-6.9156619],
[107.5902953,-6.9157844],
[107.5900646,-6.9159441],
[107.5905314,-6.9164554],
[107.5910531,-6.9164962],
[107.5913213,-6.9164962],
[107.5918456,-6.9164926],
[107.5916793,-6.9158696],
[107.5913467,-6.9155394],
[107.5915238,-6.9153264],
[107.5915881,-6.915124],
[107.5917705,-6.9149057],
[107.5920924,-6.9152039],
[107.5917652,-6.914911],
[107.5913414,-6.9146021],
[107.5906923,-6.9142666],
[107.590923,-6.914224],
[107.5916203,-6.9142453],
[107.5909283,-6.9142187],
[107.5906869,-6.914272],
[107.5905367,-6.9142933],
[107.5904026,-6.9146075],
[107.5904455,-6.9145169],
[107.5911965,-6.9149589],
[107.5910249,-6.9152838],
[107.5913414,-6.9155501],
[107.5910356,-6.9152891],
[107.5905099,-6.914895],
[107.590585,-6.9149589],
[107.5907459,-6.9147033],
[107.5904509,-6.9145169],
[107.5901183,-6.9143465],
[107.5894048,-6.9140909],
[107.5890669,-6.9139844],
[107.589024,-6.9145436],
[107.5885358,-6.9141388],
[107.5880315,-6.9145649],
[107.5880047,-6.914927],
[107.5886163,-6.9150761],
[107.5891205,-6.9147299],
[107.5886216,-6.9150708],
[107.5881335,-6.9154702],
[107.5884178,-6.916237],
[107.5894223,-6.9163205],
[107.5890642,-6.9162929],
[107.5890964,-6.9164634],
[107.5890695,-6.9166178],
[107.5890481,-6.9168255],
[107.5890374,-6.9168628],
[107.5886833,-6.9169586],
]])
self.jalan.record(nama)
self.jalan.line([[
[107.5884622,-6.9172347],
[107.5884771,-6.9170196],
[107.5883752,-6.916945],
[107.5883591,-6.9167347],
[107.5883645,-6.9162341],
[107.5884178,-6.916237],
[107.5883672,-6.9162288],
[107.5879675,-6.9161729],
[107.5879756,-6.916314],
[107.5879407,-6.9166016],
[107.5879622,-6.9168359],
[107.5879434,-6.9166016],
[107.5879783,-6.9163113],
[107.5879648,-6.9161702],
[107.5876081,-6.9161409],
[107.5876403,-6.916543],
[107.587702,-6.9168332],
[107.5877127,-6.9170169],
[107.5878415,-6.9170382],
[107.5878239,-6.9171415],
[107.5878361,-6.9170382],
[107.5877047,-6.9170089],
[107.5877047,-6.9168279],
[107.5876376,-6.916535],
[107.5876081,-6.9161382],
[107.5875732,-6.9161329],
[107.5875893,-6.9158533],
[107.5883001,-6.9159252],
[107.5878924,-6.9158853],
[107.58793,-6.9157388],
[107.5881335,-6.9154702],
[107.58793,-6.9157335],
[107.5878897,-6.915888],
[107.5875866,-6.915856],
[107.587769,-6.9154752],
[107.5880399,-6.9149427],
[107.5880047,-6.914927],
[107.5880315,-6.9145649],
[107.586959,-6.9154167],
[107.5869912,-6.9154353],
[107.5869885,-6.9154726],
[107.5870127,-6.9155152],
[107.5869697,-6.9158693],
[107.5869697,-6.9160797],
[107.5875786,-6.9161356],
[107.5869697,-6.916077],
[107.5867739,-6.9160664],
[107.5867364,-6.91633],
[107.5867364,-6.916559],
[107.5866908,-6.9166415],
[107.5867203,-6.9168572],
[107.5867471,-6.9170888],
[107.5867617,-6.9171441],
[107.5867444,-6.9170888],
[107.5867176,-6.9168519],
[107.5863153,-6.9168998],
[107.5862643,-6.9167879],
[107.5863153,-6.9169078],
[107.5863421,-6.9169956],
[107.5859881,-6.9169983],
[107.5853631,-6.9169424],
[107.58517,-6.9168998],
[107.5853631,-6.9169424],
[107.5853792,-6.9166974],
[107.5855213,-6.9167081],
[107.5853846,-6.9166974],
[107.5851861,-6.9167001],
[107.5853792,-6.9166974],
[107.5853872,-6.916543],
[107.5857359,-6.9165989],
[107.5853872,-6.9165377],
[107.5851941,-6.916527],
[107.5853899,-6.916543],
[107.5854087,-6.9163859],
[107.5851995,-6.9163513],
[107.5854141,-6.9163832],
[107.5854275,-6.9162208],
[107.5855991,-6.9158294],
[107.5852236,-6.9157149],
[107.5856018,-6.9158294],
[107.5861222,-6.9159865],
[107.586149,-6.9163273],
[107.5862268,-6.916322],
[107.5862965,-6.9163646],
[107.5864521,-6.9163486],
[107.5865674,-6.9163752],
[107.5867418,-6.916338],
[107.5865647,-6.9163779],
[107.5864494,-6.9163486],
[107.5862938,-6.9163646],
[107.5862187,-6.916322],
[107.5861436,-6.9163246],
[107.5861302,-6.9164737],
[107.5859344,-6.9164498],
[107.5857735,-6.9164072],
[107.5854087,-6.9163859],
[107.5854302,-6.9162155],
[107.5856233,-6.9162794],
]])
self.jalan.record(nama)
self.jalan.line([[
[107.5834349,-6.9110762],
[107.5836306,-6.9112092],
[107.5834267,-6.9116352],
[107.583239,-6.9115606],
[107.5834267,-6.9116352],
[107.5836306,-6.9112092],
[107.5838023,-6.9113423],
[107.583636,-6.9116299],
[107.5838023,-6.9113423],
[107.5844621,-6.9118109],
[107.5844192,-6.9119707],
[107.584505,-6.9122316],
[107.5845694,-6.9123754],
[107.5845425,-6.9126044],
[107.5840544,-6.9125512],
[107.5839471,-6.9128334],
[107.5845479,-6.9128387],
[107.5839471,-6.9128334],
[107.5837969,-6.9131689],
[107.5847035,-6.9132062],
[107.5837969,-6.9131689],
[107.5836441,-6.9139414],
]])
self.jalan.record(nama)
self.jalan.line([[
[107.5830862,-6.9138388],
[107.5831746,-6.913342],
]])
self.jalan.record(nama)
self.jalan.line([[
[107.5853103,-6.9154833],
[107.5856881,-6.9154143],
[107.5855915,-6.91514],
[107.5856881,-6.9154143],
[107.5856881,-6.9155794],
[107.5856291,-6.9158377],
[107.5857927,-6.9158829],
[107.5859697,-6.9153903],
[107.5856881,-6.9154143],
[107.5859697,-6.9153903],
[107.5859992,-6.9152945],
[107.586077,-6.9152732],
[107.5866376,-6.9153664],
[107.586959,-6.9154167],
[107.5866376,-6.9153664],
[107.5868307,-6.9147646],
[107.5871445,-6.913798],
[107.5879277,-6.9140909],
[107.5883381,-6.9142986],
[107.5885358,-6.9141388],
[107.5882496,-6.9139285],
[107.5878311,-6.9137395],
[107.5876434,-6.9136915],
[107.5872169,-6.9135105],
[107.5871445,-6.913798],
[107.5872169,-6.9135105],
[107.5868736,-6.9133587],
[107.5867851,-6.9136543],
[107.5868092,-6.9137315],
[107.5866376,-6.9145409],
]])
self.jalan.record(nama)
self.jalan.line([[
[107.5867739,-6.9160664],
[107.5861222,-6.9159865],
[107.5861651,-6.9159929],
[107.5861503,-6.9157705],
[107.5861288,-6.9152753],
]])
self.jalan.record(nama)
self.jalan.line([[
[107.5857367,-6.9126875],
[107.5863523,-6.9130875],
[107.5867492,-6.9133005],
[107.5868736,-6.9133587],
[107.5867546,-6.9133005],
[107.5866473,-6.9135562],
[107.5867546,-6.9132952],
[107.5863523,-6.9130822],
[107.5861162,-6.9132846],
[107.5861377,-6.9135189],
[107.5860411,-6.9135402],
[107.5859285,-6.9148236],
[107.5857783,-6.9148449],
[107.585789,-6.9149887],
]])
def jalanKelurahanDungusCariang(self, nama):
self.jalan.record(nama)
self.jalan.line([[
[107.5767156,-6.9174426],
[107.5770075,-6.9174589],
[107.5770182,-6.9173577],
[107.5770075,-6.9174589],
[107.5780053,-6.917576],
[107.5781233,-6.9171447],
[107.5780053,-6.917576],
[107.5781286,-6.9175973],
[107.5787509,-6.9176825],
[107.5788689,-6.9176879],
[107.5789923,-6.9177305],
[107.580607,-6.9179169],
[107.5812024,-6.9180074],
[107.5820017,-6.9180926],
[107.5820178,-6.9178796],
[107.5820822,-6.9177092],
[107.5820178,-6.9178796],
[107.5820017,-6.9180926],
[107.5826938,-6.9181831],
[107.5830907,-6.9182417],
[107.5843996,-6.9183802],
[107.5850876,-6.9184706],
[107.5849951,-6.9184494],
[107.5850648,-6.9176399],
[107.5851589,-6.9176585],
[107.5850648,-6.9176399],
[107.5850809,-6.9172938],
[107.5844104,-6.9173257],
[107.5839222,-6.9172885],
[107.5844104,-6.9173257],
[107.5844265,-6.9170808],
[107.5844104,-6.9173257],
[107.5843996,-6.9183802],
]])
self.jalan.record(nama)
self.jalan.line([[
[107.5850809,-6.9172938],
[107.5851171,-6.9168794],
[107.58517,-6.9168998],
[107.5851171,-6.9168794],
[107.5851117,-6.9166984],
[107.5851861,-6.9167001],
[107.5851117,-6.9166984],
[107.5851225,-6.9165173],
[107.5851941,-6.916527],
[107.5851225,-6.9165173],
[107.5851332,-6.9163362],
[107.5851995,-6.9163513],
[107.5851332,-6.9163362],
[107.5851868,-6.9157025],
[107.5852236,-6.9157149],
[107.5843232,-6.9154256],
[107.5842749,-6.9155747],
[107.5842856,-6.9157025],
[107.5842749,-6.9155747],
[107.5843232,-6.9154256],
[107.5838725,-6.9152552],
[107.5838243,-6.9156812],
[107.5838296,-6.915596],
[107.5836687,-6.9155747],
[107.5836633,-6.9154416],
[107.5836204,-6.915399],
[107.5836687,-6.9151966],
[107.5838725,-6.9152552],
[107.5836687,-6.9151966],
[107.5835399,-6.9151806],
[107.583497,-6.915367],
[107.5834058,-6.915383],
[107.5833576,-6.9156013],
[107.5834058,-6.915383],
[107.583497,-6.915367],
[107.5835399,-6.9151806],
[107.5832878,-6.9150741],
[107.5834756,-6.9144138],
[107.5835882,-6.9139931],
[107.5836366,-6.9139975],
[107.5835882,-6.9139931],
[107.5830303,-6.9138653],
[107.5824724,-6.9137268],
[107.5824724,-6.9136576],
[107.5825368,-6.91337],
[107.5826173,-6.9131463],
[107.5827138,-6.9127842],
[107.5828587,-6.9123102],
[107.5830089,-6.9118842],
[107.5831913,-6.9115221],
[107.583239,-6.9115606],
[107.5831913,-6.9115221],
[107.5833844,-6.9110481],
[107.5834349,-6.9110762],
[107.5833844,-6.9110481],
[107.5826226,-6.9105635],
[107.5824402,-6.9109043],
[107.5826226,-6.9105635],
[107.5821613,-6.9102386],
[107.5821291,-6.9103398],
[107.5821184,-6.910654],
[107.5819789,-6.9108883],
[107.5819789,-6.9110747],
[107.5819789,-6.9108883],
[107.5821184,-6.910654],
[107.5821291,-6.9103398],
[107.5821613,-6.9102386],
[107.581657,-6.9099351],
[107.5815229,-6.9098446],
[107.5813566,-6.9101801],
[107.581244,-6.9105315],
[107.5813674,-6.9105688],
[107.581244,-6.9105315],
[107.5813566,-6.9101801],
[107.5815229,-6.9098446],
[107.5813888,-6.9096901],
[107.5811528,-6.9100097],
[107.5809811,-6.9103185],
[107.5810348,-6.9104037],
[107.5810026,-6.9104676],
[107.5806217,-6.9102173],
[107.5810079,-6.9096262],
[107.5810723,-6.9096262],
[107.5811313,-6.909525],
[107.5813888,-6.9096901],
[107.5811313,-6.909525],
[107.5807236,-6.9092907],
[107.5804983,-6.9091096],
[107.5801443,-6.9088807],
[107.5798814,-6.9087369],
[107.5790499,-6.9081351],
[107.5798814,-6.9087369],
[107.5798653,-6.9088487],
[107.5797419,-6.9088221],
[107.579581,-6.9087102],
[107.5793289,-6.9085451],
[107.5791519,-6.9084493],
[107.578889,-6.908428],
[107.5788085,-6.908412],
]])
self.jalan.record(nama)
self.jalan.line([[
[107.5788757,-6.9112659],
[107.5781462,-6.9110636],
]])
self.jalan.record(nama)
self.jalan.line([[
[107.5782803,-6.9117612],
[107.5780013,-6.9117026],
]])
self.jalan.record(nama)
self.jalan.line([[
[107.576847,-6.9168861],
[107.5773318,-6.9169331],
[107.5774338,-6.9168479],
[107.5775947,-6.9164166],
[107.5776752,-6.9162408],
[107.5777663,-6.9157562],
[107.57782,-6.9154633],
[107.5772031,-6.9153408],
[107.57782,-6.9154633],
[107.5778736,-6.9154101],
[107.5782062,-6.915458],
[107.5785549,-6.9156124],
[107.5784798,-6.9159053],
[107.5781633,-6.9158468],
[107.5780507,-6.9158042],
[107.5777663,-6.9157562],
[107.5780507,-6.9158042],
[107.5781687,-6.9158468],
[107.5784798,-6.9159053],
[107.5784369,-6.9160544],
[107.5782277,-6.9160065],
[107.5784369,-6.9160544],
[107.578394,-6.9163154],
[107.5790324,-6.9164805],
[107.5791128,-6.9164059],
[107.5791718,-6.9162621],
[107.5791128,-6.9164059],
[107.5790324,-6.9164805],
[107.578394,-6.9163154],
[107.5782491,-6.9169065],
[107.5781286,-6.9175973],
[107.5787509,-6.9176825],
[107.5788689,-6.9176879],
[107.5789465,-6.9173006],
[107.5789519,-6.9170556],
[107.5782491,-6.9169065],
[107.5789519,-6.9170556],
[107.5789465,-6.9173006],
[107.5790377,-6.9171941],
[107.5793274,-6.9163527],
[107.5796546,-6.9164539],
[107.5796707,-6.9165391],
[107.5798907,-6.9166083],
[107.5799336,-6.9165178],
[107.5801213,-6.9165444],
[107.5806578,-6.9167254],
[107.5807597,-6.9165444],
]])
self.jalan.record(nama)
self.jalan.line([[
[107.5775873,-6.9136243],
[107.5787955,-6.9139343],
[107.5786346,-6.9142752],
[107.5784951,-6.914813],
[107.5779962,-6.9147385],
[107.5778736,-6.9154101],
[107.5782062,-6.915458],
[107.5784951,-6.914813],
[107.5784522,-6.9149089],
[107.5786453,-6.9149834],
[107.5785702,-6.9153136],
[107.5785112,-6.9155905],
[107.5785549,-6.9156124],
[107.5791362,-6.9156811],
[107.5793561,-6.9157317],
[107.579289,-6.9159607],
[107.5793561,-6.9157317],
[107.5795412,-6.915753],
[107.5793274,-6.9163527],
[107.5794634,-6.9159793],
[107.5802734,-6.9162056],
[107.5802224,-6.9163654],
[107.5801313,-6.9164133],
[107.5800991,-6.9165464],
[107.5801313,-6.9164133],
[107.5802224,-6.9163654],
[107.5802734,-6.9162056],
[107.580724,-6.9163521],
[107.5807616,-6.9160938],
[107.5808179,-6.9158435],
[107.5803941,-6.9157184],
[107.5803244,-6.9159394],
[107.5807616,-6.9160938],
[107.5803244,-6.9159394],
[107.5795412,-6.915753],
[107.579686,-6.9151405],
[107.5793936,-6.915058],
[107.5793695,-6.9151166],
[107.5791013,-6.9150553],
[107.5790423,-6.9152311],
[107.5791013,-6.9150553],
[107.5793695,-6.9151166],
[107.5792971,-6.9153722],
[107.5792408,-6.9154161],
[107.5791388,-6.9156811],
[107.5792408,-6.9154161],
[107.5785729,-6.915311],
]])
self.jalan.record(nama)
self.jalan.line([[
[107.5805838,-6.9179197],
[107.5806643,-6.917696],
[107.5807501,-6.917704],
[107.5808225,-6.9175176],
[107.5808252,-6.9174084],
[107.5808494,-6.9173445],
[107.5807635,-6.9173099],
[107.5810344,-6.9166682],
[107.5808157,-6.9164871],
[107.580821,-6.9163966],
[107.580724,-6.9163521],
[107.580821,-6.9163966],
[107.5810249,-6.9160664],
[107.5814219,-6.9162155],
[107.5812234,-6.9166469],
[107.5811697,-6.9167427],
[107.5810344,-6.9166682],
[107.5811697,-6.9167427],
[107.5812234,-6.9166469],
[107.5817705,-6.9169078],
[107.5818564,-6.9169078],
[107.5819958,-6.9169558],
[107.5820441,-6.9168386],
[107.5819958,-6.9169558],
[107.5822426,-6.9170303],
[107.5821729,-6.9172593],
[107.5822426,-6.9170303],
[107.5825055,-6.9171102],
[107.5828166,-6.9171475],
[107.5827361,-6.9177066],
[107.5826938,-6.9181831],
[107.5830907,-6.9182417],
[107.5831224,-6.9178877],
[107.5829829,-6.9178717],
[107.5829346,-6.9177705],
[107.5830634,-6.9168812],
[107.5831331,-6.9168066],
[107.5834496,-6.9168546],
[107.5837768,-6.9168919],
[107.5838144,-6.9169717],
[107.5837071,-6.9180634],
[107.5833745,-6.9180315],
[107.5833289,-6.9179503],
[107.5833316,-6.9179023],
[107.5831224,-6.9178877],
[107.5833316,-6.9179023],
[107.583455,-6.9168546],
]])
self.jalan.record(nama)
self.jalan.line([[
[107.5851107,-6.9168255],
[107.5844186,-6.9166924],
[107.5836247,-6.9165273],
[107.5833779,-6.9164634],
[107.5827449,-6.9164101],
[107.5827289,-6.9165273],
[107.582863,-6.9166125],
[107.5833833,-6.9166125],
[107.5835872,-6.9166604],
[107.5836247,-6.9165273],
[107.5844186,-6.9166924],
[107.5844669,-6.9163302],
[107.5846171,-6.9161012],
[107.5844669,-6.9163302],
[107.5837159,-6.9161598],
[107.5836408,-6.9161066],
[107.5834316,-6.9160746],
[107.5835764,-6.9154249],
[107.5836204,-6.915399],
[107.5835764,-6.9154249],
[107.5834316,-6.9160746],
[107.5833779,-6.9164634],
[107.5834316,-6.9160746],
[107.5830078,-6.9160001],
[107.5829059,-6.9160001],
[107.582525,-6.9158882],
[107.5824499,-6.9163462],
[107.5825481,-6.916367],
[107.5824499,-6.9163462],
[107.5822426,-6.9170303],
[107.5824499,-6.9163462],
[107.5819054,-6.9162051],
[107.5820422,-6.9157311],
[107.5820207,-6.9156992],
[107.5817042,-6.9156459],
[107.5816882,-6.915606],
[107.5816935,-6.9155421],
[107.5816399,-6.9154888],
[107.5816506,-6.9154356],
[107.5816962,-6.9152811],
[107.5816372,-6.9152172],
[107.5817203,-6.9147885],
[107.5820181,-6.914887],
[107.5820663,-6.9148657],
[107.5822541,-6.9149084],
[107.582187,-6.9152944],
[107.5822541,-6.9149084],
[107.5822863,-6.9147726],
[107.5825277,-6.9148444],
[107.5824901,-6.9150681],
[107.5825277,-6.9148444],
[107.5825786,-6.9148365],
[107.5825974,-6.9146075],
[107.5825786,-6.9148365],
[107.5830588,-6.9150015],
[107.583107,-6.9150042],
[107.5832116,-6.914674],
[107.583107,-6.9150042],
[107.5832878,-6.9150741],
[107.583107,-6.9150042],
[107.5830185,-6.9151187],
[107.58293,-6.9154888],
[107.5826511,-6.9154249],
[107.5825921,-6.9155927],
[107.582525,-6.9158882],
[107.5825062,-6.9159921],
[107.582187,-6.9159175],
]])
self.jalan.record(nama)
self.jalan.line([[
[107.5824724,-6.9137268],
[107.5823858,-6.9138174],
[107.5823192,-6.9143792],
[107.5822863,-6.9147726],
[107.5811283,-6.9144964],
[107.5811604,-6.9141396],
[107.5812302,-6.9137615],
[107.5811604,-6.9141396],
[107.5811283,-6.9144964],
[107.5810263,-6.9150023],
[107.5812785,-6.9150236],
[107.5812892,-6.9150822],
[107.5813965,-6.9151035],
[107.5812892,-6.9150822],
[107.5811229,-6.9157478],
[107.5815735,-6.9158544],
[107.5811229,-6.9157478],
[107.5810249,-6.9160664],
]])
self.jalan.record(nama)
self.jalan.line([[
[107.5798653,-6.9088487],
[107.5798571,-6.9090062],
[107.5792724,-6.9088518],
[107.5794226,-6.9088891],
[107.5793904,-6.9090968],
[107.5793904,-6.9092778],
[107.5792885,-6.9092778],
[107.5793904,-6.9092778],
[107.5796748,-6.9092778],
[107.579723,-6.9093258],
[107.5798411,-6.9093311],
[107.5798571,-6.9090062],
[107.5798411,-6.9093311],
[107.5798303,-6.9094536],
[107.5796157,-6.9094163],
[107.5795567,-6.9093577],
[107.5795567,-6.9092778],
[107.5795567,-6.9093577],
[107.5796157,-6.9094163],
[107.5798303,-6.9094536],
[107.5798089,-6.9097891],
[107.5796587,-6.9097731],
[107.5796372,-6.909837],
[107.5795192,-6.9098051],
[107.5796372,-6.909837],
[107.5796587,-6.9097731],
[107.5798089,-6.9097891],
[107.5798035,-6.9100766],
[107.5792992,-6.9099808],
[107.5793743,-6.9097252],
[107.5792992,-6.9099808],
[107.5798035,-6.9100766],
[107.5797713,-6.9104494],
[107.578913,-6.9101885],
[107.5797713,-6.9104494],
[107.5798357,-6.9105027],
[107.5801844,-6.9106305],
[107.5802595,-6.9107157],
[107.5803453,-6.910737],
[107.5805545,-6.9102098],
[107.5806217,-6.9102173],
[107.5805545,-6.9102098],
[107.5802434,-6.9099968],
[107.5798089,-6.9097891],
[107.5798035,-6.9100766],
[107.5797713,-6.9104494],
[107.579723,-6.9109926],
[107.5796587,-6.9112482],
[107.5799001,-6.9113494],
[107.5796587,-6.9112482],
[107.5795246,-6.9116423],
[107.5796587,-6.9117062],
[107.5795246,-6.9116423],
[107.5794655,-6.9117595],
[107.5790847,-6.9115678],
[107.5794655,-6.9117595],
[107.5793851,-6.9120098],
[107.5789291,-6.9118127],
[107.5793851,-6.9120098],
[107.5793314,-6.9121163],
[107.5788379,-6.9119352],
[107.5793314,-6.9121163],
[107.5792456,-6.9123187],
[107.5789935,-6.9122122],
[107.5792456,-6.9123187],
[107.5792027,-6.9124465],
[107.5794226,-6.912521],
[107.5792027,-6.9124465],
[107.5790471,-6.9127074],
[107.5792134,-6.912782],
[107.5790471,-6.9127074],
[107.5789774,-6.9129897],
[107.5793422,-6.9130908],
[107.5789774,-6.9129897],
[107.5789345,-6.9132932],
[107.5793958,-6.9134263],
[107.5789345,-6.9132932],
[107.5788594,-6.9135755],
[107.5787955,-6.9139343],
[107.5797797,-6.9141558],
[107.5796748,-6.9141346],
[107.5800261,-6.9130243],
]])
self.jalan.record(nama)
self.jalan.line([[
[107.5810263,-6.9150023],
[107.5804959,-6.914912],
[107.5804422,-6.9149653],
[107.5803618,-6.9149413],
[107.5804047,-6.9147789],
[107.5803886,-6.9148375],
[107.5803618,-6.9149413],
[107.5803537,-6.9149893],
[107.580182,-6.9149307],
[107.5803537,-6.9149893],
[107.5803081,-6.915149],
[107.5797475,-6.9149919],
[107.579686,-6.9151405],
[107.5793936,-6.915058],
[107.5795088,-6.9147097],
[107.5795893,-6.9147283],
[107.5796161,-6.9146964],
[107.5797797,-6.9141558],
[107.5803027,-6.914289],
[107.5811283,-6.9144964],
[107.5807155,-6.9143921],
[107.5807963,-6.9141612],
[107.5807104,-6.9141079],
[107.5806407,-6.9141052],
[107.5807104,-6.9141079],
[107.5807963,-6.9141612],
[107.5808472,-6.9141159],
[107.5808633,-6.9139987],
[107.5805549,-6.9138922],
[107.5808633,-6.9139987],
[107.5808472,-6.9141159],
[107.5807963,-6.9141612],
[107.5807155,-6.9143921],
[107.5808855,-6.9144348],
[107.5809331,-6.9142304],
[107.5808855,-6.9144348],
[107.5807641,-6.9149546],
[107.5808855,-6.9144348],
[107.5803027,-6.914289],
[107.580233,-6.9144461],
[107.5801445,-6.9147762],
[107.5803886,-6.9148375],
[107.5801445,-6.9147762],
[107.5800426,-6.9148748],
[107.5797663,-6.9148295],
[107.5797207,-6.9148535],
]])
self.jalan.record(nama)
self.jalan.line([[
[107.581657,-6.9099351],
[107.5813715,-6.9107838],
[107.5811623,-6.9114814],
[107.580878,-6.9114441],
[107.5807599,-6.911721],
[107.5804542,-6.9116358],
[107.5804864,-6.9115453],
[107.5804542,-6.9116358],
[107.5802986,-6.9121684],
[107.5803898,-6.912195],
[107.5802986,-6.9121684],
[107.5802611,-6.9122163],
[107.5794081,-6.9119607],
[107.5797568,-6.9120672],
[107.5796871,-6.9122323],
[107.5797568,-6.9120672],
[107.5802611,-6.9122163],
[107.5801377,-6.9126743],
[107.579907,-6.9126184],
[107.5801377,-6.9126743],
[107.5800261,-6.9130243],
[107.5797595,-6.9129512],
[107.5800261,-6.9130243],
[107.5801377,-6.9126743],
[107.5801752,-6.9125358],
[107.5804274,-6.9126876],
[107.5807358,-6.9127648],
[107.5809772,-6.911998],
[107.5805856,-6.9118542],
[107.5809826,-6.911998],
[107.5811703,-6.9119287],
[107.5813018,-6.9117983],
[107.5813339,-6.9117503],
[107.5813822,-6.9116598],
[107.5813339,-6.9117503],
[107.5810308,-6.9116598],
]])
self.jalan.record(nama)
self.jalan.line([[
[107.5799214,-6.9141892],
[107.5804274,-6.9126876],
[107.5807358,-6.9127648],
[107.580879,-6.9128259],
[107.5808548,-6.9129057],
[107.5807878,-6.912959],
[107.5808548,-6.9129057],
[107.580879,-6.9128232],
[107.5809192,-6.912714],
[107.5812572,-6.9127832],
[107.5811874,-6.9130415],
[107.5812572,-6.9127832],
[107.5826173,-6.9131463],
[107.5825368,-6.91337],
[107.5822335,-6.9133105],
[107.5821611,-6.9135661],
[107.5822335,-6.9133105],
[107.5813323,-6.9130522],
[107.5812089,-6.9130761],
[107.5811338,-6.9133318],
]])
self.jalan.record(nama)
self.jalan.line([[
[107.5827138,-6.9127842],
[107.5821254,-6.9126154],
[107.5810471,-6.9123038],
[107.5821227,-6.912618],
[107.5822756,-6.9122452],
[107.5815406,-6.9120322],
[107.5822756,-6.9122452],
[107.5824285,-6.9118911],
[107.5816989,-6.9116595],
]])
def jalanKelurahanGaruda(self, nama):
self.jalan.record(nama)
self.jalan.line([[
[107.5729636,-6.9155205],
[107.5731415,-6.9155822],
[107.5732327,-6.9156515],
[107.5733587,-6.9156834],
[107.5735492,-6.9157713],
[107.573348,-6.9159337],
[107.5730303,-6.9163057],
[107.5733507,-6.9159337],
[107.5735492,-6.9157713],
[107.5737476,-6.9158778],
[107.5734472,-6.9162719],
[107.5736323,-6.9164529],
[107.5737262,-6.9164503],
[107.5739113,-6.916184],
[107.5739971,-6.9162266],
[107.5739113,-6.916184],
[107.5737262,-6.9164503],
[107.5736323,-6.9164529],
[107.573446,-6.9166438],
]])
self.jalan.record(nama)
self.jalan.line([[
[107.5725292,-6.9158665],
[107.5729203,-6.9162178],
[107.5729766,-6.9161486],
[107.5729203,-6.9162178],
[107.5730303,-6.9163057],
[107.573446,-6.9166438],
[107.5738617,-6.9169766],
[107.5742453,-6.9170805],
[107.5743445,-6.9170166],
[107.5744813,-6.9170459],
[107.5745457,-6.9171391],
[107.5752914,-6.9173095],
[107.5758546,-6.9173521],
[107.5759914,-6.9169021],
[107.5758546,-6.9173521],
[107.5767156,-6.9174426],
[107.576847,-6.9168861],
[107.5765171,-6.9168409],
[107.5764528,-6.9167743],
[107.5764367,-6.9166438],
[107.5764528,-6.9167743],
[107.5765171,-6.9168409],
[107.576847,-6.9168861],
[107.5770321,-6.916074],
[107.5774452,-6.9142767],
[107.5775873,-6.9136243],
[107.577987,-6.9117897],
[107.5784376,-6.9098406],
[107.5783115,-6.909782],
[107.5783384,-6.9097048],
[107.5784751,-6.9097154],
[107.5784376,-6.9098406],
[107.5784751,-6.9097154],
[107.5786978,-6.9086637],
[107.5788963,-6.908227],
[107.5787271,-6.908121],
[107.5788963,-6.908227],
[107.5791012,-6.9079],
[107.5790556,-6.9079719],
[107.5781562,-6.9074106],
[107.5780409,-6.9076289],
[107.5780087,-6.9077701],
[107.5780409,-6.9076289],
[107.5778719,-6.9075544],
[107.5780409,-6.9076289],
[107.5781562,-6.9074106],
[107.5777807,-6.9071816],
[107.5776788,-6.9073813],
[107.5777807,-6.9071816],
[107.5775983,-6.9070431],
[107.5773999,-6.907344],
[107.5775983,-6.9070431],
[107.5771665,-6.9067955],
[107.5765281,-6.9063428],
[107.5761902,-6.9069686],
[107.5758039,-6.9068088],
[107.5761902,-6.9069686],
[107.5765281,-6.9063428],
[107.5761285,-6.9060872],
[107.5758549,-6.9065905],
[107.5761285,-6.9060872],
[107.5754177,-6.9056052],
[107.5751361,-6.9054375],
[107.5754177,-6.9056052],
[107.5751253,-6.9061591],
[107.5748947,-6.9064467],
[107.5750127,-6.9063002],
[107.5753909,-6.9065292],
[107.5754713,-6.9064307],
[107.5756403,-6.9064973],
[107.5759381,-6.9059607],
[107.5756832,-6.9057836],
[107.5755491,-6.9059967],
[107.5753936,-6.9062816],
[107.5751253,-6.9061591],
]])
self.jalan.record(nama)
self.jalan.line([[
[107.5737476,-6.9158778],
[107.5739852,-6.915599],
[107.5741636,-6.9152395],
[107.5735333,-6.9149932],
[107.5741636,-6.9152395],
[107.5743219,-6.9152648],
[107.5749656,-6.9137098],
[107.5743219,-6.9152648],
[107.5749334,-6.915595],
[107.5747617,-6.9160316],
[107.5741287,-6.9157707],
[107.5747617,-6.9160316],
[107.5743445,-6.9170166],
[107.5747617,-6.9160316],
[107.5744813,-6.9170459],
[107.5747617,-6.9160316],
[107.5749334,-6.915595],
[107.576017,-6.9158559],
[107.5765374,-6.9159731],
[107.5770321,-6.916074],
[107.5765374,-6.9159731],
[107.577106,-6.9135074],
[107.5775873,-6.9136243],
[107.577106,-6.9135074],
[107.5774118,-6.9121707],
[107.5773688,-6.9123465],
[107.5767197,-6.9121121],
[107.5770202,-6.9114784],
[107.5774547,-6.9116435],
[107.577987,-6.9117897],
[107.5774547,-6.9116435],
[107.577519,-6.9113133],
[107.5774547,-6.9116435],
[107.5770202,-6.9114784],
[107.5775459,-6.9105518],
[107.5775888,-6.91044],
[107.577562,-6.9103494],
[107.577283,-6.9101897],
[107.577562,-6.909721],
[107.5774064,-6.909982],
[107.577224,-6.9098861],
[107.5774815,-6.9094974],
[107.5776639,-6.9091938],
[107.5779643,-6.9087039],
[107.5781467,-6.9086133],
[107.5780179,-6.9085015],
[107.5779267,-6.9084589],
[107.5778034,-6.9083311],
[107.5779267,-6.9084589],
[107.5780233,-6.9085015],
[107.5781413,-6.9086133],
[107.5782915,-6.908608],
[107.5786978,-6.9086637],
]])
self.jalan.record(nama)
self.jalan.line([[
[107.576017,-6.9158559],
[107.5761265,-6.9153624],
[107.5750911,-6.9150828],
[107.5761265,-6.9153624],
[107.5764025,-6.9140311],
[107.5758017,-6.9137967],
[107.5759612,-6.9138593],
[107.5762308,-6.9133175],
[107.577106,-6.9135074],
[107.5762308,-6.9133175],
[107.5764883,-6.9127583],
[107.577234,-6.91295],
[107.5764883,-6.9127583],
[107.5767197,-6.9121121],
]])
self.jalan.record(nama)
self.jalan.line([[
[107.5749334,-6.915595],
[107.5758022,-6.9131228],
[107.5753516,-6.9129897],
[107.575483,-6.9130269],
[107.5756734,-6.9125423],
[107.575483,-6.9130269],
[107.5758022,-6.9131228],
[107.5762308,-6.9133175],
[107.5758022,-6.9131228],
[107.5758558,-6.9130323],
[107.5766337,-6.9112536],
[107.5761723,-6.9110832],
[107.5766337,-6.9112536],
[107.5770202,-6.9114784],
[107.5766337,-6.9112536],
[107.5768107,-6.9109553],
[107.5767678,-6.9108701],
[107.5770306,-6.9104175],
[107.5767678,-6.9108701],
[107.5768107,-6.9109553],
[107.577283,-6.9101897],
]])
self.jalan.record(nama)
self.jalan.line([[
[107.5737868,-6.9084887],
[107.5746753,-6.9087599],
[107.5752063,-6.908909],
[107.574627,-6.9099368],
[107.5742139,-6.9097664],
[107.5746753,-6.9087599],
[107.5742139,-6.9097664],
[107.5734708,-6.9094448],
[107.5742139,-6.9097664],
[107.574627,-6.9099368],
[107.5750132,-6.9100859],
[107.5755121,-6.9092498],
[107.5756999,-6.9092924],
[107.5757857,-6.9090634],
[107.5754316,-6.9089196],
[107.5757857,-6.9090634],
[107.5756999,-6.9092924],
[107.5755121,-6.9092498],
[107.5750132,-6.9100859],
[107.5754263,-6.9101924],
[107.5760164,-6.9092764],
[107.5754263,-6.9101924],
[107.5752653,-6.9106504],
[107.5754263,-6.9101924],
[107.5766225,-6.9107037],
[107.5759024,-6.9103908],
[107.5762685,-6.9097983],
[107.576365,-6.9094788],
[107.5761934,-6.9093936],
[107.576306,-6.9090528],
[107.5764026,-6.9086054],
[107.5767567,-6.9086693],
[107.5768586,-6.9084457],
[107.5767567,-6.9086693],
[107.5770302,-6.9087119],
[107.5773253,-6.9082699],
[107.5770302,-6.9087119],
[107.5772448,-6.9087758],
[107.577336,-6.9087652],
[107.577395,-6.908648],
[107.577336,-6.9087652],
[107.5775238,-6.9087545],
[107.5779643,-6.9087039],
[107.5775238,-6.9087545],
[107.5770839,-6.9090208],
[107.5770517,-6.9092818],
[107.5770034,-6.9098516],
[107.577224,-6.9098861],
[107.5770034,-6.9098516],
[107.5770517,-6.9092764],
[107.5770839,-6.9090208],
[107.5769873,-6.9090155],
[107.5768532,-6.9089516],
[107.5767674,-6.9089835],
[107.5767298,-6.9090847],
[107.5766011,-6.9096492],
]])
self.jalan.record(nama)
self.jalan.line([[
[107.5771665,-6.9067955],
[107.5770051,-6.907072],
[107.5768174,-6.9069868],
[107.5770051,-6.907072],
[107.5766135,-6.9077324],
[107.5762756,-6.907514],
[107.5766135,-6.9077324],
[107.5765009,-6.9078868],
[107.5763936,-6.9080732],
[107.5764901,-6.9081477],
[107.5764026,-6.9086054],
[107.5759966,-6.9084939],
[107.5760932,-6.9082117],
[107.5759966,-6.9084939],
[107.575723,-6.9084619],
[107.5756586,-6.9086483],
[107.575723,-6.9084619],
[107.5756426,-6.9084513],
[107.5757713,-6.9080892],
[107.5757713,-6.9079134],
[107.576002,-6.9074182],
[107.5751919,-6.9070241],
[107.5752992,-6.9067045],
[107.575428,-6.9067844],
[107.5752992,-6.9067045],
[107.5753909,-6.9065292],
[107.5752992,-6.9067045],
[107.5751919,-6.9070241],
[107.57509,-6.9071359],
[107.5748701,-6.90704],
[107.5746931,-6.9069229],
[107.5743926,-6.9067631],
[107.5744879,-6.9068124],
[107.574339,-6.9071199],
[107.5742478,-6.907072],
]])
self.jalan.record(nama)
self.jalan.line([[
[107.5739905,-6.9081123],
[107.5742721,-6.9081522],
[107.5742453,-6.9079925],
[107.5742158,-6.9079073],
[107.5742426,-6.9077182],
[107.5742158,-6.9079073],
[107.5742453,-6.9079925],
[107.5742721,-6.9081522],
[107.5749856,-6.9082801],
[107.5751519,-6.9079579],
[107.5749856,-6.9082801],
[107.5754228,-6.9083946],
[107.5755006,-6.9082082],
[107.5754228,-6.9083946],
[107.5755301,-6.9084265],
[107.5754282,-6.9086901],
[107.5755301,-6.9084265],
[107.5756426,-6.9084513],
]])
def jalanKelurahanKebonJeruk(self, nama):
self.jalan.record(nama)
self.jalan.line([[
[107.592997,-6.9194399],
[107.5938698,-6.9195477],
[107.5951519,-6.9197021],
[107.5954952,-6.9197421],
[107.5956937,-6.9197687],
[107.5972386,-6.9199498],
[107.5976544,-6.9200004],
[107.5983705,-6.9200829],
[107.5997385,-6.9202507],
[107.6004761,-6.9203385],
[107.6005619,-6.9200589],
[107.6005673,-6.9198033],
[107.6005619,-6.9200589],
[107.6004761,-6.9203385],
[107.6018038,-6.9204983],
[107.6021498,-6.9205462],
[107.6031502,-6.9206607],
[107.6041051,-6.9207779],
[107.6041775,-6.9198513],
[107.6041909,-6.9194519],
[107.6042151,-6.9191164],
[107.6042419,-6.9186477],
[107.6042875,-6.9183176],
[107.6043438,-6.9183309],
[107.6042875,-6.9183202],
[107.6043948,-6.91713],
[107.6044752,-6.9158253],
[107.604545,-6.9158066],
[107.6044752,-6.9158253],
[107.6045745,-6.9148294],
[107.6046228,-6.9147629],
[107.6045745,-6.9148268],
[107.6030376,-6.9147602],
[107.6027881,-6.9147842],
[107.6027881,-6.9151463],
[107.6027881,-6.9147842],
[107.6025628,-6.9148241],
[107.6015168,-6.9147708],
[107.6004385,-6.9146777],
[107.5994568,-6.9146537],
[107.5992235,-6.9147309],
[107.5990786,-6.9146191],
[107.5989258,-6.9147069],
[107.5982847,-6.9146723],
[107.5981855,-6.9147149],
[107.5982847,-6.9146723],
[107.5989258,-6.9147069],
[107.5990786,-6.9146191],
[107.5992262,-6.9147336],
[107.5990089,-6.9148081],
[107.5980702,-6.9148107],
[107.5981185,-6.9142994],
[107.5980702,-6.9148107],
[107.5973567,-6.9148426],
[107.5973031,-6.9146829],
[107.5979307,-6.9147521],
[107.5973031,-6.9146829],
[107.595506,-6.9146349],
[107.5953773,-6.9145764],
[107.5936124,-6.9143421],
[107.5929579,-6.9143474],
[107.5928399,-6.9143634],
[107.5927862,-6.914406],
[107.5927916,-6.9145071],
[107.592856,-6.9145604],
[107.5929365,-6.914603],
[107.5929472,-6.9148107],
[107.5928877,-6.915772],
]])
self.jalan.record(nama)
self.jalan.line([[
[107.5929574,-6.9158226],
[107.5930423,-6.9146035],
[107.5931067,-6.914513],
[107.5932301,-6.9144757],
[107.5935922,-6.9144837],
[107.5954965,-6.9147686],
[107.595506,-6.9146349],
[107.5954992,-6.9147686],
[107.5954617,-6.9151574],
[107.5972724,-6.915159],
[107.5971812,-6.9152921],
[107.5978732,-6.9152921],
[107.5971812,-6.9152921],
[107.5956201,-6.9153134],
[107.5954592,-6.9152602],
[107.5954592,-6.915159],
[107.5954592,-6.9152602],
[107.5954914,-6.9160536],
[107.5932149,-6.9160289],
[107.5954914,-6.9160536],
[107.5954806,-6.9163838],
[107.5933169,-6.9165122],
[107.5942522,-6.916453],
[107.5942951,-6.9171347],
[107.5942522,-6.916453],
[107.5954806,-6.9163838],
[107.5955987,-6.9163838],
[107.5955504,-6.9172945],
[107.5955987,-6.9163838],
[107.5958562,-6.9163838],
[107.5958722,-6.9166501],
[107.5960546,-6.9169803],
[107.5959634,-6.9174329],
[107.5960546,-6.9169803],
[107.5958722,-6.9166554],
[107.5958562,-6.9163838],
[107.5959259,-6.9163785],
[107.5958883,-6.9161495],
[107.5959259,-6.9163785],
[107.5966984,-6.9163732],
[107.5966876,-6.9166767],
[107.5967252,-6.9167353],
[107.5967145,-6.9169962],
[107.5967252,-6.9167353],
[107.5966876,-6.9166767],
[107.5966984,-6.9163732],
[107.5974011,-6.9163465],
[107.5973904,-6.9164956],
[107.5972831,-6.9170069],
[107.5973904,-6.9164956],
[107.5974011,-6.9163465],
[107.598195,-6.9163252],
[107.5982165,-6.9170921],
[107.598195,-6.9163199],
[107.598136,-6.9155318],
[107.5972187,-6.915585],
[107.598136,-6.9155318],
[107.5980702,-6.9148107],
[107.5973567,-6.9148426],
[107.5972724,-6.915159],
[107.5973567,-6.9148426],
[107.5980702,-6.9148107],
[107.598136,-6.9155318],
[107.598195,-6.9163252],
[107.5995703,-6.9162367],
[107.5992753,-6.9147988],
[107.5995489,-6.9147829],
[107.5992753,-6.9147988],
[107.5992235,-6.9147309],
[107.5992753,-6.9147988],
[107.5995703,-6.9162367],
[107.5999405,-6.9162047],
[107.5998976,-6.9160343],
[107.5998815,-6.915816],
[107.5996937,-6.9148787],
[107.5998815,-6.915816],
[107.5998976,-6.916029],
[107.5999405,-6.9162047],
[107.6013406,-6.9161994],
[107.6012923,-6.9156722],
[107.6014533,-6.9156562],
[107.6014586,-6.9155604],
[107.6018019,-6.9155497],
[107.6018127,-6.9152994],
[107.601582,-6.9153473],
[107.6015168,-6.9147708],
[107.601582,-6.9153473],
[107.6018127,-6.9152994],
[107.6018019,-6.9155497],
[107.6014586,-6.9155604],
[107.6014533,-6.9156562],
[107.6012923,-6.9156722],
[107.6013406,-6.9161994],
[107.6016142,-6.9162047],
[107.6016356,-6.9156349],
[107.6016142,-6.9162047],
[107.6023008,-6.9161195],
[107.6023116,-6.9153686],
[107.6024779,-6.915374],
[107.6024832,-6.9158266],
[107.6024779,-6.915374],
[107.6027514,-6.9153846],
[107.6027193,-6.9161089],
[107.6023008,-6.9161195],
[107.6027193,-6.9161089],
[107.6031055,-6.9160982],
[107.6044752,-6.9158253],
[107.6031055,-6.9160982],
[107.6031484,-6.9168012],
[107.6027407,-6.9168065],
[107.60273,-6.916929],
[107.6026388,-6.9169343],
[107.60273,-6.916929],
[107.6027407,-6.9168065],
[107.6028748,-6.9168065],
[107.6028802,-6.9169503],
[107.6028748,-6.9168065],
[107.6031484,-6.9168012],
[107.6031699,-6.9170089],
[107.6043948,-6.91713],
[107.6042875,-6.9183176],
[107.6034971,-6.9180633],
[107.6030948,-6.9179302],
[107.6031699,-6.9170089],
[107.6030948,-6.9179302],
[107.603025,-6.9182284],
[107.6030948,-6.9179302],
[107.6034971,-6.9180633],
[107.6033684,-6.9185692],
[107.6033684,-6.9186437],
[107.6037921,-6.9186704],
[107.6038619,-6.9185905],
[107.6042419,-6.9186477],
[107.6042151,-6.9191164],
[107.6036419,-6.9191017],
[107.6032986,-6.9190751],
[107.6033684,-6.9186437],
[107.6033684,-6.9185692],
[107.6027032,-6.9185266],
[107.6027032,-6.9189526],
[107.6029982,-6.9189633],
[107.6033093,-6.9190005],
[107.6029982,-6.9189633],
[107.6029928,-6.9191017],
[107.602907,-6.9191337],
[107.6029124,-6.9193627],
[107.6029928,-6.9194053],
[107.6030197,-6.9196609],
[107.6032342,-6.9196662],
[107.6032986,-6.9190751],
[107.6032705,-6.9193414],
[107.6041909,-6.9194519],
[107.6041775,-6.9198513],
[107.6032503,-6.919794],
[107.6032342,-6.9196662],
[107.6032503,-6.919794],
[107.6032369,-6.9198792],
[107.6021948,-6.9198239],
[107.6032369,-6.9198792],
[107.6032141,-6.9201819],
[107.6026696,-6.9200807],
[107.6032168,-6.9201819],
[107.6031658,-6.9204348],
[107.6031502,-6.9206607],
[107.6021498,-6.9205462],
[107.6021895,-6.9198677],
[107.6018515,-6.9198464],
[107.6021895,-6.9198677],
[107.6021948,-6.9198239],
[107.6022351,-6.9193085],
[107.6018837,-6.9193085],
[107.6022378,-6.9193112],
[107.6022538,-6.9184511],
[107.6027032,-6.9185266],
[107.6022512,-6.9184485],
[107.6022485,-6.9179799],
[107.6025757,-6.9179692],
[107.6022485,-6.9179799],
[107.6022458,-6.917884],
[107.6016182,-6.917892],
[107.6022458,-6.9178867],
[107.6022726,-6.9173115],
[107.6026776,-6.9173594],
[107.6026723,-6.9175805],
[107.6026803,-6.9173594],
[107.6031363,-6.9173861],
[107.6026776,-6.9173648],
[107.6022753,-6.9173142],
[107.6022699,-6.9169467],
[107.6022109,-6.9161266],
[107.6022699,-6.9169441],
[107.602278,-6.9173168],
[107.6015162,-6.9172955],
[107.6015484,-6.9168296],
[107.6015189,-6.9172955],
[107.6012882,-6.9172876],
[107.6008323,-6.9172663],
[107.6007894,-6.9177242],
[107.6008296,-6.9172609],
[107.6007974,-6.9172583],
[107.6008135,-6.9171837],
[107.6008189,-6.917],
[107.6007062,-6.9168855],
[107.600505,-6.9166432],
[107.6004004,-6.9163982],
[107.6003924,-6.9162038],
[107.5999405,-6.9162047],
[107.5995703,-6.9162367],
[107.5991908,-6.9162571],
[107.5992095,-6.9165447],
[107.5994456,-6.9166325],
[107.5998211,-6.9168935],
[107.5998586,-6.9166911],
[107.5998211,-6.9168935],
[107.5998104,-6.9171864],
[107.5982165,-6.9170921],
[107.5972831,-6.9170069],
[107.5967145,-6.9169962],
[107.5964254,-6.9169893],
[107.5963315,-6.9170639],
[107.5962886,-6.9169893],
[107.5963154,-6.9167337],
[107.5962886,-6.9169973],
[107.5963369,-6.9170612],
[107.5960043,-6.9174526],
[107.5959634,-6.9174329],
[107.5955504,-6.9172945],
[107.5952291,-6.9171864],
[107.5942951,-6.9171347],
[107.5941616,-6.9171411],
[107.5932954,-6.9170661],
[107.5941616,-6.9171411],
[107.5941643,-6.9181529],
[107.5941482,-6.918334],
[107.5931129,-6.9181849],
[107.5941536,-6.9183313],
[107.5940248,-6.9187041],
[107.5933757,-6.9186242],
[107.5940248,-6.9187041],
[107.593939,-6.9190076],
[107.5938698,-6.9195477],
]])
self.jalan.record(nama)
self.jalan.line([[
[107.5941643,-6.9181529],
[107.5944015,-6.9181847],
[107.5944444,-6.9178652],
[107.5950291,-6.9178652],
[107.5950559,-6.9177427],
[107.5952008,-6.9177054],
[107.5955226,-6.9177534],
[107.5955504,-6.9172945],
[107.5955226,-6.917748],
[107.5954904,-6.9183179],
[107.5943961,-6.91819],
[107.5954958,-6.9183179],
[107.5961503,-6.9183072],
[107.5954958,-6.9183179],
[107.5954422,-6.9187812],
[107.5953992,-6.9189409],
[107.5951632,-6.9193882],
[107.5951519,-6.9197021],
[107.5954952,-6.9197421],
[107.5956085,-6.9194095],
[107.5956889,-6.9190314],
[107.5957908,-6.9186054],
[107.5956889,-6.9190314],
[107.5956085,-6.9194149],
[107.5954952,-6.9197421],
[107.5956937,-6.9197687],
[107.5958713,-6.9192711],
[107.5959947,-6.9191113],
[107.596279,-6.9192019],
[107.5964453,-6.9192338],
[107.5966867,-6.9192871],
[107.5967564,-6.91909],
[107.5965794,-6.9190527],
[107.5965526,-6.9191539],
[107.5965794,-6.9190527],
[107.596456,-6.9190368],
[107.5964507,-6.9192338],
[107.596456,-6.9190368],
[107.5963273,-6.9190527],
[107.596279,-6.9192019],
[107.5959947,-6.9191113],
[107.5961503,-6.9187332],
[107.5964829,-6.9188557],
[107.5967779,-6.9188983],
[107.5967564,-6.91909],
[107.5967779,-6.9188983],
[107.5968289,-6.9185868],
[107.5965875,-6.9185149],
[107.5962119,-6.9184856],
[107.5961503,-6.9187306],
[107.5962093,-6.9184909],
[107.5962441,-6.9183152],
[107.5961476,-6.9183072],
[107.5962441,-6.9183179],
[107.5965821,-6.918443],
[107.5965875,-6.9185149],
[107.5965821,-6.9184403],
[107.596743,-6.9176256],
[107.5963407,-6.9175137],
[107.5962978,-6.9175723],
[107.5960043,-6.9174526],
[107.5959705,-6.9174924],
[107.5961503,-6.9183072],
]])
self.jalan.record(nama)
self.jalan.line([[
[107.5972831,-6.9170069],
[107.597159,-6.9175675],
[107.5974702,-6.91769],
[107.5973951,-6.9183557],
[107.5969123,-6.9182439],
[107.597159,-6.9175675],
[107.5969123,-6.9182439],
[107.5968289,-6.9185868],
[107.597379,-6.9186699],
[107.5973951,-6.9183557],
[107.597379,-6.9186699],
[107.5973575,-6.9190118],
[107.5967779,-6.9188983],
[107.5973575,-6.9190118],
[107.5972386,-6.9199498],
[107.5976544,-6.9200004],
[107.5977706,-6.9193739],
[107.5977116,-6.9193153],
[107.5977867,-6.9186656],
[107.597379,-6.9186699],
[107.5977867,-6.9186656],
[107.5982802,-6.9187082],
[107.5983705,-6.9200829],
[107.5982802,-6.9187082],
[107.5982641,-6.9177869],
[107.5981193,-6.9177923],
[107.5979154,-6.9177656],
[107.597733,-6.9177443],
[107.5979154,-6.9177656],
[107.5981193,-6.9177923],
[107.5982641,-6.9177869],
[107.5982165,-6.9170921],
[107.5982641,-6.9177869],
[107.5982748,-6.9180532],
[107.5989722,-6.9181224],
[107.5990795,-6.9180745],
[107.5990956,-6.9178562],
[107.5989668,-6.9178455],
[107.5989132,-6.9177656],
[107.5987254,-6.9177071],
[107.5989132,-6.9177656],
[107.5989668,-6.9178455],
[107.5990956,-6.9178562],
[107.5991868,-6.9174994],
[107.5994765,-6.9175473],
[107.5997822,-6.9175739],
[107.5998104,-6.9171864],
[107.6007974,-6.9172583],
[107.5998104,-6.9171864],
[107.5997822,-6.9175739],
[107.6000612,-6.9176378],
[107.5997822,-6.9175739],
[107.599734,-6.9177443],
[107.6003831,-6.9178455],
[107.6004206,-6.9174994],
[107.600544,-6.9174727],
[107.6005547,-6.9172384],
[107.600544,-6.9174727],
[107.6004206,-6.9174994],
[107.6003831,-6.9178455],
[107.6004004,-6.9182369],
[107.6005667,-6.9182795],
[107.6005399,-6.9185458],
[107.6004433,-6.9186097],
[107.6002395,-6.9186097],
[107.6002234,-6.9188973],
[107.5996387,-6.9188281],
[107.599628,-6.9183168],
[107.5999713,-6.9183168],
[107.599628,-6.9183168],
[107.5996119,-6.9180293],
[107.5999874,-6.9180825],
[107.6004004,-6.9182369],
[107.6003831,-6.9178455],
[107.599734,-6.9177443],
[107.5996119,-6.9180293],
[107.5996199,-6.9181631],
[107.5993812,-6.9181304],
[107.5994765,-6.9175473],
[107.5993812,-6.9181304],
[107.5990795,-6.9180745],
[107.5989722,-6.9181224],
[107.5989198,-6.9187535],
[107.5993812,-6.9187855],
[107.5993812,-6.9181304],
[107.5993812,-6.9187855],
[107.5996387,-6.9188281],
[107.5998157,-6.9190783],
[107.5995421,-6.919105],
[107.5998157,-6.9190783],
[107.5997621,-6.9197706],
[107.5997385,-6.9202507],
[107.5997621,-6.9197706],
[107.5993436,-6.919744],
[107.5992793,-6.9198026],
[107.5992846,-6.9199304],
[107.5992793,-6.9198026],
[107.5993436,-6.919744],
[107.5997621,-6.9197706],
[107.5997889,-6.9194085],
[107.5991291,-6.9193499],
[107.5991291,-6.9190996],
[107.5987804,-6.919073],
[107.5987643,-6.9187482],
[107.5989198,-6.9187535],
[107.5987643,-6.9187482],
[107.5982802,-6.9187082],
]])
self.jalan.record(nama)
self.jalan.line([[
[107.6018038,-6.9204983],
[107.6018515,-6.9198464],
[107.6018837,-6.9193085],
[107.6015508,-6.9193359],
[107.6015616,-6.9196714],
[107.6011592,-6.9196608],
[107.6011217,-6.919762],
[107.6011592,-6.9198951],
[107.6011217,-6.919762],
[107.6011592,-6.9196608],
[107.6011807,-6.919304],
[107.6015508,-6.9193359],
[107.6015562,-6.9189099],
[107.6017761,-6.9189099],
[107.6015562,-6.9189099],
[107.6015776,-6.9185318],
[107.60117,-6.9185265],
[107.6015776,-6.9185318],
[107.6016182,-6.917892],
[107.6012451,-6.9178981],
[107.6012882,-6.9172876],
[107.6012451,-6.9178981],
[107.60117,-6.9185265],
[107.6010788,-6.9192667],
[107.6011807,-6.919304],
[107.6010788,-6.9192667],
[107.6006872,-6.9190111],
[107.6002234,-6.9188973],
]])
self.jalan.record(nama)
self.jalan.line([[
[107.6004385,-6.9146777],
[107.6004271,-6.9149108],
[107.5999765,-6.9150493],
[107.6001857,-6.915912],
[107.6002501,-6.915928],
[107.6002554,-6.9162049],
[107.600588,-6.9161996],
[107.600529,-6.9158641],
[107.6002501,-6.9159227],
[107.600529,-6.9158641],
[107.6006739,-6.9158321],
[107.6007382,-6.9157842],
[107.6007114,-6.9156457],
[107.6009528,-6.9156085],
[107.6010011,-6.9155339],
[107.6015053,-6.9154274],
[107.6014732,-6.9152357],
[107.6013551,-6.9152517],
[107.6013716,-6.9154557],
[107.6010011,-6.9155339],
[107.6008777,-6.9153315],
[107.6006041,-6.9154061],
[107.6004271,-6.9149055],
]])
def jalanKelurahanMaleber(self, nama):
self.jalan.record(nama)
self.jalan.line([[
[107.5688412,-6.9100128],
[107.569164,-6.9105174],
[107.569341,-6.9106346],
[107.5692847,-6.9107491],
[107.5696974,-6.9114267],
[107.5699509,-6.9108835],
[107.5696974,-6.9114267],
[107.5700515,-6.9119965],
[107.5701373,-6.9119139],
[107.5700515,-6.9119965],
[107.5704029,-6.9125876],
[107.5705826,-6.9123799],
[107.5706255,-6.9122947],
[107.5706952,-6.9120551],
[107.5706523,-6.9120471],
[107.5706952,-6.9120551],
[107.5708293,-6.912063],
[107.5709715,-6.9120737],
[107.5708293,-6.912063],
[107.5710466,-6.9113122],
[107.5708293,-6.912063],
[107.5706952,-6.9120551],
[107.5706255,-6.9122947],
[107.5705826,-6.9123799],
[107.5704029,-6.9125876],
[107.5706335,-6.9129604],
[107.5707784,-6.9128432],
[107.571009,-6.9125743],
[107.5710546,-6.9125743],
[107.5711968,-6.9126062],
[107.5712799,-6.9123719],
[107.5711968,-6.9126062],
[107.5710546,-6.9125743],
[107.571009,-6.9125743],
[107.5707784,-6.9128432],
[107.5706335,-6.9129604],
[107.5709473,-6.9134849],
[107.571229,-6.9133278],
[107.5714865,-6.9133997],
[107.571229,-6.9133278],
[107.5709473,-6.9134849],
[107.5716769,-6.9147284],
[107.5717761,-6.9146565],
[107.5720175,-6.9138817],
[107.5721946,-6.9135195],
[107.5720256,-6.9134477],
[107.5721946,-6.9135195],
[107.5722482,-6.913421],
[107.5723825,-6.9130857],
[107.5721198,-6.9129207],
[107.5722113,-6.9127933],
[107.5721959,-6.9127141],
[107.5718432,-6.9126089],
[107.5721959,-6.9127141],
[107.5722113,-6.9127933],
[107.5721198,-6.9129207],
[107.5723825,-6.9130857],
[107.5733039,-6.9134641],
[107.5723825,-6.9130857],
[107.5722482,-6.913421],
[107.5721946,-6.9135195],
[107.5720175,-6.9138817],
[107.5717761,-6.9146565],
[107.5716769,-6.9147284],
[107.5718647,-6.9150186],
[107.572157,-6.9148296],
[107.5725674,-6.9148882],
[107.5725754,-6.9148296],
[107.5725674,-6.9148882],
[107.572157,-6.9148296],
[107.5718647,-6.9150186],
[107.5721892,-6.9154953],
[107.572318,-6.9153142],
[107.5721892,-6.9154953],
[107.5724226,-6.9157908],
[107.572554,-6.9155805],
[107.572503,-6.9154846],
[107.5723662,-6.915426],
[107.572503,-6.9154846],
[107.572554,-6.9155805],
[107.572664,-6.9156071],
[107.5728008,-6.9155565],
[107.5729214,-6.9155139],
[107.5729636,-6.9155205],
]])
self.jalan.record(nama)
self.jalan.line([[
[107.5735333,-6.9149932],
[107.5731403,-6.9148526],
[107.5733817,-6.9149379],
[107.5735078,-6.9142189],
[107.5735641,-6.9139686],
[107.5742373,-6.9141444],
[107.5738457,-6.9151136],
[107.5742373,-6.9141417],
[107.574468,-6.9136757],
[107.57424,-6.914147],
[107.5735587,-6.9139713],
[107.5731537,-6.9138568],
[107.5722482,-6.913421],
[107.5731537,-6.9138568],
[107.5733039,-6.9134641],
[107.5739476,-6.9137037],
[107.5733039,-6.9134641],
[107.5734675,-6.9129448],
[107.5740469,-6.9131525],
[107.5734675,-6.9129448],
[107.5737223,-6.9123404],
[107.5745994,-6.9126759],
[107.5745833,-6.9127717],
[107.5745941,-6.9128383],
[107.574645,-6.9128783],
[107.5747738,-6.912849],
[107.5748086,-6.9127904],
[107.5748355,-6.9127584],
[107.5753516,-6.9129897],
[107.5748355,-6.9127611],
[107.5747604,-6.9127345],
[107.5749052,-6.9124735],
[107.574645,-6.9123244],
[107.5749079,-6.9124735],
[107.5747604,-6.9127345],
[107.5745967,-6.9126759],
[107.5740871,-6.9124762],
[107.574181,-6.9122259],
[107.5740871,-6.9124762],
[107.5737223,-6.9123404],
[107.5732932,-6.9121726],
[107.5736097,-6.9113685],
[107.5732932,-6.9121726],
[107.5729794,-6.9120448],
[107.5727299,-6.9126386],
[107.5729794,-6.9120422],
[107.5729016,-6.9120129],
[107.5731576,-6.9113992],
[107.5733064,-6.9109505],
[107.5731576,-6.9113992],
[107.5729016,-6.9120129],
[107.5719977,-6.9116561],
[107.5721237,-6.911451],
[107.5719977,-6.9116561],
[107.5718662,-6.9119729],
[107.5719977,-6.9116561],
[107.571771,-6.9115695],
[107.5717187,-6.9116827],
[107.571771,-6.9115695],
[107.5717974,-6.9115781],
[107.5719453,-6.911215],
[107.5717981,-6.9112033],
[107.5719453,-6.911215],
[107.5717974,-6.9115781],
[107.5715459,-6.9114949],
[107.5713824,-6.9119103],
[107.5715459,-6.9114949],
[107.57131,-6.9114123],
[107.5714226,-6.9110928],
[107.57131,-6.9114123],
[107.5710466,-6.9113122],
[107.5711517,-6.9109676],
[107.5710466,-6.9113122],
[107.5706314,-6.9111354],
[107.5705402,-6.9113484],
[107.5705617,-6.9114496],
[107.5705053,-6.9115987],
[107.5704436,-6.9115774],
[107.5704597,-6.9115108],
[107.5703363,-6.9114443],
[107.5704517,-6.9110848],
[107.5706314,-6.9111354],
[107.5705821,-6.9111211],
[107.5709613,-6.9101795],
[107.5708916,-6.9101555],
[107.5709613,-6.9101795],
[107.5705821,-6.9111211],
[107.5704463,-6.9110821],
[107.5699509,-6.9108835],
[107.5696658,-6.9107653],
[107.5703524,-6.909125],
[107.5705804,-6.909133],
[107.570854,-6.9085925],
[107.5705804,-6.909133],
[107.5703524,-6.909125],
[107.5696658,-6.9107653],
[107.569341,-6.9106346],
]])
self.jalan.record(nama)
self.jalan.line([[
[107.5696075,-6.9094825],
[107.5696906,-6.9092908],
[107.5693588,-6.9092193],
[107.5696906,-6.9092908],
[107.5697684,-6.9090911],
[107.5695056,-6.9090351],
[107.5697684,-6.9090911],
[107.5698569,-6.9088914],
[107.5695834,-6.9088354],
[107.5698569,-6.9088914],
[107.5699481,-6.9087023],
[107.5697282,-6.9086331],
[107.5699481,-6.9087023],
[107.5700366,-6.9084973],
[107.5696569,-6.9084019],
[107.5700366,-6.9084973],
[107.5701129,-6.9083221],
[107.5698607,-6.9082528],
[107.5701129,-6.9083221],
[107.570854,-6.9085925],
[107.5701129,-6.9083221],
[107.5701665,-6.9082049],
[107.5698661,-6.9081197],
[107.5701665,-6.9082049],
[107.5702041,-6.908125],
[107.5703113,-6.908157],
[107.5702041,-6.908125],
[107.5702631,-6.9079652],
[107.5700002,-6.9078694],
[107.5702631,-6.9079652],
[107.5702952,-6.9079173],
[107.5705125,-6.9079985],
[107.570719,-6.9080877],
[107.5705125,-6.9079985],
[107.5702952,-6.9079173],
[107.5703435,-6.9077203],
[107.569968,-6.9076297],
[107.5703435,-6.9077203],
[107.5704133,-6.9075499],
[107.5700753,-6.9074806],
[107.5704133,-6.9075499],
[107.5706332,-6.9076138],
[107.5705125,-6.9079985],
[107.5706332,-6.9076138],
[107.5704133,-6.9075499],
[107.570483,-6.907257],
[107.5700807,-6.9071558],
[107.570483,-6.907257],
[107.5709604,-6.9073794],
[107.570483,-6.907257],
[107.5705098,-6.9070013],
[107.5702309,-6.9069641],
[107.5705098,-6.9070013],
[107.5705366,-6.9069215],
[107.5707995,-6.9069854],
[107.5705366,-6.9069215],
[107.570542,-6.9068256],
[107.5702255,-6.9067777],
[107.570542,-6.9068256],
[107.5705742,-6.9067138],
[107.5708585,-6.9067777],
[107.5705742,-6.9067138],
[107.570593,-6.9065859],
[107.5702094,-6.9065487],
[107.570593,-6.9065859],
[107.5705957,-6.906522],
[107.5707727,-6.9065487],
[107.5705957,-6.906522],
[107.5706225,-6.9063889],
[107.5702952,-6.9063463],
[107.5706225,-6.9063889],
[107.5706654,-6.9061972],
[107.5702952,-6.9061493],
[107.5706654,-6.9061972],
[107.570719,-6.9059629],
[107.5702952,-6.9058936],
[107.570719,-6.9059629],
[107.5708961,-6.9059788],
[107.570719,-6.9059629],
[107.5707459,-6.9057392],
[107.570365,-6.9056487],
[107.5707459,-6.9057392],
[107.5707673,-6.9055368],
[107.5703274,-6.9054569],
[107.5707673,-6.9055368],
[107.5708371,-6.9053664],
[107.5706654,-6.9053025],
[107.5708371,-6.9053664],
[107.5708961,-6.9052066],
[107.570542,-6.9051108],
[107.5708961,-6.9052066],
[107.5709765,-6.9049776],
[107.5709578,-6.9050296],
[107.5705313,-6.9048871],
]])
self.jalan.record(nama)
self.jalan.line([[
[107.5758411,-6.9122926],
[107.5737543,-6.9114911],
[107.5736578,-6.9113633],
[107.5736097,-6.9113685],
[107.5736604,-6.9113659],
[107.5736953,-6.9112754],
[107.5737892,-6.9112434],
[107.5739179,-6.9109692],
[107.5742505,-6.9110704],
[107.5760771,-6.9117866],
[107.5742532,-6.9110704],
[107.5739179,-6.9109665],
[107.5742183,-6.9102769],
[107.5761723,-6.9110832],
[107.574221,-6.9102769],
[107.5737194,-6.9100665],
[107.5733064,-6.9109505],
[107.5737221,-6.9100665],
[107.5732742,-6.9098349],
[107.5730677,-6.909739],
[107.5732715,-6.9098322],
[107.5734512,-6.9094301],
[107.5737248,-6.908602],
[107.5737868,-6.9084887],
[107.5739448,-6.9081067],
[107.5739905,-6.9081123],
[107.5739448,-6.9081067],
[107.5736658,-6.9080375],
[107.5737838,-6.907694],
[107.5739367,-6.907694],
[107.5742478,-6.907072],
[107.5739367,-6.907694],
[107.5737838,-6.907694],
[107.5736658,-6.9080375],
[107.5731535,-6.9079523],
[107.5737731,-6.9064718],
[107.5741084,-6.906597],
[107.5743926,-6.9067631],
[107.5741084,-6.906597],
[107.5737731,-6.9064718],
[107.5731535,-6.9079523],
[107.5728906,-6.9078964],
[107.5729577,-6.9078218],
[107.5730382,-6.907505],
[107.5729577,-6.9078218],
[107.5728906,-6.9078964],
[107.5726573,-6.9085807],
[107.5728906,-6.9078964],
[107.5726788,-6.9078511],
[107.5729657,-6.9069831],
[107.5732527,-6.9070576],
[107.5729657,-6.9069831],
[107.5729604,-6.9068979],
[107.5730623,-6.9064692],
[107.5730221,-6.9064239],
[107.5730757,-6.9062135],
[107.5733627,-6.90632],
[107.5734325,-6.9062934],
[107.5734566,-6.9061763],
[107.5734995,-6.9060724],
[107.5737007,-6.905476],
[107.5737999,-6.9055079],
[107.5743256,-6.9057928],
[107.5742291,-6.9060538],
[107.5741888,-6.9061283],
[107.5743095,-6.9061922],
[107.5744436,-6.9062082],
[107.5745643,-6.9058993],
[107.5745107,-6.9058461],
[107.5745831,-6.9055905],
[107.5747682,-6.9052044],
[107.575074,-6.9053961],
[107.574685,-6.9060218],
[107.575074,-6.9053961],
[107.5751361,-6.9054375],
[107.575074,-6.9053961],
[107.5747682,-6.9052044],
[107.5741271,-6.904781],
[107.5746166,-6.9051038],
[107.5745375,-6.9052363],
[107.5745053,-6.9052549],
[107.5744705,-6.9053455],
[107.5745322,-6.9054067],
[107.5744061,-6.9057023],
[107.5743551,-6.9057209],
[107.5743256,-6.9057928],
[107.5737999,-6.9055079],
[107.5741271,-6.904781],
[107.573816,-6.9045014],
[107.573706,-6.9044401],
[107.5735478,-6.9046958],
[107.573706,-6.9044401],
[107.5734485,-6.9042697],
[107.5733439,-6.9044481],
[107.5734485,-6.9042697],
[107.5731079,-6.9040407],
[107.5729684,-6.904291],
[107.5731079,-6.9040407],
[107.5728772,-6.9038836],
[107.572778,-6.9040194],
[107.5728772,-6.9038836],
[107.572609,-6.9037798],
[107.5722684,-6.9042751],
[107.5723542,-6.9041526],
[107.5725098,-6.9042617],
[107.5723542,-6.9041526],
[107.572609,-6.9037798],
[107.5723327,-6.9035428],
[107.572204,-6.9037958],
[107.5720645,-6.9041233],
[107.572204,-6.9037958],
[107.5723327,-6.9035428],
[107.5719251,-6.9032739],
[107.5717802,-6.9035295],
[107.5719251,-6.9032739],
[107.571799,-6.903194],
[107.5715388,-6.9037239],
[107.571799,-6.903194],
[107.5709002,-6.902592],
[107.5715469,-6.9030262],
[107.5711499,-6.9036679],
[107.5709434,-6.9041313],
[107.5713511,-6.9043043],
[107.5714369,-6.9040194],
[107.5715388,-6.9037239],
[107.571681,-6.9037904],
[107.571858,-6.9039129],
[107.5718741,-6.9040061],
[107.5714369,-6.9040194],
[107.5718741,-6.9040061],
[107.5720645,-6.9041233],
[107.5722684,-6.9042751],
[107.5721557,-6.9044721],
[107.5722281,-6.9045067],
[107.5721557,-6.9044721],
[107.5720404,-6.9046825],
[107.5713913,-6.9043629],
[107.5712947,-6.904544],
[107.5713913,-6.9043629],
[107.5713511,-6.9043043],
[107.5713913,-6.9043629],
[107.5720404,-6.9046825],
[107.5717829,-6.9051271],
[107.57163,-6.9050339],
[107.5717829,-6.9051271],
[107.5717078,-6.9052842],
[107.5715576,-6.9056331],
[107.5713242,-6.9055718],
[107.5711472,-6.9054893],
[107.5712813,-6.9051618],
[107.5711472,-6.9054893],
[107.570997,-6.9054307],
[107.5708371,-6.9053664],
[107.570997,-6.9054307],
[107.5708961,-6.9059788],
[107.5707727,-6.9065487],
[107.5708897,-6.9066209],
[107.5708585,-6.9067777],
[107.5707995,-6.9069854],
[107.5708585,-6.9067777],
[107.5708897,-6.9066209],
[107.5709353,-6.9066183],
[107.5710989,-6.9066289],
[107.5713752,-6.9066795],
[107.5712733,-6.9073905],
[107.5712384,-6.9074491],
[107.5709604,-6.9073794],
[107.570719,-6.9080877],
[107.5706564,-6.9082532],
[107.570719,-6.9080877],
[107.5709604,-6.9073794],
[107.5712384,-6.9074491],
[107.5710158,-6.908176],
[107.570938,-6.9083783],
[107.5708736,-6.9083517],
[107.570938,-6.9083783],
[107.570854,-6.9085925],
[107.570938,-6.9083783],
[107.5710158,-6.908176],
[107.5713913,-6.9083411],
[107.5710936,-6.9093289],
[107.571343,-6.9094088],
[107.5714101,-6.9094275],
[107.5713135,-6.9098349],
[107.5715388,-6.9099227],
[107.5713913,-6.9104047],
[107.5714825,-6.910426],
[107.5713913,-6.9104047],
[107.5709434,-6.9102263],
[107.5709613,-6.9101795],
[107.5710936,-6.909771],
[107.5710667,-6.9098615],
[107.5712169,-6.9099147],
[107.5710667,-6.9098615],
[107.5709434,-6.9102263],
[107.5713913,-6.9104047],
[107.5712786,-6.9107242],
[107.571233,-6.9107242],
[107.5711517,-6.9109676],
[107.570989,-6.9109079],
]])
self.jalan.record(nama)
self.jalan.line([[
[107.5731576,-6.9113992],
[107.5722645,-6.9110155],
[107.5721814,-6.9112551],
[107.5719453,-6.911215],
[107.5722404,-6.9104856],
[107.5728948,-6.9107652],
[107.5730987,-6.9103897],
[107.5730424,-6.9104909],
[107.5728439,-6.9103977],
[107.5730424,-6.9104909],
[107.5728948,-6.9107652],
[107.5733064,-6.9109505],
[107.5728948,-6.9107652],
[107.5722404,-6.9104856],
[107.5723369,-6.9102166],
[107.5715388,-6.9099227],
[107.5723369,-6.9102166],
[107.5725703,-6.9097054],
[107.5727473,-6.9097959],
[107.5725703,-6.9097054],
[107.5717737,-6.9094924],
[107.5714101,-6.9094275],
[107.571343,-6.9094088],
[107.5719588,-6.907687],
[107.5716503,-6.9076018],
[107.5716074,-6.9075033],
[107.5712733,-6.9073905],
[107.5716074,-6.9075033],
[107.5716503,-6.9076018],
[107.5713913,-6.9083411],
[107.5716503,-6.9076018],
[107.5719588,-6.907687],
[107.5724201,-6.9077829],
[107.5726788,-6.9078511],
[107.5724201,-6.9077829],
[107.5724201,-6.9076311],
[107.5721626,-6.9075699],
[107.5723209,-6.9067471],
[107.5721626,-6.9075699],
[107.5716583,-6.9074527],
[107.5721626,-6.9075699],
[107.5724201,-6.9076311],
[107.5724201,-6.9077829],
[107.5719614,-6.9089492],
[107.5717737,-6.9094924],
[107.5719614,-6.9089492],
[107.5729351,-6.9093033],
[107.5729914,-6.9092847],
[107.5732918,-6.9084992],
[107.5737248,-6.908602],
]])
self.jalan.record(nama)
self.jalan.line([[
[107.5708961,-6.9059788],
[107.5710438,-6.9060206],
[107.5711472,-6.9054893],
[107.5710438,-6.9060206],
[107.5709353,-6.9066183],
[107.5710438,-6.9060206],
[107.5712101,-6.9060499],
[107.5713242,-6.9055718],
[107.5712101,-6.9060499],
[107.5710989,-6.9066289],
[107.5712101,-6.9060499],
[107.5714622,-6.9060952],
[107.5714729,-6.9060605],
[107.5715576,-6.9056331],
[107.5714729,-6.9060605],
[107.5714622,-6.9060952],
[107.5714434,-6.9062922],
[107.5713752,-6.9066795],
[107.5714434,-6.9062922],
[107.5718323,-6.906428],
[107.5717572,-6.9067475],
[107.5718323,-6.906428],
[107.5720308,-6.9064839],
[107.5723876,-6.9066969],
[107.5729604,-6.9068979],
[107.5730623,-6.9064692],
[107.5730221,-6.9064239],
[107.5722615,-6.9060632],
[107.5721113,-6.9060179],
[107.5720711,-6.9062603],
[107.5723151,-6.9063455],
[107.5726102,-6.9064946],
[107.5723125,-6.9063428],
[107.5720711,-6.9062603],
[107.5714729,-6.9060605],
[107.5720711,-6.9062603],
[107.5720308,-6.9064839],
[107.5720711,-6.9062603],
[107.5721113,-6.9060179],
[107.5721515,-6.9058129],
[107.57224,-6.9055706],
[107.5717078,-6.9052842],
[107.5717829,-6.9051271],
[107.5720094,-6.9047425],
[107.5725297,-6.9050354],
[107.5724332,-6.9052378],
[107.57224,-6.9055706],
[107.5724332,-6.9052378],
[107.572527,-6.9053496],
[107.5723285,-6.905757],
[107.5725834,-6.9058848],
[107.5726531,-6.9058555],
[107.5727604,-6.9055733],
[107.5729642,-6.9051099],
[107.5733439,-6.9044481],
[107.5731386,-6.9043377],
[107.5730688,-6.9043298],
[107.5727094,-6.9049688],
[107.572527,-6.9053496],
[107.5727094,-6.9049688],
[107.5729642,-6.9051099],
]])
self.jalan.record(nama)
self.jalan.line([[
[107.573816,-6.9045014],
[107.5735225,-6.9049559],
[107.5732355,-6.9056402],
[107.5736057,-6.9057613],
[107.5734995,-6.9060724],
[107.5730934,-6.9059863],
[107.5732355,-6.9056402],
[107.5730934,-6.9059863],
[107.5730558,-6.9060902],
[107.5734566,-6.9061763],
[107.5731222,-6.9061048],
[107.5730757,-6.9062135],
[107.5728386,-6.9061328],
[107.5726642,-6.9060822],
[107.5721515,-6.9058129],
]])
def close(self):
self.kelurahan.close()
self.kantor.close()
self.jalan.close()
| StarcoderdataPython |
1654040 | class error(Exception):
pass
def as_fd(f):
if not isinstance(f, (int, long)):
try:
fileno = f.fileno
except AttributeError:
raise TypeError("argument must be an int, or have a fileno() method.")
f = f.fileno()
if not isinstance(f, (int, long)):
raise TypeError("fileno() returned a non-integer")
fd = int(f)
if fd < 0 or isinstance(fd, long):
raise ValueError("file descriptor cannot be a negative integer (%i)"%fd)
return fd
def select(iwtd, owtd, ewtd, timeout=None):
"""Wait until one or more file descriptors are ready for some kind of I/O.
The first three arguments are sequences of file descriptors to be waited for:
rlist -- wait until ready for reading
wlist -- wait until ready for writing
xlist -- wait for an ``exceptional condition''
If only one kind of condition is required, pass [] for the other lists.
A file descriptor is either a socket or file object, or a small integer
gotten from a fileno() method call on one of those.
The optional 4th argument specifies a timeout in seconds; it may be
a floating point number to specify fractions of seconds. If it is absent
or None, the call will never time out.
The return value is a tuple of three lists corresponding to the first three
arguments; each contains the subset of the corresponding file descriptors
that are ready.
*** IMPORTANT NOTICE ***
On Windows, only sockets are supported; on Unix, all file descriptors.
"""
from select import poll, POLLIN, POLLOUT, POLLPRI, POLLERR, POLLHUP
fddict = {}
polldict = {}
fd = 0
for f in iwtd + owtd + ewtd:
fddict[id(f)] = as_fd(f)
for f in iwtd:
fd = fddict[id(f)]
polldict[fd] = polldict.get(fd, 0) | POLLIN
for f in owtd:
fd = fddict[id(f)]
polldict[fd] = polldict.get(fd, 0) | POLLOUT
for f in ewtd:
fd = fddict[id(f)]
polldict[fd] = polldict.get(fd, 0) | POLLPRI
p = poll()
for fd, mask in polldict.iteritems():
p.register(fd, mask)
if timeout is not None:
if (not hasattr(timeout, '__int__') and
not hasattr(timeout, '__float__')):
raise TypeError('timeout must be a float or None')
ret = dict(p.poll(int(float(timeout) * 1000)))
else:
ret = dict(p.poll())
iretd = [ f for f in iwtd if ret.get(fddict[id(f)], 0) & (POLLIN|POLLHUP|POLLERR)]
oretd = [ f for f in owtd if ret.get(fddict[id(f)], 0) & POLLOUT]
eretd = [ f for f in ewtd if ret.get(fddict[id(f)], 0) & (POLLERR|POLLPRI)]
return iretd, oretd, eretd
| StarcoderdataPython |
151508 |
import os
dataset = ['csail', 'manhattan', 'intel', 'mit']
inliers_quantity = [127, 1952, 256, 20]
inliers_percentages = [0.5, 0.6, 0.7, 0.8, 0.9]
sample_size = 10
outliers_quantity = []
for i in range(0, len(dataset)):
outliers_quantity.append([])
for inlier_percentage in inliers_percentages:
inlier_n = inliers_quantity[i]
outliers_quantity[-1].append(round(inlier_n / inlier_percentage - inlier_n))
#print(outliers_quantity)
os.chdir('/home/amber/stew/pose_dataset/')
for i in range(0,len(dataset)):
for j in range(0, len(inliers_percentages)):
os.system('./create_new_dataset_folder.sh '+ dataset[i]+ ' random'+str(outliers_quantity[i][j]))
print('adding new dataset folder: '+dataset[i]+'_'+'random'+str(outliers_quantity[i][j]))
for i in range(0, len(dataset)):
for j in range(0, len(inliers_percentages)):
dataset_name = dataset[i]
configuration_name = 'random'+str(outliers_quantity[i][j])
os.chdir(dataset_name + '_'+ configuration_name)
os.system('python uniquify.py ' + dataset_name + '.g2o' + ' ' + dataset_name + '.g2o_unique.g2o')
if dataset_name == 'mit':
os.system('./generate_dataset.sh ' + dataset_name + '.g2o_unique.g2o ' + str(sample_size) + ' ' + str(outliers_quantity[i][j]) + ' ' + str(0))
else:
os.system('./generate_dataset.sh ' + dataset_name + '.g2o_unique.g2o ' + str(sample_size) + ' ' + str(outliers_quantity[i][j]))
os.chdir('..')
| StarcoderdataPython |
62768 | <reponame>stonewell/python-aop<filename>pyaop/aop_module_importer.py
import logging
import imp
import sys
from .module_traverser import ModuleTraverser
from .aspects.aspect_manager import AspectManager
class AopModuleImporter(object):
def __init__(self):
super(AopModuleImporter, self).__init__()
self.module_info_ = None
self.module_traverser_ = ModuleTraverser(self.func_callback, self.type_callback)
self.aspect_manager_ = AspectManager()
self.aspect_manager_.load_aspects()
def find_module(self, fullname, path=None):
try:
parts = fullname.split('.')
self.module_info_ = imp.find_module(parts[-1], path)
except:
return None
return self
def load_module(self, name):
if name in sys.modules:
return sys.modules[name]
module = None
try:
module = imp.load_module(name, *self.module_info_)
except:
logging.exception('load module err:%s', name)
if module:
sys.modules[name] = module
module_hooker = self.aspect_manager_.get_module_hooker(name)
if module_hooker:
self.module_traverser_.traverse(module, module_hooker)
return module
def func_callback(self, m, name, f, user_data):
user_data.hook_functin(m, name, f)
def type_callback(self, m, name, t, user_data):
user_data.hook_type(m, name, t)
| StarcoderdataPython |
171136 | <filename>termsaverlib/common.py
###############################################################################
#
# file: common.py
#
# Purpose: holds common helper functions used by termsaver code.
#
# Note: This file is part of Termsaver application, and should not be used
# or executed separately.
#
###############################################################################
#
# Copyright 2012 Termsaver
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
###############################################################################
"""
Holds common functionality used by termsaver screens.
"""
#
# Python build-in modules
#
import os
import sys
import traceback
import HTMLParser
import subprocess
import re
import time
def is_windows():
"""
Returns True if the environment is Microsoft Windows.
"""
return sys.platform == "win32"
def is_macos():
"""
Returns True if the environment is Microsoft Windows.
"""
return sys.platform == "darwin"
def prettify_exception(ex):
"""
Outputs the exception with its stack trace within separator lines.
"""
print """
===================================
Exception: (%s) %s
%s
===================================
""" % (ex.__class__.__name__, ex.message, traceback.format_exc())
def get_app_dir():
"""
Retrieves the termsaver main directory based on current operating system.
For Windows machines, this should be something like:
<root>\Documents and Settings\<user>\Application Data\termsaver
For Unix machines, it will be:
/home/<user>/.termsaver/
"""
if is_windows():
path = os.path.join(os.environ['APPDATA'], "termsaver")
else:
path = os.path.join(os.environ['HOME'], ".termsaver")
# create if applicable
if not os.path.exists(path):
# permission errors here will just propagate error
os.mkdir(path)
return path
def get_temp_dir():
"""
Retrieves the temporary based on current operating system.
For Windows machines, this should be something like:
<root>\Documents and Settings\<user>\Local Settings\Temp
For Unix machines, it will be:
/tmp/
"""
if is_windows():
path = os.environ['TMP']
else:
path = "/tmp"
return path
def unescape_string(escaped_text):
"""
Unescape strings. This is useful for cases when data that needs to be
displayed on screen is escaped for HTML or database stuff.
Additional replacing is taken here, such as some HTML tags:
* <br>, replaced to \n
"""
unescaped = escaped_text
try:
unescaped = HTMLParser.HTMLParser().unescape(escaped_text)
# replace most common HTML data
unescaped = unescaped.replace('<br>', '\n')
unescaped = unescaped.replace('<br/>', '\n')
unescaped = unescaped.replace('<br />', '\n')
unescaped = unescaped.decode('string_escape')
except:
#
# If there were errors here, just ignore them and try to give back
# the string the best it could do
#
pass
return unescaped
def get_day_suffix(day):
"""
Returns the suffix of the day, such as in 1st, 2nd, ...
"""
if day in (1, 21, 31):
return 'st'
elif day in (2, 12, 22):
return 'nd'
elif day in (3, 23):
return 'rd'
else:
return 'th'
def execute_shell(cmd, ignore_errors=False):
"""
Simple routine to execute shell commands.
If `ignore_errors` is false (default) errors here will be thrown, and
must be treated individually, to ensure proper message to end-user.
The `cmd` argument must be an array, formatted for subprocess.Popen.
If you are not sure on how to do that, just use: shlex.split(string).
"""
try:
p = subprocess.Popen(cmd, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, close_fds=True)
out, __ = p.communicate()
except Exception, e:
if not ignore_errors:
raise e
return out.rstrip()
def strip_html(text):
"""
Simple regex that cleans a string of any HTML tags (for terminal output,
there isn't much sense to have them printed anyway).
"""
return re.sub('<[^<]+?>', '', text)
def get_cpu_usage(sleep_delay, ignore_errors=False):
"""
"""
try:
if is_windows():
raise Exception(_("Functionality not available for Windows. See --help for details."))
elif is_macos():
ps = subprocess.Popen(['ps', '-A', '-o %cpu'], stdout=subprocess.PIPE)
cpu = subprocess.check_output(('awk', '{s+=$1} END {print s "%"}'), stdin=ps.stdout)
ps.wait()
time.sleep(sleep_delay) # required to simulate same in linux
return float(cpu.strip()[:-1])
else:
# linux
def getTimeList():
statFile = file("/proc/stat", "r")
timeList = statFile.readline().split(" ")[2:6]
statFile.close()
for i in range(len(timeList)) :
timeList[i] = int(timeList[i])
return timeList
def deltaTime() :
x = getTimeList()
time.sleep(sleep_delay)
y = getTimeList()
for i in range(len(x)) :
y[i] -= x[i]
return y
dt = deltaTime()
if sum(dt) > 0:
cpu = 100 - (dt[len(dt) - 1] * 100.00 / sum(dt))
else:
cpu = 0
return cpu
except Exception, e:
if not ignore_errors:
raise e
else:
return 0
def get_mem_usage(ignore_errors=False):
"""
"""
try:
if is_windows():
raise Exception(_("Functionality not available for Windows. See --help for details."))
elif is_macos():
vm = subprocess.Popen(['vm_stat'], stdout=subprocess.PIPE).communicate()[0].decode()
vmLines = vm.split('\n')
sep = re.compile(':[\s]+')
vmStats = {}
for row in range(1,len(vmLines)-2):
rowText = vmLines[row].strip()
rowElements = sep.split(rowText)
vmStats[(rowElements[0])] = int(rowElements[1].strip('\.')) * 4096
total_mem = (vmStats["Pages wired down"]+vmStats["Pages active"]+vmStats["Pages inactive"]+vmStats["Pages free"])/1024/1024
curr_mem = (vmStats["Pages inactive"]+vmStats["Pages free"]) * 100 / (vmStats["Pages wired down"]+vmStats["Pages active"]+vmStats["Pages inactive"]+vmStats["Pages free"])
return (curr_mem, total_mem)
else:
# linux
re_parser = re.compile(r'^(?P<key>\S*):\s*(?P<value>\d*)\s*kB')
mem_info = {}
for line in open('/proc/meminfo'):
match = re_parser.match(line)
if not match:
continue # skip lines that don't parse
key, value = match.groups(['key', 'value'])
if key not in ('MemTotal', 'MemFree'):
continue
mem_info[key] = int(value)
total_mem = mem_info['MemTotal'] / 1024
curr_mem = (mem_info['MemTotal'] - mem_info['MemFree']) * 100 / mem_info['MemTotal']
return (curr_mem, total_mem)
except Exception, e:
if not ignore_errors:
raise e
else:
return (0,0)
| StarcoderdataPython |
1654182 | <reponame>ajgallego/Mask_RCNN-Cleansea
# import libraries
import os, sys
import sys
import random
import math
import numpy as np
import scipy.misc
import matplotlib
import matplotlib.pyplot as plt
import cv2
import json
from PIL import Image, ImageDraw
from tensorflow.python.framework.versions import VERSION as __version__
import tensorflow as tf
import imgaug
#Cambiamos el Directorio al propio de MASK_RCNN
ROOT_DIR = '/home/saflex/projecto_cleansea/Mask_RCNN-tensorflow2.0'
assert os.path.exists(ROOT_DIR), 'ROOT_DIR does not exist'
# Import mrcnn libraries
sys.path.append(ROOT_DIR)
from mrcnn.config import Config
from mrcnn import utils
import mrcnn.model as modellib
from mrcnn import visualize
from mrcnn.model import log
############################################################
# Configuracion
############################################################
class CleanSeaConfig(Config):
"""
Configuracion para el entrenamiento con CleanSea Dataset.
"""
# Nombre de la configuracion
NAME = "debris"
# We use a GPU with 12GB memory, which can fit two images.
# Adjust down if you use a smaller GPU.
IMAGES_PER_GPU = 1
# Use small images for faster training. Set the limits of the small side
# the large side, and that determines the image shape.
IMAGE_MIN_DIM = 512
IMAGE_MAX_DIM = 512
# Numero de clases + el background
NUM_CLASSES = 1 + 19 # Cleansea tiene 19 clases
# Salta las detecciones con <50% de seguridad
DETECTION_MIN_CONFIDENCE = 0.5
config= CleanSeaConfig()
config.display()
def get_ax(rows=1, cols=1, size=8):
"""Return a Matplotlib Axes array to be used in
all visualizations in the notebook. Provide a
central point to control graph sizes.
Change the default size attribute to control the size
of rendered images
"""
_, ax = plt.subplots(rows, cols, figsize=(size*cols, size*rows))
return ax
# Directorio perteneciente a MASK-RCNN
MODEL_DIR = os.path.join(ROOT_DIR, "logs")
############################################################
# Dataset
############################################################
class CleanSeaDataset(utils.Dataset):
def load_data(self, dataset_dir, subset):
# Train or validation dataset?
assert subset in ["train_coco", "test_coco"]
dataset_dir = os.path.join(dataset_dir, subset)
print(dataset_dir)
# Cargamos el archivo json
annotation_json = os.path.join(dataset_dir,"annotations.json")
json_file = open(annotation_json)
coco_json = json.load(json_file)
json_file.close()
print("\nAnotaciones Cargadas\n")
# Añadimos los nombres de las clases usando el metodo de utils.Dataset
source_name = "coco_like"
for category in coco_json['categories']:
class_id = category['id']
class_name = category['name']
if class_id < 1:
print('Error: Class id for "{}" reserved for the background'.format(class_name))
else:
self.add_class(source_name, class_id, class_name)
print("Nombres Añadidos \n")
# Almacenamos las anotaciones
annotations = {}
for annotation in coco_json['annotations']:
image_id = annotation['image_id']
if image_id not in annotations:
annotations[image_id] = []
annotations[image_id].append(annotation)
print("Anotaciones Almacenadas\n")
# Almacenamos las imagenes y las añadimos al dataset
seen_images = {}
for image in coco_json['images']:
image_id = image['id']
if image_id in seen_images:
print("Warning: Skipping duplicate image id: {}".format(image))
else:
seen_images[image_id] = image
try:
image_file_name = image['file_name']
image_width = image['width']
image_height = image['height']
except KeyError as key:
print("Warning: Skipping image (id: {}) with missing key: {}".format(image_id, key))
image_path = os.path.join(dataset_dir, image_file_name)
image_annotations = annotations[image_id]
# Añadimos la imagen usando el metodo de utils.Dataset
self.add_image(
source=source_name,
image_id=image_id,
path=image_path,
width=image_width,
height=image_height,
annotations=image_annotations
)
print("Imagenes añadidas al Dataset\n")
def load_mask(self, image_id):
""" Carga la mascara de instancia para la imagen dada
MaskRCNN espera mascaras en forma de mapa de bits (altura, anchura e instancias)
Argumentos:
image_id: El ID de la imagen a la que vamos a cargar la mascara
Salida:
masks: Una cadena booleana con estructura (altura, anchya y la cuenta de instancias) con una mascara por instancia
class_ids: Una cadena de 1 dimension de clase ID de la instancia de la mascara """
image_info = self.image_info[image_id]
annotations = image_info['annotations']
instance_masks = []
class_ids = []
for annotation in annotations:
class_id = annotation['category_id']
mask = Image.new('1', (image_info['width'], image_info['height']))
mask_draw = ImageDraw.ImageDraw(mask, '1')
for segmentation in annotation['segmentation']:
mask_draw.polygon(segmentation, fill=1)
bool_array = np.array(mask) > 0
instance_masks.append(bool_array)
class_ids.append(class_id)
mask = np.dstack(instance_masks)
class_ids = np.array(class_ids, dtype=np.int32)
return mask, class_ids
def image_reference(self, image_id):
"""Return the path of the image."""
info = self.image_info[image_id]
if info["source"] == "object":
return info["path"]
else:
super(self.__class__, self).image_reference(image_id)
# define random colors
def random_colors(N):
np.random.seed(1)
colors = [tuple(255 * np.random.rand(3)) for _ in range(N)]
return colors
#apply mask to image
def apply_mask(image, mask, color, alpha=0.5):
for n, c in enumerate(color):
image[:, :, n] = np.where(mask == 1, image[:, :, n] * (1-alpha) + alpha * c, image[:, :, n])
return image
#take the image and apply the mask, box, and Label
def display_instances(image, boxes, masks, ids, names, scores):
n_instances = boxes.shape[0]
colors = random_colors(n_instances)
if not n_instances:
print("NO INSTANCES TO DISPLAY")
else:
assert boxes.shape[0] == masks.shape[-1] == ids.shape[0]
for i, color in enumerate(colors):
if not np.any(boxes[i]):
continue
y1, x1, y2, x2 = boxes[i]
label = names[ids[i]]
score = scores[i] if scores is not None else None
caption = "{} {:.2f}".format(label, score) if score else label
mask = masks[:, :, i]
image = apply_mask(image, mask, color)
image = cv2.rectangle(image, (x1, y1), (x2, y2), color, 2)
image = cv2.putText(image, caption, (x1, y1), cv2.FONT_HERSHEY_COMPLEX, 0.7, color, 2)
return image
# Cargamos el archivo json
dataset_dir = "/home/saflex/projecto_cleansea/coco/train_coco_ok"
annotation_json = os.path.join(dataset_dir,"annotations.json")
print(annotation_json)
json_file = open(annotation_json)
coco_json = json.load(json_file)
json_file.close()
print("\nAnotaciones Cargadas\n")
class_names = []
# Añadimos los nombres de las clases usando el metodo de utils.Dataset
source_name = "coco_like"
for category in coco_json['categories']:
class_id = category['id']
class_name = category['name']
if class_id < 1:
print('Error: Class id for "{}" reserved for the background'.format(class_name))
else:
class_names.append(class_name)
print("Nombres Añadidos \n")
print(class_names)
#Mask R-CNN
MODEL_DIR = os.path.join(ROOT_DIR, 'logs')
VIDEO_FILE = "/home/saflex/projecto_cleansea/debrisVideo.mp4"
VIDEO_SAVE_DIR = os.path.join('/media/saflex/TOSHIBA EXT/TFG/video_detection', 'savedimgs')
COCO_MODEL_PATH = os.path.join(MODEL_DIR, 'mask_rcnn_debris_weights1000DA+.h5')
if not os.path.exists(COCO_MODEL_PATH):
utils.download_trained_weights(COCO_MODEL_PATH)
class InferenceConfig(CleanSeaConfig):
GPU_COUNT = 1
IMAGES_PER_GPU = 3
config = InferenceConfig()
print(config)
# Create model object in inference mode.
model = modellib.MaskRCNN(mode='inference', model_dir=MODEL_DIR, config=config)
# Load weights trained on MS-COCO
model.load_weights(COCO_MODEL_PATH, by_name=True)
video = cv2.VideoCapture(VIDEO_FILE)
# Find OpenCV version
(major_ver, minor_ver, subminor_ver) = (cv2.__version__).split('.')
if int(major_ver) < 3 :
fps = video.get(cv2.cv.CV_CAP_PROP_FPS)
print('Frames per second using video.get(cv2.cv.CV_CAP_PROP_FPS): {0}'.format(fps))
else :
fps = video.get(cv2.CAP_PROP_FPS)
print("Frames per second using video.get(cv2.CAP_PROP_FPS) : {0}".format(fps))
try:
if not os.path.exists(VIDEO_SAVE_DIR):
os.makedirs(VIDEO_SAVE_DIR)
except OSError:
print ('Error: Creating directory of data')
frames = []
frame_count = 0
while True:
ret, frame = video.read()
if not ret:
break
# Save each frame of the video to a list
frame_count += 1
frames.append(frame)
print('frame_count :{0}'.format(frame_count))
if len(frames) == 3:
results = model.detect(frames, verbose=0)
print('Predicted')
for i, item in enumerate(zip(frames, results)):
frame = item[0]
r = item[1]
frame = display_instances(frame, r['rois'], r['masks'], r['class_ids'], class_names, r['scores'])
name = '{0}.jpg'.format(frame_count + i - 3)
name = os.path.join(VIDEO_SAVE_DIR, name)
cv2.imwrite(name, frame)
print('writing to file:{0}'.format(name))
# Clear the frames array to start the next batch
frames = []
video.release() | StarcoderdataPython |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.