text stringlengths 38 1.54M |
|---|
"""
This module is reponsible for fetching the status of each tank.
"""
import json
from logs import log_warning
def get_tank_status() -> dict:
"""
This function reads the tank json file, saves and returns its
contents to a dictionary.
"""
with open('tanks.json', 'r') as tanks_file:
try:
tank_status = json.load(tanks_file)
except ValueError as warning:
tank_status = []
log_warning(warning)
return tank_status |
import tensorflow as tf
from models.base_model import Model
from core.layers import MLP, GRU, SparseMax
from utils.constants import BIG_NUMBER, SMALL_NUMBER, FLOW_THRESHOLD
from utils.tf_utils import masked_gather
from utils.flow_utils import mcf_solver, dual_flow, destination_attn
from cost_functions.cost_functions import get_cost_function
from models.aggregators import Neighborhood, GAT, GGNN
class FlowModel(Model):
def __init__(self, params, name='flow-model'):
super(FlowModel, self).__init__(params, name)
self.cost_fn = get_cost_function(cost_fn=params['cost_fn'])
self.should_use_edges = params['cost_fn']['use_edges']
def build(self, **kwargs):
# B x V x 1 tensor which contains node demands
demands = kwargs['demands']
# B x V x F tensor which contains node features
node_features = kwargs['node_features']
# B x V x D tensor containing the padded adjacency list
adj_lst = kwargs['adj_lst']
# B x V x D tensor containing padded inverse adjacency list
inv_adj_lst = kwargs['inv_adj_lst']
# B x V x D tensor of edge lengths
edge_lengths = kwargs['edge_lengths']
# B x V x D tensor of normalized edge lengths
norm_edge_lengths = kwargs['norm_edge_lengths']
# List of B x V x D tensors containing padded adjacency lists for k neighborhood levels
out_neighborhoods = kwargs['out_neighborhoods']
in_neighborhoods = kwargs['in_neighborhoods']
# B*V*D x 3 tensor containing 3D indices used to compute inflow
in_indices = kwargs['in_indices']
# B*V*D x 3 tensor containing 2D indices of outgoing neighbors
rev_indices = kwargs['rev_indices']
# B x 1
num_nodes = kwargs['num_nodes']
# Floating point number between 0 and 1
dropout_keep_prob = kwargs['dropout_keep_prob']
# Scalar Int
max_num_nodes = kwargs['max_num_nodes']
# B x 1 tensor of true costs (if given)
true_costs = kwargs['true_costs']
with self._sess.graph.as_default():
with tf.variable_scope(self.name, reuse=tf.AUTO_REUSE):
node_indices = tf.range(start=0, limit=max_num_nodes)
node_indices = tf.tile(tf.expand_dims(node_indices, axis=0),
multiples=(tf.shape(num_nodes)[0], 1))
node_embedding_init = tf.random.normal(shape=(max_num_nodes, self.params['node_embedding_size']))
node_embedding_var = tf.Variable(node_embedding_init,
trainable=True,
name='node-embedding-var')
node_embeddings = tf.nn.embedding_lookup(params=node_embedding_var,
ids=node_indices,
max_norm=1,
name='node-embedding-lookup')
# Node encoding, B x V x K
encoder = MLP(hidden_sizes=[],
output_size=self.params['node_encoding'],
activation=None,
activate_final=True,
name='node-encoder')
node_encoding = encoder(inputs=tf.concat([node_embeddings, node_features], axis=-1),
dropout_keep_prob=dropout_keep_prob)
# Select specific node aggregator
if self.params['name'] == 'neighborhood':
node_aggregator = Neighborhood(output_size=self.params['node_encoding'],
num_heads=self.params['num_heads'],
activation=tf.nn.tanh,
name='neighborhood-aggregator')
elif self.params['name'] == 'gat':
node_aggregator = GAT(output_size=self.params['node_encoding'],
num_heads=self.params['num_heads'],
activation=tf.nn.tanh,
use_gru_gate=False,
name='GAT-aggregator')
elif self.params['name'] == 'gated-gat':
node_aggregator = GAT(output_size=self.params['node_encoding'],
num_heads=self.params['num_heads'],
activation=tf.nn.tanh,
use_gru_gate=True,
name='GRU-GAT-aggregator')
elif self.params['name'] == 'ggnn':
node_aggregator = GGNN(output_size=self.params['node_encoding'],
activation=tf.nn.tanh,
name='GGNN-aggregator')
else:
raise ValueError('Model with name {0} does not exist.'.format(self.params['name']))
# Combine message passing steps
for _ in range(self.params['graph_layers']):
node_encoding = node_aggregator(node_states=node_encoding,
adj_lst=adj_lst,
inv_adj_lst=inv_adj_lst,
node_indices=node_indices,
dropout_keep_prob=dropout_keep_prob,
out_neighborhoods=out_neighborhoods,
in_neighborhoods=in_neighborhoods,
num_nodes=num_nodes)
# Neighbor States, B x V x D x K
neighbor_states, _ = masked_gather(values=node_encoding,
indices=adj_lst,
mask_index=num_nodes,
set_zero=True)
# Mask to remove nonexistent edges, B x V x D
mask_indices = tf.expand_dims(num_nodes, axis=-1)
mask = tf.cast(tf.equal(adj_lst, mask_indices), tf.float32)
adj_mask = 1.0 - mask
# Current States tiled across neighbors, B x V x D x K
tiled_states = tf.tile(tf.expand_dims(node_encoding, axis=-2),
multiples=(1, 1, tf.shape(neighbor_states)[2], 1))
tiled_states = tf.expand_dims(adj_mask, axis=-1) * tiled_states
concat_states = tf.concat([tiled_states, neighbor_states], axis=-1)
# Compute flow proportions
decoder = MLP(hidden_sizes=self.params['decoder_hidden'],
output_size=1,
activation=tf.nn.tanh,
activate_final=False,
name='node-decoder')
# B x V x D x 1
node_weights = decoder(inputs=concat_states)
# B x V x D
node_weights = tf.squeeze(node_weights, axis=-1)
# Mask out nonexistent neighbors before normalization, B x V x D
pred_weights = (-BIG_NUMBER * mask) + node_weights
# Normalize weights for outgoing neighbors
if self.params['use_sparsemax']:
sparsemax = SparseMax(epsilon=SMALL_NUMBER)
normalized_weights = sparsemax(inputs=pred_weights, mask=adj_mask)
else:
normalized_weights = tf.nn.softmax(pred_weights, axis=-1)
normalized_weights = tf.debugging.check_numerics(normalized_weights, 'Normalized Weights has Inf or NaN.')
flow, pflow = mcf_solver(pred_weights=normalized_weights,
demand=demands,
in_indices=in_indices,
max_iters=self.params['flow_iters'])
flow = tf.debugging.check_numerics(flow, 'Flow has Inf or NaN.')
if self.should_use_edges:
flow_cost = tf.reduce_sum(self.cost_fn.apply(flow, edge_lengths), axis=[1, 2])
else:
flow_cost = tf.reduce_sum(self.cost_fn.apply(flow), axis=[1, 2])
flow_cost = tf.debugging.check_numerics(flow_cost, 'Flow Cost has Inf or NaN.')
# Handle special case where the true cost is available
if self.params['use_true_cost']:
self.loss = (flow_cost - true_costs)
self.loss_op = tf.reduce_mean(self.loss)
# Named outputs
self.output_ops['flow'] = flow
self.output_ops['flow_cost'] = flow_cost
self.output_ops['normalized_weights'] = normalized_weights
self.output_ops['dual_cost'] = true_costs
self.output_ops['pred_weights'] = pred_weights
self.output_ops['dual_flow'] = tf.zeros_like(flow)
self.output_ops['dual_idx'] = tf.zeros_like(flow_cost)
self.optimizer_op = self._build_optimizer_op()
return
# Compute Dual Problem and associated cost
dual_decoder = MLP(hidden_sizes=self.params['decoder_hidden'],
output_size=1,
activation=tf.nn.tanh,
activate_final=False,
name='dual-decoder')
node_encoding = tf.debugging.check_numerics(node_encoding, 'Node Encoding has Inf or Nan.')
dual_vars = dual_decoder(inputs=node_encoding)
# B x (V + 1) x D tensor of repeated dual variables
dual = adj_mask * dual_vars
# Need to compute transpose (via a masked gather)
dual_tr, _ = masked_gather(values=dual_vars,
indices=adj_lst,
mask_index=num_nodes,
set_zero=True)
dual_tr = tf.squeeze(dual_tr, axis=-1)
# alpha_j - alpha_i
dual_diff = dual_tr - dual
# B x V x D
dual_flows, dual_idx = dual_flow(dual_diff=dual_diff,
adj_mask=adj_mask,
cost_fn=self.cost_fn,
edge_lengths=edge_lengths,
should_use_edges=self.should_use_edges,
step_size=self.params['dual_step_size'],
momentum=self.params['dual_momentum'],
max_iters=self.params['dual_iters'])
dual_flows = tf.debugging.check_numerics(dual_flows, 'Dual Flows have Inf or NaN.')
dual_demand = tf.reduce_sum(dual_vars * demands, axis=[1, 2])
if self.should_use_edges:
dual_flow_cost = self.cost_fn.apply(dual_flows, edge_lengths)
else:
dual_flow_cost = self.cost_fn.apply(dual_flows)
dual_flow_cost += dual_diff * dual_flows
dual_cost = tf.reduce_sum(dual_flow_cost, axis=[1, 2]) - dual_demand
dual_cost = tf.debugging.check_numerics(dual_cost, 'Dual Cost has Inf or NaN.')
# tf.summary.histogram('Duality Gap', flow_cost - dual_cost)
self.loss = (flow_cost - dual_cost)
self.loss_op = tf.reduce_mean(self.loss)
# Named outputs
self.output_ops['flow'] = flow
self.output_ops['flow_cost'] = flow_cost
self.output_ops['normalized_weights'] = normalized_weights
self.output_ops['dual_cost'] = dual_cost
self.output_ops['pred_weights'] = pred_weights
self.output_ops['dual_flow'] = dual_flows
self.output_ops['dual_idx'] = dual_idx
self.optimizer_op = self._build_optimizer_op()
# self.train_writer = tf.summary.FileWriter('./logs/train', self._sess.graph)
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import models
class MailMessage(models.Model):
_inherit = 'mail.message'
def portal_message_format(self):
return self._portal_message_format([
'id', 'body', 'date', 'author_id', 'email_from', # base message fields
'message_type', 'subtype_id', 'is_internal', 'subject', # message specific
'model', 'res_id', 'record_name', # document related
])
def _portal_message_format(self, fields_list):
vals_list = self._message_format(fields_list)
IrAttachmentSudo = self.env['ir.attachment'].sudo()
for vals in vals_list:
for attachment in vals.get('attachment_ids', []):
if not attachment.get('access_token'):
attachment['access_token'] = IrAttachmentSudo.browse(attachment['id']).generate_access_token()[0]
return vals_list
|
# coding: utf-8
# In[40]:
"""
난이도 : 3
문제 : 1부터 10000사이의 자연수 N이 주어진다.
자연수 중에서 6을 연속으로 3개이상 포함하는 수 중에 N번째 수를 출력.
알고리즘 : 666부터 시작하여 1씩 더해가면서, 수를 문자열로바꾼뒤 '666'을 1개 이상 포함하면 하나씩 카운트한다.
입력받은 N과 카운트수가 일치하는 수를 출력한다.
"""
def Brute_force(N):
cnt=666
s=0
while 1:
if str(cnt).count('666') >= 1:
s+=1
if s==N:
print(cnt)
break
cnt+=1
N = int(input())
Brute_force(N)
|
from django.contrib import admin
from rd.models import Detail, Photo, EngineCategory, CarCategory, EngineCategoryPhoto, CarCategoryPhoto, DetailCategory, \
City
# Register your models here.
from django import forms
from tinymce.widgets import TinyMCE
class PhotoInline(admin.TabularInline):
model = Photo
class DetailAdmin(admin.ModelAdmin):
inlines = [
PhotoInline,
]
list_filter = ('cars', 'engines', 'nalichie', 'category')
filter_horizontal = ('related_details',)
list_display = ('name', 'inner_articul', 'articul', 'cost','nalichie')
search_fields = ('name',)
class EnginePhotoInline(admin.TabularInline):
model = EngineCategoryPhoto
class CategoryPhotoInline(admin.TabularInline):
model = CarCategoryPhoto
class EngineForm(forms.ModelForm):
about_html = forms.CharField(widget=TinyMCE(attrs={'cols': 80, 'rows': 10}), required=False)
class Meta:
model = EngineCategory
fields = '__all__'
class EngineCategoryAdmin(admin.ModelAdmin):
form = EngineForm
list_display = ('name', 'sort')
list_editable = ('sort',)
search_fields = ('name',)
inlines = [
EnginePhotoInline
]
class CarForm(forms.ModelForm):
about_html = forms.CharField(widget=TinyMCE(attrs={'cols': 80, 'rows': 10}),required=False)
class Meta:
model = CarCategory
fields = '__all__'
class CarCategoryAdmin(admin.ModelAdmin):
form = CarForm
list_display = ('name', 'sort')
list_editable = ('sort',)
search_fields = ('name',)
inlines = [
CategoryPhotoInline
]
admin.site.register(Detail, DetailAdmin)
admin.site.register(EngineCategory, EngineCategoryAdmin)
admin.site.register(CarCategory, CarCategoryAdmin)
admin.site.register(DetailCategory)
admin.site.register(City) |
from django.shortcuts import render, HttpResponseRedirect, reverse
# used this https://stackoverflow.com/questions/23557697/django-how-to-let-permissiondenied-exception-display-the-reason
#in reference on how to display an error to user if they don't have permission
from django.http import HttpResponseForbidden
from django.contrib.auth import login, logout, authenticate
from django.contrib.auth.decorators import login_required
from django.contrib.admin.views.decorators import staff_member_required
from django.contrib.auth.models import User
from recipe.models import Recipe, Author
from recipe.forms import AddRecipeForm, AddAuthorForm, LoginForm
# Create your views here.
def index_view(request):
recipes = Recipe.objects.all()
return render(request, "index.html", {"recipe": recipes, "welcome": "Welcome to Recipe World"})
def recipe_detail(request, recipe_id):
recipes = Recipe.objects.filter(id=recipe_id).first()
return render(request, "recipe.html", {"recipe": recipes})
# Got help from Matt Perry - this was for v1 recipebox assignment
def author_recipes(request, author_id):
selected_author = Author.objects.filter(id=author_id).first()
recipe_list = Recipe.objects.filter(author=selected_author)
return render(request, "author_recipes.html", {"recipes": recipe_list, "author": selected_author})
@login_required
def recipe_form_view(request):
if request.method == "POST":
form = AddRecipeForm(request.POST)
if form.is_valid():
data = form.cleaned_data
Recipe.objects.create(
title = data.get('title'),
author = request.user.author,
description = data.get('description'),
time_required = data.get('time_required'),
instructions = data.get('instructions'),
)
return HttpResponseRedirect(reverse("homepage"))
form = AddRecipeForm()
return render(request, "generic_form.html", {"form": form})
@login_required
def author_form_view(request):
if request.user.is_staff:
if request.method =="POST":
form =AddAuthorForm(request.POST)
if form.is_valid():
data = form.cleaned_data
new_user = User.objects.create_user(username=data.get("username"), password=data.get("password"))
Author.objects.create(name=data.get("name"), user=new_user, bio=data.get("bio"))
return HttpResponseRedirect(reverse("homepage"))
else:
return HttpResponseForbidden("You don't have permission to make an author")
form = AddAuthorForm()
return render(request, "generic_form.html", {"form": form})
def login_view(request):
if request.method == "POST":
form = LoginForm(request.POST)
if form.is_valid():
data = form.cleaned_data
user = authenticate(request, username=data.get("username"), password=data.get("password"))
if user:
login(request, user)
return HttpResponseRedirect(request.GET.get('next', reverse("homepage")))
form = LoginForm()
return render(request, "generic_form.html", {"form" : form})
def logout_view(request):
logout(request)
return HttpResponseRedirect(reverse("homepage"))
|
from Google import Create_Service
from flask import Flask, render_template, request, json
import requests
app = Flask(__name__)
parent_folder = ['Admin', '2020']
sub_folder_admin = ['11. Director Details']
sub_folder_tahun = ['01. Januari']
sub_folder_bulan = ['24. PP23']
parent_id_company = []
parent_id_admin = []
parent_id_tahun = []
parent_id_bulan = []
all_id_folder = []
nama_checklist = ['Permintaan Dokumen','Pengerjaan Pembukuan Bulanan','PPh Pasal 21','PP23']
jml_checklist =[3,6,8,5]
checkitem_PD = ['permintaan bank statment', 'permintaan laporan penjualan', 'permintaan laporan pembelian']
id =[]
@app.route("/")
def main():
return render_template('index.html')
@app.route('/showPT')
def showPT():
return render_template('signUp.html')
@app.route('/addPT',methods=['POST'])
def addPT():
# read the posted values from the UI
global _name
global drive_service
_name = request.form['inputName']
if _name :
CLIENT_SECRET_FILE = 'credentials.json'
API_NAME = 'drive'
API_VERSION = 'v3'
SCOPES = ['https://www.googleapis.com/auth/drive']
drive_service = Create_Service(CLIENT_SECRET_FILE, API_NAME, API_VERSION, SCOPES)
create_company_folder(_name)
create_parent_folder(parent_folder, parent_id_company[0])
create_sub_folder_admin(sub_folder_admin, parent_id_admin[0])
create_sub_folder_tahun(sub_folder_tahun, parent_id_tahun[0])
create_folder_inside_each_month(sub_folder_bulan, parent_id_bulan)
create_permission(all_id_folder)
create_cards(_name)
create_checklist(id_cards)
create_checkitem(id)
else:
return json.dumps({'html':'<span>Enter the required fields</span>'})
def create_company_folder(company_name):
file_metadata = {
'name': company_name,
'mimeType': 'application/vnd.google-apps.folder'
}
file = drive_service.files().create(body=file_metadata, fields='id').execute()
parent_id_company.append(file.get('id'))
all_id_folder.append(file.get('id'))
return print("Succes Create Folder for PT")
def create_parent_folder(parent_folder, parent_id):
for i, folder in enumerate(parent_folder):
file_metadata = {
'name': folder,
'parents': [parent_id],
'mimeType': 'application/vnd.google-apps.folder'
}
file = drive_service.files().create(body=file_metadata).execute()
if i == 0:
parent_id_admin.append(file.get('id'))
else:
parent_id_tahun.append(file.get('id'))
return print("Succes Create Folder for Admin and Year")
def create_sub_folder_admin(sub_folder_admin, parent_id):
for sub_admin in sub_folder_admin:
file_metadata = {
'name': sub_admin,
'parents': [parent_id],
'mimeType': 'application/vnd.google-apps.folder'
}
file = drive_service.files().create(body=file_metadata).execute()
return print("Succes Create Sub Folder Admin")
def create_sub_folder_tahun(sub_folder_tahun, parent_id):
for sub_tahun in sub_folder_tahun:
file_metadata = {
'name': sub_tahun,
'parents': [parent_id],
'mimeType': 'application/vnd.google-apps.folder'
}
file = drive_service.files().create(body=file_metadata).execute()
parent_id_bulan.append(file.get('id'))
return print("Succes Create Sub Folder Year")
def create_folder_inside_each_month(sub_folder_bulan, parent_id):
for x in range(len(parent_id)):
for sub_bulan in sub_folder_bulan:
file_metadata = {
'name': sub_bulan,
'parents': [parent_id[x]],
'mimeType': 'application/vnd.google-apps.folder'
}
file = drive_service.files().create(body=file_metadata).execute()
return print("Succes Create Sub Folder Bulan")
def callback(request_id, response, exception):
if exception:
# Handle error
print(exception)
else:
print("Permission Id: %s" % response.get('id'))
def create_permission(all_id_folder):
file_id = all_id_folder[0]
batch = drive_service.new_batch_http_request(callback=callback)
user_permission = {
'type': 'user',
'role': 'writer',
'emailAddress': 'tias1508@gmail.com',
}
batch.add(drive_service.permissions().create(
fileId=file_id,
body=user_permission,
fields='id',
))
batch.execute()
return print("Success Create Permission")
def create_cards(nama):
url = "https://api.trello.com/1/cards"
query = {
'key': '2fb50ddab4bccb72c4f25e03680ae024',
'token': '9ea8aab5b36bd7671f354d5f6597386683f6aef8c3cfb6f4c224d8b09f43996a',
'idList': '5fe9639ca4647617995c94e8',
'name':nama
}
response = requests.request(
"POST",
url,
params=query
)
cardresult = response.text
print(cardresult)
y = json.loads(cardresult)
global id_cards
id_cards = y['id']
print(id_cards)
return id_cards
def create_checklist(idc):
for x in range(len(nama_checklist)):
url = "https://api.trello.com/1/checklists"
query = {
'key': '2fb50ddab4bccb72c4f25e03680ae024',
'token': '9ea8aab5b36bd7671f354d5f6597386683f6aef8c3cfb6f4c224d8b09f43996a',
'idCard': idc,
'name' : nama_checklist[x]
}
response = requests.request(
"POST",
url,
params=query
)
checkresult = response.text
x = json.loads(checkresult)
id.append(x['id'])
print(x['id'])
print(checkresult)
print(id)
def create_checkitem(id):
for b in range(len(nama_checklist)):
if nama_checklist[b] == 'Permintaan Dokumen':
ulang = jml_checklist[0]
for c in range(ulang):
js = id[0]
url = "https://api.trello.com/1/checklists/%s" % js
url1 = "/checkItems"
newUrl = "".join((url, url1))
print(newUrl)
query = {
'key': '2fb50ddab4bccb72c4f25e03680ae024',
'token': '9ea8aab5b36bd7671f354d5f6597386683f6aef8c3cfb6f4c224d8b09f43996a',
'name': 'cobaa',
}
response = requests.request(
"POST",
newUrl,
params=query
)
elif nama_checklist[b] == 'Pengerjaan Pembukuan Bulanan':
ulang = jml_checklist[1]
for c in range(ulang):
js = id[1]
url = "https://api.trello.com/1/checklists/%s" % js
url1 = "/checkItems"
newUrl = "".join((url, url1))
print(newUrl)
query = {
'key': '2fb50ddab4bccb72c4f25e03680ae024',
'token': '9ea8aab5b36bd7671f354d5f6597386683f6aef8c3cfb6f4c224d8b09f43996a',
'name': 'coba',
}
response = requests.request(
"POST",
newUrl,
params=query
)
elif nama_checklist[b] == 'PPh Pasal 21':
ulang = jml_checklist[2]
for c in range(ulang):
js = id[2]
url = "https://api.trello.com/1/checklists/%s" % js
url1 = "/checkItems"
newUrl = "".join((url, url1))
print(newUrl)
query = {
'key': '2fb50ddab4bccb72c4f25e03680ae024',
'token': '9ea8aab5b36bd7671f354d5f6597386683f6aef8c3cfb6f4c224d8b09f43996a',
'name': 'coba',
}
response = requests.request(
"POST",
newUrl,
params=query
)
elif nama_checklist[b] == 'PP23':
ulang = jml_checklist[3]
for c in range(ulang):
js = id[3]
url = "https://api.trello.com/1/checklists/%s" % js
url1 = "/checkItems"
newUrl = "".join((url, url1))
print(newUrl)
query = {
'key': '2fb50ddab4bccb72c4f25e03680ae024',
'token': '9ea8aab5b36bd7671f354d5f6597386683f6aef8c3cfb6f4c224d8b09f43996a',
'name': 'coba',
}
response = requests.request(
"POST",
newUrl,
params=query
)
if __name__ == "__main__":
app.run(debug=True)
|
import psycopg2
import traceback
def execute_select(query):
connection = None
cursor = None
try:
connection = get_connection_object()
cursor = connection.cursor()
cursor.execute(query)
desc = cursor.description
column_names = [col[0] for col in desc]
data = [dict(zip(column_names, row))
for row in cursor.fetchall()]
return data
except (Exception, psycopg2.Error) as error:
print("Error while connecting to PostgreSQL", error)
finally:
if connection:
cursor.close()
connection.close()
def execute_query(query):
connection = None
cursor = None
try:
connection = get_connection_object()
cursor = connection.cursor()
cursor.execute(query)
connection.commit()
count = cursor.rowcount
print(count, "affected rows by query")
return True
except (Exception, psycopg2.Error) as error:
print("Error while connecting to PostgreSQL", error)
finally:
if connection:
cursor.close()
connection.close()
def execute_insert(table_name, data):
connection = None
cursor = None
try:
connection = get_connection_object()
cursor = connection.cursor()
column_list = []
row_count = 1
value_list = []
for row in data:
values = []
for column in row:
if row_count == 1:
column_list.append('"' + column + '"')
values.append("'" + row[column] + "'")
str_values_row = '(' + ','.join(values) + ')'
value_list.append(str_values_row)
row_count = row_count + 1
str_columns = ','.join(column_list)
str_values = ','.join(value_list)
insert_query = f"INSERT INTO {table_name} ({str_columns}) VALUES {str_values}"
# Print PostgreSQL version
cursor.execute(insert_query)
connection.commit()
count = cursor.rowcount
print(count, "Record inserted successfully")
except (Exception, psycopg2.Error) as error:
print("Error while inserting into PostgreSQL:", error)
traceback.print_exc()
finally:
if connection:
cursor.close()
connection.close()
def get_connection_object():
connection = psycopg2.connect(
user="ponderausr",
password="ponderapwd",
host="pondera.clsjvgvvhooq.us-east-1.rds.amazonaws.com",
port="5432",
database="pondera"
)
return connection
|
from sys import exit
from random import randrange
class Scene(object):
def enter(self):
print('This scene is not yet configured.')
print('Subclass it and implement enter().')
exit(1)
class Engine(object):
def __init__(self, scene_map):
self.scene_map = scene_map
def play(self):
current_scene = self.scene_map.opening_scene()
last_scene = self.scene_map.next_scene('finished')
while current_scene != last_scene:
next_scene_name = current_scene.enter()
current_scene = self.scene_map.next_scene(next_scene_name)
current_scene.enter()
class CentralCorridor(Scene):
def enter(self):
print('''Aliens have invaded your space ship,
You have to go through a maze of rooms defeating them
Your escape will be through an escape pod before you destroy the ship.
Good Luck
Welcome to Space Hero.''')
print('_' * 30)
print('You\'re at the central corridor and you have an Aichmophobic gothon in front of you')
print('''There are 4 items on the floor beside you.
1.A Needle
2.A rail gun
3.An Axe
4.A flame thrower
5.A grenade''')
print('_' * 30)
print('What do you do?')
print('_' * 30)
print('''Do you:
1. Use the needle to scare the Gothon.
2. Shoot the Gothon with the rail gun.
3. Attack the Gothon with the axe.
4. Use the flame thrower on the Gothon.
5. Use the grenade.''')
choice = input('>> ')
if choice == '2' or choice == '3':
Death().Death1('The gothon gets upset and slaps the weapon out of your hand.')
elif choice == '4' or choice == '5':
Death().Death2('Gothon blood is flammable.')
elif choice == '1':
print('smart choice.')
return 'laser_weapon_armory'
else:
print('Sorry, I didn\'t get that.')
class LaserWeaponArmory(Scene):
def enter(self):
print('_' * 30)
print('Welcome to the Laser Weapon Armory.')
print('Input the right pin into the keypad to get a neutron bomb.')
pin_right = False
print('_' * 30)
while True:
pin1 = print('1.' + str(randrange(1000, 9999)))
pin2 = print('2.' + str(randrange(1000, 9999)))
pin3 = print('3.' + str(randrange(1000, 9999)))
choice = input('>> ')
if choice == '1':
print('Wrong. You have another chance')
pin_right = False
elif choice == '2':
print('Pin correct.Take your neutron Bomb.')
pin_right = True
return 'the_bridge'
elif choice == '3':
print('Wrong you have another chance')
pin_right = False
else:
print('Sorry didn\'t get that')
pin_right = False
class TheBridge(Scene):
def enter(self):
print('_' * 30)
print('''Now you're at the bridge.
Here you find another gothon,
Your only obstacle to planting the bomb.''')
print('_' * 30)
print('''There are 2 items on the floor next to you..
1. A Human Brain
2. A Space Rocket Launcher''')
print('_' * 30)
print('What do you do?')
print('_' * 30)
print('''Do you:
1. Throw the brain at the Gothon.
2. Shoot a rocket at the Gothon.''')
choice = input('>> ')
if choice == '1':
print('Smart choice')
return'escape_pod'
elif choice == '2':
death.Death2('In shooting you accidentaly activate the bomb.')
class EscapePod(Scene):
def enter(self):
print('_' * 30)
print('You have made it to the end of the game now you have to escape.')
print('Pick the right escape pod.')
pin_right = False
print('_' * 30)
while True:
pod1 = print('Pod1. ' + str(randrange(1,50)))
pod2 = print('Pod2. ' + str(randrange(1,50)))
pod3 = print('Pod3. ' + str(randrange(1,50)))
pod4 = print('Pod4. ' + str(randrange(1,50)))
pod5 = print('Pod5. ' + str(randrange(1,50)))
pod6 = print('Pod6. ' + str(randrange(1,50)))
pod7 = print('Pod7. ' + str(randrange(1,50)))
choice = input('>> ')
W = ['1', '2', '3', '5', '7']
R = ['4', '6']
if choice in W:
print('Wrong pod')
pin_right = False
elif choice in R:
print('Correct pod')
return 'finished'
else:
print('Sorry didn\'t get that')
class Death(Scene):
def Death1(self, why):
print('Wrong choice.')
print(why)
print('The Gothon then abducts you and melts your brain.')
print('Game Over.')
exit()
def Death2(self, why):
print('Wrong choice.')
print(why)
print('You blow up yourself and the gothon to pieces.')
print('Game Over.')
exit()
class Finished(Scene):
def enter(self):
print('You won! Good job.')
return 'finished'
class Map(object):
scenes = {
'central_corridor': CentralCorridor(),
'laser_weapon_armory': LaserWeaponArmory(),
'the_bridge': TheBridge(),
'escape_pod': EscapePod(),
'death': Death(),
'finished': Finished(),
}
def __init__(self, start_scene):
self.start_scene = start_scene
def next_scene(self, scene_name):
val = Map.scenes.get(scene_name)
return val
def opening_scene(self):
return self.next_scene(self.start_scene)
a_map = Map('central_corridor')
a_game = Engine(a_map)
a_game.play()
|
"""PAWN GOING TO END AND CHECKMATE WILL STAY INCOMPLETE FOR THE NEAR FUTURE."""
# cd C:\Users\nellissery\Desktop\python code\Chess
# python main.py
# to do
# castling (it works, but for some reason you have to double click, and an ineffective error is given)
# checkmate condition(not done)
from tkinter import *
from tkinter import colorchooser
import itertools
"""...................Setup Menu.............................."""
# constants and bools
BOARD_X = 8
BOARD_Y = 8
CUBE_X = 64
CUBE_Y = 64
TEXT_SIZE = 48
COORDS = list(range(0, 8))
turn = "white" # / "black"
swap = False
theocracy = False
# tile colour1, tile colour2, pieceColour1, pieceColour2
colours = ['#008000', '#ffff00', '#510051', '#0000ff']
borderColour = "Red"
bkgColour = "SteelBlue"
# setup menu intiialization
setup = Tk()
setup.title("setup")
setupMenu = Menu(setup)
setup.geometry("170x240")
setup.config(background="silver", menu=setupMenu)
setup.resizable(width=False, height=False)
# setup functions
def colourChanger(colourIndex):
global colours, tile1Piece1Preview, tile1Piece2Preview, tile2Piece1Preview, tile2Piece2Preview
colours[colourIndex] = colorchooser.askcolor()[1]
print(colours)
tile1Piece1Preview.grid_forget()
tile2Piece2Preview.grid_forget()
tile1Piece2Preview.grid_forget()
tile2Piece1Preview.grid_forget()
tile1Piece1Preview = Label(previewPanel, text="♔", bg=colours[0], fg=colours[2], font=("Helvetica", 64))
tile2Piece2Preview = Label(previewPanel, text="♔", bg=colours[1], fg=colours[3], font=("Helvetica", 64))
tile1Piece2Preview = Label(previewPanel, text="♔", bg=colours[0], fg=colours[3], font=("Helvetica", 64))
tile2Piece1Preview = Label(previewPanel, text="♔", bg=colours[1], fg=colours[2], font=("Helvetica", 64))
tile1Piece1Preview.grid(row=0, column=0)
tile2Piece2Preview.grid(row=0, column=1)
tile1Piece2Preview.grid(row=1, column=1)
tile2Piece1Preview.grid(row=1, column=0)
# main loop
def setupMain():
# setup menu
global colours, tile1Piece1Preview, tile1Piece2Preview, tile2Piece1Preview, tile2Piece2Preview, previewPanel
tileColorMenu = Menu(setupMenu, tearoff=0)
setupMenu.add_cascade(label="Tile Colour", menu=tileColorMenu)
tileColorMenu.add_cascade(label="Tile Colour 1", command= lambda: colourChanger(0))
tileColorMenu.add_cascade(label="Tile Colour 2", command=lambda: colourChanger(1))
pieceColourMenu =Menu(setupMenu, tearoff=0)
setupMenu.add_cascade(label="Piece colour", menu=pieceColourMenu)
pieceColourMenu.add_cascade(label="Piece Colour 1", command=lambda: colourChanger(2))
pieceColourMenu.add_cascade(label="Piece Colour 2", command=lambda: colourChanger(3))
gameModeMenu = Menu(setupMenu, tearoff=0)
setupMenu.add_cascade(label="Game Mode", menu=gameModeMenu)
gameModeMenu.add_cascade(label="Color Swaps", command=colorSwap)
gameModeMenu.add_cascade(label="Helen Keller", command=helenKeller)
gameModeMenu.add_cascade(label="Haitian", command=haitian)
gameModeMenu.add_cascade(label="Theocratic", command=theocratic)
# creating and griding setup buttons
startButton = Button(setup, text="Start Game", command=setup.destroy)
startButton.grid(row=2, column=0, pady=10, sticky=W+E+N+S)
# creating preview Panel
previewPanel = PanedWindow(setup, bd=10, relief="raised", bg="SteelBlue")
previewPanel.grid(row=0, column=0, sticky=W+E+N+S)
# putting preview pieces on preview Panel
tile1Piece1Preview = Label(previewPanel, text= "♔", bg=colours[0], fg=colours[2], font=("Helvetica", TEXT_SIZE))
tile2Piece2Preview = Label(previewPanel, text= "♔", bg=colours[1], fg=colours[3], font=("Helvetica", TEXT_SIZE))
tile1Piece2Preview = Label(previewPanel, text= "♔", bg=colours[0], fg=colours[3], font=("Helvetica", TEXT_SIZE))
tile2Piece1Preview = Label(previewPanel, text= "♔", bg=colours[1], fg=colours[2], font=("Helvetica", TEXT_SIZE))
tile1Piece1Preview.grid(row=0, column=0)
tile2Piece2Preview.grid(row=0, column=1)
tile1Piece2Preview.grid(row=1, column=1)
tile2Piece1Preview.grid(row=1, column=0)
def helenKeller():
global colours, borderColour
colours[0] = "black"
colours[1] = "black"
colours[2] = "black"
colours[3] = "black"
borderColour = "DarkSlateGrey"
pass
def colorSwap():
global swap
swap = True
def haitian():
global turn
turn = "black"
pass
def theocratic():
global theocracy
theocracy = True
pass
setupMain()
setup.mainloop()
"""....................Game Board........................."""
# chess board variables
activePiece = [] # the active piece, i.e with highlighted piece, egs :[1, 1, '#008000', '♟', '#510051']
activePieceBool = "NoActivePiece" #/ "ActivePiece"
cellList = [] # list that contains all the pieces and their attributes
moves = [] # list that has the possible moves for a piece
# chess Board Constants and bools
justActivated = False # a bool designed specifically for the pieceActivate function, to make it so that the piece doesnt re-activate after it has moved
turnChange = False # bool to check if the turn has been changed in one round
coordList = [] # list with all the coordinates, note to self: xcoord is downward, while ycoord is vertical
kingMoved = False # to check if the king has moved, needed for castling
castling = False
# Lists of all possible moves
# white
wPawn = []
wRook = []
wPriest = []
wKing = []
wQueen = []
wKnight = []
wMoves = [wPawn, wPriest, wKing, wQueen, wKnight, wRook]
# black
bPawn = []
bRook = []
bPriest = []
bKing = []
bQueen = []
bKnight = []
bMoves = [bPawn, bPriest, bKing, bQueen, bKnight, bRook]
# chess board class
class Cell:
def __init__(self, master, xCoord, yCoord, tileColour, pieceType, pieceColour, ACTIVE):
global activePiece, cellList, activePieceBool, justActivated
self.yCoord = yCoord
self.tileColour = tileColour
self.pieceType = pieceType
self.pieceColour = pieceColour
currentCellCoord = (xCoord, yCoord)
if len(cellList) < 64:
cellList.append([xCoord, yCoord, tileColour, pieceType, pieceColour])
def hoverAnimation(event=None): # func that gives red highlight when entering a cell
global activePiece, activePieceBool, turn
if turn == "white":
if pieceType in "♙♖♗♘♕♔" and activePieceBool == "NoActivePiece":
pieceLabel["fg"] = borderColour
if turn == "black":
if pieceType in "♟♜♝♞♛♚" and activePieceBool == "NoActivePiece":
pieceLabel["fg"] = borderColour
def removeHover(event=None): # func that removes red highlight when leaving a cell
global activePiece,activePieceBool
if pieceType in "♔♝♞♚♖♛♟♜♘♗♕♔♙" and activePieceBool == "NoActivePiece":
pieceLabel["fg"] = pieceColour
def blocked(x, y):
global moves
if pieceFromCoords(x,y)[4] != activePiece[4] and pieceFromCoords(x,y)[3] in "♔♝♞♚♖♛♟♜♘♗♕♔♙" :
if activePiece[3] not in "♙♟":
moves.append((x,y))
return True # saying that the obj is blocked
if pieceFromCoords(x,y)[4] == activePiece[4] and pieceFromCoords(x,y)[3] in "♔♝♞♚♖♛♟♜♘♗♕♙" and x in range(0,8) and y in range(0,8):
return True
else:
return False
def pawnMove(xCoordTo, yCoordTo, xCoordFrom, yCoordFrom):
global moves
if activePiece[3] in "♟": # black pawn
if xCoordFrom == 6 or xCoordFrom == 1: # initial pos can have double step or single step
if not blocked(xCoordFrom + 2, yCoordFrom):
moves.append((xCoordFrom + 2, yCoordFrom))
if not blocked(xCoordFrom + 1, yCoordFrom):
moves.append((xCoordFrom + 1, yCoordFrom))
elif not blocked(xCoordFrom + 1, yCoordFrom): # for all other pos, single step
moves.append((xCoordFrom + 1, yCoordFrom))
if pieceFromCoords(xCoordFrom + 1, yCoordFrom - 1)[4] != activePiece[4] and blocked(xCoordFrom + 1, yCoordFrom - 1):
moves.append((xCoordFrom + 1, yCoordFrom - 1))
if pieceFromCoords(xCoordFrom + 1, yCoordFrom + 1)[4] != activePiece[4] and blocked(xCoordFrom + 1, yCoordFrom + 1):
moves.append((xCoordFrom + 1, yCoordFrom + 1))
if (xCoordTo, yCoordTo) in moves:
moves = []
return True
if activePiece[3] in "♙": # white pawn
if xCoordFrom == 6 or xCoordFrom == 1:
if not blocked(xCoordFrom - 2, yCoordFrom):
moves.append((xCoordFrom - 2, yCoordFrom))
if not blocked(xCoordFrom - 1, yCoordFrom):
moves.append((xCoordFrom - 1, yCoordFrom))
elif not blocked(xCoordFrom - 1, yCoordFrom):
moves.append((xCoordFrom - 1, yCoordFrom))
if pieceFromCoords(xCoordFrom - 1, yCoordFrom - 1)[4] != activePiece[4] and blocked(xCoordFrom - 1, yCoordFrom - 1):
moves.append((xCoordFrom - 1, yCoordFrom - 1))
if pieceFromCoords(xCoordFrom - 1, yCoordFrom + 1)[4] != activePiece[4] and blocked(xCoordFrom - 1, yCoordFrom + 1):
moves.append((xCoordFrom - 1, yCoordFrom + 1))
if (xCoordTo, yCoordTo) in moves:
moves = []
return True
def rookMove(xCoordTo, yCoordTo, xCoordFrom, yCoordFrom):
global moves, coordList, run2, bRook, wRook
run = True
while run == True:
i = 1
f = -1
while (xCoordFrom + f) in range(0,8) and not blocked(xCoordFrom + f, yCoordFrom):
moves.append((xCoordFrom + f, yCoordFrom))
f += -1
f = -1
while (yCoordFrom + f) in range(0,8) and not blocked(xCoordFrom, yCoordFrom + f):
moves.append((xCoordFrom, yCoordFrom + f))
f += -1
f = -1
while (yCoordFrom + i) in range(0,8) and not blocked(xCoordFrom, yCoordFrom + i):
moves.append((xCoordFrom, yCoordFrom + i))
i += 1
i = 1
while (xCoordFrom + i) in range(0,8) and not blocked(xCoordFrom + i, yCoordFrom):
moves.append((xCoordFrom + i, yCoordFrom))
i += 1
i = 1
run = False
if (xCoordTo, yCoordTo) in moves:
moves = []
return True
def priestMove(xCoordTo, yCoordTo, xCoordFrom, yCoordFrom):
global moves, coordList
run = True
while run == True:
i = 1
while (xCoordFrom - i) in range(0,8) and (yCoordFrom-i) in range(0,8) and not blocked(xCoordFrom - i, yCoordFrom - i):
moves.append((xCoordFrom - i, yCoordFrom - i))
i += 1
i = 1
while (xCoordFrom + i) in range(0,8) and (yCoordFrom - i) in range(0,8) and not blocked(xCoordFrom + i, yCoordFrom - i):
moves.append((xCoordFrom + i, yCoordFrom - i))
i += 1
i = 1
while (xCoordFrom- i) in range(0,8) and (yCoordFrom + i) in range(0,8) and not blocked(xCoordFrom - i, yCoordFrom + i):
moves.append((xCoordFrom - i, yCoordFrom + i))
i += 1
i = 1
while (xCoordFrom + i) in range(0,8) and (yCoordFrom + i) in range(0,8) and not blocked(xCoordFrom + i, yCoordFrom + i):
moves.append((xCoordFrom + i, yCoordFrom + i))
i += 1
i = 1
run = False
if (xCoordTo, yCoordTo) in moves:
moves = []
return True
def knightMove(xCoordTo, yCoordTo, xCoordFrom, yCoordFrom):
global moves, coordList
if xCoordFrom + 2 in range(0,8) and yCoordFrom + 1 in range(0,8) and not blocked(xCoordFrom + 2, yCoordFrom + 1):
moves.append((xCoordFrom + 2, yCoordFrom + 1))
if xCoordFrom + 2 in range(0,8) and yCoordFrom - 1 in range(0,8) and not blocked(xCoordFrom + 2, yCoordFrom - 1):
moves.append((xCoordFrom + 2, yCoordFrom - 1))
if xCoordFrom - 2 in range(0,8) and yCoordFrom - 1 in range(0,8) and not blocked(xCoordFrom - 2, yCoordFrom - 1):
moves.append((xCoordFrom - 2, yCoordFrom - 1))
if xCoordFrom - 2 in range(0,8) and yCoordFrom + 1 in range(0,8) and not blocked(xCoordFrom - 2, yCoordFrom + 1):
moves.append((xCoordFrom - 2, yCoordFrom + 1))
if xCoordFrom + 1 in range(0,8) and yCoordFrom - 2 in range(0,8) and not blocked(xCoordFrom + 1, yCoordFrom - 2):
moves.append((xCoordFrom + 1, yCoordFrom - 2))
if xCoordFrom - 1 in range(0,8) and yCoordFrom + 2 in range(0,8) and not blocked(xCoordFrom - 1, yCoordFrom + 2):
moves.append((xCoordFrom - 1, yCoordFrom + 2))
if xCoordFrom + 1 in range(0,8) and yCoordFrom + 2 in range(0,8) and not blocked(xCoordFrom + 1, yCoordFrom + 2):
moves.append((xCoordFrom + 1, yCoordFrom + 2))
if xCoordFrom - 1 in range(0,8) and yCoordFrom - 2 in range(0,8) and not blocked(xCoordFrom - 1, yCoordFrom - 2):
moves.append((xCoordFrom - 1, yCoordFrom - 2))
if (xCoordTo, yCoordTo) in moves:
moves = []
return True
def queenMove(xCoordTo, yCoordTo, xCoordFrom, yCoordFrom):
global moves, coordList, run2
run = True
while run == True:
i = 1
while (xCoordFrom - i) in range(0,8) and (yCoordFrom-i) in range(0,8) and not blocked(xCoordFrom - i, yCoordFrom - i):
moves.append((xCoordFrom - i, yCoordFrom - i))
i += 1
i = 1
while (xCoordFrom + i) in range(0,8) and (yCoordFrom - i) in range(0,8) and not blocked(xCoordFrom + i, yCoordFrom - i):
moves.append((xCoordFrom + i, yCoordFrom - i))
i += 1
i = 1
while (xCoordFrom- i) in range(0,8) and (yCoordFrom + i) in range(0,8) and not blocked(xCoordFrom - i, yCoordFrom + i):
moves.append((xCoordFrom - i, yCoordFrom + i))
i += 1
i = 1
while (xCoordFrom + i) in range(0,8) and (yCoordFrom + i) in range(0,8) and not blocked(xCoordFrom + i, yCoordFrom + i):
moves.append((xCoordFrom + i, yCoordFrom + i))
i += 1
i = 1
f = -1
while (xCoordFrom + f) in range(0,8) and not blocked(xCoordFrom + f, yCoordFrom):
moves.append((xCoordFrom + f, yCoordFrom))
f += -1
f = -1
while (yCoordFrom + f) in range(0,8) and not blocked(xCoordFrom, yCoordFrom + f):
moves.append((xCoordFrom, yCoordFrom + f))
f += -1
f = -1
while (yCoordFrom + i) in range(0,8) and not blocked(xCoordFrom, yCoordFrom + i):
moves.append((xCoordFrom, yCoordFrom + i))
i += 1
i = 1
while (xCoordFrom + i) in range(0,8) and not blocked(xCoordFrom + i, yCoordFrom):
moves.append((xCoordFrom + i, yCoordFrom))
i += 1
i = 1
run = False
if (xCoordTo, yCoordTo) in moves:
moves = []
return True
def kingMove(xCoordTo, yCoordTo, xCoordFrom, yCoordFrom):
global moves, cellList, kingMoved, castling
if xCoordFrom + 1 in range(0,8) and yCoordFrom + 1 in range(0,8) and not blocked(xCoordFrom + 1, yCoordFrom + 1):
moves.append((xCoordFrom + 1, yCoordFrom + 1))
if xCoordFrom + 1 in range(0,8) and yCoordFrom - 1 in range(0,8) and not blocked(xCoordFrom - 1, yCoordFrom - 1):
moves.append((xCoordFrom - 1, yCoordFrom - 1))
if xCoordFrom + 1 in range(0,8) and yCoordFrom - 1 in range(0,8) and not blocked(xCoordFrom + 1, yCoordFrom - 1):
moves.append((xCoordFrom + 1, yCoordFrom - 1))
if xCoordFrom - 1 in range(0,8) and yCoordFrom + 1 in range(0,8) and not blocked(xCoordFrom - 1, yCoordFrom + 1):
moves.append((xCoordFrom - 1, yCoordFrom + 1))
if xCoordFrom - 1 in range(0,8) and yCoordFrom in range(0,8) and not blocked(xCoordFrom - 1, yCoordFrom):
moves.append((xCoordFrom - 1, yCoordFrom))
if xCoordFrom + 1 in range(0,8) and yCoordFrom in range(0,8) and not blocked(xCoordFrom + 1, yCoordFrom):
moves.append((xCoordFrom + 1, yCoordFrom))
if xCoordFrom in range(0,8) and yCoordFrom + 1 in range(0,8) and not blocked(xCoordFrom, yCoordFrom + 1):
moves.append((xCoordFrom, yCoordFrom + 1))
if xCoordFrom in range(0,8) and yCoordFrom - 1 in range(0,8) and not blocked(xCoordFrom, yCoordFrom - 1):
moves.append((xCoordFrom, yCoordFrom - 1))
if not kingMoved:
if activePiece[3] == "♚":
if (xCoordTo, yCoordTo) == (0,6) and not blocked(0,5) and not blocked(0,6) and pieceFromCoords(0,7)[3] == "♜":
pieceFromCoords(0,6)[3] = "♚"
pieceFromCoords(0,5)[3] = "♜"
pieceFromCoords(0,7)[3] = "\u2003"
pieceFromCoords(0,4)[3] = "\u2003"
castling = True
return True
if (xCoordTo, yCoordTo) == (0,1) and not blocked(0,1) and not blocked(0,2) and not blocked(0,3) and pieceFromCoords(0,0)[3] == "♜":
pieceFromCoords(0,1)[3] = "♚"
pieceFromCoords(0,2)[3] = "♜"
pieceFromCoords(0,0)[3] = "\u2003"
pieceFromCoords(0,4)[3] = "\u2003"
castling = True
return True
if activePiece[3] == "♔":
if (xCoordTo, yCoordTo) == (7,6) and not blocked(7,5) and not blocked(7,6) and pieceFromCoords(7,7)[3] == "♖":
pieceFromCoords(7,6)[3] = "♔"
pieceFromCoords(7,5)[3] = "♖"
pieceFromCoords(7,7)[3] = "\u2003"
pieceFromCoords(7,4)[3] = "\u2003"
castling = True
return True
if (xCoordTo, yCoordTo) == (7,1) and not blocked(7,1) and not blocked(7,2) and not blocked(7,3) and pieceFromCoords(7,0)[3] == "♖":
pieceFromCoords(7,1)[3] = "♔"
pieceFromCoords(7,2)[3] = "♖"
pieceFromCoords(7,0)[3] = "\u2003"
pieceFromCoords(7,4)[3] = "\u2003"
castling = True
return True
if (xCoordTo, yCoordTo) in moves:
kingMoved = True
moves = []
return True
def availableMove(xCoordTo, yCoordTo, xCoordFrom, yCoordFrom): # func that checks if anything is blocking the piece, and if it is folllowing piece rules
global moves, castling
coordList = []
[coordList.append((i[0], i[1])) for i in cellList]
moves = []
if activePiece[3] in "♟♙":
return pawnMove(xCoordTo, yCoordTo, xCoordFrom, yCoordFrom)
if activePiece[3] in "♖♜":
return rookMove(xCoordTo, yCoordTo, xCoordFrom, yCoordFrom)
if activePiece[3] in "♗♝":
return priestMove(xCoordTo, yCoordTo, xCoordFrom, yCoordFrom)
if activePiece[3] in "♘♞":
return knightMove(xCoordTo, yCoordTo, xCoordFrom, yCoordFrom)
if activePiece[3] in "♛♕":
return queenMove(xCoordTo, yCoordTo, xCoordFrom, yCoordFrom)
if activePiece[3] in "♚♔":
return kingMove(xCoordTo, yCoordTo, xCoordFrom, yCoordFrom)
def checkMate():
allOpponentMoves = [] # takes every move the enemy can make, and checks if all of them restrict kings move completely
if activePiece[3] == "♚":
return True
if activePiece[3] == "♔":
return True
def pieceMovement(event=None):
global activePiece, activePieceBool, justActivated, turn, turnChange, castling
if turn == "white":
if pieceType in "♙♖♗♘♕♔" and activePieceBool == "NoActivePiece":
activePiece = pieceFromCoords(currentCellCoord[0], currentCellCoord[1])
activePieceBool = "ActivePiece"
justActivated = True
if turn == "black":
if pieceType in "♟♜♝♞♛♚" and activePieceBool == "NoActivePiece":
activePiece = pieceFromCoords(currentCellCoord[0], currentCellCoord[1])
activePieceBool = "ActivePiece"
justActivated = True
if activePieceBool == "ActivePiece" and not justActivated:
clickedPiece = pieceFromCoords(currentCellCoord[0], currentCellCoord[1])
print(clickedPiece)
if castling:
activePiece = []
activePieceBool = "NoActivePiece"
if turn == "white" and not turnChange:
turn = "black"
turnChange = True
if turn == "black" and not turnChange:
turn = "white"
turnChange = True
turnChange = False
castling = False
baseReDraw()
print(len(cellList))
if clickedPiece[3] != "\u2003" and clickedPiece[4] != activePiece[4] and availableMove(currentCellCoord[0], currentCellCoord[1], activePiece[0], activePiece[1]) and not castling: # for an opponent clicked cell
clickedPiece[3] = activePiece[3]
clickedPiece[4] = activePiece[4]
activePiece[3] = "\u2003"
activePiece = []
activePieceBool = "NoActivePiece"
if turn == "white" and not turnChange:
turn = "black"
turnChange = True
if turn == "black" and not turnChange:
turn = "white"
turnChange = True
turnChange = False
baseReDraw()
if clickedPiece[3] == "\u2003" and availableMove(currentCellCoord[0], currentCellCoord[1], activePiece[0], activePiece[1]) and not castling: # for a blank clicked cell
clickedPiece[3] = activePiece[3]
clickedPiece[4] = activePiece[4]
activePiece[3] = "\u2003"
activePiece = []
activePieceBool = "NoActivePiece"
if turn == "white" and not turnChange:
turn = "black"
turnChange = True
if turn == "black" and not turnChange:
turn = "white"
turnChange = True
turnChange = False
baseReDraw()
justActivated = False
def pieceDeactivate(event=None):
global activePiece, activePieceBool
if currentCellCoord == (activePiece[0], activePiece[1]):
pieceLabel["fg"] = pieceColour
activePiece = []
activePieceBool = "NoActivePiece"
# creating the label with the piece
pieceLabel = Label(master, text=pieceType, bg=tileColour, fg=pieceColour, font=("Helvetica",TEXT_SIZE))
pieceLabel.grid(row=xCoord, column=yCoord)
# creating the bindings for one cell
pieceLabel.bind("<Leave>", removeHover)
pieceLabel.bind("<Button-1>", pieceMovement)
pieceLabel.bind("<Button-3>", pieceDeactivate)
pieceLabel.bind("<Enter>", hoverAnimation)
# board initialization
board = Tk()
board.title("Chess")
board.resizable(width=False, height=False)
# board functions
def createBase(): # function that creates base for the first base
global cellList
patternType = "DarkLight" # / "LightDark"
# manually creating the initial positions for the pieces :(
# black side
Cell(boardPanel, 0, 0, colours[0], "♜", colours[2], False)
Cell(boardPanel, 0, 1, colours[1], "♞", colours[2], False)
Cell(boardPanel, 0, 2, colours[0], "♝", colours[2], False)
if not theocracy:
Cell(boardPanel, 0, 3, colours[1], "♛", colours[2], False)
else:
Cell(boardPanel, 0, 3, colours[1], "\u2003", colours[2], False)
Cell(boardPanel, 0, 4, colours[0], "♚", colours[2], False)
Cell(boardPanel, 0, 5, colours[1], "♝", colours[2], False)
Cell(boardPanel, 0, 6, colours[0], "♞", colours[2], False)
Cell(boardPanel, 0, 7, colours[1], "♜", colours[2], False)
for y in COORDS:
while y % 2 == 0:
Cell(boardPanel, 1, y, colours[1], "♟", colours[2], False)
break
while y % 2 != 0:
Cell(boardPanel, 1, y, colours[0], "♟", colours[2], False)
break
for x in COORDS[2:6]:
for y in COORDS:
if patternType == "DarkLight":
while y % 2 == 0:
Cell(boardPanel, x, y, colours[0], " ", colours[2], False)
break
while y % 2 != 0:
Cell(boardPanel, x, y, colours[1], " ", colours[3], False)
break
if patternType == "LightDark":
while y % 2 != 0:
Cell(boardPanel, x, y, colours[0], " ", colours[2], False)
break
while y % 2 == 0:
Cell(boardPanel, x, y, colours[1], " ", colours[3], False)
break
if patternType == "DarkLight":
patternType = "LightDark"
elif patternType == "LightDark":
patternType = "DarkLight"
# white side
for y in COORDS:
while y % 2 == 0:
Cell(boardPanel, 6, y, colours[0], "♙", colours[3], False)
break
while y % 2 != 0:
Cell(boardPanel, 6, y, colours[1], "♙", colours[3], False)
break
Cell(boardPanel, 7, 0, colours[1], "♖", colours[3], False)
Cell(boardPanel, 7, 1, colours[0], "♘", colours[3], False)
Cell(boardPanel, 7, 2, colours[1], "♗", colours[3], False)
if not theocracy:
Cell(boardPanel, 7, 3, colours[0], "♕", colours[3], False)
else:
Cell(boardPanel, 7, 3, colours[0], "\u2003", colours[3], False)
Cell(boardPanel, 7, 4, colours[1], "♔", colours[3], False)
Cell(boardPanel, 7, 5, colours[0], "♗", colours[3], False)
Cell(boardPanel, 7, 6, colours[1], "♘", colours[3], False)
Cell(boardPanel, 7, 7, colours[0], "♖", colours[3], False)
def baseReDraw():
global cellList
if not swap:
for cell in cellList:
Cell(boardPanel, cell[0], cell[1], cell[2], cell[3], cell[4], False)
else:
for cell in cellList:
x = False
# [7, 0, '#ffff00', '♖', '#0000ff']
if cell[2] == colours[0] and not x:
cell[2] = colours[1]
x = True
if cell[2] == colours[1] and not x:
cell[2] = colours[0]
x = True
x = False
Cell(boardPanel, cell[0], cell[1], cell[2], cell[3], cell[4], False)
def pieceFromCoords(row, column):
global cellList
y = 0
x = column
if row != 0:
y = row*8
return cellList[y + x]
if row == 0:
return cellList[y + x]
# note: is a very large whitespace
# creating the panel that has the pieces
boardPanel = PanedWindow(board, bd=10, relief="raised", bg="SteelBlue")
boardPanel.grid(row=0, column=0, sticky=W + E + N + S)
# creating blank chessboard for the first time
createBase()
board.mainloop()
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import argparse
import os
import re
import textwrap
import nibabel as nib
import numpy as np
from dipy.core.gradients import gradient_table
from dipy.tracking.streamline import length
from learn2track.neurotools import TractographyData, subsample_streamlines
from learn2track.utils import Timer
def build_argparser():
DESCRIPTION = textwrap.dedent(
""" Script to generate training data from a list of streamlines bundle files.
This results in a .npz file containing the following keys:\n"
'coords': ndarray of shape (N, 3)
Coordinates of each point of every streamlines expressed in voxel space.
N is the total number points of all streamlines.
'offsets': ndarray of shape (M,) with dtype int64
Index of the beginning of each streamline. M is the total number of streamlines.
'lengths': ndarray of shape (M,) with dtype int16
Number of points of each streamline. M is the total number of streamlines.
'bundle_ids': ndarray of shape (M,) with dtype int16
ID of the bundle each streamline belongs to
'name2id': dict
Mapping between bundle names and bundle IDs.
'signal': :class:`Nifti1Image` object (from nibabel)
Diffusion signal
'gradients': :class:`GradientTable` object (from dipy)
Diffusion gradients information
""")
p = argparse.ArgumentParser(description=DESCRIPTION, formatter_class=argparse.ArgumentDefaultsHelpFormatter)
subsampling_parser = argparse.ArgumentParser(add_help=False)
subsampling_parser.add_argument('--subsample-streamlines', action='store_true',
help="Downsample every bundle using QuickBundles. "
"A clustering threshold of 6 and a removal distance of 2 are used by default, but can be changed. "
"NOTE: Changing the default values will have no effect if this flag is not given")
subsampling_parser.add_argument('--clustering_threshold', default=6, help="Threshold used to cluster streamlines before computing distance matrix")
subsampling_parser.add_argument('--removal_distance', default=2, help="Streamlines closer than this distance will be reduced to a single streamline")
# General options (optional)
general_parser = argparse.ArgumentParser(add_help=False)
general_parser.add_argument('--out', metavar='FILE', default="dataset.npz", help='output filename (.npz). Default: dataset.npz')
general_parser.add_argument('--dtype', type=str, default="float32", help="'float16' or 'float32'. Default: 'float32'")
general_parser.add_argument('--min-length', type=float, default="10", help="Minimum length (in mm)")
general_parser.add_argument('-v', '--verbose', action='store_true', help='enable verbose mode.')
signal_subparsers = p.add_subparsers(title="Signal source", dest="signal_source")
signal_subparsers.required = True
raw_signal_parser = signal_subparsers.add_parser("raw_signal", parents=[subsampling_parser, general_parser],
description="Use raw signal from a Nifti image")
signal_parser = raw_signal_parser.add_argument_group("Raw signal arguments")
signal_parser.add_argument('signal', help='Diffusion signal (.nii|.nii.gz).')
signal_parser.add_argument('bundles', metavar='bundle', type=str, nargs="+", help='list of streamlines bundle files.')
signal_parser.add_argument('--bvals', help='File containing diffusion gradient lengths (Default: guess it from `signal`).')
signal_parser.add_argument('--bvecs', help='File containing diffusion gradient directions (Default: guess it from `signal`).')
processed_signal_parser = signal_subparsers.add_parser("processed_signal", parents=[subsampling_parser, general_parser],
description="Extract signal from a TractographyData (.npz) file, and ignore existing streamlines.")
signal_parser = processed_signal_parser.add_argument_group("Processed signal arguments")
signal_parser.add_argument('tracto_data', help="TractographyData file containing the processed signal along existing streamlines and other info. (.npz)")
signal_parser.add_argument('bundles', metavar='bundle', type=str, nargs="+", help='list of streamlines bundle files.')
return p
def main():
parser = build_argparser()
args = parser.parse_args()
tracto_data = None
if args.signal_source == "raw_signal":
signal = nib.load(args.signal)
signal.get_data() # Forces loading volume in-memory.
basename = re.sub('(\.gz|\.nii.gz)$', '', args.signal)
try:
bvals = basename + '.bvals' if args.bvals is None else args.bvals
bvecs = basename + '.bvecs' if args.bvecs is None else args.bvecs
gradients = gradient_table(bvals, bvecs)
except FileNotFoundError:
try:
bvals = basename + '.bval' if args.bvals is None else args.bvals
bvecs = basename + '.bvec' if args.bvecs is None else args.bvecs
gradients = gradient_table(bvals, bvecs)
except FileNotFoundError as e:
print("Could not find .bvals/.bvecs or .bval/.bvec files...")
raise e
tracto_data = TractographyData(signal, gradients)
elif args.signal_source == "processed_signal":
loaded_tracto_data = TractographyData.load(args.tracto_data)
tracto_data = TractographyData(loaded_tracto_data.signal, loaded_tracto_data.gradients)
# Compute matrix that brings streamlines back to diffusion voxel space.
rasmm2vox_affine = np.linalg.inv(tracto_data.signal.affine)
# Retrieve data.
with Timer("Retrieving data", newline=args.verbose):
for filename in sorted(args.bundles):
if args.verbose:
print("{}".format(filename))
# Load streamlines
tfile = nib.streamlines.load(filename)
tractogram = tfile.tractogram
original_streamlines = tractogram.streamlines
lengths = length(original_streamlines)
streamlines = [s for (s, l) in zip(original_streamlines, lengths) if l >= args.min_length]
# Make sure file is not empty
if len(streamlines) > 0:
if args.subsample_streamlines:
output_streamlines = subsample_streamlines(streamlines, args.clustering_threshold,
args.removal_distance)
print("Total difference: {} / {}".format(len(original_streamlines), len(output_streamlines)))
new_tractogram = nib.streamlines.Tractogram(output_streamlines,
affine_to_rasmm=tractogram.affine_to_rasmm)
tractogram = new_tractogram
tractogram.apply_affine(rasmm2vox_affine)
# Add streamlines to the TractogramData
bundle_name = os.path.splitext(os.path.basename(filename))[0]
tracto_data.add(tractogram.streamlines, bundle_name)
if args.verbose:
diff = tracto_data.streamlines._data - tracto_data.streamlines._data.astype(args.dtype)
precision_error = np.sum(np.sqrt(np.sum(diff ** 2, axis=1)))
avg_precision_error = precision_error / len(tracto_data.streamlines._data)
print("Precision error: {} (avg. {})".format(precision_error, avg_precision_error))
# Save streamlines coordinates using either float16 or float32.
tracto_data.streamlines._data = tracto_data.streamlines._data.astype(args.dtype)
# Save dataset
tracto_data.save(args.out)
if __name__ == '__main__':
main()
|
import os
import sys
import numpy
import h5py
import cupy
import utility
def calcDistField(point_file, h5name, save_location):
data_file = h5py.File(h5name)
data = data_file['data'][:]
data_dim = data.shape[0]
data_file.close()
ptfile = h5py.File(point_file)
sample_points = ptfile['points'][:]
ptfile.close()
sample_size = sample_points.shape[0]
#gpu parallelization
memory_pool = cupy.get_default_memory_pool()
pinned_memory_pool = cupy.get_default_pinned_memory_pool()
distancesgpu = numpy.zeros((data_dim, data.shape[1], sample_size))
x = cupy.asarray(sample_points)
allpts = cupy.tile(x ,(data.shape[1], 1))
blocks = int(numpy.ceil(sample_size*data.shape[1]/8192))
del x
print(blocks)
yy = cupy.asarray(data)
for inst in range(data_dim):
if inst % 200 == 0:
print(inst)
y = yy[inst]
xx = allpts + cupy.tile(y,(1,sample_size)).reshape(-1,3)
xdot = cupy.sum(cupy.multiply(xx,xx),axis=1)
dt = cupy.zeros((sample_size*data.shape[1],))
for blk in range(blocks):
idstart = int(blk * 8192)
idend = int((blk + 1) * 8192)
dists = cupy.tile(xdot[idstart:idend], (y.shape[0], 1)).transpose() - 2 * cupy.matmul(xx[idstart:idend], y.transpose()) + cupy.tile(cupy.sum(cupy.multiply(y,y),axis=1).transpose(), (xx[idstart:idend].shape[0],1))
dt[idstart:idend] = cupy.amin(dists, axis=1)
del dists
dt = cupy.reshape(dt,(-1,sample_size))
distancesgpu[inst] = cupy.asnumpy(dt)
del dt
del xx
del xdot
memory_pool.free_all_blocks()
pinned_memory_pool.free_all_blocks()
# save file
saveh5 = h5py.File(save_location, 'w')
saveh5.create_dataset('distances', data=distancesgpu)
saveh5.close()
def saveELM(svd_file, original_file, final_file, point_file, weight_file, dim):
file1 = h5py.File(svd_file)
file2 = h5py.File(original_file)
distances = file1['distances'][:]
file1.close()
file2.close()
file3 = h5py.File(point_file)
mat = file3['mat'][:]
file3.close()
surf_size = distances.shape[1]
memory_pool = cupy.get_default_memory_pool()
pinned_memory_pool = cupy.get_default_pinned_memory_pool()
data_dim = distances.shape[0]
tmp = numpy.zeros((data_dim, surf_size, dim))
pinvmat = cupy.asarray(mat)
for inst in range(data_dim):
if inst % 200 == 0:
print(inst)
dt = cupy.asarray(distances[inst])
res = cupy.matmul(pinvmat,dt.transpose())
tmp[inst] = cupy.asnumpy(res.transpose())
del dt
del res
#
memory_pool.free_all_blocks()
pinned_memory_pool.free_all_blocks()
saveh5 = h5py.File(final_file, 'w')
saveh5.create_dataset('data', data=tmp)
saveh5.close() |
from functools import wraps
from hashlib import md5
from contextlib import contextmanager
import psycopg2
from psycopg2.extras import RealDictCursor
from flask import session, request
from flask_restful import abort
from werkzeug.exceptions import BadRequest
def authenticated(method):
@wraps(method)
def wrapper(*args, **kwargs):
if 'login' not in session:
abort(403, error='go away')
else:
return method(*args, **kwargs)
return wrapper
def superuser_only(method):
@wraps(method)
def wrapper(*args, **kwargs):
if session.get('login') != 'root':
abort(403, error='go away')
else:
return method(*args, **kwargs)
return wrapper
def hash_password(password):
return md5(password.encode('utf-8')).hexdigest()
null = type(None) # just for beauty
def _isinstance(value, _type):
""" fix inconsistency of isinstance(True, int) == True """
if isinstance(_type, (list, tuple)):
return any(_isinstance(value, t) for t in _type)
if _type is int:
return isinstance(value, int) and value not in {True, False}
else:
return isinstance(value, _type)
def validate_request_json(required_keys_types):
""" validation decorator, use this for POST controllers
side-effect: set self.data, underscored keys
:param required_keys_types: dict like {'password': (str, null), 'color': str}
"""
# check validation rules on app start
# declared field type should be python type (int, str, bool..) or tuple/list of this types
for key, _type in required_keys_types.items():
if isinstance(_type, type):
continue
if isinstance(_type, (tuple, list)) and all(isinstance(t, type) for t in _type):
continue
# else
raise Exception(f'Bad validation rule: {key}, {_type}')
def deco(method):
@wraps(method)
def wrapper(self, *args, **kwargs):
try:
data = underscore_keys(request.json)
except BadRequest:
abort(400, error='Cannot decode json')
for key, _type in required_keys_types.items():
if key not in data:
abort(400, error=f'<{key}> should be provided')
# type check and friendly error message
if not _isinstance(data[key], _type):
type_strings = {
bool: 'boolean',
str: 'string',
int: 'integer',
null: 'null',
}
if isinstance(_type, (tuple, list)):
type_str = ' or '.join(type_strings.get(t, '<some type>') for t in _type)
else:
type_str = type_strings.get(_type, 'appropriate type')
abort(400, error=f'<{key}> should be {type_str}')
self.data = data
return method(self, *args, **kwargs)
return wrapper
return deco
def underscore_keys(dct):
def _to_underscore(lowerCamelCase):
return ''.join(f'_{c.lower()}' if c.isupper() else c for c in lowerCamelCase)
return {_to_underscore(key): value for key, value in dct.items()}
@contextmanager
def get_db():
# TODO from conf
user = 'postgres'
password = ''
host = 'localhost'
port = 54320
dbname = 'accman_test'
conn = psycopg2.connect(
f"user='{user}' \
password='{password}' \
dbname='{dbname}' \
host='{host}' \
port={port} \
"
)
try:
yield conn.cursor(cursor_factory=RealDictCursor)
finally:
conn.commit()
conn.close()
|
"""IGCV3 for Semantic Segmentation"""
import torch
import torch.nn as nn
import torch.nn.functional as F
from light.model.base import BaseModel
from light.nn import _ASPP, _FCNHead
class IGCV3Seg(BaseModel):
def __init__(self, nclass, aux=False, backbone='mobilenetv2', pretrained_base=False, **kwargs):
super(IGCV3Seg, self).__init__(nclass, aux, backbone, pretrained_base, **kwargs)
self.head = _Head(nclass, **kwargs)
if self.aux:
self.auxlayer = _FCNHead(160, nclass, **kwargs)
def forward(self, x):
size = x.size()[2:]
_, _, c3, c4 = self.base_forward(x)
outputs = list()
x = self.head(c4)
x = F.interpolate(x, size, mode='bilinear', align_corners=True)
outputs.append(x)
if self.aux:
auxout = self.auxlayer(c3)
auxout = F.interpolate(auxout, size, mode='bilinear', align_corners=True)
outputs.append(auxout)
return tuple(outputs)
class _Head(nn.Module):
def __init__(self, nclass, norm_layer=nn.BatchNorm2d, **kwargs):
super(_Head, self).__init__()
self.aspp = _ASPP(320, [12, 24, 36], norm_layer=norm_layer, **kwargs)
self.project = nn.Sequential(
nn.Conv2d(256, 256, 3, padding=1, bias=False),
norm_layer(256),
nn.ReLU(True),
nn.Dropout(0.1),
nn.Conv2d(256, nclass, 1)
)
def forward(self, x):
x = self.aspp(x)
return self.project(x)
def get_igcv3_seg(dataset='citys', pretrained=False, root='~/.torch/models',
pretrained_base=False, **kwargs):
acronyms = {
'pascal_voc': 'pascal_voc',
'pascal_aug': 'pascal_aug',
'ade20k': 'ade',
'coco': 'coco',
'citys': 'citys',
}
from light.data import datasets
model = IGCV3Seg(datasets[dataset].NUM_CLASS, backbone='igcv3',
pretrained_base=pretrained_base, **kwargs)
if pretrained:
from ..model import get_model_file
model.load_state_dict(torch.load(get_model_file('igcv3_%s_best_model' % (acronyms[dataset]), root=root)))
return model
if __name__ == '__main__':
model = get_igcv3_seg()
|
from django.db import models
# Create your models here.
from tinymce.models import HTMLField
class Category(models.Model):
name = models.CharField(max_length=20, verbose_name="分类")
def __str__(self):
return self.name
class Meta:
db_table = "Category"
verbose_name = "分类"
verbose_name_plural = verbose_name
class Tag(models.Model):
name = models.CharField(max_length=20, verbose_name="标签")
def __str__(self):
return self.name
class Meta:
db_table = "Tag"
verbose_name = "标签"
verbose_name_plural = verbose_name
class Blog(models.Model):
"""
博客
"""
title = models.CharField(max_length=30, null=True, verbose_name="标题")
time = models.DateField(null=True, verbose_name="创建时间")
hcontent = HTMLField(verbose_name="正文")
category = models.ForeignKey(Category, null=True, verbose_name="分类") # 多对一 (博客--类别)
tag = models.ManyToManyField(Tag, verbose_name="标签") # 多对多
def __str__(self):
return self.title
class Meta:
db_table = "blog"
verbose_name = "文章"
verbose_name_plural = verbose_name
|
# -*- coding: utf-8 -*-
"""
Test the readers for which there is test data in this package
"""
import os
from io_utils.data.read.geo_ts_readers import (
GeoCCISMv6Ts, GeoSMOSICTs, SelfMaskingAdapter, GeoCCISMv5Ts,
GeoSpl3smpTs, GeoCCISMv3Ts, GeoCCISMv4Ts, GeoEra5Ts, GeoEra5LandTs,
GeoC3Sv202012Ts, GeoC3Sv201912Ts, GeoC3Sv201812FullCDRTs, GeoC3Sv201812Ts,
GeoC3Sv201706Ts, GeoC3Sv201706FullCDRTs, GeoGLDAS21Ts, GeoGLDAS20Ts,
GeoMerra2Ts, GeoISMNTs, GeoCglsNcTs, GeoCCISMv7IntermedNcTs)
import numpy as np
import pandas as pd
import pytest
from smecv_grid.grid import SMECV_Grid_v052
test_loc = (-155.875, 19.625)
def test_read_applied():
force_path_group = '__test'
reader = GeoCCISMv6Ts(dataset_or_path=('ESA_CCI_SM', 'v061', 'COMBINED'),
exact_index=True,
ioclass_kws={'read_bulk': True},
parameters=['sm', 'sm_uncertainty', 't0'],
scale_factors={'sm': 1.},
force_path_group=force_path_group)
ts = reader.read(*test_loc)
assert not ts.empty
def test_smosic_reader():
force_path_group = '__test'
smos_reader = GeoSMOSICTs(dataset_or_path=('SMOS', 'IC', 'ASC'),
ioclass_kws={'read_bulk': True},
parameters=['Soil_Moisture', 'Quality_Flag'],
scale_factors={'sm': 1.},
force_path_group=force_path_group)
celldata = smos_reader.read_cells([165,166])
assert any([not data.empty for gpi, data in celldata.items()])
smos_reader = SelfMaskingAdapter(smos_reader, '==', 0, 'Quality_Flag')
ts = smos_reader.read(*test_loc)
assert not ts.empty
def test_smap_spl3_v5_reader():
force_path_group = '__test'
smap_reader = GeoSpl3smpTs(dataset_or_path=('SMAP', 'SP3SMPv5', 'ASC'),
ioclass_kws={'read_bulk': True},
parameters=['soil_moisture_pm', 'retrieval_qual_flag_pm'],
scale_factors={'soil_moisture_pm': 1.},
force_path_group=force_path_group)
celldata = smap_reader.read_cells([165,166])
assert any([not data.empty for gpi, data in celldata.items()])
smap_reader = SelfMaskingAdapter(smap_reader, '!=', 999, 'retrieval_qual_flag_pm')
ts = smap_reader.read(*test_loc)
assert not ts.empty
def test_cci_v033_reader():
vers = 'v033'
force_path_group = '__test'
## == active
reader = GeoCCISMv3Ts(dataset_or_path=('ESA_CCI_SM', vers, 'ACTIVE'),
exact_index=False, # works only after 47
ioclass_kws={'read_bulk': True},
parameters=['sm', 'sm_uncertainty', 't0'],
scale_factors={'sm': 1.},
force_path_group=force_path_group)
#reader = SelfMaskingAdapter(reader, '==', 0, 'flag')
cell_data = reader.read_cells([165,166]) # all empty
ts = reader.read(633697)
# TODO: times are wrong
assert not ts.empty
## == combined
reader = GeoCCISMv3Ts(dataset_or_path=('ESA_CCI_SM', vers, 'COMBINED'),
exact_index=False,
ioclass_kws={'read_bulk': True},
parameters=['sm', 'sm_uncertainty', 't0'],
scale_factors={'sm': 1.},
force_path_group=force_path_group)
#reader = SelfMaskingAdapter(reader, '==', 0, 'flag')
cell_data = reader.read_cells([165,166]) # all empty
ts = reader.read(633697)
# TODO: times are wrong
assert not ts.empty
## == passive
reader = GeoCCISMv3Ts(dataset_or_path=('ESA_CCI_SM', vers, 'PASSIVE'),
exact_index=False,
ioclass_kws={'read_bulk': True},
parameters=['sm', 'sm_uncertainty', 't0'],
scale_factors={'sm': 1.},
force_path_group=force_path_group)
#reader = SelfMaskingAdapter(reader, '==', 0, 'flag')
cell_data = reader.read_cells([165,166]) # all empty
ts = reader.read(633697)
# TODO: times are wrong
assert not ts.empty
@pytest.mark.parametrize("version,Reader",[
("v045", GeoCCISMv4Ts),
("v047", GeoCCISMv4Ts),
("v052", GeoCCISMv5Ts),
("v061", GeoCCISMv6Ts),
])
def test_cci_reader(version, Reader):
force_path_group = '__test'
## == active
reader = Reader(dataset_or_path=('ESA_CCI_SM', version, 'ACTIVE'),
exact_index=True,
ioclass_kws={'read_bulk': True},
parameters=['sm', 'sm_uncertainty', 't0'],
scale_factors={'sm': 1.},
force_path_group=force_path_group)
#reader = SelfMaskingAdapter(reader, '==', 0, 'flag')
cell_data = reader.read_cells([165,166]) # all empty
for col in cell_data.columns: # all empty
assert cell_data[col].dropna().empty
ts = reader.read(*test_loc)
assert ts.dropna().empty
reader.close()
## == combined
reader = Reader(dataset_or_path=('ESA_CCI_SM', version, 'COMBINED'),
exact_index=True,
ioclass_kws={'read_bulk': True},
parameters=['sm', 'sm_uncertainty', 't0'],
scale_factors={'sm': 1.},
force_path_group=force_path_group)
#reader = SelfMaskingAdapter(reader, '==', 0, 'flag')
cell_data = reader.read_cells([165,166]) # not all empty
assert not cell_data[(632257, 'sm')].dropna().empty
ts = reader.read(632257).replace(-9999., np.nan)
assert not ts.empty
reader.close()
## == passive
reader = Reader(dataset_or_path=('ESA_CCI_SM', version, 'PASSIVE'),
exact_index=True,
ioclass_kws={'read_bulk': True},
parameters=['sm', 'sm_uncertainty', 't0'],
scale_factors={'sm': 1.},
force_path_group=force_path_group)
cell_data = reader.read_cells([165,166])
assert isinstance(cell_data, pd.DataFrame)
ts = reader.read(*test_loc)
assert ts.dropna().empty
def test_era5_reader():
force_path_group = '__test'
reader = GeoEra5Ts(dataset_or_path=('ERA5', 'core'),
ioclass_kws={'read_bulk': True},
parameters=['swvl1'], scale_factors={'swvl1': 1.},
force_path_group=force_path_group)
ts = reader.read(*test_loc)
assert not ts.dropna(how='all').empty
#print(ts)
def test_era5land_reader():
force_path_group = '__test'
reader = GeoEra5LandTs(group_vars={('ERA5-Land', 'sm_precip_lai'): ['swvl1']},
ioclass_kws={'read_bulk': True},
scale_factors={'swvl1': 1.},
force_path_group=force_path_group)
ts = reader.read(*test_loc)
assert not ts.dropna(how='all').empty
#print(ts)
def test_C3S201706_single_readers():
force_path_group = '__test'
for record in ['TCDR', 'ICDR']:
dataset = 'ACTIVE'
reader = GeoC3Sv201706Ts(
dataset_or_path=('C3S', 'v201706', dataset, 'DAILY', record),
grid=None, ioclass_kws={'read_bulk': True},
parameters=['sm', 'sm_uncertainty', 'flag'], scale_factors={'sm': 1.},
force_path_group=force_path_group)
ts = reader.read(*test_loc)
assert not ts.dropna(how='all').empty
dataset = 'COMBINED'
reader = GeoC3Sv201706Ts(
dataset_or_path=('C3S', 'v201706', dataset, 'DAILY', record),
grid=None, ioclass_kws={'read_bulk': True},
parameters=['sm', 'sm_uncertainty', 'flag'], scale_factors={'sm': 1.},
force_path_group=force_path_group)
ts = reader.read(*test_loc)
assert not ts.dropna(how='all').empty
dataset = 'PASSIVE'
reader = GeoC3Sv201706Ts(
dataset_or_path=('C3S', 'v201706', dataset, 'DAILY', record),
grid=None, ioclass_kws={'read_bulk': True},
parameters=['sm', 'sm_uncertainty', 'flag'], scale_factors={'sm': 1.},
force_path_group=force_path_group)
ts = reader.read(*test_loc)
assert not ts.dropna(how='all').empty
#print(ts)
def test_C3S201812_single_readers():
force_path_group = '__test'
reader = GeoC3Sv201812Ts(
dataset_or_path=('C3S', 'v201812', 'ACTIVE', 'DAILY', 'TCDR'),
grid=None, ioclass_kws={'read_bulk': True},
parameters=['sm', 'sm_uncertainty', 'flag'], scale_factors={'sm': 1.},
force_path_group=force_path_group)
reader = SelfMaskingAdapter(reader, '==', 0, 'flag')
ts = reader.read(*test_loc)
assert not ts.dropna(how='all').empty
#print(ts)
reader = GeoC3Sv201812Ts(
dataset_or_path=('C3S', 'v201812', 'COMBINED', 'DAILY', 'TCDR'),
grid=None, ioclass_kws={'read_bulk': True},
parameters=['sm', 'sm_uncertainty', 'flag'], scale_factors={'sm': 1.},
force_path_group=force_path_group)
reader = SelfMaskingAdapter(reader, '==', 0, 'flag')
ts = reader.read(*test_loc)
assert not ts.dropna(how='all').empty
#print(ts)
# no data for the passive c3s there
reader = GeoC3Sv201812Ts(
dataset_or_path=('C3S', 'v201812', 'PASSIVE', 'DAILY', 'TCDR'),
grid=None, ioclass_kws={'read_bulk': True},
parameters=['sm', 'sm_uncertainty', 'flag'], scale_factors={'sm': 1.},
force_path_group=force_path_group)
reader = SelfMaskingAdapter(reader, '==', 0, 'flag')
ts = reader.read(*test_loc)
assert ts.empty # ATTENTION: passive data is empty here
#print(ts)
@pytest.mark.parametrize("version,Reader",[
("v201706", GeoC3Sv201706Ts),
("v201812", GeoC3Sv201812Ts),
("v201912", GeoC3Sv201912Ts),
("v202012", GeoC3Sv202012Ts),
])
def test_C3S_single_readers(version, Reader):
force_path_group = '__test'
reader = Reader(
dataset_or_path=('C3S', version, 'COMBINED', 'DAILY', 'TCDR'),
grid=None, ioclass_kws={'read_bulk': True},
parameters=['sm', 'sm_uncertainty', 'flag'],
scale_factors={'sm': 1.},
force_path_group=force_path_group)
reader = SelfMaskingAdapter(reader, '==', 0, 'flag')
ts = reader.read(*test_loc)
if version == 'v201706': # this version is empty
assert ts.dropna(how='all').empty
else:
assert not ts.dropna(how='all').empty
print(ts)
def test_merra2_ts_reader():
force_path_group = '__test'
reader = GeoMerra2Ts(dataset_or_path=('MERRA2', 'core'),
ioclass_kws={'read_bulk': True},
parameters=['SFMC'], scale_factors={'SFMC': 100.},
force_path_group=force_path_group)
ts = reader.read(*test_loc)
assert not ts.dropna(how='all').empty
#print(ts)
def test_era5_land_ts_reader():
force_path_group = '__test'
reader = GeoEra5LandTs(group_vars={('ERA5-Land', 'temperature'): ['stl1'],
('ERA5-Land', 'sm_precip_lai'): ['swvl1']},
ioclass_kws={'read_bulk': True},
scale_factors={'swvl1': 1.},
force_path_group=force_path_group)
reader = SelfMaskingAdapter(reader, '>=', 273.15, 'stl1')
ts = reader.read(*test_loc)
assert not ts.dropna(how='all').empty
#print(ts)
def test_era5_ts_reader():
force_path_group = '__test'
reader = GeoEra5Ts(dataset_or_path=('ERA5', 'core'),
ioclass_kws={'read_bulk': True},
parameters=['swvl1', 'stl1'], scale_factors={'swvl1': 100.},
force_path_group=force_path_group)
reader = SelfMaskingAdapter(reader, '>=', 273.15, 'stl1')
ts = reader.read(*test_loc)
assert not ts.dropna(how='all').empty
# print(ts)
def test_gldas21_ts_reader():
force_path_group = '__test'
reader = GeoGLDAS21Ts(dataset_or_path=('GLDAS21', 'core'),
ioclass_kws={'read_bulk': True},
parameters=['SoilMoi0_10cm_inst', 'SoilTMP0_10cm_inst'],
scale_factors={'SoilMoi0_10cm_inst': 0.01},
force_path_group=force_path_group)
reader = SelfMaskingAdapter(reader, '>=', 273.15, 'SoilTMP0_10cm_inst')
ts = reader.read(*test_loc)
assert not ts.dropna(how='all').empty
#print(ts)
def test_ismn_good_sm_ts_reader_masking():
reader = GeoISMNTs(('ISMN', 'v20191211'), network=['COSMOS'],
force_path_group='__test', scale_factors=None)
reader.rebuild_metadata()
mreader = SelfMaskingAdapter(reader, '==', 'G', 'soil_moisture_flag')
nearest_station = reader.find_nearest_station(-155.5, 19.9)
assert nearest_station.name == 'SilverSword'
ids = reader.get_dataset_ids('soil_moisture', min_depth=0, max_depth=0.17)
ts = mreader.read(ids[0]) # read and mask
assert np.all(ts['soil_moisture_flag'] == 'G')
df_drop = ts['soil_moisture'].dropna()
assert not df_drop.empty
def test_ismn_good_sm_ts_reader_no_masking():
reader = GeoISMNTs(('ISMN', 'v20191211'), network=['COSMOS'],
force_path_group='__test', scale_factors=None)
nearest = reader.find_nearest_station(-155.5, 19.9)
# todO: here the mask adapter cannot be applied because if expect fct read_ts..
dat, station, dist = reader.read_nearest_station(
lon=nearest.metadata['longitude'].val,
lat=nearest.metadata['latitude'].val,
variable='soil_moisture',
only_good=True,
return_flags=True,
depth=(0, 0.2))
sm_g = dat['soil_moisture 0.0 to 0.17 [m]']
flag_g = dat['soil_moisture_flag 0.0 to 0.17 [m]']
assert np.all(flag_g.values == 'G')
also_sm = reader.read_ts(0)
also_g_sm = also_sm.loc[also_sm['soil_moisture_flag'] == 'G']['soil_moisture']
also_g_flag = also_sm.loc[also_sm['soil_moisture_flag'] == 'G']['soil_moisture_flag']
assert np.all(also_g_flag.values == 'G')
assert np.all(sm_g.values == also_g_sm.values)
assert not dat.dropna().empty
def test_cci_intermed_v7_nc_reader():
## == active
reader = GeoCCISMv7IntermedNcTs\
(dataset_or_path=os.path.join(os.path.dirname(__file__),
'..', '00_testdata', 'read',
'esa_cci_sm', 'v07x', 'intermedncts'),
grid=SMECV_Grid_v052(),
exact_index=False, # todo: implement
ioclass_kws={'read_bulk': True},
parameters=['sm', 'sm_uncertainty', 't0'],
scale_factors={'sm': 1.},
force_path_group=None)
# todo: test cell_reader
ts = reader.read(*test_loc)
assert ts.dropna().empty
cs = reader.read_cell(165, 'sm', fill_value=np.nan)
gpi, _ = reader.grid.find_nearest_gpi(*test_loc)
assert np.all(ts['sm'].values == cs[gpi].dropna().values)
reader.close()
def test_cgls_ssm_reader():
dataset = ('CSAR', 'CGLS', 'SSM', '1km', 'V1.1')
reader = GeoCglsNcTs(dataset,
parameters='ssm',
force_path_group='__test')
ts = reader.read(*test_loc)
assert not ts.dropna().empty
reader.close()
def test_cgls_swi_ts_reader():
dataset = ('CSAR', 'CGLS', 'SWI', '1km', 'V1.0')
reader = GeoCglsNcTs(dataset,
parameters='SWI_005',
force_path_group='__test')
ts = reader.read(*test_loc)
assert ts.columns == ['SWI_005']
assert not ts.dropna().empty
reader.close()
if __name__ == '__main__':
test_cgls_ssm_reader()
test_cgls_swi_ts_reader()
test_cci_intermed_v7_nc_reader()
test_read_applied()
test_C3S_single_readers('v201706', GeoC3Sv201706Ts)
test_C3S_single_readers('v202012', GeoC3Sv202012Ts)
test_C3S_single_readers('v201912', GeoC3Sv201912Ts)
test_C3S_single_readers('v201812', GeoC3Sv201812Ts)
test_cci_reader('v061', GeoCCISMv6Ts)
test_cci_reader('v052', GeoCCISMv5Ts)
test_cci_reader('v045', GeoCCISMv4Ts)
test_cci_v033_reader()
test_era5_reader()
test_era5land_reader()
test_C3S201706_single_readers()
test_C3S201812_single_readers()
test_merra2_ts_reader()
test_era5_land_ts_reader()
test_era5_ts_reader()
test_gldas21_ts_reader()
test_ismn_good_sm_ts_reader_masking()
test_ismn_good_sm_ts_reader_no_masking()
test_smosic_reader()
test_smap_spl3_v5_reader()
|
class Loaf:
"loaves of bread with names, tastes and weights"
ingredients=['yeast', 'flour']
counter=0
def taste(self):
return "yeasty"
def weight(self):
return "3lb 2oz"
def __init__(self, name="default loaf"):
print 'init'
print self.__class__
self.__class__.counter += 1
self.name=name
def getname(self):
return self.name
def __repr__(self):
"display name, taste and weight of the loaf"
return "\n".join([self.getname(), self.taste(), self.weight()])
class MaltLoaf(Loaf):
"malt loaves"
ingredients=Loaf.ingredients+['malt', 'fruit']
def __init__(self, name="soreen"):
Loaf.__init__(self, name)
def taste(self):
return "malty"
from UserDict import UserDict
def __p():
print Loaf.counter is MaltLoaf.counter
print Loaf.counter
print MaltLoaf.counter
print
if __name__=='__main__':
a=Loaf()
__p()
b=MaltLoaf()
__p()
c=Loaf()
__p()
d=MaltLoaf()
__p()
|
import requests, re, os, time, sys, subprocess, datetime, json
vlc_path = 'C:\\Software\\VideoLAN\\VLC\\vlc.exe'
video_folder = 'camarads'
file_len = 5 #minutes, approximately
night_mode = False
offTime = datetime.time(2,0,0) # (3:00 AM) time when downloading stop
printCams = False
# cam's IDs. For ex.: ['4_4','2_2']
download_only = [i.replace('cam','_') for i in json.loads(sys.argv[1])]
#download_only = ['4_4']
def lets_go():
q1 = requests.get("https://www.camarads.com/")
serv = re.findall("var serverAddr = \"(.*?)\"", q1.text)[0]
cams = re.findall("nimbleStreamArr\['(.+?)'\] = '(.+?)'", q1.text)
cams = dict(cams)
if printCams:
with open('selected_cams_%s.txt'%(str(datetime.datetime.now().date()).replace(':','-'),),'w') as w:
w.write(str(download_only))
if not len(download_only) == 0:
templ = list(cams.keys())
for i in templ:
if i not in download_only:
cams.pop(i)
filename = time.strftime("!%Y-%m-%d-%H-%M-%S")
for sel in list(cams.keys()):
if not os.path.exists(video_folder+'\\'+sel):
os.system('mkdir "%s\\%s"'%(video_folder,sel))
#print("New cam!")
if 'stop.txt' in os.listdir(video_folder+'/'+sel):
continue
cmd = '%s "%s" --sout="#duplicate{dst=std{access=file,mux=mp4,dst=\'%s\\%s\\%s.mp4\'},dst=nodisplay}" '%(vlc_path,serv+cams[sel],video_folder,sel,filename)
os.popen(cmd)
time.sleep(1)
print("************************************")
print("**Press Ctrl+С to stop downloading**")
print("************************************")
print('Video length = ' + str(file_len))
print('Night mode = ' + str(night_mode))
print("************************************")
if night_mode:
print(offTime.strftime('%H:%M:%S'))
ctrl_c = False
lasttime = datetime.datetime.now().time()
while True:
nowtime = datetime.datetime.now().time()
if night_mode and lasttime < offTime and nowtime > offTime:
break
lasttime = nowtime
print("Runing. Wait...")
lets_go()
print("Done. Now sleeping from "+str(datetime.datetime.now().time()))
try:
time.sleep(file_len*60)
except KeyboardInterrupt:
print("Exiting...")
ctrl_c = True
os.system('taskkill /IM "vlc.exe"')
if ctrl_c:
break |
"""
Copyright (c) 2016-2020 Keith Sterling http://www.keithsterling.com
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the
Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
class ServiceConfiguration:
def __init__(self, service_type):
self._service_type = service_type
self._name = None
self._category = None
self._storage = None
self._service_class = None
self._default_response = None
self._default_srai = None
self._success_prefix = None
self._default_aiml = None
self._load_default_aiml = True
self._url = None
@staticmethod
def from_data(service_type, name, category, storage=None, service_class=None, default_response=None, default_srai=None,
success_prefix=None, default_aiml=None, load_default_aiml=True, url=None):
if service_type == 'rest':
config = ServiceRESTConfiguration()
elif service_type == 'wsdl':
config = ServiceWSDLConfiguration()
elif service_type == 'library':
config = ServiceLibraryConfiguration()
else:
config = ServiceConfiguration(service_type=service_type)
config._name = name
config._category = category
config._storage = storage
config._service_class = service_class
config._default_response = default_response
config._default_srai = default_srai
config._success_prefix = success_prefix
config._default_aiml = default_aiml
config._load_default_aiml = load_default_aiml
config._url = url
return config
@property
def service_type(self):
return self._service_type
@property
def name(self):
return self._name
@property
def category(self):
return self._category
@property
def service_class(self):
return self._service_class
@property
def storage(self):
return self._storage
@property
def default_aiml(self):
return self._default_aiml
@property
def load_default_aiml(self):
return self._load_default_aiml
@property
def default_response(self):
return self._default_response
@property
def default_srai(self):
return self._default_srai
@property
def success_prefix(self):
return self._success_prefix
@property
def url(self):
return self._url
@staticmethod
def new_from_yaml(yaml_data, filename):
if 'service' not in yaml_data:
raise ValueError("'service' missing from service yaml")
service_data = yaml_data['service']
if 'type' in service_data:
service_type = service_data.get('type')
if service_type == 'rest':
config = ServiceRESTConfiguration()
elif service_type == 'wsdl':
config = ServiceWSDLConfiguration()
elif service_type == 'library':
config = ServiceWSDLConfiguration()
else:
raise ValueError("Unknown service type [%s]"%service_type)
else:
config = ServiceConfiguration(service_type='generic')
config.from_yaml(service_data, filename)
return config
def from_yaml(self, service_data, filename):
self._name = service_data.get('name', None)
self._category = service_data.get('category', None)
self._storage = filename
self._service_class = service_data.get('service_class', None)
self._default_response = service_data.get('default_response', None)
self._default_srai = service_data.get('default_srai', None)
self._success_prefix = service_data.get('success_prefix', None)
self._default_aiml = service_data.get('default_aiml', None)
self._load_default_aiml = service_data.get('load_default_aiml', True)
self._url = service_data.get('url', None)
@staticmethod
def from_sql(dao):
if dao.type == 'rest':
config = ServiceRESTConfiguration()
config._retries = dao.rest_retries
if config._retries is None:
config._retries = ServiceRESTConfiguration.DEFAULT_RETRIES
config._timeout = dao.rest_timeout
if config._timeout is None:
config._timeout = ServiceRESTConfiguration.DEFAULT_TIMEOUT
elif dao.type == 'library':
config = ServiceLibraryConfiguration()
else:
config = ServiceConfiguration(service_type=dao.type)
config._name = dao.name
config._category = dao.category
config._storage = "sql"
config._service_class = dao.service_class
config._default_response = dao.default_response
config._default_srai = dao.default_srai
config._success_prefix = dao.success_prefix
config._default_aiml = dao.default_aiml
config._load_default_aiml = dao.load_default_aiml
config._url = dao.url
return config
@staticmethod
def from_mongo(dao):
if dao.get('type') == 'rest':
config = ServiceRESTConfiguration()
rest_data = dao.get("rest", None)
if rest_data is not None:
config._retries = rest_data.get('retries', None)
config._timeout = rest_data.get('timeout', None)
if config._retries is None:
config._retries = ServiceRESTConfiguration.DEFAULT_RETRIES
if config._timeout is None:
config._timeout = ServiceRESTConfiguration.DEFAULT_TIMEOUT
elif dao.get('type')== 'library':
config = ServiceLibraryConfiguration()
else:
config = ServiceConfiguration(service_type=dao.get('type'))
config._name = dao.get('name', None)
config._category = dao.get('category', None)
config._storage = "mongo"
config._service_class = dao.get('service_class', None)
config._default_response = dao.get('default_response', None)
config._default_srai = dao.get('default_srai', None)
config._success_prefix = dao.get('success_prefix', None)
config._default_aiml = dao.get('default_aiml', None)
config._load_default_aiml = dao.get('load_default_aiml', True)
config._url = dao.get('url', None)
return config
class ServiceLibraryConfiguration(ServiceConfiguration):
def __init__(self):
ServiceConfiguration.__init__(self, service_type='library')
class ServiceRESTConfiguration(ServiceConfiguration):
DEFAULT_RETRIES = [100, 500, 1000, 2000, 5000, 10000]
DEFAULT_TIMEOUT = 3000
def __init__(self):
ServiceConfiguration.__init__(self, service_type='rest')
self._retries = ServiceRESTConfiguration.DEFAULT_RETRIES
self._timeout = ServiceRESTConfiguration.DEFAULT_TIMEOUT
@property
def retries(self):
return self._retries
@property
def timeout(self):
return self._timeout
def from_yaml(self, service_data, filename):
super(ServiceRESTConfiguration, self).from_yaml(service_data, filename)
rest_data = service_data.get("rest", None)
if rest_data is not None:
self._retries = rest_data.get('retries', None)
if self._retries is None:
self._retries = ServiceRESTConfiguration.DEFAULT_RETRIES
self._timeout = rest_data.get('timeout', None)
if self._timeout is None:
self._timeout = ServiceRESTConfiguration.DEFAULT_TIMEOUT
class ServiceWSDLConfiguration(ServiceConfiguration):
def __init__(self):
ServiceConfiguration.__init__(self, service_type='wsdl')
self._wsdl_file = None
self._station_codes_file = None
@property
def wsdl_file(self):
return self._wsdl_file
@property
def station_codes_file(self):
return self._station_codes_file
def from_yaml(self, service_data, filename):
super(ServiceWSDLConfiguration, self).from_yaml(service_data, filename)
self._station_codes_file = service_data.get("station_codes_file", None)
wsdl_data = service_data.get("wsdl", None)
if wsdl_data is not None:
self._wsdl_file = wsdl_data.get('wsdl_file', None)
|
from ScenarioHelper import *
def main():
SetCodePage("ms932")
CreateScenaFile(
"r4090.bin", # FileName
"r4090", # MapName
"r4090", # Location
0x00A6, # MapIndex
"ed7354",
0x00000000, # Flags
("", "", "", "", "", ""), # include
0x00, # PlaceNameNumber
0x26, # PreInitFunctionIndex
b'\x00\xff\xff', # Unknown_51
# Information
[0, 0, -1000, 0, 0, 0, 24000, 500, 30, 45, 0, 360, 0, 0, 0, 0, 0, 1, 166, 0, 0, 0, 1],
)
BuildStringList((
"r4090", # 0
"警備隊員", # 1
"警備隊員", # 2
"警備隊員", # 3
"警備隊員", # 4
"警備隊員", # 5
"警備隊員", # 6
"魔人ヴァルド", # 7
"ミレイユ三尉", # 8
"国防軍兵士", # 9
"国防軍兵士", # 10
"国防軍兵士", # 11
"国防軍兵士", # 12
"国防軍兵士", # 13
"国防軍兵士", # 14
"国防軍兵士", # 15
"国防軍隊長", # 16
"ツァイト", # 17
"倒木", # 18
"倒木", # 19
"倒木", # 20
"汎用ダミー", # 21
"SE制御", # 22
"br4020", # 23
"br4020", # 24
))
ATBonus("ATBonus_32C", 100, 5, 0, 5, 0, 5, 0, 2, 5, 0, 0, 0, 2, 0, 0, 0)
MonsterBattlePostion("MonsterBattlePostion_3EC", 8, 13, 180)
MonsterBattlePostion("MonsterBattlePostion_3F0", 0, 0, 180)
MonsterBattlePostion("MonsterBattlePostion_3F4", 0, 0, 180)
MonsterBattlePostion("MonsterBattlePostion_3F8", 0, 0, 180)
MonsterBattlePostion("MonsterBattlePostion_3FC", 0, 0, 180)
MonsterBattlePostion("MonsterBattlePostion_400", 0, 0, 180)
MonsterBattlePostion("MonsterBattlePostion_404", 0, 0, 180)
MonsterBattlePostion("MonsterBattlePostion_408", 0, 0, 180)
MonsterBattlePostion("MonsterBattlePostion_3CC", 7, 4, 0)
MonsterBattlePostion("MonsterBattlePostion_3D0", 10, 11, 225)
MonsterBattlePostion("MonsterBattlePostion_3D4", 4, 7, 90)
MonsterBattlePostion("MonsterBattlePostion_3D8", 12, 7, 270)
MonsterBattlePostion("MonsterBattlePostion_3DC", 4, 11, 135)
MonsterBattlePostion("MonsterBattlePostion_3E0", 11, 4, 315)
MonsterBattlePostion("MonsterBattlePostion_3E4", 7, 12, 180)
MonsterBattlePostion("MonsterBattlePostion_3E8", 5, 5, 45)
# monster count: 0
# event battle count: 2
BattleInfo(
"BattleInfo_450", 0x11C2, 0, 6, 0, 0, 255, 0, 0, "br4020", 0x00000000, 100, 0, 0, 0,
(
("ms88300.dat", 0, 0, 0, 0, 0, 0, 0, "MonsterBattlePostion_3EC", "MonsterBattlePostion_3CC", "ed7455", "ed7453", "ATBonus_32C"),
(),
(),
(),
)
)
BattleInfo(
"BattleInfo_40C", 0x00E2, 0, 6, 0, 0, 255, 0, 0, "br4020", 0x00000000, 100, 0, 0, 0,
(
("ms88300.dat", 0, 0, 0, 0, 0, 0, 0, "MonsterBattlePostion_3EC", "MonsterBattlePostion_3CC", "ed7455", "ed7453", "ATBonus_32C"),
(),
(),
(),
)
)
DeclNpc(0, 0, 0, 0, 453, 0x0, 0, 0, 0, 255, 255, 255, 255, 255, 0)
DeclNpc(0, 0, 0, 0, 453, 0x0, 0, 0, 0, 255, 255, 255, 255, 255, 0)
DeclNpc(0, 0, 0, 0, 453, 0x0, 0, 0, 0, 255, 255, 255, 255, 255, 0)
DeclNpc(0, 0, 0, 0, 453, 0x0, 0, 0, 0, 255, 255, 255, 255, 255, 0)
DeclNpc(0, 0, 0, 0, 453, 0x0, 0, 0, 0, 255, 255, 255, 255, 255, 0)
DeclNpc(0, 0, 0, 0, 453, 0x0, 0, 0, 0, 255, 255, 255, 255, 255, 0)
DeclNpc(0, 0, 0, 0, 197, 0x0, 0, 0, 0, 255, 255, 255, 255, 255, 0)
DeclNpc(0, 0, 0, 0, 453, 0x0, 0, 0, 0, 255, 255, 255, 255, 255, 0)
DeclNpc(0, 0, 0, 0, 453, 0x0, 0, 0, 0, 255, 255, 255, 255, 255, 0)
DeclNpc(0, 0, 0, 0, 453, 0x0, 0, 0, 0, 255, 255, 255, 255, 255, 0)
DeclNpc(0, 0, 0, 0, 453, 0x0, 0, 0, 0, 255, 255, 255, 255, 255, 0)
DeclNpc(0, 0, 0, 0, 453, 0x0, 0, 0, 0, 255, 255, 255, 255, 255, 0)
DeclNpc(0, 0, 0, 0, 453, 0x0, 0, 0, 0, 255, 255, 255, 255, 255, 0)
DeclNpc(0, 0, 0, 0, 453, 0x0, 0, 0, 0, 255, 255, 255, 255, 255, 0)
DeclNpc(0, 0, 0, 0, 453, 0x0, 0, 0, 0, 255, 255, 255, 255, 255, 0)
DeclNpc(0, 0, 0, 0, 453, 0x0, 0, 0, 0, 255, 255, 255, 255, 255, 0)
DeclNpc(0, 0, 0, 0, 197, 0x0, 0, 0, 0, 255, 255, 255, 255, 255, 0)
DeclNpc(0, 0, 0, 0, 196, 0x0, 0, 0, 0, 255, 255, 255, 255, 255, 0)
DeclNpc(0, 0, 0, 0, 196, 0x0, 0, 0, 0, 255, 255, 255, 255, 255, 0)
DeclNpc(0, 0, 0, 0, 196, 0x0, 0, 0, 0, 255, 255, 255, 255, 255, 0)
DeclNpc(0, 0, 0, 0, 453, 0x0, 0, 0, 0, 255, 255, 255, 255, 255, 0)
DeclNpc(0, 0, 0, 0, 197, 0x0, 0, 0, 0, 255, 255, 255, 255, 255, 0)
ChipFrameInfo(1464, 0) # 0
ScpFunction((
"Function_0_5B8", # 00, 0
"Function_1_5F3", # 01, 1
"Function_2_62C", # 02, 2
"Function_3_1E16", # 03, 3
"Function_4_1E3E", # 04, 4
"Function_5_1E66", # 05, 5
"Function_6_1E8E", # 06, 6
"Function_7_1EB6", # 07, 7
"Function_8_1EDE", # 08, 8
"Function_9_1F06", # 09, 9
"Function_10_1F17", # 0A, 10
"Function_11_1F37", # 0B, 11
"Function_12_1F5B", # 0C, 12
"Function_13_1F7B", # 0D, 13
"Function_14_1FC1", # 0E, 14
"Function_15_3CE9", # 0F, 15
"Function_16_3D13", # 10, 16
"Function_17_3D3D", # 11, 17
"Function_18_3DF3", # 12, 18
"Function_19_3EB5", # 13, 19
"Function_20_3F61", # 14, 20
"Function_21_4007", # 15, 21
"Function_22_40A7", # 16, 22
"Function_23_4153", # 17, 23
"Function_24_4209", # 18, 24
"Function_25_433C", # 19, 25
"Function_26_435C", # 1A, 26
"Function_27_437C", # 1B, 27
"Function_28_439C", # 1C, 28
"Function_29_43BC", # 1D, 29
"Function_30_43DC", # 1E, 30
"Function_31_43FC", # 1F, 31
"Function_32_4426", # 20, 32
"Function_33_444A", # 21, 33
"Function_34_446E", # 22, 34
"Function_35_4492", # 23, 35
"Function_36_44B6", # 24, 36
"Function_37_44DA", # 25, 37
"Function_38_4509", # 26, 38
"Function_39_4532", # 27, 39
"Function_40_455B", # 28, 40
"Function_41_4584", # 29, 41
"Function_42_45AD", # 2A, 42
"Function_43_45D6", # 2B, 43
"Function_44_45FF", # 2C, 44
"Function_45_4622", # 2D, 45
"Function_46_6377", # 2E, 46
"Function_47_63B9", # 2F, 47
"Function_48_63FB", # 30, 48
"Function_49_643D", # 31, 49
"Function_50_647F", # 32, 50
"Function_51_64C1", # 33, 51
"Function_52_6517", # 34, 52
"Function_53_6559", # 35, 53
"Function_54_659B", # 36, 54
"Function_55_65A9", # 37, 55
"Function_56_65BF", # 38, 56
"Function_57_65F5", # 39, 57
"Function_58_664A", # 3A, 58
"Function_59_674A", # 3B, 59
"Function_60_6858", # 3C, 60
"Function_61_6877", # 3D, 61
"Function_62_68BC", # 3E, 62
"Function_63_68FA", # 3F, 63
"Function_64_691F", # 40, 64
"Function_65_696A", # 41, 65
"Function_66_6A3D", # 42, 66
"Function_67_6AD3", # 43, 67
"Function_68_6AF2", # 44, 68
"Function_69_6BDC", # 45, 69
"Function_70_6C8F", # 46, 70
"Function_71_6D42", # 47, 71
"Function_72_6D5F", # 48, 72
))
def Function_0_5B8(): pass
label("Function_0_5B8")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x163, 7)), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x164, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_5C9")
Event(0, 2)
label("loc_5C9")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x22, 0)), scpexpr(EXPR_END)), "loc_5E0")
ClearScenarioFlags(0x22, 0)
SetScenarioFlags(0x0, 0)
Event(0, 14)
Jump("loc_5F2")
label("loc_5E0")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x22, 1)), scpexpr(EXPR_END)), "loc_5F2")
ClearScenarioFlags(0x22, 1)
SetScenarioFlags(0x0, 1)
Event(0, 45)
label("loc_5F2")
Return()
# Function_0_5B8 end
def Function_1_5F3(): pass
label("Function_1_5F3")
OP_50(0x51, (scpexpr(EXPR_PUSH_LONG, 0xFF3C4169), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 0)), scpexpr(EXPR_END)), "loc_616")
OP_50(0x1, (scpexpr(EXPR_PUSH_LONG, 0x246), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
ClearScenarioFlags(0x0, 0)
Jump("loc_62B")
label("loc_616")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 1)), scpexpr(EXPR_END)), "loc_62B")
OP_50(0x1, (scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
ClearScenarioFlags(0x0, 1)
label("loc_62B")
Return()
# Function_1_5F3 end
def Function_2_62C(): pass
label("Function_2_62C")
EventBegin(0x0)
FadeToDark(0, 0, -1)
LoadChrToIndex("chr/ch00050.itc", 0x1E)
LoadChrToIndex("chr/ch00051.itc", 0x1F)
LoadChrToIndex("chr/ch00150.itc", 0x20)
LoadChrToIndex("chr/ch00151.itc", 0x21)
LoadChrToIndex("chr/ch00250.itc", 0x22)
LoadChrToIndex("chr/ch00251.itc", 0x23)
LoadChrToIndex("chr/ch00350.itc", 0x24)
LoadChrToIndex("chr/ch00351.itc", 0x25)
LoadChrToIndex("chr/ch02950.itc", 0x26)
LoadChrToIndex("chr/ch02951.itc", 0x27)
LoadChrToIndex("chr/ch03050.itc", 0x28)
LoadChrToIndex("chr/ch03051.itc", 0x29)
LoadChrToIndex("chr/ch00056.itc", 0x2A)
LoadChrToIndex("chr/ch00156.itc", 0x2B)
LoadChrToIndex("chr/ch00256.itc", 0x2C)
LoadChrToIndex("chr/ch00356.itc", 0x2D)
LoadChrToIndex("chr/ch0295F.itc", 0x2E)
LoadChrToIndex("chr/ch03056.itc", 0x2F)
LoadEffect(0x0, "event/ev10008.eff")
LoadEffect(0x1, "event\\ev15010.eff")
LoadEffect(0x2, "event/ev14006.eff")
SoundLoad(2914)
SoundLoad(3571)
SoundLoad(3572)
SoundLoad(3573)
SoundLoad(3574)
SoundLoad(3576)
ClearChrFlags(0xE, 0x80)
OP_78(0x0, 0xE)
OP_49()
SetChrPos(0xE, -5000, 0, 2000, 90)
OP_D5(0xE, 0x0, 0x15F90, 0x0, 0x0)
SetMapObjFlags(0x0, 0x1000)
OP_74(0x0, 0x1E)
OP_71(0x0, 0xB, 0x32, 0x1, 0x20)
OP_52(0xE, 0x7, (scpexpr(EXPR_PUSH_LONG, 0xDAC), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
OP_52(0xE, 0x28, (scpexpr(EXPR_PUSH_LONG, 0x9), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Jc((scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_END)), "loc_7C2")
SetChrPos(0x101, -2000, 0, 1550, 270)
SetChrPos(0x102, -1600, 0, 2550, 270)
SetChrPos(0x103, -200, 0, 1750, 270)
SetChrPos(0x104, -350, 0, 3100, 270)
SetChrPos(0x109, -400, 0, 750, 270)
SetChrPos(0x105, 850, 0, 2350, 270)
Jump("loc_828")
label("loc_7C2")
SetChrPos(0x101, 11000, 0, 1550, 270)
SetChrPos(0x102, 11400, 0, 2550, 270)
SetChrPos(0x103, 12800, 0, 1750, 270)
SetChrPos(0x104, 12650, 0, 3100, 270)
SetChrPos(0x109, 12600, 0, 750, 270)
SetChrPos(0x105, 13850, 0, 2350, 270)
label("loc_828")
ClearChrFlags(0x4, 0x80)
ClearChrBattleFlags(0x4, 0x8000)
ClearChrFlags(0x5, 0x80)
ClearChrBattleFlags(0x5, 0x8000)
Jc((scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_END)), "loc_855")
FadeToBright(0, 0)
Jump("loc_F3B")
label("loc_855")
FadeToBright(1000, 0)
OP_68(-2000, 15700, 2000, 0)
MoveCamera(345, 0, 0, 0)
OP_6E(650, 0)
SetCameraDistance(28000, 0)
OP_68(-2000, 700, 2000, 15000)
MoveCamera(315, 25, 0, 15000)
SetCameraDistance(56000, 15000)
Sleep(7000)
def lambda_8B9():
OP_9B(0x0, 0x101, 0x0, 0x32C8, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x101, 0, lambda_8B9)
Sleep(50)
def lambda_8D1():
OP_9B(0x0, 0x102, 0x0, 0x32C8, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x102, 0, lambda_8D1)
Sleep(50)
def lambda_8E9():
OP_9B(0x0, 0x103, 0x0, 0x32C8, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x103, 0, lambda_8E9)
Sleep(50)
def lambda_901():
OP_9B(0x0, 0x104, 0x0, 0x32C8, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x104, 0, lambda_901)
Sleep(50)
def lambda_919():
OP_9B(0x0, 0x109, 0x0, 0x32C8, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x109, 0, lambda_919)
Sleep(50)
def lambda_931():
OP_9B(0x0, 0x105, 0x0, 0x32C8, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x105, 0, lambda_931)
WaitChrThread(0x101, 0)
WaitChrThread(0x102, 0)
WaitChrThread(0x103, 0)
WaitChrThread(0x104, 0)
WaitChrThread(0x109, 0)
WaitChrThread(0x105, 0)
OP_6F(0x79)
OP_0D()
Fade(1000)
OP_68(-1000, 700, 2000, 0)
MoveCamera(315, 25, 0, 0)
OP_6E(650, 0)
SetCameraDistance(17500, 0)
SetCameraDistance(17000, 2000)
OP_6F(0x79)
OP_0D()
#C0001
ChrTalk(
0x105,
"#10308F#12P……開けた場所に出たね。\x02",
)
CloseMessageWindow()
#C0002
ChrTalk(
0x101,
(
"#00001F#5P俺もこのあたりまでしか\x01",
"訓練で来なかったけど……\x02",
)
)
CloseMessageWindow()
OP_93(0x101, 0x13B, 0x1F4)
Sleep(500)
OP_93(0x101, 0xE1, 0x1F4)
Sleep(500)
#C0003
ChrTalk(
0x101,
(
"#00005F#5Pあれ……?\x01",
"ここって行き止まりだったか?\x02",
)
)
CloseMessageWindow()
#C0004
ChrTalk(
0x109,
(
"#10101F#6Pいえ、まだこの先にも\x01",
"獣道は続いていたはずです。\x02",
)
)
CloseMessageWindow()
#C0005
ChrTalk(
0x104,
(
"#00303F#11Pどうやら倒木が\x01",
"道を塞いだみてぇだが……\x02\x03",
"#00301F倒れたのは一月くらい前か。\x02",
)
)
CloseMessageWindow()
#C0006
ChrTalk(
0x102,
(
"#00108F#12Pで、でもそれじゃあ\x01",
"例の魔獣は一体どこに?\x02",
)
)
CloseMessageWindow()
#C0007
ChrTalk(
0x103,
(
"#00208F#12P……何かの気配は\x01",
"感じるんですが……\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
Sound(3574, 255, 100, 0) #voice#Wald
Sound(833, 0, 40, 0)
StopBGM(0xFA0)
Fade(1500)
SetCameraDistance(19000, 1000)
OP_6F(0x79)
OP_0D()
OP_63(0x101, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
OP_63(0x105, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
Sleep(50)
OP_63(0x102, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
OP_63(0x103, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
Sleep(50)
OP_63(0x104, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
OP_63(0x109, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
Sleep(1000)
OP_93(0x101, 0x10E, 0x1F4)
#C0008
ChrTalk(
0x101,
"#00011F#5P!?\x02",
)
CloseMessageWindow()
#C0009
ChrTalk(
0x104,
"#00305F#11P笑い声だと……!?\x02",
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
OP_C9(0x0, 0x80000000)
SetMessageWindowPos(30, 15, -1, -1)
SetChrName("不気味な声")
#A0010
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x2),
"#3571V#40W#53Aクク……揃いも揃って\x01",
"のこのこと現れやがったか……\x02",
)
)
#Auto
CloseMessageWindow()
OP_57(0x0)
OP_5A()
Sleep(300)
SetChrName("不気味な声")
#A0011
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x2),
"#3572V#40W#30A相変わらずメデたい連中だぜ……\x07\x00\x02",
)
)
#Auto
CloseMessageWindow()
OP_57(0x0)
OP_5A()
OP_C9(0x1, 0x80000000)
SetMessageWindowPos(14, 280, 60, 3)
WaitBGM()
Sleep(10)
PlayBGM("ed7582", 0)
Fade(500)
OP_68(-1000, 1000, 2000, 0)
MoveCamera(225, 30, 0, 0)
OP_6E(650, 0)
SetCameraDistance(35000, 0)
OP_68(-1000, 1000, 2000, 10000)
MoveCamera(270, 15, 0, 10000)
SetCameraDistance(16650, 10000)
OP_0D()
#C0012
ChrTalk(
0x101,
"#00013F#5Pこ、これは……\x02",
)
CloseMessageWindow()
#C0013
ChrTalk(
0x109,
(
"#10107F#5Pまさか……\x01",
"魔獣を操ってた犯人……?\x02",
)
)
CloseMessageWindow()
#C0014
ChrTalk(
0x103,
"#00208F#6Pい、いえ……それよりも……\x02",
)
CloseMessageWindow()
#C0015
ChrTalk(
0x102,
(
"#00101F#5Pこの声……\x01",
"どこかで聞いた事があるような……\x02",
)
)
CloseMessageWindow()
#C0016
ChrTalk(
0x105,
"#10310F#6P……………………………………\x02",
)
CloseMessageWindow()
#C0017
ChrTalk(
0x104,
"#00310F#12Pおいおい、まさか──\x02",
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
Fade(500)
BlurSwitch(0x1, 0xBBFFFFFF, 0x0, 0x0, 0x0)
OP_68(-1000, 1000, 2000, 500)
MoveCamera(270, 15, 0, 500)
SetCameraDistance(15650, 500)
OP_6F(0x79)
CancelBlur(0)
OP_0D()
OP_63(0x105, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
Sleep(1000)
OP_C9(0x0, 0x80000000)
OP_82(0x64, 0x0, 0xBB8, 0x12C)
#C0018
ChrTalk(
0x105,
"#10307F#2914V#6P#4S#11A来る──下がれっ!\x02",
)
#Auto
CloseMessageWindow()
OP_C9(0x1, 0x80000000)
OP_57(0x0)
OP_5A()
#C0019
ChrTalk(
0x101,
"#00010F#5P!!#8A\x02",
)
#Auto
CloseMessageWindow()
label("loc_F3B")
Fade(500)
SetChrChipByIndex(0x101, 0x1E)
SetChrSubChip(0x101, 0x0)
SetChrChipByIndex(0x102, 0x20)
SetChrSubChip(0x102, 0x0)
SetChrChipByIndex(0x103, 0x22)
SetChrSubChip(0x103, 0x0)
SetChrChipByIndex(0x104, 0x24)
SetChrSubChip(0x104, 0x0)
SetChrChipByIndex(0x109, 0x26)
SetChrSubChip(0x109, 0x0)
SetChrChipByIndex(0x105, 0x28)
SetChrSubChip(0x105, 0x0)
ClearMapObjFlags(0x0, 0x4)
SetChrPos(0xE, -22000, 17000, 15000, 135)
OP_D5(0xE, 0x0, 0x20F58, 0x0, 0x0)
OP_74(0x0, 0xA)
OP_71(0x0, 0x393, 0x3A2, 0x1, 0x8)
Sound(3552, 255, 100, 0) #voice#Wald
Sound(200, 0, 50, 0)
Sound(251, 0, 100, 0)
OP_68(-22000, 22000, 15000, 0)
MoveCamera(315, 0, 0, 0)
OP_6E(650, 0)
SetCameraDistance(10000, 0)
OP_68(-3000, 1300, 2000, 2300)
MoveCamera(315, 20, 0, 2300)
SetCameraDistance(18500, 2300)
Sound(834, 0, 100, 0)
def lambda_101A():
OP_9D(0xFE, 0xFFFFE4A8, 0xFFFFFE70, 0x1388, 0xC8, 0x5DC)
ExitThread()
QueueWorkItem(0xE, 1, lambda_101A)
Sleep(1500)
Sound(893, 0, 100, 0)
OP_74(0x0, 0xF)
OP_71(0x0, 0x3A3, 0x3A7, 0x1, 0x8)
Sound(3544, 255, 100, 0) #voice#Wald
Sleep(300)
Sound(833, 0, 100, 0)
Sound(248, 0, 100, 0)
PlayEffect(0x0, 0xFF, 0xE, 0x5, 0, 1000, 3000, 0, 0, 0, 1500, 2500, 1500, 0xFF, 0, 0, 0, 0)
PlayEffect(0x1, 0xFF, 0xE, 0x5, 0, 2500, 1500, 0, 0, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
OP_82(0x1F4, 0x12C, 0x1388, 0x7D0)
BlurSwitch(0x0, 0xBBFFFFFF, 0x0, 0x1, 0xA)
CancelBlur(500)
Sound(531, 0, 100, 0)
Sound(805, 0, 100, 0)
BeginChrThread(0x101, 3, 0, 3)
BeginChrThread(0x102, 3, 0, 4)
BeginChrThread(0x103, 3, 0, 5)
BeginChrThread(0x104, 3, 0, 6)
BeginChrThread(0x109, 3, 0, 7)
BeginChrThread(0x105, 3, 0, 8)
WaitChrThread(0x101, 3)
WaitChrThread(0x102, 3)
WaitChrThread(0x103, 3)
WaitChrThread(0x104, 3)
WaitChrThread(0x109, 3)
WaitChrThread(0x105, 3)
WaitChrThread(0xE, 1)
OP_6F(0x79)
OP_0D()
Sleep(1000)
Fade(1000)
OP_82(0x96, 0x5A, 0x1388, 0x3E8)
SetChrPos(0xE, -6000, 0, 0, 135)
OP_D5(0xE, 0x0, 0x20F58, 0x0, 0x0)
BeginChrThread(0xE, 3, 0, 9)
SetChrFlags(0xE, 0x1)
OP_68(-6000, 2300, 300, 0)
MoveCamera(200, 18, 0, 0)
OP_6E(650, 0)
SetCameraDistance(7800, 0)
BlurSwitch(0x0, 0xBBFFFFFF, 0x0, 0x0, 0xA)
OP_68(-1350, 1300, 300, 3000)
MoveCamera(115, 18, 0, 3000)
SetCameraDistance(12800, 3000)
Sleep(500)
CancelBlur(1000)
OP_6F(0x79)
OP_0D()
#C0020
ChrTalk(
0x101,
"#00007F#5Pな──!?\x02",
)
CloseMessageWindow()
#C0021
ChrTalk(
0x109,
"#10107F#5Pお、鬼……!?\x02",
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
OP_C9(0x0, 0x80000000)
SetMessageWindowPos(280, 150, -1, -1)
SetChrName("鬼")
#A0022
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x2),
"#3573V#50W#20A……クックックッ…………\x02",
)
)
#Auto
CloseMessageWindow()
Sleep(500)
OP_82(0x64, 0x0, 0x3E8, 0x3E8)
SetChrName("鬼")
#A0023
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x2),
"#3574V#40W#4S#26A………カハハハハハハッ………\x07\x00\x02",
)
)
#Auto
CloseMessageWindow()
OP_57(0x0)
OP_5A()
SetMessageWindowPos(14, 280, 60, 3)
OP_C9(0x1, 0x80000000)
Fade(500)
Sound(889, 0, 40, 0)
SetChrFlags(0x101, 0x8)
SetChrFlags(0x102, 0x8)
SetChrFlags(0x103, 0x8)
SetChrFlags(0x104, 0x8)
SetChrFlags(0x109, 0x8)
SetChrFlags(0x105, 0x8)
SetChrPos(0xE, -5000, 0, 2000, 90)
OP_D5(0xE, 0x0, 0x15F90, 0x0, 0x0)
EndChrThread(0xE, 0x3)
OP_74(0x0, 0x5)
OP_68(-5000, 2100, 2000, 0)
MoveCamera(180, 51, 0, 0)
OP_6E(650, 0)
SetCameraDistance(13650, 0)
OP_68(-5000, 4000, 2000, 5000)
MoveCamera(270, -10, 0, 5000)
SetCameraDistance(7350, 5000)
BlurSwitch(0x1, 0xBBFFFFFF, 0x0, 0x0, 0x0)
OP_71(0x0, 0x3DE, 0x3F2, 0x1, 0x8)
OP_79(0x0)
CancelBlur(0)
OP_74(0x0, 0x1E)
OP_71(0x0, 0xB, 0x32, 0x1, 0x8)
OP_79(0x0)
OP_74(0x0, 0xF)
OP_71(0x0, 0x41A, 0x442, 0x1, 0x8)
Sound(892, 0, 100, 0)
Sleep(1300)
Sound(892, 0, 100, 0)
OP_79(0x0)
OP_74(0x0, 0xA)
OP_71(0x0, 0xB, 0x32, 0x1, 0x20)
OP_6F(0x79)
OP_0D()
Sleep(300)
SetMessageWindowPos(30, 160, -1, -1)
#A0024
AnonymousTalk(
0x102,
"#00107F………っ…………!\x02",
)
CloseMessageWindow()
SetMessageWindowPos(280, 160, -1, -1)
#A0025
AnonymousTalk(
0x103,
"#00201Fこ、これは……\x02",
)
CloseMessageWindow()
SetMessageWindowPos(30, 160, -1, -1)
#A0026
AnonymousTalk(
0x109,
(
"#10110Fま、まさかグノーシスで\x01",
"魔人化したのと同じ……!?\x02",
)
)
CloseMessageWindow()
SetMessageWindowPos(280, 160, -1, -1)
#A0027
AnonymousTalk(
0x104,
"#00311Fてめえ……もしかして……\x02",
)
CloseMessageWindow()
Sleep(500)
OP_82(0x64, 0x0, 0xBB8, 0x12C)
SetMessageWindowPos(30, 160, -1, -1)
#A0028
AnonymousTalk(
0x101,
(
"#00007Fヴァルド──\x01",
"あんたなのか!?\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
OP_C9(0x0, 0x80000000)
Sleep(300)
SetMessageWindowPos(-1, 140, -1, -1)
SetChrName("鬼")
#A0029
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x2),
"#51A#3S#3576V#50Wククク…#1500W…\x01",
"#40W#5Sハハハハハハハハハハハッ!\x07\x00\x02",
)
)
#Auto
Sleep(1600)
Fade(300)
Sound(196, 0, 70, 0)
Sound(200, 0, 60, 0)
Sound(833, 0, 100, 0)
OP_82(0x64, 0xC8, 0xBB8, 0x7D0)
BlurSwitch(0x0, 0xBBFFFFFF, 0x0, 0x0, 0x0)
CancelBlur(1200)
OP_68(-5000, 3500, 2000, 1000)
MoveCamera(270, 40, 0, 1000)
SetCameraDistance(16000, 1000)
BeginChrThread(0xE, 3, 0, 10)
WaitChrThread(0xE, 3)
OP_82(0x1F4, 0x0, 0xBB8, 0x4B0)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
SetMessageWindowPos(14, 280, 60, 3)
OP_6F(0x79)
OP_C9(0x1, 0x80000000)
Fade(500)
ClearChrFlags(0x101, 0x8)
ClearChrFlags(0x102, 0x8)
ClearChrFlags(0x103, 0x8)
ClearChrFlags(0x104, 0x8)
ClearChrFlags(0x109, 0x8)
ClearChrFlags(0x105, 0x8)
EndChrThread(0xE, 0x3)
BeginChrThread(0xE, 3, 0, 11)
OP_68(-1740, 2600, 2210, 0)
MoveCamera(295, 3, 0, 0)
OP_6E(600, 0)
SetCameraDistance(14050, 0)
SetCameraDistance(12750, 1500)
OP_6F(0x79)
OP_0D()
SetMessageWindowPos(10, 80, -1, -1)
#A0030
AnonymousTalk(
0xE,
(
scpstr(SCPSTR_CODE_COLOR, 0x2),
"#30Wバニングスにオルランド……\x01",
"ずいぶん久しぶりじゃねぇか。\x02\x03",
"#30Wクク……\x01",
"それにワジ……テメェともな。\x07\x00\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
SetMessageWindowPos(14, 280, 60, 3)
#C0031
ChrTalk(
0x105,
(
"#10306F#12P#Nああ……そうだね。\x02\x03",
"#10308F君のファッションの\x01",
"悪趣味さは知っていたけど……\x02\x03",
"#10310Fさすがにそれ#4R㈲ ㈲#は、幾らなんでも\x01",
"やりすぎなんじゃないの……?\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
SetMessageWindowPos(40, 80, -1, -1)
#A0032
AnonymousTalk(
0xE,
(
scpstr(SCPSTR_CODE_COLOR, 0x2),
"#11P#30Wクク……抜かせ。\x07\x00\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
SetMessageWindowPos(14, 280, 60, 3)
#C0033
ChrTalk(
0x101,
"#00010F#4P#Nちょ、ちょっと待ってくれ!\x02",
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
#C0034
ChrTalk(
0x109,
(
"#10107F#4P#Nそ、それじゃああなたが\x01",
"列車を脱線させた……!?\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
SetMessageWindowPos(10, 80, -1, -1)
#A0035
AnonymousTalk(
0xE,
(
scpstr(SCPSTR_CODE_COLOR, 0x2),
"#30Wクク……何を判りきったことを\x01",
"わざわざ確認してやがる……?\x02\x03",
"#30Wそこらの魔獣ごときに\x01",
"あんな真似ができるわけねぇだろ……\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
SetMessageWindowPos(14, 280, 60, 3)
Fade(500)
OP_68(-5000, 4200, 2000, 0)
MoveCamera(270, 0, 0, 0)
OP_6E(500, 0)
SetCameraDistance(7000, 0)
Sound(892, 0, 100, 0)
Sound(200, 0, 80, 0)
PlayEffect(0x2, 0xFF, 0xE, 0x5, 0, 500, 0, 0, 0, 0, 2000, 2500, 2000, 0xFF, 0, 0, 0, 0)
BlurSwitch(0x0, 0xBBFFFFFF, 0x0, 0x0, 0x0)
OP_68(-500, 2300, 2000, 1000)
MoveCamera(270, 14, 0, 1000)
SetCameraDistance(23000, 1000)
EndChrThread(0xE, 0x3)
BeginChrThread(0xE, 3, 0, 12)
Sleep(500)
CancelBlur(500)
OP_6F(0x79)
OP_0D()
OP_82(0x1F4, 0x0, 0xBB8, 0x1F4)
SetMessageWindowPos(-1, 110, -1, -1)
#A0036
AnonymousTalk(
0xE,
(
scpstr(SCPSTR_CODE_COLOR, 0x2),
"#4S新たな“チカラ”を手に入れた\x01",
"このヴァルド・ヴァレス以外になアアッ!\x07\x00\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
SetMessageWindowPos(14, 280, 60, 3)
Fade(500)
OP_68(-2000, 2500, 2000, 0)
MoveCamera(60, 24, 0, 0)
OP_6E(600, 0)
SetCameraDistance(14000, 0)
MoveCamera(50, 24, 0, 10000)
OP_0D()
#C0037
ChrTalk(
0x103,
"#00210F#11Pっ……\x02",
)
CloseMessageWindow()
#C0038
ChrTalk(
0x102,
"#00108F#11Pなんて鬼気……\x02",
)
CloseMessageWindow()
#C0039
ChrTalk(
0x104,
"#00310F#11P洒落になってねぇぞ……\x02",
)
CloseMessageWindow()
SetMessageWindowPos(10, 150, -1, -1)
#A0040
AnonymousTalk(
0xE,
(
scpstr(SCPSTR_CODE_COLOR, 0x2),
"#30Wさてと、わざわざここまで\x01",
"俺を追ってきてくれたんだ……\x02",
)
)
CloseMessageWindow()
#A0041
AnonymousTalk(
0xE,
(
scpstr(SCPSTR_CODE_COLOR, 0x2),
"#30Wとっとと始めるとしようか……?\x02",
)
)
CloseMessageWindow()
#A0042
AnonymousTalk(
0xE,
(
scpstr(SCPSTR_CODE_COLOR, 0x2),
"#30Wこの俺がどれだけ“上”か……\x01",
"骨の髄まで判らせるためによォ……?\x07\x00\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
SetMessageWindowPos(14, 280, 60, 3)
#C0043
ChrTalk(
0x101,
"#00010F#11Pくっ……!\x02",
)
CloseMessageWindow()
#C0044
ChrTalk(
0x105,
"#10301F#11P……どうやら本気みたいだね。\x02",
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
Fade(500)
BlurSwitch(0x0, 0xBBFFFFFF, 0x0, 0x0, 0x0)
OP_68(-5000, 3200, 2000, 0)
MoveCamera(270, 0, 0, 0)
OP_6E(500, 0)
SetCameraDistance(18250, 0)
OP_68(-5000, 4500, 2000, 2000)
SetCameraDistance(8250, 2000)
MoveCamera(270, 15, 0, 2000)
StopBGM(0xFA0)
EndChrThread(0xE, 0x3)
Sound(889, 0, 70, 0)
BeginChrThread(0xE, 3, 0, 13)
Sleep(500)
StopSound(889, 1000, 70)
CancelBlur(500)
Sleep(1500)
SetMessageWindowPos(260, 160, -1, -1)
#A0045
AnonymousTalk(
0xE,
(
scpstr(SCPSTR_CODE_COLOR, 0x2),
"#30Wクク、てめぇらごとき\x01",
"今さら本気を出すまでもねぇ……\x02\x03",
"#30Wせいぜい優しく撫でてやるから\x01",
"死なない程度に味わいやがれよ……?\x02",
)
)
CloseMessageWindow()
OP_6F(0x79)
Fade(500)
BlurSwitch(0x1, 0xBBFFFFFF, 0x0, 0x0, 0x0)
SetCameraDistance(6730, 500)
OP_6F(0x79)
CancelBlur(0)
OP_0D()
WaitBGM()
Sleep(10)
PlayBGM("ed7455", 0)
OP_82(0x1F4, 0x0, 0xBB8, 0x1F4)
#A0046
AnonymousTalk(
0xE,
(
scpstr(SCPSTR_CODE_COLOR, 0x2),
"#4S──このオレが手に入れた\x01",
"正真正銘の“チカラ”をなああッ!!\x07\x00\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
SetMessageWindowPos(14, 280, 60, 3)
OP_68(-5000, 5800, 2000, 0)
MoveCamera(270, 5, 0, 0)
OP_6E(500, 0)
SetCameraDistance(15000, 0)
OP_68(-5000, 1500, 2000, 500)
SetCameraDistance(23000, 500)
EndChrThread(0xE, 0x3)
Sound(893, 0, 100, 0)
OP_74(0x0, 0xA)
OP_71(0x0, 0x3A3, 0x3A7, 0x1, 0x8)
Sleep(300)
Sound(833, 0, 100, 0)
Sound(196, 0, 100, 0)
OP_82(0x0, 0x64, 0x1388, 0x1F4)
BlurSwitch(0x1F4, 0xBBFFFFFF, 0x0, 0x1, 0xF)
Sleep(500)
SetScenarioFlags(0x0, 0)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x20, 7)), scpexpr(EXPR_END)), "loc_1DF8")
Battle("BattleInfo_450", 0x30200011, 0x0, 0x100, 0x13, 0xFF)
Jump("loc_1E08")
label("loc_1DF8")
Battle("BattleInfo_40C", 0x30200011, 0x0, 0x100, 0x13, 0xFF)
label("loc_1E08")
FadeToDark(0, 0, -1)
Call(0, 14)
Return()
# Function_2_62C end
def Function_3_1E16(): pass
label("Function_3_1E16")
SetChrChipByIndex(0xFE, 0x2A)
SetChrSubChip(0xFE, 0x2)
OP_9C(0xFE, 0x9C4, 0x0, 0x0, 0x1F4, 0xDAC)
SetChrChipByIndex(0xFE, 0x1E)
SetChrSubChip(0xFE, 0x0)
Return()
# Function_3_1E16 end
def Function_4_1E3E(): pass
label("Function_4_1E3E")
SetChrChipByIndex(0xFE, 0x2B)
SetChrSubChip(0xFE, 0x2)
OP_9C(0xFE, 0x9C4, 0x0, 0x0, 0x258, 0xBB8)
SetChrChipByIndex(0xFE, 0x20)
SetChrSubChip(0xFE, 0x0)
Return()
# Function_4_1E3E end
def Function_5_1E66(): pass
label("Function_5_1E66")
SetChrChipByIndex(0xFE, 0x2C)
SetChrSubChip(0xFE, 0x2)
OP_9C(0xFE, 0x9C4, 0x0, 0x0, 0x2BC, 0x9C4)
SetChrChipByIndex(0xFE, 0x22)
SetChrSubChip(0xFE, 0x0)
Return()
# Function_5_1E66 end
def Function_6_1E8E(): pass
label("Function_6_1E8E")
SetChrChipByIndex(0xFE, 0x2D)
SetChrSubChip(0xFE, 0x2)
OP_9C(0xFE, 0x9C4, 0x0, 0x0, 0x1F4, 0xDAC)
SetChrChipByIndex(0xFE, 0x24)
SetChrSubChip(0xFE, 0x0)
Return()
# Function_6_1E8E end
def Function_7_1EB6(): pass
label("Function_7_1EB6")
SetChrChipByIndex(0xFE, 0x2E)
SetChrSubChip(0xFE, 0x2)
OP_9C(0xFE, 0x9C4, 0x0, 0x0, 0x258, 0xBB8)
SetChrChipByIndex(0xFE, 0x26)
SetChrSubChip(0xFE, 0x0)
Return()
# Function_7_1EB6 end
def Function_8_1EDE(): pass
label("Function_8_1EDE")
SetChrChipByIndex(0xFE, 0x2F)
SetChrSubChip(0xFE, 0x2)
OP_9C(0xFE, 0x9C4, 0x0, 0x0, 0x2BC, 0x9C4)
SetChrChipByIndex(0xFE, 0x28)
SetChrSubChip(0xFE, 0x0)
Return()
# Function_8_1EDE end
def Function_9_1F06(): pass
label("Function_9_1F06")
OP_74(0x0, 0x5)
OP_71(0x0, 0x3CA, 0x3DE, 0x1, 0x20)
Return()
# Function_9_1F06 end
def Function_10_1F17(): pass
label("Function_10_1F17")
OP_74(0x0, 0x14)
OP_71(0x0, 0x5B, 0x78, 0x1, 0x8)
OP_79(0x0)
OP_71(0x0, 0x65, 0x78, 0x0, 0x20)
Return()
# Function_10_1F17 end
def Function_11_1F37(): pass
label("Function_11_1F37")
OP_74(0x0, 0xF)
OP_71(0x0, 0x78, 0x82, 0x1, 0x8)
OP_79(0x0)
OP_74(0x0, 0xA)
OP_71(0x0, 0xB, 0x32, 0x1, 0x20)
Return()
# Function_11_1F37 end
def Function_12_1F5B(): pass
label("Function_12_1F5B")
OP_74(0x0, 0xA)
OP_71(0x0, 0x3F2, 0x3FC, 0x1, 0x8)
OP_79(0x0)
OP_71(0x0, 0x3FC, 0x410, 0x1, 0x20)
Return()
# Function_12_1F5B end
def Function_13_1F7B(): pass
label("Function_13_1F7B")
OP_74(0x0, 0x5)
OP_71(0x0, 0x335, 0x339, 0x1, 0x8)
OP_79(0x0)
label("loc_1F8E")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_1FC0")
OP_74(0x0, 0x1)
OP_71(0x0, 0x339, 0x33B, 0x1, 0x8)
OP_79(0x0)
OP_71(0x0, 0x33B, 0x339, 0x1, 0x8)
OP_79(0x0)
Jump("loc_1F8E")
label("loc_1FC0")
Return()
# Function_13_1F7B end
def Function_14_1FC1(): pass
label("Function_14_1FC1")
EventBegin(0x0)
FadeToDark(0, 0, -1)
LoadChrToIndex("chr/ch00050.itc", 0x1E)
LoadChrToIndex("chr/ch00051.itc", 0x1F)
LoadChrToIndex("chr/ch00150.itc", 0x20)
LoadChrToIndex("chr/ch00151.itc", 0x21)
LoadChrToIndex("chr/ch00250.itc", 0x22)
LoadChrToIndex("chr/ch00251.itc", 0x23)
LoadChrToIndex("chr/ch00350.itc", 0x24)
LoadChrToIndex("chr/ch00351.itc", 0x25)
LoadChrToIndex("chr/ch02950.itc", 0x26)
LoadChrToIndex("chr/ch02951.itc", 0x27)
LoadChrToIndex("chr/ch03050.itc", 0x28)
LoadChrToIndex("chr/ch03051.itc", 0x29)
LoadChrToIndex("chr/ch00056.itc", 0x2A)
LoadChrToIndex("chr/ch00156.itc", 0x2B)
LoadChrToIndex("chr/ch00256.itc", 0x2C)
LoadChrToIndex("chr/ch00356.itc", 0x2D)
LoadChrToIndex("chr/ch0295F.itc", 0x2E)
LoadChrToIndex("chr/ch03056.itc", 0x2F)
LoadChrToIndex("chr/ch32650.itc", 0x30)
LoadChrToIndex("chr/ch32651.itc", 0x31)
LoadChrToIndex("chr/ch32657.itc", 0x32)
LoadChrToIndex("chr/ch32653.itc", 0x33)
LoadChrToIndex("chr/ch31250.itc", 0x34)
LoadChrToIndex("chr/ch31251.itc", 0x35)
LoadChrToIndex("chr/ch31252.itc", 0x36)
LoadChrToIndex("chr/ch31253.itc", 0x37)
LoadChrToIndex("apl/ch51444.itc", 0x38)
SoundLoad(3575)
CreatePortrait(0, 0, 8, 480, 264, 0, 0, 512, 256, 0, 0, 480, 256, 0xFFFFFF, 0x0, "c_vis416.itp")
LoadEffect(0x0, "battle/btgun00.eff")
LoadEffect(0x1, "event/ev606_00.eff")
LoadEffect(0x2, "battle/cr326000.eff")
LoadEffect(0x3, "event\\ev15010.eff")
OP_7D(0xFF, 0xBE, 0xB4, 0x0, 0x0)
OP_11(0xFF, 0xCF, 0xB5, 0x5A, 0x96, 0x0)
OP_50(0x51, (scpexpr(EXPR_PUSH_LONG, 0xFFFFCFB5), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Jc((scpexpr(EXPR_PUSH_VALUE_INDEX, 0x3), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_2157")
SetChrChipByIndex(0x101, 0x2A)
SetChrSubChip(0x101, 0x0)
SetChrChipByIndex(0x102, 0x2B)
SetChrSubChip(0x102, 0x0)
SetChrChipByIndex(0x103, 0x2C)
SetChrSubChip(0x103, 0x0)
SetChrChipByIndex(0x104, 0x2D)
SetChrSubChip(0x104, 0x0)
SetChrChipByIndex(0x109, 0x2E)
SetChrSubChip(0x109, 0x0)
SetChrChipByIndex(0x105, 0x2F)
SetChrSubChip(0x105, 0x0)
Jump("loc_21CA")
label("loc_2157")
Jc((scpexpr(EXPR_PUSH_VALUE_INDEX, 0x3), scpexpr(EXPR_PUSH_LONG, 0x3), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_219A")
SetChrChipByIndex(0x101, 0x1E)
SetChrSubChip(0x101, 0x0)
SetChrChipByIndex(0x102, 0x2B)
SetChrSubChip(0x102, 0x0)
SetChrChipByIndex(0x103, 0x2C)
SetChrSubChip(0x103, 0x0)
SetChrChipByIndex(0x104, 0x24)
SetChrSubChip(0x104, 0x0)
SetChrChipByIndex(0x109, 0x2E)
SetChrSubChip(0x109, 0x0)
SetChrChipByIndex(0x105, 0x28)
SetChrSubChip(0x105, 0x0)
Jump("loc_21CA")
label("loc_219A")
SetChrChipByIndex(0x101, 0x1E)
SetChrSubChip(0x101, 0x0)
SetChrChipByIndex(0x102, 0x20)
SetChrSubChip(0x102, 0x0)
SetChrChipByIndex(0x103, 0x22)
SetChrSubChip(0x103, 0x0)
SetChrChipByIndex(0x104, 0x24)
SetChrSubChip(0x104, 0x0)
SetChrChipByIndex(0x109, 0x26)
SetChrSubChip(0x109, 0x0)
SetChrChipByIndex(0x105, 0x28)
SetChrSubChip(0x105, 0x0)
label("loc_21CA")
SetChrPos(0x101, 1000, 0, 1550, 270)
SetChrPos(0x102, 1400, 0, 2550, 270)
SetChrPos(0x103, 2800, 0, 1750, 270)
SetChrPos(0x104, 2650, 0, 3100, 270)
SetChrPos(0x109, 2600, 0, 750, 270)
SetChrPos(0x105, 3850, 0, 2350, 270)
ClearChrFlags(0x4, 0x80)
ClearChrBattleFlags(0x4, 0x8000)
ClearChrFlags(0x5, 0x80)
ClearChrBattleFlags(0x5, 0x8000)
ClearChrFlags(0xF, 0x80)
SetChrChipByIndex(0xF, 0x30)
SetChrSubChip(0xF, 0x0)
SetChrFlags(0xF, 0x8000)
ClearChrFlags(0x8, 0x80)
SetChrChipByIndex(0x8, 0x34)
SetChrSubChip(0x8, 0x0)
SetChrFlags(0x8, 0x8000)
ClearChrFlags(0x9, 0x80)
SetChrChipByIndex(0x9, 0x34)
SetChrSubChip(0x9, 0x0)
SetChrFlags(0x9, 0x8000)
ClearChrFlags(0xA, 0x80)
SetChrChipByIndex(0xA, 0x34)
SetChrSubChip(0xA, 0x0)
SetChrFlags(0xA, 0x8000)
ClearChrFlags(0xB, 0x80)
SetChrChipByIndex(0xB, 0x34)
SetChrSubChip(0xB, 0x0)
SetChrFlags(0xB, 0x8000)
ClearChrFlags(0xC, 0x80)
SetChrChipByIndex(0xC, 0x34)
SetChrSubChip(0xC, 0x0)
SetChrFlags(0xC, 0x8000)
ClearChrFlags(0xD, 0x80)
SetChrChipByIndex(0xD, 0x34)
SetChrSubChip(0xD, 0x0)
SetChrFlags(0xD, 0x8000)
SetChrPos(0xF, 12000, 0, 2000, 270)
SetChrPos(0x8, 13700, 0, 3000, 270)
SetChrPos(0x9, 14700, 0, 2500, 270)
SetChrPos(0xA, 15300, 0, 3500, 270)
SetChrPos(0xB, 13200, 0, 1000, 270)
SetChrPos(0xC, 15000, 0, 1500, 270)
SetChrPos(0xD, 15300, 0, 500, 270)
SetChrFlags(0xF, 0x8)
SetChrFlags(0x8, 0x8)
SetChrFlags(0x9, 0x8)
SetChrFlags(0xA, 0x8)
SetChrFlags(0xB, 0x8)
SetChrFlags(0xC, 0x8)
SetChrFlags(0xD, 0x8)
ClearChrFlags(0xE, 0x80)
OP_78(0x0, 0xE)
OP_49()
SetChrPos(0xE, -5000, 0, 2000, 90)
OP_D5(0xE, 0x0, 0x15F90, 0x0, 0x0)
ClearMapObjFlags(0x0, 0x4)
SetMapObjFlags(0x0, 0x1000)
OP_74(0x0, 0xA)
OP_71(0x0, 0xB, 0x32, 0x1, 0x20)
SetChrFlags(0xE, 0x1)
OP_52(0xE, 0x7, (scpexpr(EXPR_PUSH_LONG, 0xDAC), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
OP_52(0xE, 0x28, (scpexpr(EXPR_PUSH_LONG, 0x9), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Jc((scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_END)), "loc_23DA")
FadeToBright(0, 0)
Jump("loc_32E6")
label("loc_23DA")
OP_68(-1740, 2600, 2210, 0)
MoveCamera(295, 3, 0, 0)
OP_6E(600, 0)
SetCameraDistance(14050, 0)
SetCameraDistance(12750, 1500)
FadeToBright(1000, 0)
OP_0D()
OP_6F(0x79)
Jc((scpexpr(EXPR_PUSH_VALUE_INDEX, 0x3), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_2629")
SetMessageWindowPos(10, 80, -1, -1)
#A0047
AnonymousTalk(
0xE,
(
scpstr(SCPSTR_CODE_COLOR, 0x2),
"#30W……オイオイ……\x01",
"てめぇら、舐めてんのか?\x02",
)
)
CloseMessageWindow()
#A0048
AnonymousTalk(
0xE,
(
scpstr(SCPSTR_CODE_COLOR, 0x2),
"#30Wいくらなんでも\x01",
"呆気なさすぎるだろうが……?\x07\x00\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
SetMessageWindowPos(14, 280, 60, 3)
def lambda_24B3():
OP_A6(0xFE, 0x0, 0x32, 0x1F4, 0xBB8)
ExitThread()
QueueWorkItem(0x101, 2, lambda_24B3)
def lambda_24CC():
OP_A6(0xFE, 0x0, 0x32, 0x1F4, 0xBB8)
ExitThread()
QueueWorkItem(0x104, 2, lambda_24CC)
Sleep(150)
Fade(250)
SetChrChipByIndex(0x101, 0x1E)
SetChrSubChip(0x101, 0x0)
SetChrChipByIndex(0x104, 0x24)
SetChrSubChip(0x104, 0x0)
OP_0D()
WaitChrThread(0x101, 2)
WaitChrThread(0x104, 2)
#C0049
ChrTalk(
0x101,
"#00006F#4P#Nくっ……はあはあ……\x02",
)
CloseMessageWindow()
def lambda_252C():
OP_A6(0xFE, 0x0, 0x32, 0x1F4, 0xBB8)
ExitThread()
QueueWorkItem(0x102, 2, lambda_252C)
def lambda_2545():
OP_A6(0xFE, 0x0, 0x32, 0x1F4, 0xBB8)
ExitThread()
QueueWorkItem(0x103, 2, lambda_2545)
Sleep(150)
Fade(250)
SetChrChipByIndex(0x102, 0x20)
SetChrSubChip(0x102, 0x0)
SetChrChipByIndex(0x103, 0x22)
SetChrSubChip(0x103, 0x0)
OP_0D()
WaitChrThread(0x102, 2)
WaitChrThread(0x103, 2)
#C0050
ChrTalk(
0x102,
"#00108F#12P#Nぜ、全然通用してない……?\x02",
)
CloseMessageWindow()
def lambda_25AC():
OP_A6(0xFE, 0x0, 0x32, 0x1F4, 0xBB8)
ExitThread()
QueueWorkItem(0x105, 2, lambda_25AC)
def lambda_25C5():
OP_A6(0xFE, 0x0, 0x32, 0x1F4, 0xBB8)
ExitThread()
QueueWorkItem(0x109, 2, lambda_25C5)
Sleep(150)
Fade(250)
SetChrChipByIndex(0x105, 0x28)
SetChrSubChip(0x105, 0x0)
SetChrChipByIndex(0x109, 0x26)
SetChrSubChip(0x109, 0x0)
OP_0D()
WaitChrThread(0x105, 2)
WaitChrThread(0x109, 2)
#C0051
ChrTalk(
0x103,
"#00206F#12P#Nとんでもないです……\x02",
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
Jump("loc_287C")
label("loc_2629")
Jc((scpexpr(EXPR_PUSH_VALUE_INDEX, 0x3), scpexpr(EXPR_PUSH_LONG, 0x3), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_2792")
OP_2C(0xA8, 0x1)
SetMessageWindowPos(10, 80, -1, -1)
#A0052
AnonymousTalk(
0xE,
(
scpstr(SCPSTR_CODE_COLOR, 0x2),
"#30Wクク……\x01",
"まあそんなモンだろうな。\x02",
)
)
CloseMessageWindow()
#A0053
AnonymousTalk(
0xE,
(
scpstr(SCPSTR_CODE_COLOR, 0x2),
"#30Wやれやれ……\x01",
"ちょいと強くなりすぎたかァ?\x07\x00\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
SetMessageWindowPos(14, 280, 60, 3)
#C0054
ChrTalk(
0x101,
"#00010F#4P#Nくっ……\x02",
)
CloseMessageWindow()
def lambda_26D4():
OP_A6(0xFE, 0x0, 0x32, 0x1F4, 0xBB8)
ExitThread()
QueueWorkItem(0x102, 2, lambda_26D4)
def lambda_26ED():
OP_A6(0xFE, 0x0, 0x32, 0x1F4, 0xBB8)
ExitThread()
QueueWorkItem(0x103, 2, lambda_26ED)
def lambda_2706():
OP_A6(0xFE, 0x0, 0x32, 0x1F4, 0xBB8)
ExitThread()
QueueWorkItem(0x109, 2, lambda_2706)
Sleep(150)
Fade(250)
SetChrChipByIndex(0x102, 0x20)
SetChrSubChip(0x102, 0x0)
SetChrChipByIndex(0x103, 0x22)
SetChrSubChip(0x103, 0x0)
SetChrChipByIndex(0x109, 0x26)
SetChrSubChip(0x109, 0x0)
OP_0D()
WaitChrThread(0x102, 2)
WaitChrThread(0x103, 2)
WaitChrThread(0x109, 2)
#C0055
ChrTalk(
0x109,
(
"#10108F#4P#Nあ、あれだけやったのに\x01",
"殆んど効いていない……!?\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
Jump("loc_287C")
label("loc_2792")
OP_2C(0xA8, 0x2)
SetMessageWindowPos(10, 80, -1, -1)
#A0056
AnonymousTalk(
0xE,
(
scpstr(SCPSTR_CODE_COLOR, 0x2),
"#30Wクク……\x01",
"思ったよりもやるじゃねぇか。\x02",
)
)
CloseMessageWindow()
#A0057
AnonymousTalk(
0xE,
(
scpstr(SCPSTR_CODE_COLOR, 0x2),
"#30Wお楽しみに取っておくつもりが\x01",
"喰っちまいたくなるぜぇ……\x07\x00\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
SetMessageWindowPos(14, 280, 60, 3)
#C0058
ChrTalk(
0x101,
"#00013F#4P#Nくっ……!\x02",
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
#C0059
ChrTalk(
0x104,
(
"#00311F#12P#Nチッ……\x01",
"6人がかりでこの程度かよ!?\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
label("loc_287C")
#C0060
ChrTalk(
0x105,
"#10303F#12P#N……………………………………\x02",
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
Fade(500)
OP_68(-2000, 2500, 2000, 0)
MoveCamera(60, 24, 0, 0)
OP_6E(600, 0)
SetCameraDistance(14000, 0)
MoveCamera(50, 24, 0, 15000)
Sleep(500)
SetMessageWindowPos(10, 120, -1, -1)
#A0061
AnonymousTalk(
0xE,
(
scpstr(SCPSTR_CODE_COLOR, 0x2),
"#30Wクク……どうしたワジ……?\x02",
)
)
CloseMessageWindow()
#A0062
AnonymousTalk(
0xE,
(
scpstr(SCPSTR_CODE_COLOR, 0x2),
"#30Wいつもみたいに小奇麗なツラで\x01",
"スカしたことを言ってみろよ……?\x02",
)
)
CloseMessageWindow()
#A0063
AnonymousTalk(
0xE,
(
scpstr(SCPSTR_CODE_COLOR, 0x2),
"#30Wそうじゃなくちゃ\x01",
"面白くならねぇだろうが……?\x07\x00\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
SetMessageWindowPos(14, 280, 60, 3)
#C0064
ChrTalk(
0x105,
(
"#10306F──ヴァルド。\x02\x03",
"#10301F一体どこで\x01",
"《グノーシス》を手に入れた?\x02",
)
)
CloseMessageWindow()
OP_63(0x101, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
OP_63(0x102, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
Sleep(50)
OP_63(0x103, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
OP_63(0x104, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
Sleep(50)
OP_63(0x109, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
Sleep(1000)
#C0065
ChrTalk(
0x101,
"#00011F#11Pそ、そういえば……!\x02",
)
CloseMessageWindow()
#C0066
ChrTalk(
0x102,
(
"#00101F#11Pヨアヒム先生が製造したものは\x01",
"調査用のサンプルを除いて\x01",
"全て廃棄された筈……\x02",
)
)
CloseMessageWindow()
#C0067
ChrTalk(
0x104,
(
"#00307F#11Pてめぇ……\x01",
"どこから手に入れやがった!?\x02",
)
)
CloseMessageWindow()
SetMessageWindowPos(10, 120, -1, -1)
#A0068
AnonymousTalk(
0xE,
(
scpstr(SCPSTR_CODE_COLOR, 0x2),
"#30Wクク……さてなぁ。\x02",
)
)
CloseMessageWindow()
#A0069
AnonymousTalk(
0xE,
(
scpstr(SCPSTR_CODE_COLOR, 0x2),
"#30Wそれに、カン違いするな。\x02",
)
)
CloseMessageWindow()
#A0070
AnonymousTalk(
0xE,
(
scpstr(SCPSTR_CODE_COLOR, 0x2),
"#30Wこの“チカラ”は何も\x01",
"クスリだけのモンじゃねえ……\x02",
)
)
CloseMessageWindow()
#A0071
AnonymousTalk(
0xE,
(
scpstr(SCPSTR_CODE_COLOR, 0x2),
"#30Wクスリはあくまできっかけ──\x01",
"コイツはオレ自身から生み出された\x01",
"混じりけのない“チカラ”だ。\x02",
)
)
CloseMessageWindow()
#A0072
AnonymousTalk(
0xE,
(
scpstr(SCPSTR_CODE_COLOR, 0x2),
"#30Wヨアヒムってのが手に入れた\x01",
"紛いモンの“チカラ”と違ってなぁ。\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
SetMessageWindowPos(14, 280, 60, 3)
#C0073
ChrTalk(
0x103,
(
"#00201F#11P……確かに……\x01",
"ヨアヒム先生の時とは違って\x01",
"暴走はしていないようです。\x02",
)
)
CloseMessageWindow()
#C0074
ChrTalk(
0x105,
(
"#10303F#11Pきっかけはどうあれ\x01",
"使いこなせてるってわけか……\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
Fade(500)
OP_68(-530, 2600, 2300, 0)
MoveCamera(271, 4, 0, 0)
OP_6E(600, 0)
SetCameraDistance(12750, 0)
ClearChrFlags(0x8, 0x8)
ClearChrFlags(0x9, 0x8)
ClearChrFlags(0xA, 0x8)
ClearChrFlags(0xB, 0x8)
ClearChrFlags(0xC, 0x8)
ClearChrFlags(0xD, 0x8)
ClearChrFlags(0xF, 0x8)
Sleep(300)
ClearChrFlags(0x1C, 0x80)
SetChrChipByIndex(0x1C, 0x1E)
SetChrPos(0x1C, 8000, 0, 1750, 270)
#N0075
NpcTalk(
0x1C,
"男の声",
"#2S#5Pな、なんだ!?\x02",
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
SetChrPos(0x1C, 8000, 0, 2250, 270)
#N0076
NpcTalk(
0x1C,
"男の声",
"#2S#11Pば、化物……!?\x02",
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
SetChrFlags(0x1C, 0x80)
OP_63(0x101, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
OP_63(0x102, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
Sleep(50)
OP_63(0x103, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
OP_63(0x104, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
Sleep(50)
OP_63(0x109, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
OP_63(0x105, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
Sleep(1000)
Fade(500)
OP_68(12000, 1000, 2000, 0)
OP_68(4500, 1000, 2000, 2500)
MoveCamera(45, 24, 0, 0)
MoveCamera(35, 21, 0, 2500)
OP_6E(510, 0)
SetCameraDistance(15000, 0)
SetCameraDistance(16000, 2500)
OP_93(0x101, 0x5A, 0x0)
OP_93(0x102, 0x5A, 0x0)
OP_93(0x103, 0x5A, 0x0)
OP_93(0x104, 0x5A, 0x0)
OP_93(0x109, 0x5A, 0x0)
OP_93(0x105, 0x5A, 0x0)
SetMapObjFlags(0x0, 0x4)
BeginChrThread(0xF, 3, 0, 15)
Sleep(10)
BeginChrThread(0x8, 3, 0, 16)
BeginChrThread(0x9, 3, 0, 16)
Sleep(10)
BeginChrThread(0xA, 3, 0, 16)
BeginChrThread(0xB, 3, 0, 16)
Sleep(10)
BeginChrThread(0xC, 3, 0, 16)
BeginChrThread(0xD, 3, 0, 16)
WaitChrThread(0xF, 3)
WaitChrThread(0x8, 3)
WaitChrThread(0x9, 3)
WaitChrThread(0xA, 3)
WaitChrThread(0xB, 3)
WaitChrThread(0xC, 3)
WaitChrThread(0xD, 3)
OP_6F(0x79)
OP_0D()
#C0077
ChrTalk(
0x104,
"#00305F#6Pミレイユ……!\x02",
)
CloseMessageWindow()
#C0078
ChrTalk(
0x109,
"#10102F#6Pミレイユ三尉……!\x02",
)
CloseMessageWindow()
#C0079
ChrTalk(
0x102,
(
"#00102F#6Pよかった……!\x01",
"復旧が終わったんですね?\x02",
)
)
CloseMessageWindow()
#C0080
ChrTalk(
0xF,
(
"#07905F#11Pえ、ええ、それで\x01",
"急いで駆けつけたんだけど……\x02\x03",
"#07907Fな、なんなのその化物……!?\x02",
)
)
CloseMessageWindow()
def lambda_3033():
OP_93(0x101, 0x10E, 0x1F4)
ExitThread()
QueueWorkItem(0x101, 0, lambda_3033)
Sleep(50)
def lambda_3043():
OP_93(0x102, 0x10E, 0x1F4)
ExitThread()
QueueWorkItem(0x102, 0, lambda_3043)
Sleep(50)
def lambda_3053():
OP_93(0x103, 0x10E, 0x1F4)
ExitThread()
QueueWorkItem(0x103, 0, lambda_3053)
Sleep(50)
def lambda_3063():
OP_93(0x104, 0x10E, 0x1F4)
ExitThread()
QueueWorkItem(0x104, 0, lambda_3063)
Sleep(50)
def lambda_3073():
OP_93(0x109, 0x10E, 0x1F4)
ExitThread()
QueueWorkItem(0x109, 0, lambda_3073)
Sleep(50)
def lambda_3083():
OP_93(0x105, 0x10E, 0x1F4)
ExitThread()
QueueWorkItem(0x105, 0, lambda_3083)
WaitChrThread(0x101, 0)
WaitChrThread(0x102, 0)
WaitChrThread(0x103, 0)
WaitChrThread(0x104, 0)
WaitChrThread(0x109, 0)
WaitChrThread(0x105, 0)
Fade(500)
OP_68(-2000, 2500, 2000, 0)
MoveCamera(55, 24, 0, 0)
OP_6E(600, 0)
SetCameraDistance(12750, 0)
ClearMapObjFlags(0x0, 0x4)
OP_0D()
SetMessageWindowPos(10, 120, -1, -1)
#A0081
AnonymousTalk(
0xE,
(
scpstr(SCPSTR_CODE_COLOR, 0x2),
"#30Wクク……今日はここでお開きか。\x02",
)
)
CloseMessageWindow()
#A0082
AnonymousTalk(
0xE,
(
scpstr(SCPSTR_CODE_COLOR, 0x2),
"#30W特務支援課……それからワジ。\x02",
)
)
CloseMessageWindow()
#A0083
AnonymousTalk(
0xE,
(
scpstr(SCPSTR_CODE_COLOR, 0x2),
"#30W次会った時はもう少しくらいは\x01",
"オレを愉しませろや……?\x02",
)
)
CloseMessageWindow()
#A0084
AnonymousTalk(
0xE,
(
scpstr(SCPSTR_CODE_COLOR, 0x2),
"#30Wあの旧市街でやった\x01",
"チェイスバトルくらいにはなァ?\x07\x00\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
SetMessageWindowPos(14, 280, 60, 3)
#C0085
ChrTalk(
0x101,
"#00013F#11Pくっ……\x02",
)
CloseMessageWindow()
#C0086
ChrTalk(
0x104,
"#00310F#11Pてめぇ……\x02",
)
CloseMessageWindow()
#C0087
ChrTalk(
0x105,
"#10307F#11Pヴァルド……!\x02",
)
CloseMessageWindow()
OP_68(6000, 1000, 2000, 1500)
SetCameraDistance(13000, 1500)
OP_6F(0x79)
#C0088
ChrTalk(
0xF,
(
"#07901F#11Pに、逃がすもんですかッ!\x02\x03",
"#07907F総員、戦闘準備ッ!\x01",
"ミサイルポッドの使用も許可する!\x02",
)
)
CloseMessageWindow()
Sleep(500)
OP_82(0xC8, 0x0, 0xBB8, 0xC8)
SetMessageWindowPos(280, 10, -1, -1)
SetChrName("警備隊員たち")
#A0089
AnonymousTalk(
0xFF,
"#4Sイエス・マム!\x02",
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
SetMessageWindowPos(14, 280, 60, 3)
label("loc_32E6")
Fade(500)
OP_68(7150, 1500, 2000, 0)
MoveCamera(270, 28, 0, 0)
OP_6E(600, 0)
SetCameraDistance(13800, 0)
OP_68(-1750, 1500, 2000, 3000)
MoveCamera(310, 9, 0, 3000)
SetCameraDistance(19800, 3000)
ClearChrFlags(0xF, 0x8)
ClearChrFlags(0x8, 0x8)
ClearChrFlags(0x9, 0x8)
ClearChrFlags(0xA, 0x8)
ClearChrFlags(0xB, 0x8)
ClearChrFlags(0xC, 0x8)
ClearChrFlags(0xD, 0x8)
SetChrPos(0xF, 6000, 0, 2000, 270)
SetChrPos(0x8, 7700, 0, 3000, 270)
SetChrPos(0x9, 8700, 0, 2500, 270)
SetChrPos(0xA, 9300, 0, 3500, 270)
SetChrPos(0xB, 7200, 0, 1000, 270)
SetChrPos(0xC, 9000, 0, 1500, 270)
SetChrPos(0xD, 9300, 0, 500, 270)
BeginChrThread(0xF, 3, 0, 17)
BeginChrThread(0x8, 3, 0, 18)
BeginChrThread(0x9, 3, 0, 19)
BeginChrThread(0xA, 3, 0, 20)
BeginChrThread(0xB, 3, 0, 21)
BeginChrThread(0xC, 3, 0, 22)
BeginChrThread(0xD, 3, 0, 23)
Sleep(800)
BeginChrThread(0x101, 3, 0, 25)
BeginChrThread(0x102, 3, 0, 26)
BeginChrThread(0x103, 3, 0, 27)
BeginChrThread(0x104, 3, 0, 28)
BeginChrThread(0x109, 3, 0, 29)
BeginChrThread(0x105, 3, 0, 30)
OP_6F(0x79)
BeginChrThread(0x101, 0, 0, 24)
MoveCamera(295, 9, 0, 9000)
OP_6F(0x79)
WaitChrThread(0x101, 3)
WaitChrThread(0x102, 3)
WaitChrThread(0x103, 3)
WaitChrThread(0x104, 3)
WaitChrThread(0x109, 3)
WaitChrThread(0x105, 3)
OP_0D()
Fade(500)
OP_68(-5000, 3300, 2000, 0)
MoveCamera(270, 40, 0, 0)
OP_6E(600, 0)
SetCameraDistance(15500, 0)
MoveCamera(270, 30, 0, 10000)
SetCameraDistance(13500, 10000)
OP_74(0x0, 0xF)
OP_71(0x0, 0x41A, 0x442, 0x1, 0x8)
Sound(892, 0, 100, 0)
Sleep(1300)
Sound(892, 0, 100, 0)
OP_79(0x0)
OP_74(0x0, 0x14)
OP_71(0x0, 0xB, 0x32, 0x1, 0x20)
OP_0D()
OP_C9(0x0, 0x80000000)
SetMessageWindowPos(-1, 140, -1, -1)
#A0090
AnonymousTalk(
0xE,
(
scpstr(SCPSTR_CODE_COLOR, 0x2),
"#40A#3575Vカカ……\x01",
"#4Sぬるいんだよオオオオッ!\x07\x00\x05\x02",
)
)
#Auto
CloseMessageWindow()
OP_57(0x0)
OP_5A()
SetMessageWindowPos(14, 280, 60, 3)
OP_C9(0x1, 0x80000000)
BlurSwitch(0x0, 0xBBFFFFFF, 0x0, 0x0, 0x0)
OP_68(-5000, 4250, 2000, 1200)
MoveCamera(270, 15, 0, 1200)
OP_6E(600, 1200)
SetCameraDistance(9000, 1200)
Sound(892, 0, 100, 0)
OP_74(0x0, 0x7)
OP_71(0x0, 0x335, 0x33E, 0x1, 0x8)
OP_79(0x0)
EndChrThread(0x101, 0x0)
EndChrThread(0xF, 0x3)
EndChrThread(0x8, 0x3)
EndChrThread(0x9, 0x3)
EndChrThread(0xA, 0x3)
EndChrThread(0xB, 0x3)
EndChrThread(0xC, 0x3)
EndChrThread(0xD, 0x3)
CancelBlur(500)
Sleep(500)
OP_6F(0x79)
Sound(893, 0, 100, 0)
OP_74(0x0, 0x1E)
OP_71(0x0, 0x33E, 0x348, 0x1, 0x8)
Sound(3543, 255, 100, 0) #voice#Wald
Sound(833, 0, 100, 0)
Sound(862, 0, 100, 0)
OP_82(0x1F4, 0x1F4, 0x2328, 0x5DC)
BlurSwitch(0x0, 0xBBFFFFFF, 0x0, 0x0, 0x0)
PlayEffect(0x3, 0xFF, 0xE, 0x5, 0, 2000, 0, 0, 0, 0, 1500, 1500, 1500, 0xFF, 0, 0, 0, 0)
OP_68(-5000, 800, 2000, 1000)
MoveCamera(270, 16, 0, 1000)
SetCameraDistance(33000, 1000)
Sleep(700)
CancelBlur(500)
BeginChrThread(0xF, 3, 0, 37)
BeginChrThread(0x8, 3, 0, 38)
BeginChrThread(0x9, 3, 0, 39)
BeginChrThread(0xA, 3, 0, 40)
BeginChrThread(0xB, 3, 0, 41)
BeginChrThread(0xC, 3, 0, 42)
BeginChrThread(0xD, 3, 0, 43)
BeginChrThread(0x101, 3, 0, 31)
BeginChrThread(0x102, 3, 0, 32)
BeginChrThread(0x103, 3, 0, 33)
BeginChrThread(0x104, 3, 0, 34)
BeginChrThread(0x109, 3, 0, 35)
BeginChrThread(0x105, 3, 0, 36)
#C0091
ChrTalk(
0xF,
"#07911Fきゃああッ!?\x05\x02",
)
#C0092
ChrTalk(
0xD,
"うわあああっ!?\x05\x02",
)
WaitChrThread(0x101, 3)
WaitChrThread(0x102, 3)
WaitChrThread(0x103, 3)
WaitChrThread(0x104, 3)
WaitChrThread(0x109, 3)
WaitChrThread(0x105, 3)
WaitChrThread(0xF, 3)
Sound(514, 0, 100, 0)
WaitChrThread(0x8, 3)
WaitChrThread(0x9, 3)
WaitChrThread(0xA, 3)
WaitChrThread(0xB, 3)
WaitChrThread(0xC, 3)
WaitChrThread(0xD, 3)
OP_79(0x0)
OP_6F(0x79)
Sleep(1000)
Fade(500)
OP_68(-5000, 2800, 2000, 0)
MoveCamera(120, 42, 0, 0)
OP_6E(600, 0)
SetCameraDistance(27000, 0)
BeginChrThread(0xE, 3, 0, 44)
OP_D5(0xE, 0x0, 0x5F370, 0x0, 0x0)
OP_D5(0xE, 0x0, 0x4BAF0, 0x0, 0x2BC)
WaitChrThread(0xE, 3)
OP_0D()
Sleep(200)
Fade(500)
OP_68(-5000, 3800, 2000, 2000)
MoveCamera(340, -15, 0, 2000)
SetCameraDistance(12000, 2000)
OP_74(0x0, 0xA)
OP_71(0x0, 0x105, 0x118, 0x1, 0x8)
OP_79(0x0)
OP_74(0x0, 0x5)
OP_71(0x0, 0x119, 0x122, 0x1, 0x20)
Sleep(500)
OP_68(-5000, 10000, 2000, 500)
MoveCamera(340, -40, 0, 500)
SetCameraDistance(20000, 500)
OP_82(0xC8, 0x1F4, 0xFA0, 0x1F4)
Sound(833, 0, 80, 0)
Sound(251, 0, 100, 0)
BlurSwitch(0x0, 0xBBFFFFFF, 0x0, 0x0, 0x0)
Sleep(1)
CancelBlur(1000)
def lambda_3824():
OP_9C(0xFE, 0xFFFEEE90, 0x0, 0x7530, 0x88B8, 0x5DC)
ExitThread()
QueueWorkItem(0xE, 1, lambda_3824)
PlayEffect(0x3, 0xFF, 0xE, 0x0, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
ClearChrFlags(0xE, 0x1)
OP_74(0x0, 0x2D)
OP_71(0x0, 0x123, 0x12C, 0x1, 0x8)
OP_79(0x0)
OP_71(0x0, 0x12D, 0x140, 0x1, 0x20)
OP_0D()
Sleep(2500)
Fade(500)
EndChrThread(0xE, 0x1)
OP_F0(0x0, 0x1)
OP_F0(0x1, 0x3E8)
OP_68(-26540, 13300, 18780, 0)
MoveCamera(85, 30, 0, 0)
OP_6E(600, 0)
SetCameraDistance(48960, 0)
OP_68(-26540, 8300, 18780, 4000)
MoveCamera(112, 23, 0, 4000)
OP_6E(600, 4000)
SetCameraDistance(73320, 4000)
SetChrPos(0xE, -18600, 15800, 15800, 0)
def lambda_391E():
OP_9D(0xFE, 0xFFFF9688, 0x23F0, 0x79AE, 0x1388, 0x1388)
ExitThread()
QueueWorkItem(0xE, 1, lambda_391E)
OP_74(0x0, 0xF)
OP_71(0x0, 0x141, 0x154, 0x1, 0x8)
Sleep(1100)
Sound(833, 0, 80, 0)
OP_82(0xC8, 0x0, 0xBB8, 0x1F4)
PlayEffect(0x3, 0xFF, 0xE, 0x0, 0, 0, 0, 0, 0, 0, 700, 700, 700, 0xFF, 0, 0, 0, 0)
OP_D5(0xE, 0x0, 0x41EB0, 0x0, 0x12C)
OP_79(0x0)
WaitChrThread(0xE, 1)
Sound(251, 0, 100, 0)
def lambda_39BC():
OP_9D(0xFE, 0xFFFF444E, 0x3458, 0x57E4, 0x2710, 0x1388)
ExitThread()
QueueWorkItem(0xE, 1, lambda_39BC)
OP_74(0x0, 0x23)
OP_71(0x0, 0x123, 0x154, 0x1, 0x8)
Sleep(1100)
Sound(833, 0, 80, 0)
OP_82(0xC8, 0x0, 0xBB8, 0x1F4)
PlayEffect(0x3, 0xFF, 0xE, 0x0, 0, 0, 0, 0, 0, 0, 700, 700, 700, 0xFF, 0, 0, 0, 0)
OP_D5(0xE, 0x0, 0x4CE78, 0x0, 0x12C)
OP_79(0x0)
WaitChrThread(0xE, 1)
Sound(251, 0, 100, 0)
def lambda_3A5A():
OP_9C(0xFE, 0xFFFEE2D8, 0x0, 0x9C40, 0x36B0, 0xBB8)
ExitThread()
QueueWorkItem(0xE, 1, lambda_3A5A)
OP_74(0x0, 0x14)
OP_71(0x0, 0x123, 0x154, 0x1, 0x8)
Sleep(550)
BlurSwitch(0x96, 0xBBFFFFFF, 0x0, 0x0, 0x0)
Sleep(150)
OP_82(0x5DC, 0x5DC, 0x1388, 0x3E8)
Sleep(1000)
CancelBlur(3000)
OP_79(0x0)
WaitChrThread(0xE, 1)
OP_0D()
StopBGM(0x2710)
Fade(1000)
OP_68(-14380, 14500, -2230, 0)
MoveCamera(350, 38, 0, 0)
OP_6E(600, 0)
SetCameraDistance(60750, 0)
OP_68(-14380, 14500, -2230, 10000)
MoveCamera(308, 0, 0, 10000)
SetCameraDistance(60750, 10000)
OP_6F(0x79)
OP_0D()
FadeToDark(1000, 0, -1)
OP_0D()
WaitBGM()
Sleep(500)
SetChrName("")
#A0093
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"その後、ロイドたちは\x01",
"ミレイユの部隊と協力しながら\x01",
"広大な樹海を捜索したが……\x02\x03",
"結局、魔人化したヴァルドの姿を\x01",
"発見することは出来なかった。\x02\x03",
"そして夜も更けて\x01",
"いったん捜索が打ち切られた後……\x02\x03",
"ロイドたちは深夜近くに支援課に戻り、\x01",
"キーアが用意していた鍋をつつく気力もなく\x01",
"泥のような眠りにつくのだった。\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
Sleep(1000)
Sound(13, 0, 100, 0)
Sleep(4000)
OP_CB(0x0, 0x3, 0xFFFFFFFF, 0x1F4, 0x0, 0x0)
OP_CC(0x0, 0x0, 0x3)
Sleep(2000)
OP_CB(0x0, 0x3, 0xFFFFFF, 0x1F4, 0x0, 0x0)
OP_CC(0x0, 0x0, 0x3)
OP_CC(0x1, 0xFF, 0x0)
SetChrChipByIndex(0x101, 0xFF)
SetChrSubChip(0x101, 0x0)
SetChrChipByIndex(0x102, 0xFF)
SetChrSubChip(0x102, 0x0)
SetChrChipByIndex(0x104, 0xFF)
SetChrSubChip(0x104, 0x0)
SetChrChipByIndex(0x109, 0xFF)
SetChrSubChip(0x109, 0x0)
SetChrChipByIndex(0x105, 0xFF)
SetChrSubChip(0x105, 0x0)
SetChrChipByIndex(0x103, 0xFF)
SetChrSubChip(0x103, 0x0)
OP_32(0xFF, 0xFE, 0x0)
ReplaceBGM(-1, -1)
OP_C9(0x0, 0x10000)
SetScenarioFlags(0x22, 2)
NewScene("c1400", 0, 0, 0)
IdleLoop()
Return()
# Function_14_1FC1 end
def Function_15_3CE9(): pass
label("Function_15_3CE9")
SetChrChipByIndex(0xFE, 0x31)
SetChrSubChip(0xFE, 0x0)
def lambda_3CF6():
OP_9B(0x0, 0xFE, 0x0, 0x1770, 0xFA0, 0x0)
ExitThread()
QueueWorkItem(0xFE, 1, lambda_3CF6)
WaitChrThread(0xFE, 1)
SetChrChipByIndex(0xFE, 0x30)
SetChrSubChip(0xFE, 0x0)
Return()
# Function_15_3CE9 end
def Function_16_3D13(): pass
label("Function_16_3D13")
SetChrChipByIndex(0xFE, 0x35)
SetChrSubChip(0xFE, 0x0)
def lambda_3D20():
OP_9B(0x0, 0xFE, 0x0, 0x1770, 0xFA0, 0x0)
ExitThread()
QueueWorkItem(0xFE, 1, lambda_3D20)
WaitChrThread(0xFE, 1)
SetChrChipByIndex(0xFE, 0x34)
SetChrSubChip(0xFE, 0x0)
Return()
# Function_16_3D13 end
def Function_17_3D3D(): pass
label("Function_17_3D3D")
SetChrChipByIndex(0xFE, 0x31)
SetChrSubChip(0xFE, 0x0)
def lambda_3D4A():
OP_95(0xFE, 1950, 0, 5200, 4000, 0x0)
ExitThread()
QueueWorkItem(0xFE, 1, lambda_3D4A)
WaitChrThread(0xFE, 1)
SetChrChipByIndex(0xFE, 0x30)
Sleep(50)
TurnDirection(0xFE, 0xE, 500)
SetChrChipByIndex(0xFE, 0x32)
OP_A1(0xFE, 0x3E8, 0x4, 0x0, 0x1, 0x2, 0x3)
label("loc_3D7F")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_3DF2")
Sound(545, 0, 80, 0)
PlayEffect(0x2, 0xFF, 0xFE, 0x5, 300, 1000, 1000, 0, 0, 0, 1000, 1000, 1000, 0xE, 0, 0, 0, 0)
OP_A1(0xFE, 0x3E8, 0x3, 0x4, 0x5, 0x3)
OP_82(0x64, 0x64, 0xBB8, 0x1F4)
Sleep(1000)
Sound(196, 0, 80, 0)
Sleep(2000)
Jump("loc_3D7F")
label("loc_3DF2")
Return()
# Function_17_3D3D end
def Function_18_3DF3(): pass
label("Function_18_3DF3")
SetChrChipByIndex(0xFE, 0x35)
SetChrSubChip(0xFE, 0x0)
def lambda_3E00():
OP_95(0xFE, -2100, 0, 9400, 4000, 0x0)
ExitThread()
QueueWorkItem(0xFE, 1, lambda_3E00)
WaitChrThread(0xFE, 1)
Sound(531, 0, 100, 0)
Sound(358, 0, 80, 0)
SetChrChipByIndex(0xFE, 0x34)
Sleep(50)
TurnDirection(0xFE, 0xE, 500)
SetChrChipByIndex(0xFE, 0x38)
OP_A1(0xFE, 0x3E8, 0x4, 0x0, 0x1, 0x2, 0x3)
label("loc_3E41")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_3EB4")
Sound(545, 0, 80, 0)
PlayEffect(0x2, 0xFF, 0xFE, 0x5, 300, 1000, 1000, 0, 0, 0, 1000, 1000, 1000, 0xE, 0, 0, 0, 0)
OP_A1(0xFE, 0x3E8, 0x3, 0x4, 0x5, 0x3)
OP_82(0x64, 0x64, 0xBB8, 0x1F4)
Sleep(1000)
Sound(196, 0, 80, 0)
Sleep(2000)
Jump("loc_3E41")
label("loc_3EB4")
Return()
# Function_18_3DF3 end
def Function_19_3EB5(): pass
label("Function_19_3EB5")
SetChrChipByIndex(0xFE, 0x35)
SetChrSubChip(0xFE, 0x0)
def lambda_3EC2():
OP_95(0xFE, 1700, 0, 7650, 4000, 0x0)
ExitThread()
QueueWorkItem(0xFE, 1, lambda_3EC2)
WaitChrThread(0xFE, 1)
SetChrChipByIndex(0xFE, 0x34)
Sleep(50)
TurnDirection(0xFE, 0xE, 500)
Sound(531, 0, 100, 0)
SetChrChipByIndex(0xFE, 0x36)
OP_A1(0xFE, 0x3E8, 0x2, 0x0, 0x1)
label("loc_3EFB")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_3F60")
SetChrSubChip(0xFE, 0x2)
Sleep(100)
Sound(987, 0, 50, 0)
PlayEffect(0x0, 0xFF, 0xFE, 0x5, 0, 1050, 1200, 0, 0, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
SetChrSubChip(0xFE, 0x3)
Sleep(100)
SetChrSubChip(0xFE, 0x2)
Sleep(100)
Sleep(1000)
Jump("loc_3EFB")
label("loc_3F60")
Return()
# Function_19_3EB5 end
def Function_20_3F61(): pass
label("Function_20_3F61")
SetChrChipByIndex(0xFE, 0x35)
SetChrSubChip(0xFE, 0x0)
def lambda_3F6E():
OP_95(0xFE, 1100, 0, 10100, 4000, 0x0)
ExitThread()
QueueWorkItem(0xFE, 1, lambda_3F6E)
WaitChrThread(0xFE, 1)
SetChrChipByIndex(0xFE, 0x34)
Sleep(50)
TurnDirection(0xFE, 0xE, 500)
SetChrChipByIndex(0xFE, 0x36)
OP_A1(0xFE, 0x3E8, 0x2, 0x0, 0x1)
label("loc_3FA1")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_4006")
SetChrSubChip(0xFE, 0x2)
Sleep(100)
Sound(567, 0, 50, 0)
PlayEffect(0x0, 0xFF, 0xFE, 0x5, 0, 1050, 1200, 0, 0, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
SetChrSubChip(0xFE, 0x3)
Sleep(100)
SetChrSubChip(0xFE, 0x2)
Sleep(100)
Sleep(1000)
Jump("loc_3FA1")
label("loc_4006")
Return()
# Function_20_3F61 end
def Function_21_4007(): pass
label("Function_21_4007")
SetChrChipByIndex(0xFE, 0x35)
SetChrSubChip(0xFE, 0x0)
def lambda_4014():
OP_95(0xFE, -550, 0, -5000, 4000, 0x0)
ExitThread()
QueueWorkItem(0xFE, 1, lambda_4014)
WaitChrThread(0xFE, 1)
SetChrChipByIndex(0xFE, 0x34)
Sleep(50)
TurnDirection(0xFE, 0xE, 500)
SetChrChipByIndex(0xFE, 0x36)
OP_A1(0xFE, 0x3E8, 0x2, 0x0, 0x1)
label("loc_4047")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_40A6")
SetChrSubChip(0xFE, 0x2)
Sleep(100)
PlayEffect(0x0, 0xFF, 0xFE, 0x5, 0, 1050, 1200, 0, 0, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
SetChrSubChip(0xFE, 0x3)
Sleep(100)
SetChrSubChip(0xFE, 0x2)
Sleep(100)
Sleep(1000)
Jump("loc_4047")
label("loc_40A6")
Return()
# Function_21_4007 end
def Function_22_40A7(): pass
label("Function_22_40A7")
SetChrChipByIndex(0xFE, 0x35)
SetChrSubChip(0xFE, 0x0)
def lambda_40B4():
OP_95(0xFE, 1950, 0, -1300, 4000, 0x0)
ExitThread()
QueueWorkItem(0xFE, 1, lambda_40B4)
WaitChrThread(0xFE, 1)
Sound(531, 0, 100, 0)
SetChrChipByIndex(0xFE, 0x34)
Sleep(50)
TurnDirection(0xFE, 0xE, 500)
SetChrChipByIndex(0xFE, 0x36)
OP_A1(0xFE, 0x3E8, 0x2, 0x0, 0x1)
label("loc_40ED")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_4152")
SetChrSubChip(0xFE, 0x2)
Sleep(100)
Sound(530, 0, 40, 0)
PlayEffect(0x0, 0xFF, 0xFE, 0x5, 0, 1050, 1200, 0, 0, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
SetChrSubChip(0xFE, 0x3)
Sleep(100)
SetChrSubChip(0xFE, 0x2)
Sleep(100)
Sleep(1000)
Jump("loc_40ED")
label("loc_4152")
Return()
# Function_22_40A7 end
def Function_23_4153(): pass
label("Function_23_4153")
SetChrChipByIndex(0xFE, 0x35)
SetChrSubChip(0xFE, 0x0)
def lambda_4160():
OP_95(0xFE, 2300, 0, -3900, 4000, 0x0)
ExitThread()
QueueWorkItem(0xFE, 1, lambda_4160)
WaitChrThread(0xFE, 1)
SetChrChipByIndex(0xFE, 0x34)
Sleep(50)
TurnDirection(0xFE, 0xE, 500)
SetChrChipByIndex(0xFE, 0x38)
OP_A1(0xFE, 0x3E8, 0x4, 0x0, 0x1, 0x2, 0x3)
label("loc_4195")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_4208")
Sound(545, 0, 80, 0)
PlayEffect(0x2, 0xFF, 0xFE, 0x5, 300, 1000, 1000, 0, 0, 0, 1000, 1000, 1000, 0xE, 0, 0, 0, 0)
OP_A1(0xFE, 0x3E8, 0x3, 0x4, 0x5, 0x3)
OP_82(0x64, 0x64, 0xBB8, 0x1F4)
Sleep(1000)
Sound(196, 0, 80, 0)
Sleep(2000)
Jump("loc_4195")
label("loc_4208")
Return()
# Function_23_4153 end
def Function_24_4209(): pass
label("Function_24_4209")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_433B")
PlayEffect(0x1, 0xFF, 0xFF, 0x0, -6050, 0, 6310, 0, 0, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sleep(300)
PlayEffect(0x1, 0xFF, 0xFF, 0x0, -4270, 0, -2050, 0, 0, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sleep(500)
PlayEffect(0x1, 0xFF, 0xFF, 0x0, -1110, 0, 2390, 0, 0, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sleep(450)
PlayEffect(0x1, 0xFF, 0xFF, 0x0, -2860, 0, 530, 0, 0, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sleep(350)
PlayEffect(0x1, 0xFF, 0xFF, 0x0, -1580, 0, -1780, 0, 0, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sleep(500)
Jump("Function_24_4209")
label("loc_433B")
Return()
# Function_24_4209 end
def Function_25_433C(): pass
label("Function_25_433C")
SetChrChipByIndex(0xFE, 0x1F)
SetChrSubChip(0xFE, 0x0)
OP_9B(0x1, 0xFE, 0xB4, 0x7D0, 0x7D0, 0x0)
SetChrChipByIndex(0xFE, 0x1E)
SetChrSubChip(0xFE, 0x0)
Return()
# Function_25_433C end
def Function_26_435C(): pass
label("Function_26_435C")
SetChrChipByIndex(0xFE, 0x21)
SetChrSubChip(0xFE, 0x0)
OP_9B(0x1, 0xFE, 0xB4, 0x7D0, 0x7D0, 0x0)
SetChrChipByIndex(0xFE, 0x20)
SetChrSubChip(0xFE, 0x0)
Return()
# Function_26_435C end
def Function_27_437C(): pass
label("Function_27_437C")
SetChrChipByIndex(0xFE, 0x23)
SetChrSubChip(0xFE, 0x0)
OP_9B(0x1, 0xFE, 0xB4, 0x7D0, 0x7D0, 0x0)
SetChrChipByIndex(0xFE, 0x22)
SetChrSubChip(0xFE, 0x0)
Return()
# Function_27_437C end
def Function_28_439C(): pass
label("Function_28_439C")
SetChrChipByIndex(0xFE, 0x25)
SetChrSubChip(0xFE, 0x0)
OP_9B(0x1, 0xFE, 0xB4, 0x7D0, 0x7D0, 0x0)
SetChrChipByIndex(0xFE, 0x24)
SetChrSubChip(0xFE, 0x0)
Return()
# Function_28_439C end
def Function_29_43BC(): pass
label("Function_29_43BC")
SetChrChipByIndex(0xFE, 0x27)
SetChrSubChip(0xFE, 0x0)
OP_9B(0x1, 0xFE, 0xB4, 0x7D0, 0x7D0, 0x0)
SetChrChipByIndex(0xFE, 0x26)
SetChrSubChip(0xFE, 0x0)
Return()
# Function_29_43BC end
def Function_30_43DC(): pass
label("Function_30_43DC")
SetChrChipByIndex(0xFE, 0x29)
SetChrSubChip(0xFE, 0x0)
OP_9B(0x1, 0xFE, 0xB4, 0x7D0, 0x7D0, 0x0)
SetChrChipByIndex(0xFE, 0x28)
SetChrSubChip(0xFE, 0x0)
Return()
# Function_30_43DC end
def Function_31_43FC(): pass
label("Function_31_43FC")
Sound(250, 0, 100, 0)
SetChrChipByIndex(0xFE, 0x2A)
SetChrSubChip(0xFE, 0x2)
OP_9C(0xFE, 0xBB8, 0x0, 0x0, 0x3E8, 0xBB8)
SetChrSubChip(0xFE, 0x0)
Return()
# Function_31_43FC end
def Function_32_4426(): pass
label("Function_32_4426")
SetChrChipByIndex(0xFE, 0x2B)
SetChrSubChip(0xFE, 0x2)
OP_9C(0xFE, 0xBB8, 0x0, 0x0, 0x3E8, 0xBB8)
SetChrSubChip(0xFE, 0x0)
Return()
# Function_32_4426 end
def Function_33_444A(): pass
label("Function_33_444A")
SetChrChipByIndex(0xFE, 0x2C)
SetChrSubChip(0xFE, 0x2)
OP_9C(0xFE, 0xBB8, 0x0, 0x0, 0x3E8, 0xBB8)
SetChrSubChip(0xFE, 0x0)
Return()
# Function_33_444A end
def Function_34_446E(): pass
label("Function_34_446E")
SetChrChipByIndex(0xFE, 0x2D)
SetChrSubChip(0xFE, 0x2)
OP_9C(0xFE, 0xBB8, 0x0, 0x0, 0x3E8, 0xBB8)
SetChrSubChip(0xFE, 0x0)
Return()
# Function_34_446E end
def Function_35_4492(): pass
label("Function_35_4492")
SetChrChipByIndex(0xFE, 0x2E)
SetChrSubChip(0xFE, 0x2)
OP_9C(0xFE, 0xBB8, 0x0, 0x0, 0x3E8, 0xBB8)
SetChrSubChip(0xFE, 0x0)
Return()
# Function_35_4492 end
def Function_36_44B6(): pass
label("Function_36_44B6")
SetChrChipByIndex(0xFE, 0x2F)
SetChrSubChip(0xFE, 0x2)
OP_9C(0xFE, 0xBB8, 0x0, 0x0, 0x3E8, 0xBB8)
SetChrSubChip(0xFE, 0x0)
Return()
# Function_36_44B6 end
def Function_37_44DA(): pass
label("Function_37_44DA")
SetChrChipByIndex(0xFE, 0x33)
SetChrSubChip(0xFE, 0x0)
Sound(815, 0, 100, 0)
OP_9D(0xFE, 0x154A, 0x0, 0x18EC, 0x1F4, 0x7D0)
OP_A1(0xFE, 0x3E8, 0x3, 0x1, 0x2, 0x3)
Return()
# Function_37_44DA end
def Function_38_4509(): pass
label("Function_38_4509")
SetChrChipByIndex(0xFE, 0x37)
SetChrSubChip(0xFE, 0x0)
OP_9D(0xFE, 0x50, 0x0, 0x2CD8, 0x1F4, 0x7D0)
OP_A1(0xFE, 0x3E8, 0x3, 0x1, 0x2, 0x3)
Return()
# Function_38_4509 end
def Function_39_4532(): pass
label("Function_39_4532")
SetChrChipByIndex(0xFE, 0x37)
SetChrSubChip(0xFE, 0x0)
OP_9D(0xFE, 0x1068, 0x0, 0x2314, 0x1F4, 0x7D0)
OP_A1(0xFE, 0x3E8, 0x3, 0x1, 0x2, 0x3)
Return()
# Function_39_4532 end
def Function_40_455B(): pass
label("Function_40_455B")
SetChrChipByIndex(0xFE, 0x37)
SetChrSubChip(0xFE, 0x0)
OP_9D(0xFE, 0xC1C, 0x0, 0x2CBA, 0x1F4, 0x7D0)
OP_A1(0xFE, 0x3E8, 0x3, 0x1, 0x2, 0x3)
Return()
# Function_40_455B end
def Function_41_4584(): pass
label("Function_41_4584")
SetChrChipByIndex(0xFE, 0x37)
SetChrSubChip(0xFE, 0x0)
OP_9D(0xFE, 0x672, 0x0, 0xFFFFE502, 0x1F4, 0x7D0)
OP_A1(0xFE, 0x3E8, 0x3, 0x1, 0x2, 0x3)
Return()
# Function_41_4584 end
def Function_42_45AD(): pass
label("Function_42_45AD")
SetChrChipByIndex(0xFE, 0x37)
SetChrSubChip(0xFE, 0x0)
OP_9D(0xFE, 0x154A, 0x0, 0xFFFFF7EA, 0x1F4, 0x7D0)
OP_A1(0xFE, 0x3E8, 0x3, 0x1, 0x2, 0x3)
Return()
# Function_42_45AD end
def Function_43_45D6(): pass
label("Function_43_45D6")
SetChrChipByIndex(0xFE, 0x37)
SetChrSubChip(0xFE, 0x0)
OP_9D(0xFE, 0x1324, 0x0, 0xFFFFEA98, 0x1F4, 0x7D0)
OP_A1(0xFE, 0x3E8, 0x3, 0x1, 0x2, 0x3)
Return()
# Function_43_45D6 end
def Function_44_45FF(): pass
label("Function_44_45FF")
OP_74(0x0, 0xF)
OP_71(0x0, 0x173, 0x17A, 0x1, 0x8)
OP_79(0x0)
OP_71(0x0, 0x17A, 0x173, 0x1, 0x8)
OP_79(0x0)
Return()
# Function_44_45FF end
def Function_45_4622(): pass
label("Function_45_4622")
EventBegin(0x0)
FadeToDark(0, 0, -1)
LoadEffect(0x0, "eff\\cutin00.eff")
LoadEffect(0x1, "eff\\\\step00.eff")
LoadChrToIndex("chr/ch00050.itc", 0x1E)
LoadChrToIndex("chr/ch00051.itc", 0x1F)
LoadChrToIndex("chr/ch00056.itc", 0x20)
LoadChrToIndex("chr/ch41450.itc", 0x21)
LoadChrToIndex("chr/ch41451.itc", 0x22)
LoadChrToIndex("chr/ch41452.itc", 0x23)
LoadChrToIndex("chr/ch41453.itc", 0x24)
LoadChrToIndex("apl/ch51613.itc", 0x25)
SoundLoad(825)
SoundLoad(2949)
SoundLoad(2950)
SoundLoad(2951)
SoundLoad(2952)
ClearChrFlags(0x18, 0x80)
OP_78(0x1, 0x18)
OP_49()
SetChrPos(0x18, -36800, 0, 26550, 135)
OP_D5(0x18, 0x0, 0x20F58, 0x0, 0x0)
SetMapObjFlags(0x1, 0x1000)
OP_74(0x1, 0x1E)
OP_71(0x1, 0xA, 0x32, 0x1, 0x20)
SetChrFlags(0x18, 0x1)
OP_52(0x18, 0x7, (scpexpr(EXPR_PUSH_LONG, 0x206C), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
OP_52(0x18, 0x28, (scpexpr(EXPR_PUSH_LONG, 0x9), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
SetMapObjFrame(0x1, "879mabuta:Layer1(43)", 0x0, 0x1)
SetMapObjFrame(0x1, "879mabuta:Layer2(44)", 0x0, 0x1)
SetChrChipByIndex(0x101, 0x20)
SetChrSubChip(0x101, 0x0)
SetChrPos(0x101, -5000, 0, 2000, 180)
SetChrChipByIndex(0x10, 0x21)
SetChrSubChip(0x10, 0x0)
ClearChrFlags(0x10, 0x80)
SetChrFlags(0x10, 0x8000)
SetChrPos(0x10, -6000, 0, -5000, 0)
SetChrChipByIndex(0x11, 0x21)
SetChrSubChip(0x11, 0x0)
ClearChrFlags(0x11, 0x80)
SetChrFlags(0x11, 0x8000)
SetChrPos(0x11, -7000, 0, -3500, 0)
SetChrChipByIndex(0x12, 0x21)
SetChrSubChip(0x12, 0x0)
ClearChrFlags(0x12, 0x80)
SetChrFlags(0x12, 0x8000)
SetChrPos(0x12, -8500, 0, -2500, 45)
SetChrChipByIndex(0x13, 0x21)
SetChrSubChip(0x13, 0x0)
ClearChrFlags(0x13, 0x80)
SetChrFlags(0x13, 0x8000)
SetChrPos(0x13, -4000, 0, -4500, 0)
SetChrChipByIndex(0x14, 0x21)
SetChrSubChip(0x14, 0x0)
ClearChrFlags(0x14, 0x80)
SetChrFlags(0x14, 0x8000)
SetChrPos(0x14, -3000, 0, -3000, 0)
SetChrChipByIndex(0x15, 0x21)
SetChrSubChip(0x15, 0x0)
ClearChrFlags(0x15, 0x80)
SetChrFlags(0x15, 0x8000)
SetChrPos(0x15, -2000, 0, -4500, 0)
SetChrChipByIndex(0x16, 0x21)
SetChrSubChip(0x16, 0x0)
ClearChrFlags(0x16, 0x80)
SetChrFlags(0x16, 0x8000)
SetChrPos(0x16, -500, 0, -2500, 315)
SetChrChipByIndex(0x17, 0x21)
SetChrSubChip(0x17, 0x0)
ClearChrFlags(0x17, 0x80)
SetChrFlags(0x17, 0x8000)
SetChrPos(0x17, -5000, 0, -2500, 0)
SetMapObjFrame(0xFF, "touboku00", 0x0, 0x1)
SetMapObjFrame(0xFF, "touboku01", 0x0, 0x1)
SetMapObjFrame(0xFF, "touboku02", 0x0, 0x1)
ClearMapObjFlags(0x2, 0x4)
ClearMapObjFlags(0x3, 0x4)
ClearMapObjFlags(0x4, 0x4)
SetMapObjFlags(0x2, 0x1000)
SetMapObjFlags(0x3, 0x1000)
SetMapObjFlags(0x4, 0x1000)
ClearChrFlags(0x19, 0x80)
OP_78(0x2, 0x19)
OP_49()
SetChrPos(0x19, -28500, 0, 21580, 0)
OP_D5(0x19, 0x0, 0x0, 0x0, 0x0)
ClearChrFlags(0x1A, 0x80)
OP_78(0x3, 0x1A)
OP_49()
SetChrPos(0x1A, -27250, 0, 18000, 0)
OP_D5(0x1A, 0x0, 0x0, 0x0, 0x0)
ClearChrFlags(0x1B, 0x80)
OP_78(0x4, 0x1B)
OP_49()
SetChrPos(0x1B, -24500, 0, 18250, 0)
OP_D5(0x1B, 0x0, 0x0, 0x0, 0x0)
Jc((scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_END)), "loc_4953")
FadeToBright(0, 0)
Jump("loc_50A0")
label("loc_4953")
OP_68(-5000, 1000, 2000, 0)
MoveCamera(45, 23, 0, 0)
OP_6E(550, 0)
SetCameraDistance(22500, 0)
SetMapObjFrame(0xFF, "hikari", 0x0, 0x1)
FadeToBright(1000, 0)
OP_68(-5000, 1000, -300, 2500)
OP_6F(0x79)
OP_0D()
OP_A6(0x101, 0x0, 0x32, 0x1F4, 0xBB8)
#C0094
ChrTalk(
0x101,
"#00006F#50W#5Pはあはあはあ……ぐうっ……\x02",
)
CloseMessageWindow()
#C0095
ChrTalk(
0x10,
(
"#12P……フン。\x01",
"なかなか見上げたもんだ。\x02",
)
)
CloseMessageWindow()
#C0096
ChrTalk(
0x13,
(
"#12Pまさか警察官ごときが\x01",
"俺たち新生国防軍を\x01",
"ここまで翻弄するとは……\x02",
)
)
CloseMessageWindow()
#C0097
ChrTalk(
0x12,
(
"#6P確かシーカー少尉の\x01",
"元同僚だったか?\x02",
)
)
CloseMessageWindow()
#C0098
ChrTalk(
0x12,
"#6Pさすがと言うべきか……\x02",
)
CloseMessageWindow()
#C0099
ChrTalk(
0x17,
"#11Pよし、武装解除するぞ。\x02",
)
CloseMessageWindow()
#C0100
ChrTalk(
0x17,
(
"#11Pあまり傷付けずに\x01",
"捕らえろとの命令だ。\x02",
)
)
CloseMessageWindow()
PlayBGM("ed7356", 0)
OP_A6(0x101, 0x0, 0x32, 0x1F4, 0xBB8)
#C0101
ChrTalk(
0x101,
"#00015F#60W#5P………お断り………だ………\x02",
)
CloseMessageWindow()
SetCameraDistance(20500, 10000)
def lambda_4B51():
OP_A6(0xFE, 0x0, 0x32, 0x1F4, 0xBB8)
ExitThread()
QueueWorkItem(0x101, 2, lambda_4B51)
Sleep(150)
Fade(250)
SetChrChipByIndex(0x101, 0x1E)
SetChrSubChip(0x101, 0x0)
Sound(805, 0, 100, 0)
Sound(802, 0, 100, 0)
BeginChrThread(0x101, 0, 0, 67)
OP_0D()
WaitChrThread(0x101, 2)
OP_63(0x10, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
OP_63(0x11, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
OP_63(0x12, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
Sleep(50)
OP_63(0x13, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
OP_63(0x14, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
OP_63(0x17, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
Sleep(50)
OP_63(0x15, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
OP_63(0x16, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
Sleep(1000)
Sound(531, 0, 100, 0)
BeginChrThread(0x17, 3, 0, 54)
BeginChrThread(0x10, 3, 0, 54)
BeginChrThread(0x11, 3, 0, 54)
Sleep(50)
BeginChrThread(0x12, 3, 0, 54)
BeginChrThread(0x13, 3, 0, 54)
Sleep(50)
Sound(531, 0, 100, 0)
BeginChrThread(0x14, 3, 0, 54)
BeginChrThread(0x15, 3, 0, 54)
BeginChrThread(0x16, 3, 0, 54)
WaitChrThread(0x17, 3)
WaitChrThread(0x10, 3)
WaitChrThread(0x11, 3)
WaitChrThread(0x12, 3)
WaitChrThread(0x13, 3)
WaitChrThread(0x14, 3)
WaitChrThread(0x15, 3)
WaitChrThread(0x16, 3)
#C0102
ChrTalk(
0x10,
"#12Pこいつ……!\x02",
)
CloseMessageWindow()
#C0103
ChrTalk(
0x11,
"#12Pまだ動けるのか!?\x02",
)
CloseMessageWindow()
OP_63(0x17, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x0)
Sleep(2000)
OP_64(0x17)
#C0104
ChrTalk(
0x17,
(
"#11P……判らんな。\x01",
"どうしてそこまでする?\x02",
)
)
CloseMessageWindow()
#C0105
ChrTalk(
0x17,
(
"#11Pどうやら、新たなクロスベルの\x01",
"体制が気に喰わんようだが……\x02",
)
)
CloseMessageWindow()
#C0106
ChrTalk(
0x17,
(
"#11Pお前一人が抗#2Rあらが#ったところで\x01",
"状況が変わるものでもなかろう。\x02",
)
)
CloseMessageWindow()
OP_68(-5000, 1000, 2000, 8000)
MoveCamera(0, 27, 0, 8000)
SetCameraDistance(19500, 8000)
#C0107
ChrTalk(
0x101,
(
"#00010F#5P#40Wそれでも……\x02\x03",
"……それでも\x01",
"誰かが立ち上がらなかったら\x01",
"何も変わらない……!\x02\x03",
"#00015F流されるだけじゃなく……\x01",
"……自分自身の目で\x01",
"真実を見極めるためにも……\x02\x03",
"#00007F大切な人たちを……\x01",
"……取り戻すためにも……!\x02",
)
)
CloseMessageWindow()
OP_6F(0x79)
PlayEffect(0x0, 0xFF, 0xFF, 0x0, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sound(833, 0, 60, 0)
Fade(500)
BlurSwitch(0x1, 0xBBFFFFFF, 0x0, 0x0, 0x0)
OP_68(-5000, 1000, 2000, 500)
MoveCamera(0, 27, 0, 500)
SetCameraDistance(17500, 500)
OP_6F(0x79)
CancelBlur(0)
OP_0D()
Sleep(1200)
OP_82(0xC8, 0x0, 0xBB8, 0x1F4)
#C0108
ChrTalk(
0x101,
(
"#00007F#4S#5P俺は……!\x01",
"絶対に諦めたりしない!\x02\x03",
"#4S何度でも……!\x01",
"たとえ足をへし折られても\x01",
"立ち上がってみせる……!\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
Fade(500)
OP_68(-5000, 1000, -1400, 0)
MoveCamera(0, 25, 0, 0)
OP_6E(550, 0)
SetCameraDistance(20250, 0)
OP_0D()
#C0109
ChrTalk(
0x10,
"#6Pうっ……\x02",
)
CloseMessageWindow()
#C0110
ChrTalk(
0x13,
"#12Pこいつは……\x02",
)
CloseMessageWindow()
#C0111
ChrTalk(
0x17,
"#5P……惜しいな。\x02",
)
CloseMessageWindow()
#C0112
ChrTalk(
0x17,
(
"#5P一気にかかれ。\x01",
"抵抗させずに落とすぞ。\x02",
)
)
CloseMessageWindow()
#C0113
ChrTalk(
0x11,
"#6P了解#4Rラジャ#。\x02",
)
CloseMessageWindow()
#C0114
ChrTalk(
0x15,
"#12P足を狙え。\x02",
)
CloseMessageWindow()
label("loc_50A0")
Fade(500)
OP_68(-5000, 1000, -300, 0)
MoveCamera(135, 35, 0, 0)
OP_6E(550, 0)
SetCameraDistance(25500, 0)
SetMapObjFrame(0xFF, "hikari", 0x1, 0x1)
SetChrChipByIndex(0x101, 0x1E)
SetChrSubChip(0x101, 0x0)
Sound(805, 0, 100, 0)
SetChrChipByIndex(0x10, 0x23)
SetChrSubChip(0x10, 0x2)
SetChrChipByIndex(0x11, 0x23)
SetChrSubChip(0x11, 0x2)
SetChrChipByIndex(0x12, 0x23)
SetChrSubChip(0x12, 0x2)
SetChrChipByIndex(0x13, 0x23)
SetChrSubChip(0x13, 0x2)
SetChrChipByIndex(0x14, 0x23)
SetChrSubChip(0x14, 0x2)
SetChrChipByIndex(0x15, 0x23)
SetChrSubChip(0x15, 0x2)
SetChrChipByIndex(0x16, 0x23)
SetChrSubChip(0x16, 0x2)
SetChrChipByIndex(0x17, 0x23)
SetChrSubChip(0x17, 0x2)
OP_68(-5000, 1000, 2000, 2500)
MoveCamera(0, 21, 0, 10000)
OP_6E(550, 10000)
SetCameraDistance(17000, 7500)
BlurSwitch(0x0, 0xBBFFFFFF, 0x0, 0x0, 0x0)
Sleep(1)
CancelBlur(2500)
BeginChrThread(0x17, 3, 0, 46)
BeginChrThread(0x10, 3, 0, 47)
BeginChrThread(0x11, 3, 0, 48)
BeginChrThread(0x12, 3, 0, 49)
BeginChrThread(0x13, 3, 0, 50)
BeginChrThread(0x14, 3, 0, 51)
BeginChrThread(0x15, 3, 0, 52)
BeginChrThread(0x16, 3, 0, 53)
OP_0D()
Sleep(800)
#C0115
ChrTalk(
0x101,
(
"#00013F#5P#40W(……最後まで……\x01",
" 最後まで諦めるな……)\x02\x03",
"#00003F(キーア……\x01",
" ……エリィ……ティオ……)\x02\x03",
"(ランディ……ワジ……\x01",
" ……セルゲイ課長も……)\x02\x03",
"#00010F(どうか俺に……\x01",
" ……俺に力を貸してくれ……!)\x02",
)
)
CloseMessageWindow()
WaitChrThread(0x17, 3)
WaitChrThread(0x10, 3)
WaitChrThread(0x11, 3)
WaitChrThread(0x12, 3)
WaitChrThread(0x13, 3)
WaitChrThread(0x14, 3)
WaitChrThread(0x15, 3)
WaitChrThread(0x16, 3)
OP_6F(0x79)
Sleep(200)
Sound(912, 0, 100, 0)
Fade(500)
OP_68(-5000, 1000, 2000, 0)
MoveCamera(90, 23, 0, 0)
OP_6E(550, 0)
SetCameraDistance(12500, 0)
SetCameraDistance(22500, 1500)
OP_6F(0x79)
OP_0D()
Sleep(500)
OP_C9(0x0, 0x80000000)
SetMessageWindowPos(30, 30, -1, -1)
SetChrName("声")
#A0116
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x3),
"#2949V#40W#4S──やれやれ。\x02",
)
)
CloseMessageWindow()
#A0117
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x3),
"#2950V#40W#4S頼むべき存在を1つ、\x01",
"忘れているようだな。\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
OP_24(0xB86)
SetMessageWindowPos(14, 280, 60, 3)
OP_C9(0x1, 0x80000000)
OP_63(0x101, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
EndChrThread(0x101, 0x0)
SetChrSubChip(0x101, 0x0)
OP_63(0x10, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
OP_63(0x11, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
Sleep(50)
OP_63(0x12, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
OP_63(0x13, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
OP_63(0x14, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
Sleep(50)
OP_63(0x15, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
OP_63(0x16, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
OP_63(0x17, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
Sleep(1000)
#C0118
ChrTalk(
0x101,
"#00005F#5Pえ……\x02",
)
CloseMessageWindow()
#C0119
ChrTalk(
0x17,
"#11P……!?\x02",
)
CloseMessageWindow()
#C0120
ChrTalk(
0x16,
"#5Pい、今のは……\x02",
)
CloseMessageWindow()
#C0121
ChrTalk(
0x13,
"#11P頭に響いて──\x02",
)
CloseMessageWindow()
Sound(913, 0, 100, 0)
OP_82(0xC8, 0x12C, 0x1770, 0x7D0)
BlurSwitch(0x3E8, 0xBBFFFFFF, 0x0, 0x1, 0xA)
Sleep(1500)
CancelBlur(1000)
Sleep(1500)
OP_63(0x101, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
EndChrThread(0x101, 0x0)
SetChrSubChip(0x101, 0x0)
OP_63(0x10, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
OP_63(0x11, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
Sleep(50)
OP_63(0x12, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
OP_63(0x13, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
OP_63(0x14, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
Sleep(50)
OP_63(0x15, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
OP_63(0x16, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
OP_63(0x17, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
Sleep(1000)
OP_63(0x10, 0x0, 2000, 0x28, 0x2B, 0x64, 0x0)
OP_A6(0x10, 0x0, 0x32, 0x1F4, 0xBB8)
Sleep(500)
OP_64(0x10)
#C0122
ChrTalk(
0x10,
"#12Pひっ……!?\x02",
)
CloseMessageWindow()
OP_63(0x15, 0x0, 2000, 0x28, 0x2B, 0x64, 0x0)
OP_A6(0x15, 0x0, 0x32, 0x1F4, 0xBB8)
Sleep(500)
OP_64(0x15)
#C0123
ChrTalk(
0x15,
"#5Pな、なんだ……!?\x02",
)
CloseMessageWindow()
Sound(914, 0, 100, 0)
OP_82(0x5A, 0x96, 0x1388, 0x1F4)
Sleep(500)
Sleep(200)
def lambda_5643():
OP_93(0xFE, 0x13B, 0x1F4)
ExitThread()
QueueWorkItem(0x101, 2, lambda_5643)
BeginChrThread(0x17, 3, 0, 55)
BeginChrThread(0x10, 3, 0, 55)
Sleep(50)
BeginChrThread(0x11, 3, 0, 55)
BeginChrThread(0x12, 3, 0, 55)
Sleep(50)
BeginChrThread(0x13, 3, 0, 55)
BeginChrThread(0x14, 3, 0, 55)
Sleep(50)
BeginChrThread(0x15, 3, 0, 55)
BeginChrThread(0x16, 3, 0, 55)
WaitChrThread(0x17, 3)
WaitChrThread(0x10, 3)
WaitChrThread(0x11, 3)
WaitChrThread(0x12, 3)
WaitChrThread(0x13, 3)
WaitChrThread(0x14, 3)
WaitChrThread(0x15, 3)
WaitChrThread(0x16, 3)
WaitChrThread(0x101, 2)
Fade(1000)
SetChrPos(0x17, -5500, 0, -2150, 315)
SetChrPos(0x12, -4750, 0, 4450, 300)
OP_68(-36800, 4000, 26550, 0)
MoveCamera(3, 30, 0, 0)
OP_6E(550, 0)
SetCameraDistance(43000, 0)
OP_68(-16120, 4000, 9190, 8500)
MoveCamera(288, 3, 0, 8500)
OP_6E(550, 8500)
SetCameraDistance(23000, 8500)
ClearMapObjFlags(0x1, 0x4)
BeginChrThread(0x18, 3, 0, 65)
WaitChrThread(0x18, 3)
OP_6F(0x79)
OP_0D()
Sleep(800)
OP_68(-9220, 3300, 3410, 2000)
MoveCamera(284, 8, 0, 2000)
OP_6E(550, 2000)
SetCameraDistance(28200, 2000)
OP_6F(0x79)
#C0124
ChrTalk(
0x10,
"#5P#40W#2S………え…………?\x02",
)
CloseMessageWindow()
#C0125
ChrTalk(
0x16,
"#11P#40W#2S…………な………………\x02",
)
CloseMessageWindow()
#C0126
ChrTalk(
0x101,
"#00005F#5P#30W………………………………\x02",
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
Fade(500)
OP_68(-9550, 3300, 4600, 0)
MoveCamera(102, 26, 0, 0)
OP_6E(550, 0)
SetCameraDistance(22200, 0)
SetCameraDistance(20700, 2000)
OP_6F(0x79)
OP_0D()
OP_C9(0x0, 0x80000000)
SetMessageWindowPos(50, 150, -1, -1)
SetChrName("白き巨狼")
#A0127
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x3),
"#2951V#40W#4S去れ──\x01",
"偽りの聖地を守る兵#2Rつわもの#どもよ。\x02",
)
)
CloseMessageWindow()
SetMessageWindowPos(40, 155, -1, -1)
SetChrName("白き巨狼")
#A0128
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x3),
"#2952V#40W#4Sこの者は私が預からせてもらう。\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
OP_24(0xB88)
SetMessageWindowPos(14, 280, 60, 3)
OP_C9(0x1, 0x80000000)
OP_82(0x96, 0x0, 0xBB8, 0x1F4)
BeginChrThread(0x17, 3, 0, 56)
BeginChrThread(0x10, 3, 0, 57)
#C0129
ChrTalk(
0x10,
"#11Pひ、ひいいいっ……!\x05\x02",
)
Sleep(50)
BeginChrThread(0x11, 3, 0, 57)
BeginChrThread(0x12, 3, 0, 57)
Sleep(50)
BeginChrThread(0x13, 3, 0, 57)
BeginChrThread(0x14, 3, 0, 57)
#C0130
ChrTalk(
0x15,
"#11Pうわあああっ……!\x05\x02",
)
Sleep(50)
BeginChrThread(0x15, 3, 0, 57)
BeginChrThread(0x16, 3, 0, 57)
WaitChrThread(0x17, 3)
WaitChrThread(0x10, 3)
WaitChrThread(0x11, 3)
WaitChrThread(0x12, 3)
WaitChrThread(0x13, 3)
WaitChrThread(0x14, 3)
WaitChrThread(0x15, 3)
WaitChrThread(0x16, 3)
CloseMessageWindow()
OP_63(0x17, 0x0, 2000, 0xC, 0xD, 0xFA, 0x2)
Sound(23, 0, 100, 0)
Sleep(1000)
OP_68(-9550, 1700, 4600, 1500)
SetChrChipByIndex(0x17, 0x22)
SetChrSubChip(0x17, 0x0)
OP_9B(0x0, 0x17, 0x9, 0x1770, 0x1388, 0x0)
SetChrChipByIndex(0x17, 0x23)
OP_A1(0x17, 0x5DC, 0x3, 0x0, 0x1, 0x2)
Sound(531, 0, 100, 0)
OP_6F(0x79)
OP_82(0xC8, 0x0, 0xBB8, 0x1F4)
#C0131
ChrTalk(
0x17,
"#11P#4Sふ、ふざけるなっ……!\x02",
)
CloseMessageWindow()
#C0132
ChrTalk(
0x17,
(
"#11P#4S新生クロスベル国防軍が\x01",
"幻獣ごときに屈して──\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
Fade(500)
OP_68(-15640, 9800, 8050, 0)
MoveCamera(315, 35, 0, 0)
OP_6E(550, 0)
SetCameraDistance(15000, 0)
Sound(913, 0, 100, 0)
OP_82(0xC8, 0x12C, 0x1770, 0x3E8)
BlurSwitch(0x0, 0xBBFFFFFF, 0x0, 0x0, 0x0)
OP_68(-10700, 1000, 4500, 1500)
MoveCamera(279, 26, 0, 1500)
OP_6E(550, 1500)
SetCameraDistance(18500, 1500)
SetChrPos(0x18, -15350, 0, 8550, 225)
SetChrPos(0x17, -10700, 0, 4500, 315)
SetChrFlags(0x12, 0x8)
OP_93(0x11, 0x0, 0x0)
OP_93(0x14, 0xE1, 0x0)
OP_74(0x1, 0x14)
OP_71(0x1, 0xB5, 0xDC, 0x0, 0x8)
Sleep(300)
CancelBlur(0)
Sleep(850)
StopSound(913, 700, 100)
Sound(915, 0, 100, 0)
SetChrFlags(0x17, 0x8)
BeginChrThread(0x17, 3, 0, 58)
OP_63(0x101, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
OP_63(0x10, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
Sleep(50)
OP_63(0x11, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
Sleep(50)
OP_63(0x13, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
Sleep(50)
OP_63(0x14, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
Sleep(50)
OP_63(0x15, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
OP_63(0x16, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
Sleep(1000)
OP_79(0x1)
OP_71(0x1, 0x3D, 0x64, 0x0, 0x20)
Sound(912, 0, 50, 0)
#C0133
ChrTalk(
0x11,
"#5P#50W………ぇ……………\x02",
)
CloseMessageWindow()
#C0134
ChrTalk(
0x14,
"#11P#50W…………ぅぁ………………\x02",
)
CloseMessageWindow()
StopSound(912, 500, 40)
Fade(500)
OP_68(-8350, 5600, 2950, 0)
MoveCamera(106, 31, 0, 0)
OP_6E(550, 0)
SetCameraDistance(26750, 0)
OP_68(-8350, 2100, 2950, 1500)
MoveCamera(92, 36, 0, 1500)
OP_6E(550, 1500)
SetCameraDistance(23000, 1500)
def lambda_5CB6():
label("loc_5CB6")
TurnDirection(0xFE, 0x17, 500)
Yield()
Jump("loc_5CB6")
QueueWorkItem2(0x101, 2, lambda_5CB6)
def lambda_5CC8():
label("loc_5CC8")
TurnDirection(0xFE, 0x17, 500)
Yield()
Jump("loc_5CC8")
QueueWorkItem2(0x10, 2, lambda_5CC8)
def lambda_5CDA():
label("loc_5CDA")
TurnDirection(0xFE, 0x17, 500)
Yield()
Jump("loc_5CDA")
QueueWorkItem2(0x11, 2, lambda_5CDA)
def lambda_5CEC():
label("loc_5CEC")
TurnDirection(0xFE, 0x17, 500)
Yield()
Jump("loc_5CEC")
QueueWorkItem2(0x12, 2, lambda_5CEC)
def lambda_5CFE():
label("loc_5CFE")
TurnDirection(0xFE, 0x17, 500)
Yield()
Jump("loc_5CFE")
QueueWorkItem2(0x13, 2, lambda_5CFE)
def lambda_5D10():
label("loc_5D10")
TurnDirection(0xFE, 0x17, 500)
Yield()
Jump("loc_5D10")
QueueWorkItem2(0x14, 2, lambda_5D10)
def lambda_5D22():
label("loc_5D22")
TurnDirection(0xFE, 0x17, 500)
Yield()
Jump("loc_5D22")
QueueWorkItem2(0x15, 2, lambda_5D22)
def lambda_5D34():
label("loc_5D34")
TurnDirection(0xFE, 0x17, 500)
Yield()
Jump("loc_5D34")
QueueWorkItem2(0x16, 2, lambda_5D34)
ClearChrFlags(0x12, 0x8)
OP_64(0x17)
SetChrFlags(0x17, 0x8)
OP_74(0x1, 0x5)
OP_71(0x1, 0xB5, 0xB7, 0x1F4, 0x8)
Sleep(500)
ClearChrFlags(0x17, 0x8)
BeginChrThread(0x17, 3, 0, 59)
OP_79(0x1)
OP_74(0x1, 0x3)
OP_71(0x1, 0xB7, 0xB5, 0x0, 0x8)
OP_79(0x1)
OP_74(0x1, 0x14)
OP_71(0x1, 0x3D, 0x64, 0x12C, 0x20)
WaitChrThread(0x17, 3)
OP_6F(0x79)
OP_0D()
EndChrThread(0x10, 0x2)
EndChrThread(0x11, 0x2)
EndChrThread(0x12, 0x2)
EndChrThread(0x13, 0x2)
EndChrThread(0x14, 0x2)
EndChrThread(0x15, 0x2)
EndChrThread(0x16, 0x2)
Sleep(500)
#C0135
ChrTalk(
0x17,
"#60W#12P…………………………………\x02",
)
CloseMessageWindow()
Sleep(300)
SetMessageWindowPos(50, 150, -1, -1)
SetChrName("白き巨狼")
#A0136
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x3),
"#30W──さて。\x01",
"同じ事を言おうか?\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
SetMessageWindowPos(14, 280, 60, 3)
SetChrSubChip(0x17, 0x3)
OP_A6(0x17, 0x0, 0x32, 0x1F4, 0xBB8)
Sleep(300)
#C0137
ChrTalk(
0x17,
"#60W#12P#60W………その必要はない…………\x02",
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
Fade(500)
OP_68(-7900, 1000, 2580, 0)
MoveCamera(354, 19, 0, 0)
OP_6E(650, 0)
SetCameraDistance(15500, 0)
SetChrSubChip(0x17, 0x1)
SetCameraDistance(14000, 1500)
OP_6F(0x79)
OP_0D()
Sleep(200)
Fade(250)
SetChrFlags(0x17, 0x20)
ClearChrFlags(0x17, 0x2)
SetChrSubChip(0x17, 0x1)
Sound(802, 0, 100, 0)
OP_0D()
Sleep(300)
OP_93(0x17, 0x87, 0x1F4)
Sleep(500)
SetCameraDistance(25000, 750)
BlurSwitch(0x0, 0xBBFFFFFF, 0x0, 0x0, 0x0)
OP_82(0x12C, 0x0, 0xBB8, 0x1F4)
#C0138
ChrTalk(
0x17,
"#5S#5P撤収──ッ!!!\x05\x02",
)
Sleep(1000)
CancelBlur(500)
OP_6F(0x79)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
Fade(250)
OP_68(-4700, 2550, -400, 0)
MoveCamera(310, 9, 0, 0)
OP_6E(550, 0)
SetCameraDistance(21000, 0)
OP_68(-4700, 1550, -400, 5000)
SetCameraDistance(25000, 5000)
BeginChrThread(0x18, 0, 0, 63)
Sound(871, 0, 60, 0)
Sound(825, 2, 40, 0)
BeginChrThread(0x17, 3, 0, 61)
Sleep(50)
BeginChrThread(0x10, 3, 0, 62)
Sleep(50)
BeginChrThread(0x11, 3, 0, 62)
#C0139
ChrTalk(
0x11,
"#5Pうわあああああああっ!!\x05\x02",
)
Sleep(50)
BeginChrThread(0x12, 3, 0, 62)
Sleep(50)
BeginChrThread(0x13, 3, 0, 62)
Sleep(50)
BeginChrThread(0x14, 3, 0, 62)
Sound(916, 0, 100, 0)
#C0140
ChrTalk(
0x14,
"#11Pめ、女神さまああああっ!!\x05\x02",
)
Sleep(50)
BeginChrThread(0x15, 3, 0, 62)
Sleep(50)
BeginChrThread(0x16, 3, 0, 62)
WaitChrThread(0x17, 3)
WaitChrThread(0x10, 3)
WaitChrThread(0x11, 3)
WaitChrThread(0x12, 3)
WaitChrThread(0x13, 3)
WaitChrThread(0x14, 3)
WaitChrThread(0x15, 3)
WaitChrThread(0x16, 3)
CloseMessageWindow()
OP_6F(0x79)
OP_0D()
Fade(500)
EndChrThread(0x101, 0x2)
EndChrThread(0x18, 0x0)
OP_82(0x0, 0x46, 0xBB8, 0x5DC)
OP_63(0x101, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x0)
StopSound(825, 1000, 40)
OP_68(-5000, 1200, 2000, 0)
MoveCamera(354, 19, 0, 0)
OP_6E(550, 0)
SetCameraDistance(19500, 0)
SetCameraDistance(19000, 2000)
OP_6F(0x79)
OP_0D()
OP_64(0x101)
Sleep(1000)
#C0141
ChrTalk(
0x101,
"#00011F#5P#30W…………………………………\x02",
)
CloseMessageWindow()
OP_68(-7330, 3200, 1550, 2000)
MoveCamera(348, 19, 0, 2000)
OP_6E(550, 2000)
SetCameraDistance(20760, 2000)
BeginChrThread(0x18, 0, 0, 66)
BeginChrThread(0x1D, 1, 0, 72)
OP_74(0x1, 0x14)
OP_71(0x1, 0x172, 0x19A, 0x1, 0x20)
OP_93(0x18, 0x87, 0x0)
OP_9B(0x0, 0x18, 0x0, 0x1B58, 0xBB8, 0x0)
EndChrThread(0x18, 0x0)
EndChrThread(0x1D, 0x1)
OP_74(0x1, 0x14)
OP_71(0x1, 0x3D, 0x64, 0x2BC, 0x20)
OP_6F(0x79)
Sleep(800)
SetMessageWindowPos(20, 120, -1, -1)
SetChrName("白き巨狼")
#A0142
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x3),
"#30Wフム、さすがに\x01",
"驚かせてしまったか。\x02",
)
)
CloseMessageWindow()
#A0143
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x3),
"#30Wこの姿に戻ったのは久しいゆえ、\x01",
"いささか加減が分からぬな。\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
SetMessageWindowPos(14, 280, 60, 3)
Fade(250)
SetChrChipByIndex(0x101, 0xFF)
SetChrSubChip(0x101, 0x0)
Sound(805, 0, 100, 0)
OP_0D()
Sleep(300)
#C0144
ChrTalk(
0x101,
(
"#00006F#5P#30W……言いたいことは\x01",
"色々あるんだけど……\x02\x03",
"とりあえず\x01",
"これだけは言わせてくれ。\x02",
)
)
CloseMessageWindow()
OP_74(0x1, 0x14)
OP_71(0x1, 0x1A4, 0x1AE, 0x0, 0x8)
OP_79(0x1)
OP_71(0x1, 0x1AE, 0x1D6, 0x0, 0x20)
SetMessageWindowPos(30, 120, -1, -1)
SetChrName("白き巨狼")
#A0145
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x3),
"#30Wフム、なんだ?\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
SetMessageWindowPos(14, 280, 60, 3)
OP_68(-7330, 3200, 1550, 1200)
MoveCamera(332, -2, 0, 1200)
OP_6E(550, 1200)
SetCameraDistance(20760, 1200)
TurnDirection(0x101, 0x18, 500)
OP_6F(0x79)
OP_82(0x64, 0x0, 0xBB8, 0x190)
#C0146
ChrTalk(
0x101,
(
"#00007F#12P#4S#N──ツァイト!\x01",
"さすがにタイミング良すぎだろ!?\x02",
)
)
CloseMessageWindow()
FadeToDark(1000, 0, -1)
OP_0D()
StopBGM(0x1770)
WaitBGM()
OP_24(0x339)
SetScenarioFlags(0x22, 0)
NewScene("e4200", 0, 0, 0)
IdleLoop()
Return()
# Function_45_4622 end
def Function_46_6377(): pass
label("Function_46_6377")
SetChrChipByIndex(0xFE, 0x22)
SetChrSubChip(0xFE, 0x0)
OP_96(0xFE, 0xFFFFEC78, 0x0, 0xFFFFF254, 0xBB8, 0x0)
TurnDirection(0xFE, 0x101, 500)
SetChrChipByIndex(0xFE, 0x21)
SetChrSubChip(0xFE, 0x0)
Sleep(100)
Sound(531, 0, 70, 0)
SetChrChipByIndex(0xFE, 0x23)
OP_A1(0xFE, 0x3E8, 0x3, 0x0, 0x1, 0x2)
Return()
# Function_46_6377 end
def Function_47_63B9(): pass
label("Function_47_63B9")
SetChrChipByIndex(0xFE, 0x22)
SetChrSubChip(0xFE, 0x0)
OP_95(0xFE, -9000, 0, -3000, 4000, 0x0)
TurnDirection(0xFE, 0x101, 500)
SetChrChipByIndex(0xFE, 0x21)
SetChrSubChip(0xFE, 0x0)
Sleep(100)
Sound(531, 0, 70, 0)
SetChrChipByIndex(0xFE, 0x23)
OP_A1(0xFE, 0x3E8, 0x3, 0x0, 0x1, 0x2)
Return()
# Function_47_63B9 end
def Function_48_63FB(): pass
label("Function_48_63FB")
SetChrChipByIndex(0xFE, 0x22)
SetChrSubChip(0xFE, 0x0)
OP_95(0xFE, -10500, 0, 1000, 4000, 0x0)
TurnDirection(0xFE, 0x101, 500)
SetChrChipByIndex(0xFE, 0x21)
SetChrSubChip(0xFE, 0x0)
Sleep(100)
Sound(531, 0, 70, 0)
SetChrChipByIndex(0xFE, 0x23)
OP_A1(0xFE, 0x3E8, 0x3, 0x0, 0x1, 0x2)
Return()
# Function_48_63FB end
def Function_49_643D(): pass
label("Function_49_643D")
SetChrChipByIndex(0xFE, 0x22)
SetChrSubChip(0xFE, 0x0)
OP_95(0xFE, -10700, 0, 4500, 4200, 0x0)
TurnDirection(0xFE, 0x101, 500)
SetChrChipByIndex(0xFE, 0x21)
SetChrSubChip(0xFE, 0x0)
Sleep(100)
Sound(531, 0, 70, 0)
SetChrChipByIndex(0xFE, 0x23)
OP_A1(0xFE, 0x3E8, 0x3, 0x0, 0x1, 0x2)
Return()
# Function_49_643D end
def Function_50_647F(): pass
label("Function_50_647F")
SetChrChipByIndex(0xFE, 0x22)
SetChrSubChip(0xFE, 0x0)
OP_95(0xFE, -1000, 0, -2000, 4000, 0x0)
TurnDirection(0xFE, 0x101, 500)
SetChrChipByIndex(0xFE, 0x21)
SetChrSubChip(0xFE, 0x0)
Sleep(100)
Sound(531, 0, 70, 0)
SetChrChipByIndex(0xFE, 0x23)
OP_A1(0xFE, 0x3E8, 0x3, 0x0, 0x1, 0x2)
Return()
# Function_50_647F end
def Function_51_64C1(): pass
label("Function_51_64C1")
SetChrChipByIndex(0xFE, 0x22)
SetChrSubChip(0xFE, 0x0)
OP_95(0xFE, -2000, 0, 6500, 6000, 0x1)
OP_95(0xFE, -6000, 0, 7500, 6000, 0x0)
TurnDirection(0xFE, 0x101, 500)
SetChrChipByIndex(0xFE, 0x21)
SetChrSubChip(0xFE, 0x0)
Sleep(100)
Sound(531, 0, 70, 0)
SetChrChipByIndex(0xFE, 0x23)
OP_A1(0xFE, 0x3E8, 0x3, 0x0, 0x1, 0x2)
Return()
# Function_51_64C1 end
def Function_52_6517(): pass
label("Function_52_6517")
SetChrChipByIndex(0xFE, 0x22)
SetChrSubChip(0xFE, 0x0)
OP_95(0xFE, 1000, 0, 2000, 5000, 0x0)
TurnDirection(0xFE, 0x101, 500)
SetChrChipByIndex(0xFE, 0x21)
SetChrSubChip(0xFE, 0x0)
Sleep(100)
Sound(531, 0, 70, 0)
SetChrChipByIndex(0xFE, 0x23)
OP_A1(0xFE, 0x3E8, 0x3, 0x0, 0x1, 0x2)
Return()
# Function_52_6517 end
def Function_53_6559(): pass
label("Function_53_6559")
SetChrChipByIndex(0xFE, 0x22)
SetChrSubChip(0xFE, 0x0)
OP_95(0xFE, -2000, 0, 6500, 4200, 0x0)
TurnDirection(0xFE, 0x101, 500)
SetChrChipByIndex(0xFE, 0x21)
SetChrSubChip(0xFE, 0x0)
Sleep(100)
Sound(531, 0, 70, 0)
SetChrChipByIndex(0xFE, 0x23)
OP_A1(0xFE, 0x3E8, 0x3, 0x0, 0x1, 0x2)
Return()
# Function_53_6559 end
def Function_54_659B(): pass
label("Function_54_659B")
SetChrChipByIndex(0xFE, 0x23)
OP_A1(0xFE, 0x5DC, 0x3, 0x0, 0x1, 0x2)
Return()
# Function_54_659B end
def Function_55_65A9(): pass
label("Function_55_65A9")
SetChrSubChip(0xFE, 0x0)
SetChrFlags(0xFE, 0x20)
TurnDirection(0xFE, 0x18, 500)
ClearChrFlags(0xFE, 0x20)
Return()
# Function_55_65A9 end
def Function_56_65BF(): pass
label("Function_56_65BF")
OP_63(0xFE, 0x0, 2000, 0x28, 0x2B, 0x64, 0x0)
def lambda_65D6():
OP_A6(0xFE, 0x0, 0x32, 0x1F4, 0xBB8)
ExitThread()
QueueWorkItem(0xFE, 1, lambda_65D6)
WaitChrThread(0xFE, 1)
Sleep(500)
OP_64(0xFE)
Return()
# Function_56_65BF end
def Function_57_65F5(): pass
label("Function_57_65F5")
OP_63(0xFE, 0x0, 2000, 0x28, 0x2B, 0x64, 0x0)
def lambda_660C():
OP_A6(0xFE, 0x0, 0x32, 0x1F4, 0xBB8)
ExitThread()
QueueWorkItem(0xFE, 1, lambda_660C)
SetChrChipByIndex(0xFE, 0x22)
SetChrSubChip(0xFE, 0x0)
OP_9B(0x1, 0xFE, 0x0, 0xFFFFFD12, 0xBB8, 0x0)
SetChrChipByIndex(0xFE, 0x23)
SetChrSubChip(0xFE, 0x0)
WaitChrThread(0xFE, 1)
Sleep(500)
OP_64(0xFE)
Return()
# Function_57_65F5 end
def Function_58_664A(): pass
label("Function_58_664A")
PlayEffect(0x1, 0xFF, 0xFF, 0x0, -10700, 500, 4500, 0, 0, 0, 2500, 2500, 2500, 0xFF, 0, 0, 0, 0)
SetChrChipByIndex(0xFE, 0x24)
SetChrSubChip(0xFE, 0x7)
ClearChrFlags(0xFE, 0x100)
SetChrFlags(0xFE, 0x800)
SetChrFlags(0xFE, 0x2)
ClearChrFlags(0xFE, 0x1)
OP_D3(0x17, 0x1, "Null_kuti(41)")
OP_52(0xFE, 0x23, (scpexpr(EXPR_PUSH_LONG, 0xAF), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
OP_52(0xFE, 0x24, (scpexpr(EXPR_PUSH_LONG, 0x50), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
OP_D5(0xFE, 0x4E20, 0x41EB0, 0x0, 0x0)
OP_D5(0xFE, 0x4E20, 0x41EB0, 0xFFFF8AD0, 0x64)
OP_D5(0xFE, 0x4E20, 0x41EB0, 0xEA60, 0x190)
OP_D5(0xFE, 0x4E20, 0x41EB0, 0x61A8, 0x190)
OP_63(0xFE, 0x0, 300, 0x28, 0x2B, 0x64, 0x0)
label("loc_6723")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_6749")
OP_A6(0xFE, 0x0, 0x1E, 0x1F4, 0xBB8)
Sleep(1000)
Jump("loc_6723")
label("loc_6749")
Return()
# Function_58_664A end
def Function_59_674A(): pass
label("Function_59_674A")
Sound(915, 0, 100, 0)
SetChrChip(0x0, 0xFE, 0x5, 0x96)
SetChrFlags(0xFE, 0x1)
OP_52(0xFE, 0x28, (scpexpr(EXPR_PUSH_LONG, 0xD), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
OP_82(0x64, 0x0, 0x1388, 0x12C)
OP_D3(0x17, 0xFF, "")
OP_52(0xFE, 0x23, (scpexpr(EXPR_PUSH_LONG, 0x80), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
OP_52(0xFE, 0x24, (scpexpr(EXPR_PUSH_LONG, 0xB0), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
SetChrFlags(0xFE, 0x2)
SetChrChipByIndex(0xFE, 0x25)
SetChrSubChip(0xFE, 0x4)
SetChrFlags(0xFE, 0x100)
ClearChrFlags(0xFE, 0x800)
OP_D5(0xFE, 0x4E20, 0x0, 0x0, 0x0)
Sound(844, 0, 100, 0)
OP_9D(0xFE, 0xFFFFE124, 0x0, 0xA14, 0x1F4, 0x9C4)
OP_52(0xFE, 0x28, (scpexpr(EXPR_PUSH_LONG, 0x9), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
SetChrChip(0x1, 0xFE, 0x0, 0x0)
OP_93(0xFE, 0x13B, 0x0)
PlayEffect(0x1, 0xFF, 0xFF, 0x0, -7900, 150, 2580, 0, 0, 0, 1500, 1500, 1500, 0xFF, 0, 0, 0, 0)
def lambda_6831():
OP_A6(0xFE, 0x0, 0x1E, 0x12C, 0xDAC)
ExitThread()
QueueWorkItem(0xFE, 2, lambda_6831)
Sound(811, 0, 100, 0)
OP_A1(0xFE, 0x3E8, 0x2, 0x3, 0x2)
WaitChrThread(0xFE, 2)
Return()
# Function_59_674A end
def Function_60_6858(): pass
label("Function_60_6858")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_6876")
OP_A1(0xFE, 0x9C4, 0x8, 0x10, 0x11, 0x12, 0x11, 0x10, 0x13, 0x14, 0x13)
Jump("Function_60_6858")
label("loc_6876")
Return()
# Function_60_6858 end
def Function_61_6877(): pass
label("Function_61_6877")
SetChrFlags(0xFE, 0x2)
OP_63(0xFE, 0x0, 1700, 0x28, 0x2B, 0x64, 0x0)
OP_93(0xFE, 0x87, 0x1F4)
BeginChrThread(0xFE, 0, 0, 64)
BeginChrThread(0xFE, 1, 0, 60)
OP_9B(0x0, 0xFE, 0x0, 0x4E20, 0x1770, 0x0)
EndChrThread(0xFE, 0x0)
EndChrThread(0xFE, 0x1)
OP_64(0xFE)
Return()
# Function_61_6877 end
def Function_62_68BC(): pass
label("Function_62_68BC")
OP_63(0xFE, 0x0, 1700, 0x28, 0x2B, 0x64, 0x0)
OP_93(0xFE, 0x87, 0x1F4)
BeginChrThread(0xFE, 0, 0, 64)
SetChrChipByIndex(0xFE, 0x22)
SetChrSubChip(0xFE, 0x0)
OP_9B(0x0, 0xFE, 0x0, 0x4E20, 0x1770, 0x0)
EndChrThread(0xFE, 0x0)
OP_64(0xFE)
Return()
# Function_62_68BC end
def Function_63_68FA(): pass
label("Function_63_68FA")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_691E")
OP_82(0x3C, 0x64, 0x1388, 0x1F4)
Sleep(500)
Jump("Function_63_68FA")
label("loc_691E")
Return()
# Function_63_68FA end
def Function_64_691F(): pass
label("Function_64_691F")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_6969")
PlayEffect(0x1, 0xFF, 0xFE, 0x4, 0, 200, 0, 0, 0, 0, 1500, 1500, 1500, 0xFF, 0, 0, 0, 0)
Sleep(500)
Jump("Function_64_691F")
label("loc_6969")
Return()
# Function_64_691F end
def Function_65_696A(): pass
label("Function_65_696A")
BeginChrThread(0xFE, 0, 0, 66)
BeginChrThread(0x1D, 1, 0, 71)
OP_74(0x1, 0x14)
OP_71(0x1, 0x172, 0x19A, 0x1, 0x20)
def lambda_698B():
OP_9B(0x0, 0xFE, 0x163, 0x61A8, 0xBB8, 0x0)
ExitThread()
QueueWorkItem(0xFE, 1, lambda_698B)
Sleep(1800)
OP_82(0x64, 0x1F4, 0xFA0, 0x1F4)
BeginChrThread(0x19, 3, 0, 68)
Sleep(1000)
OP_82(0x64, 0x1F4, 0xFA0, 0x1F4)
BeginChrThread(0x1A, 3, 0, 69)
Sleep(1000)
OP_82(0x64, 0x1F4, 0xFA0, 0x1F4)
BeginChrThread(0x1B, 3, 0, 70)
WaitChrThread(0xFE, 1)
EndChrThread(0xFE, 0x0)
EndChrThread(0x1D, 0x1)
PlayEffect(0x1, 0xFF, 0xFF, 0x0, -15430, 500, 8570, 0, 0, 0, 3000, 3000, 3000, 0xFF, 0, 0, 0, 0)
OP_74(0x1, 0x14)
OP_71(0x1, 0x3D, 0x64, 0x2BC, 0x20)
Return()
# Function_65_696A end
def Function_66_6A3D(): pass
label("Function_66_6A3D")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_6AD2")
OP_82(0x5A, 0x96, 0x1388, 0x1F4)
PlayEffect(0x1, 0xFF, 0xFE, 0x4, 0, 500, -2500, 0, 0, 0, 3000, 3000, 3000, 0xFF, 0, 0, 0, 0)
Sleep(500)
PlayEffect(0x1, 0xFF, 0xFE, 0x4, 0, 500, 2500, 0, 0, 0, 3000, 3000, 3000, 0xFF, 0, 0, 0, 0)
Sleep(500)
Jump("Function_66_6A3D")
label("loc_6AD2")
Return()
# Function_66_6A3D end
def Function_67_6AD3(): pass
label("Function_67_6AD3")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_6AF1")
OP_A1(0xFE, 0x3E8, 0x8, 0x0, 0x1, 0x2, 0x1, 0x0, 0x3, 0x4, 0x3)
Jump("Function_67_6AD3")
label("loc_6AF1")
Return()
# Function_67_6AD3 end
def Function_68_6AF2(): pass
label("Function_68_6AF2")
Sound(917, 0, 80, 0)
PlayEffect(0x1, 0xFF, 0xFF, 0x0, -28500, 500, 21580, 0, 0, 0, 7000, 7000, 7000, 0xFF, 0, 0, 0, 0)
PlayEffect(0x1, 0xFF, 0xFF, 0x0, -30500, 1500, 20080, 0, 0, 0, 7000, 7000, 7000, 0xFF, 0, 0, 0, 0)
def lambda_6B6B():
OP_98(0xFE, 0x0, 0xFFFFF448, 0x0, 0x3E8, 0x0)
ExitThread()
QueueWorkItem(0xFE, 1, lambda_6B6B)
OP_D5(0xFE, 0x0, 0x0, 0x7530, 0x3E8)
Sleep(500)
PlayEffect(0x1, 0xFF, 0xFF, 0x0, -28500, 500, 21580, 0, 0, 0, 7000, 7000, 7000, 0xFF, 0, 0, 0, 0)
OP_75(0x2, 0x1, 0x3E8)
Sleep(1000)
WaitChrThread(0xFE, 1)
Return()
# Function_68_6AF2 end
def Function_69_6BDC(): pass
label("Function_69_6BDC")
Sound(917, 0, 70, 0)
PlayEffect(0x1, 0xFF, 0xFF, 0x0, -27250, 500, 18000, 0, 0, 0, 7000, 7000, 7000, 0xFF, 0, 0, 0, 0)
def lambda_6C1E():
OP_98(0xFE, 0x0, 0xFFFFF448, 0x0, 0x3E8, 0x0)
ExitThread()
QueueWorkItem(0xFE, 1, lambda_6C1E)
OP_D5(0xFE, 0x0, 0x0, 0xFFFF8AD0, 0x3E8)
Sleep(500)
PlayEffect(0x1, 0xFF, 0xFF, 0x0, -27250, 500, 18000, 0, 0, 0, 7000, 7000, 7000, 0xFF, 0, 0, 0, 0)
OP_75(0x3, 0x1, 0x3E8)
Sleep(1000)
WaitChrThread(0xFE, 1)
Return()
# Function_69_6BDC end
def Function_70_6C8F(): pass
label("Function_70_6C8F")
Sound(917, 0, 60, 0)
PlayEffect(0x1, 0xFF, 0xFF, 0x0, -24500, 500, 18250, 0, 0, 0, 7000, 7000, 7000, 0xFF, 0, 0, 0, 0)
def lambda_6CD1():
OP_98(0xFE, 0x0, 0xFFFFF448, 0x0, 0x3E8, 0x0)
ExitThread()
QueueWorkItem(0xFE, 1, lambda_6CD1)
OP_D5(0xFE, 0x0, 0x0, 0x7530, 0x3E8)
Sleep(500)
PlayEffect(0x1, 0xFF, 0xFF, 0x0, -24500, 500, 18250, 0, 0, 0, 7000, 7000, 7000, 0xFF, 0, 0, 0, 0)
OP_75(0x4, 0x1, 0x3E8)
Sleep(1000)
WaitChrThread(0xFE, 1)
Return()
# Function_70_6C8F end
def Function_71_6D42(): pass
label("Function_71_6D42")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_6D5E")
Sleep(100)
Sound(914, 0, 100, 0)
Sleep(800)
Jump("Function_71_6D42")
label("loc_6D5E")
Return()
# Function_71_6D42 end
def Function_72_6D5F(): pass
label("Function_72_6D5F")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_6D7B")
Sleep(250)
Sound(914, 0, 100, 0)
Sleep(650)
Jump("Function_72_6D5F")
label("loc_6D7B")
Return()
# Function_72_6D5F end
SaveToFile()
Try(main)
|
import numpy as np
from numpy import linalg as LA
import scipy as sc
from scipy.interpolate import interp1d
import scipy.integrate as integrate
import scipy.special as special
import random as rd
from sympy import besseli, besselk
#import matplotlib.pyplot as plt
import time
start_time = time.time()
#import matplotlib.pyplot as plt
#Size of steps on radial coordinate
def Delta_Mom(K,N):
return K/(N)
#Parametrization of the momentum in terms of tan(th)**r/sqrt(r)
def Tan_Mom(i,K,N):
return i*Delta_Mom(K,N)
#Jacobian of the lattice reconfig. to the tangent parametrization
def Jacobi(i,K,N):
return Delta_Mom(K,N)#Delta_Mom(N)*K**2*np.tan(np.pi/2*i/(N+1))**(2*R-1)/np.cos(np.pi/2*i/(N+1))**2
#Kinetic term of the m-layer system
def Kin_Term(k,Euv,K,N,M):
return Euv*(k/K)**M
#Norm in the reciprocal lattice
def Norm_Mom(k1,k2,th):
return k1**2 + k2**2 - 2*k1*k2*np.cos(th)
#Coulomb interaction
def Coulomb(k1,k2,th,Euv,K,G,A):
num = Euv/K**2*G*np.exp( -A**2*Norm_Mom(k1,k2,th)/K**2 )
return num
#Hopping/pairing terms
def Hopp_Pair(k1,k2,th,Euv,K,G,A,M,s):
return -1/(4*np.pi)*( Coulomb(k1,k2,th,Euv,K,G,A) )*s*( s+np.cos(M*th) )
def SelfEnergy(k,Euv,K,N,M,G,A):
Factor0 = 1/( 4*np.pi*A**2 )
Factor1 = -K**2/( 2*A**4*k**2*np.pi )
Factor2 = +K**2/( 2*A**4*k**2*np.pi ) * np.exp(-A**2*k**2/(2*K**2))
return Euv*G*(Factor0+Factor1+Factor2)
#Angular momentum channels
def Four_Hopp_Pair(k1,k2,l,Euv,K,G,A,M,s):
chopps = 10000
suma = 0.
x = np.pi/chopps*np.arange(0, chopps)
y = Hopp_Pair(k1,k2,x,Euv,K,G,A,M,s)*np.cos(l*x)
#suma = integrate.quad(y, x, even='last')
suma = integrate.quad(lambda x: Hopp_Pair(k1,k2,x,Euv,K,G,A,M,s)*np.cos(l*x), 0, np.pi)[0]
return suma
#Import the self-energy from file InteMm.txt
#SelfEnergyData = np.loadtxt(fname = "InteM1.txt")
Parameters = np.genfromtxt('ParametersABLN.txt')
eCharge = Parameters[0] #Coupling constant
eSpread = Parameters[1] #Spreading constant
eAngMom = Parameters[2] #Angular momentum channel
eRadMom = Parameters[3] #Matrix size
eCutOff = Parameters[4] #Number of UV cutoff intervals
#BloqueX = Parameters[4]
#BloqueY = Parameters[5]
file1 = open("HamVals.txt","w")
GGG = eCharge/10. #Coupling constant
AAA = eSpread #Spreading constant
LLL = eAngMom #Angular momentum channel
NNN = int(eRadMom*10) #Matrix size
SSS = int(eCutOff) #Number of UV cutoff intervals
EUV = 1. #UV cutoff energy
KKK = 1. #UV cutoff momentum
MMM = 2 #Number of layers
"""
PRINTING OF TERMS TO DO THE BENCH-MARK
print "Hopp"
print Hopp_Pair(Tan_Mom(1,RRR,KKK,NNN),Tan_Mom(2,RRR,KKK,NNN),np.pi/3,KKK,MMM,+1)
print "Four_Hopp"
print Four_Hopp_Pair(Tan_Mom(1,RRR,KKK,NNN),Tan_Mom(2,RRR,KKK,NNN),1,KKK,MMM,+1)
print "Pair"
print Hopp_Pair(Tan_Mom(1,RRR,KKK,NNN),Tan_Mom(2,RRR,KKK,NNN),np.pi/3,KKK,MMM,-1)
print "Four_Pair"
print Four_Hopp_Pair(Tan_Mom(1,RRR,KKK,NNN),Tan_Mom(2,RRR,KKK,NNN),1,KKK,MMM,-1)
print "Jacobi"
print Jacobi(1,RRR,KKK,NNN)
"""
#Radial coordinate slots
MomentumAxis = np.zeros(2*NNN*SSS)
KineticAxis = np.zeros(2*NNN*SSS)
SelfEnergyAxis = np.zeros(2*NNN*SSS)
for i in range(0, NNN*SSS):
MomentumAxis[i] = Tan_Mom(- NNN*SSS+i,KKK,NNN)
KineticAxis[i] = -Kin_Term(MomentumAxis[i],EUV,KKK,NNN,MMM)
SelfEnergyAxis[i] = -SelfEnergy(MomentumAxis[i],EUV,KKK,NNN,MMM,GGG,AAA)
print - NNN*SSS+i
for i in range(0, NNN*SSS):
MomentumAxis[NNN*SSS+i] = +Tan_Mom(i+1,KKK,NNN)
KineticAxis[NNN*SSS+i] = +Kin_Term(MomentumAxis[NNN*SSS+i],EUV,KKK,NNN,MMM)
SelfEnergyAxis[NNN*SSS+i] = SelfEnergy(MomentumAxis[NNN*SSS+i],EUV,KKK,NNN,MMM,GGG,AAA)
print i+1
#print MomentumAxis
#print SelfEnergy
"""
PLOT OF TE SELF-ENERGY IN LOG MOMENTUM
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1)
line, = ax.plot(SelfEnergyData[:,0], SelfEnergyData[:,1], color='blue', lw=2,marker="*")
line, = ax.plot(MomentumAxis, SelfEnergy, color='red', marker="*")
plt.xlim(0.001, 10)
ax.set_xscale('log')
plt.show()
"""
#Bogoliubov Hamiltonian
HamiltonMatrix = np.zeros((2*NNN*SSS,2*NNN*SSS))
for i in range(0,NNN*SSS):
mx = i+1
for j in range(0,i+1):
my = j+1
if i==j:
#Selection of the kinetic/self-energy terms on the diagonal
Mom = Tan_Mom(mx,KKK,NNN)
HamiltonMatrix[i+000][j+000] = Kin_Term(Mom,EUV,KKK,NNN,MMM) + SelfEnergy(Mom,EUV,KKK,NNN,MMM,GGG,AAA)
HamiltonMatrix[i+NNN][j+NNN] = -HamiltonMatrix[i][j]
else:
#Off-diagonal terms
#Definition of the momenta for the matrix element i,j
Mom1 = Tan_Mom(mx,KKK,NNN)
Mom2 = Tan_Mom(my,KKK,NNN)
#Hopping terms
HamiltonMatrix[i+000][j+000] = Four_Hopp_Pair(Mom1,Mom2,LLL,EUV,KKK,GGG,AAA,MMM,+1)*np.sqrt( Jacobi(mx,KKK,NNN)*Jacobi(my,KKK,NNN) )
HamiltonMatrix[j+000][i+000] = HamiltonMatrix[i][j]
HamiltonMatrix[i+NNN*SSS][j+NNN*SSS] = -HamiltonMatrix[i][j]
HamiltonMatrix[j+NNN*SSS][i+NNN*SSS] = -HamiltonMatrix[i][j]
#Pairing terms
HamiltonMatrix[i+000][j+NNN*SSS] = Four_Hopp_Pair(Mom1,Mom2,LLL,EUV,KKK,GGG,AAA,MMM,-1)*np.sqrt( Jacobi(mx,KKK,NNN)*Jacobi(my,KKK,NNN) )
HamiltonMatrix[j+000][i+NNN*SSS] = +HamiltonMatrix[i+000][j+NNN*SSS]
HamiltonMatrix[i+NNN*SSS][j+000] = -HamiltonMatrix[i+000][j+NNN*SSS]
HamiltonMatrix[j+NNN*SSS][i+000] = -HamiltonMatrix[i+000][j+NNN*SSS]
#PRINTING OF THE FULL-HAMILTONIAN. Not recommended to activate for sizes NNN>4
#print('\n'.join(['\t'.join([str(cell) for cell in row]) for row in HamiltonMatrix]))
np.savetxt('HamiltonCalculator0010.txt', HamiltonMatrix, fmt='%1.4e') # use exponential notation
#DIAGONALIZATION OF THE HAMILTONIAN
HamiltonEigVals, HamiltonEigVecs = LA.eig( HamiltonMatrix )
#SORTING OF THE EIGENVALUES
idx = np.argsort(HamiltonEigVals)
HamiltonEigVals = HamiltonEigVals[idx]
HamiltonEigVecs = HamiltonEigVecs [:,idx]
"""
PRINTING OF EIGSYSTEM
print "HamiltonEigVals"
print HamiltonEigVals
print "HamiltonEigVecs"
print('\n'.join(['\t'.join([str(cell) for cell in row]) for row in HamiltonEigVecs]))
print "First HamiltonEigVals"
print HamiltonEigVals[0]
print "First HamiltonEigVecs"
print HamiltonEigVecs[:,0]
"""
print NNN
np.savetxt('MomentumAxis.txt', MomentumAxis, fmt='%1.4e') # use exponential notation
np.savetxt('KineticAxis.txt', KineticAxis, fmt='%1.4e') # use exponential notation
np.savetxt('SelfEnergyAxis.txt', SelfEnergyAxis, fmt='%1.4e') # use exponential notation
np.savetxt('ValsRe.txt', np.real(HamiltonEigVals), fmt='%1.4e') # use exponential notation
np.savetxt('ValsIm.txt', np.imag(HamiltonEigVals), fmt='%1.4e') # use exponential notation
np.savetxt('Vals.txt', HamiltonEigVals, fmt='%1.4e') # use exponential notation
#np.savetxt('VecsHamiltonCalculator0010.txt', HamiltonEigVecs, fmt='%1.4e') # use exponential notation
for m1 in range(0,2*NNN*SSS):
print m1
file1.write("%s" % (MomentumAxis[m1])),
file1.write("\t"),
file1.write("%s" % (KineticAxis[m1]))
file1.write("\t"),
file1.write("%s" % (SelfEnergyAxis[m1]))
file1.write("\t"),
file1.write("%s" % (np.real(HamiltonEigVals[m1])))
file1.write("\t"),
file1.write("%s" % (np.imag(HamiltonEigVals[m1])))
file1.write("\t"),
file1.write("%s" % (HamiltonEigVals[m1]))
file1.write('\n'),
file1.close()
print "Fin"
#216
|
# -*- coding: cp936 -*-
import arcpy
# 设置工作空间
workSpace="C:\Users\lenovo\Desktop\PyTest\data\geodb.gdb"
arcpy.env.workspace=workSpace
def editPipeTableNonGeometricalProperties():
'''
定义一个将管段表中非空间属性的数据汇总到管线表的函数。
如果存在一条管线中的某个属性中有多个值的情况,函数将其设置为这条管线的这一属性None
在与甲方进行管线确认时,请将管线名称写入管段表中的管线名称字段中
基础地理信息字段请手动添加
起点里程字段默认为0,终点里程默认为管线长度(这两个空间基础属性在此函数中进行编辑)
基本思路:
首先获取所有管段的信息
其次将同一管线下的信息进行汇总,提取管线属性值(如同一管线下某一属性存在多值,则将其设置为None)
最后将所有的管线信息填入管线表中
'''
#编辑管线非几何属性
pipeCodeList=[] #定义管线编码列表
pipeSegmentDataList=[] #定义管段数据列表
pipeEndDataList=[] #定义一个列表存放最终的管线数据
#提取管段信息至列表
with arcpy.da.SearchCursor("T_PN_PIPESEGMENT_GEO",("OBJECTID","PIPENAME","PLCODE","LENGTH","DESIGNDEPNAME",\
"CONSTRUNIT","SUPERVISORUNIT","TESTUNIT","USEDDATE",\
"FDNAME","SEGTYPE","TRANSMEDIUM","SEGMATERIAL2",\
"TRANSAMOUNTDESIGN","TRANSAMOUNTREAL","DIAMETER",\
"THICKNESS","DESIGNPRESURE","PRESSURELEVEL",\
"RUNPRESSURE","MAXPRESSURE","ANTISEPTICMODE",\
"ANTISEPTICLEVEL","REPAIRHOLEMODE","REPAIRHOLLEVEL",\
"CPMODE","COLLECTDATE","COLLECTUNIT","INPUTDATETIME"\
)) as PPcursor:
for PC in PPcursor:
try:
if PC[2] is not None:
pipeCodeList.append(PC[2])
pipeSegmentDataList.append([PC[1],PC[2],PC[3],PC[4],PC[5],PC[6],PC[7],PC[8],PC[9],PC[10],PC[11],PC[12],PC[13],\
PC[14],PC[15],PC[16],PC[17],PC[18],PC[19],PC[20],PC[21],PC[22],PC[23],PC[24],PC[25],\
PC[26],PC[27],PC[28]])
except Exception,e:
print e.message
print PC[0],PC[1]
pass
continue
#将每条管线的数据进行汇总计算
for P in set(pipeCodeList):
#定义一个列表存放临时的管线数据
pipeALLDataList=[[],[],[],[],[],[],[],[],[],[],[],[],[],[],\
[],[],[],[],[],[],[],[],[],[],[],[],[],[]]
pipeSingleDataList=[]
for PPD in pipeSegmentDataList:
if P==PPD[1]:
for i in range(0,28):
pipeALLDataList[i].append(PPD[i])
for i in range(0,28):
if i!=2:
if len(set(pipeALLDataList[i]))==1:
pipeSingleDataList.append(list(set(pipeALLDataList[i]))[0])
else :
pipeSingleDataList.append(None)
else:
Sum=0
for Len in pipeALLDataList[i]:
Sum+=Len
pipeSingleDataList.append(Sum)
pipeEndDataList.append(pipeSingleDataList)
#将数据写入管线表中
rows=arcpy.InsertCursor("T_PN_PIPELINE")
for PED in pipeEndDataList:
row=rows.newRow()
row.setValue("NAME",PED[0])
row.setValue("CODE",PED[1])
row.setValue("LENGTH",PED[2])
row.setValue("DESIGNDEPNAME",PED[3])
row.setValue("CONSTRUNIT",PED[4])
row.setValue("SUPERVISORUNIT",PED[5])
row.setValue("TESTUNIT",PED[6])
row.setValue("USEDDATE",PED[7])
row.setValue("FNNAME",PED[8])
row.setValue("SEGTYPE",PED[9])
row.setValue("TRANSMEDIUM",PED[10])
row.setValue("SEGMATERIAL2",PED[11])
row.setValue("TRANSAMOUNTDESIGN",PED[12])
row.setValue("TRANSAMOUNTREAL",PED[13])
row.setValue("DIAMETER",PED[14])
row.setValue("THICKNESS",PED[15])
row.setValue("DESIGNPRESURE",PED[16])
row.setValue("PRESSURELEVEL",PED[17])
row.setValue("RUNPRESSURE",PED[18])
row.setValue("MAXPRESSURE",PED[19])
row.setValue("ANTISEPTICMODE",PED[20])
row.setValue("ANTISEPTICLEVEL",PED[21])
row.setValue("REPAIRHOLEMODE",PED[22])
row.setValue("REPAIRHOLLEVEL",PED[23])
row.setValue("CPMODE",PED[24])
row.setValue("COLLECTDATE",PED[25])
row.setValue("COLLECTUNIT",PED[26])
row.setValue("INPUTDATETIME",PED[27])
row.setValue("RUNSTATE",3)
row.setValue("LAYMODE",1)
row.setValue("MSTART",0)
row.setValue("MEND",PED[2])
rows.insertRow(row)
#for aa in pipeEndDataList:
# print aa[0],aa[1],aa[2],aa[3],aa[4],aa[5],aa[6],aa[7],aa[8],aa[9],aa[10],aa[11],aa[12],aa[13],\
# aa[14],aa[15],aa[16],aa[17],aa[18],aa[19],aa[20],aa[21],aa[22],aa[23],aa[24],aa[25],aa[26],aa[27]
def editPipeTableGeometricalProperties():
'''
定义一个用于编辑管线表中几何信息的函数(包括参照物和坐标)
编辑进行管段编码时需要将某一管线的开始一条管段编码为001,最后一条编码为最大号管段
函数中获取某一管线下管段最小值的起点几何信息作为管线的起点几何信息,管段最大值的终点几何信息作为管线终点几何信息
基本思路:
首先获取管段的所有几何信息
然后将管线的最小管段编码的起点几何信息和最大管段的终点几何信息记性汇集
最后将汇集后的信息更新至管线表
***编辑管线的几何信息需要在其他属性信息编辑完成后进行***
'''
#编辑管线几何属性(参照物、坐标等)
pipeCodeGList=[] #定义管线编码列表
pipeSegmentDataGList=[] #定义管段数据列表
pipeEndDataGList=[] #定义一个列表存放最终的管线数据
#获取管段管线几何信息
with arcpy.da.SearchCursor("T_PN_PIPESEGMENT_GEO",("OBJECTID","PLCODE","CODE","ADDRSTART","REFOBJSTART",\
"OFFSETSTART","XSTART","YSTART","ZSTART","ADDREND",\
"REFOBJEND","OFFSETEND","XEND","YEND","ZEND")) as PPAcursor:
for PCA in PPAcursor:
try:
if PCA[1] is not None:
pipeCodeGList.append(PCA[1])
pipeSegmentDataGList.append([PCA[1],PCA[2],PCA[3],PCA[4],PCA[5],PCA[6],PCA[7],\
PCA[8],PCA[9],PCA[10],PCA[11],PCA[12],PCA[13],PCA[14],])
except Exception,e:
print e.message
print PCA[0],PCA[1],PCA[2]
pass
continue
#获取同一管线下的所有管段空间信息
for PG in set(pipeCodeGList):
#定义一个获取全部管段的列表
pipeSegCodeALLDataList=[]
for PPDG in pipeSegmentDataGList:
if PG==PPDG[0]:
pipeSegCodeALLDataList.append(PPDG[1])
pipeSingleADataGList=[]
pipeSingleBDataGList=[]
for PPDGT in pipeSegmentDataGList:
if PPDGT[1]==min(pipeSegCodeALLDataList):
pipeSingleADataGList.append(PG)
pipeSingleADataGList.append(PPDGT[2])
pipeSingleADataGList.append(PPDGT[3])
pipeSingleADataGList.append(PPDGT[4])
pipeSingleADataGList.append(PPDGT[5])
pipeSingleADataGList.append(PPDGT[6])
pipeSingleADataGList.append(PPDGT[7])
if PPDGT[1]==max(pipeSegCodeALLDataList):
pipeSingleBDataGList.append(PPDGT[8])
pipeSingleBDataGList.append(PPDGT[9])
pipeSingleBDataGList.append(PPDGT[10])
pipeSingleBDataGList.append(PPDGT[11])
pipeSingleBDataGList.append(PPDGT[12])
pipeSingleBDataGList.append(PPDGT[13])
for PSB in pipeSingleBDataGList:
pipeSingleADataGList.append(PSB)
pipeEndDataGList.append(pipeSingleADataGList)
#将所收集的管线数据更新至管线表
with arcpy.da.UpdateCursor("T_PN_PIPELINE",("CODE","ADDRSTART","REFOBJSTART",\
"OFFSETSTART","XSTART","YSTART","ZSTART","ADDREND",\
"REFOBJEND","OFFSETEND","XEND","YEND","ZEND")) as PUcursor:
for PURow in PUcursor:
for PEDG in pipeEndDataGList:
if PEDG[0]==PURow[0]:
PURow[1]=PEDG[1]
PURow[2]=PEDG[2]
PURow[3]=PEDG[3]
PURow[4]=PEDG[4]
PURow[5]=PEDG[5]
PURow[6]=PEDG[6]
PURow[7]=PEDG[7]
PURow[8]=PEDG[8]
PURow[9]=PEDG[9]
PURow[10]=PEDG[10]
PURow[11]=PEDG[11]
PURow[12]=PEDG[12]
PUcursor.updateRow(PURow)
def editPipeTable():
editPipeTableNonGeometricalProperties()
editPipeTableGeometricalProperties()
editPipeTable()
|
def test_basic_settings():
from orb.settings import Settings
# validate the default values
settings = Settings()
assert settings.default_locale == 'en_US'
assert settings.default_page_size == '40'
assert settings.max_cache_timeout == '86400000' # 24 hours
assert settings.max_connections == '10'
assert settings.security_key == ''
assert settings.server_timezone == 'US/Pacific'
assert settings.worker_class == 'default'
def test_environment_based_settings():
import os
from orb.settings import Settings
try:
# setup the default values via environment
os.environ['ORB_DEFAULT_LOCALE'] = 'fr_FR'
os.environ['ORB_DEFAULT_PAGE_SIZE'] = '100'
os.environ['ORB_MAX_CACHE_TIMEOUT'] = '100'
os.environ['ORB_MAX_CONNECTIONS'] = '1'
os.environ['ORB_SECURITY_KEY'] = '12345'
os.environ['ORB_SERVER_TIMEZONE'] = 'US/Eastern'
os.environ['ORB_WORKER_CLASS'] = 'gevent'
settings = Settings()
# validate the environment based settings
assert settings.default_locale == 'fr_FR'
assert settings.default_page_size == '100'
assert settings.max_cache_timeout == '100'
assert settings.max_connections == '1'
assert settings.security_key == '12345'
assert settings.server_timezone == 'US/Eastern'
assert settings.worker_class == 'gevent'
finally:
del os.environ['ORB_DEFAULT_LOCALE']
del os.environ['ORB_DEFAULT_PAGE_SIZE']
del os.environ['ORB_MAX_CACHE_TIMEOUT']
del os.environ['ORB_MAX_CONNECTIONS']
del os.environ['ORB_SERVER_TIMEZONE']
del os.environ['ORB_SECURITY_KEY']
del os.environ['ORB_WORKER_CLASS']
def test_initialization_based_settings():
import os
from orb.settings import Settings
try:
# setup the default values via environment
os.environ['ORB_DEFAULT_LOCALE'] = 'fr_FR'
os.environ['ORB_DEFAULT_PAGE_SIZE'] = '100'
os.environ['ORB_MAX_CACHE_TIMEOUT'] = '100'
os.environ['ORB_MAX_CONNECTIONS'] = '1'
os.environ['ORB_SECURITY_KEY'] = '12345'
os.environ['ORB_SERVER_TIMEZONE'] = 'US/Eastern'
os.environ['ORB_WORKER_CLASS'] = 'gevent'
settings = Settings(
default_locale='en_GB',
default_page_size='1',
max_cache_timeout='10',
max_connections='2',
security_key='54321',
server_timezone='US/Central',
worker_class=''
)
# validate the environment based settings
assert settings.default_locale == 'en_GB'
assert settings.default_page_size == '1'
assert settings.max_cache_timeout == '10'
assert settings.max_connections == '2'
assert settings.security_key == '54321'
assert settings.server_timezone == 'US/Central'
assert settings.worker_class == ''
finally:
del os.environ['ORB_DEFAULT_LOCALE']
del os.environ['ORB_DEFAULT_PAGE_SIZE']
del os.environ['ORB_MAX_CACHE_TIMEOUT']
del os.environ['ORB_MAX_CONNECTIONS']
del os.environ['ORB_SERVER_TIMEZONE']
del os.environ['ORB_SECURITY_KEY']
del os.environ['ORB_WORKER_CLASS'] |
"""
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved
Author: Dejiao Zhang (dejiaoz@amazon.com)
Date: 02/26/2021
"""
import torch
import numpy as np
from utils.metric import Confusion
from dataloader.dataloader import train_unshuffle_loader
from sklearn import cluster
def prepare_task_input(model, batch, args, is_contrastive=False):
if is_contrastive:
text, text1, text2, class_label = batch['text'], batch['text1'], batch['text2'], batch['label'].cuda()
txts = [text, text1, text2]
feat = []
for text in txts:
features = model.tokenizer.batch_encode_plus(text, max_length=args.max_length, return_tensors='pt', padding='longest', truncation=True)
for k in features.keys():
features[k] = features[k].cuda()
feat.append(features)
return feat, class_label.detach()
else:
text, class_label = batch['text'], batch['label'].cuda()
features = model.tokenizer.batch_encode_plus(text, max_length=args.max_length, return_tensors='pt', padding='longest', truncation=True)
for k in features.keys():
features[k] = features[k].cuda()
return features, class_label.detach()
def evaluate_embedding(model, args, step):
confusion, confusion_model = Confusion(args.num_classes), Confusion(args.num_classes)
model.eval()
dataloader = train_unshuffle_loader(args)
print('---- {} evaluation batches ----'.format(len(dataloader)))
for i, batch in enumerate(dataloader):
with torch.no_grad():
_, label = batch['text'], batch['label']
features, _ = prepare_task_input(model, batch, args, is_contrastive=False)
embeddings = model.get_embeddings(features)
model_prob = model.get_cluster_prob(embeddings)
if i == 0:
all_labels = label
all_embeddings = embeddings.detach()
all_prob = model_prob
else:
all_labels = torch.cat((all_labels, label), dim=0)
all_embeddings = torch.cat((all_embeddings, embeddings.detach()), dim=0)
all_prob = torch.cat((all_prob, model_prob), dim=0)
all_pred = all_prob.max(1)[1]
confusion_model.add(all_pred, all_labels)
confusion_model.optimal_assignment(args.num_classes)
acc_model = confusion_model.acc()
kmeans = cluster.KMeans(n_clusters=args.num_classes, random_state=args.seed)
embeddings = all_embeddings.cpu().numpy()
kmeans.fit(embeddings)
pred_labels = torch.tensor(kmeans.labels_.astype(np.int))
# clustering accuracy
confusion.add(pred_labels, all_labels)
confusion.optimal_assignment(args.num_classes)
acc = confusion.acc()
ressave = {"acc":acc, "acc_model":acc_model}
for key, val in ressave.items():
args.tensorboard.add_scalar('Test/{}'.format(key), val, step)
print('[Representation] Clustering scores:',confusion.clusterscores())
print('[Representation] ACC: {:.3f}'.format(acc))
print('[Model] Clustering scores:',confusion_model.clusterscores())
print('[Model] ACC: {:.3f}'.format(acc_model))
return None
|
# import matplotlib.pyplot as plt
import numpy as np
import plotly as py
import plotly.graph_objs as go
import plotly.tools as tls
"""dictionary = {
'A' : [12, 14.8, 16, 20],
'B' : [1],
'c' : [12, 2, 14, 10],
'd' : [12, 2, 4, 5],
'3' : [1, 3, 4, 5]
}"""
def plottingBox(dictionary):
N = len(dictionary)
dictionary_names = [i for i in dictionary]
c = ['hsl(' + str(h) + ',50%' + ',50%)' for h in np.linspace(0, 360, N)]
traces = [{
'y': dictionary[dictionary_names[i]],
'name': dictionary_names[i],
'type': 'box',
'marker': {'color': c[i]}
} for i in range(int(N))]
py.offline.plot(traces, filename='boxplot.html')
with open('boxplot.html', 'r') as myfile:
box_html = myfile.read().replace('\n', '')
return box_html
"""fig, axs = plt.subplots(ncols = len(dictionary), sharey = True)
axs = axs.ravel()
for i in range(len(dictionary)):
axs[i].boxplot(dictionary[dictionary_names[i]], vert = True)
plt.show()
plotly_fig = tls.mpl_to_plotly(fig)
plotly.offline.plot(plotly_fig, 'gay_shit.html')"""
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import collections
import logging
from odoo.tests import common
from odoo.cli.populate import Populate
from odoo.tools import mute_logger, populate
from unittest.mock import patch
_logger = logging.getLogger(__name__)
# todo patch cursor commit
class TestPopulate(common.TransactionCase):
def setUp(self):
super(TestPopulate, self).setUp()
patcher = patch.object(self.cr, 'commit')
patcher.start()
self.addCleanup(patcher.stop)
def test_dependency(self):
ordered_models = Populate._get_ordered_models(self.env, ['test.populate'])
ordered_models_names = [model._name for model in ordered_models]
self.assertEqual(ordered_models_names, ['test.populate.category', 'test.populate'])
@mute_logger('odoo.cli.populate')
def test_no_populate(self):
""" Check that model with no populate method are not populated"""
model = 'test.no.populate'
populated = Populate.populate(self.env, 'small', [model])
new = populated[model]
self.assertFalse(new)
@mute_logger('odoo.cli.populate')
def test_populate(self):
""" Check that model with populate methods are correctly populated"""
model = 'test.populate'
populated = Populate.populate(self.env, 'small', [model])
records = self.check_test_populate_values(populated, model)
# pseudo random after cartesian with ~ 1/4 False, 3/4 True
# seed is model dependant
self.assertEqual(records.mapped('active')[6:20], [
True, True, True, True, True, True, False, False, True, True, True, True, False, True
])
# pseudo random after iterate
self.assertEqual(records.mapped('some_ref')[5:20], [
1, 0, 2, 4, 4, 3, 4, 1, 2, 2, 2, 4, 4, 1, 2
])
self.assertEqual(records.mapped('sequence')[:20], [6, 10, 1, 1, 1, 3, 8, 9, 1, 5, 9, 5, 7, 3, 5, 3, 6, 4, 9, 2]) # Test randint
@mute_logger('odoo.cli.populate')
def test_populate_inherit(self):
""" Check that model with populate methods are correctly populated"""
model = 'test.populate.inherit'
populated = Populate.populate(self.env, 'small', [model])
records = self.check_test_populate_values(populated, model) # should be same values as base class
# and additionnal_field has own values set
# iterate then pseudo random
self.assertEqual(records.mapped('additionnal_field')[:20], [
'V1', 'V2', 'V3', # iterate
'V3', 'V1', 'V2', 'V1', 'V2', 'V1', 'V2', 'V2', 'V2', 'V1', 'V1', 'V3', 'V1', 'V2', 'V2', 'V3', 'V2' # pseudorandom
])
def check_test_populate_values(self, populated, model):
new = populated[model]
self.assertTrue(new)
records = self.env[model].browse(new)
# main cartesian product
self.assertEqual(records.mapped('active')[:6], [
True, True, True,
False, False, False,
])
self.assertEqual(records.mapped('state')[:6], [
False, 'a', 'b',
False, 'a', 'b',
])
# custom name call
self.assertEqual(records.mapped('name')[:6], [
'active_corner_0', 'active_corner_1', 'active_corner_2',
'inactive_corner_3', 'inactive_corner_4', 'inactive_corner_5',
])
self.assertIn('filling', records.mapped('name')[6]) # filling when cartesian and iterate are done
# iterate then pseudo random
self.assertEqual(records.mapped('some_ref')[:5], [
0, 1, 2, 3, 4 # iterate
])
# some custom multi field generator (as cartesian product in this example)
self.assertEqual(records.mapped('dependant_field_1')[:6], [
'd1_1', 'd1_1', 'd1_1',
'd1_2', 'd1_2', 'd1_2'
])
self.assertEqual(records.mapped('dependant_field_2')[:6], [
'd2_1', 'd2_2', 'd2_3_0',
'd2_1', 'd2_2', 'd2_3_1'
])
used_category_ids = set(records.mapped('category_id').ids[:20])
self.assertEqual(len(used_category_ids), 6) # event if id may change, with given seed, the 6 category are used
generated_category_ids = set(populated['test.populate.category'])
self.assertFalse(used_category_ids-generated_category_ids) # all category are the generated one
self.assertFalse(hasattr(self.env.registry, 'populated_models'), 'populated_models flag has been removed from registry')
return records
@common.tagged('-at_install', 'post_install')
class TestPopulateValidation(common.TransactionCase):
""" check that all fields in _populate_factories exists """
def setUp(self):
super(TestPopulateValidation, self).setUp()
self.env.registry.populated_models = collections.defaultdict(list)
self.addCleanup(delattr, self.env.registry, 'populated_models')
def test_populate_factories(self):
for model in self.env.values():
factories = model._populate_factories() or []
factories_fields = set([field_name for field_name, factory in factories if not field_name.startswith('_')])
missing = factories_fields - model._fields.keys()
self.assertFalse(missing, 'Fields %s not found in model %s' % (missing, model._name))
@common.tagged('-standard', '-at_install', 'post_install', 'missing_populate')
class TestPopulateMissing(common.TransactionCase):
""" check that all fields in _populate_factories exists """
def setUp(self):
super(TestPopulateMissing, self).setUp()
self.env.registry.populated_models = collections.defaultdict(list)
self.addCleanup(delattr, self.env.registry, 'populated_models')
def test_populate_missing_factories(self):
no_factory_models = []
for model in self.env.values():
factories = model._populate_factories()
if not factories:
if model._transient or model._abstract:
continue
ir_model = self.env['ir.model'].search([('model', '=', model._name)])
if all(module.startswith('test_') for module in ir_model.modules.split(',')):
continue
no_factory_models.append(model._name)
else:
factories_fields = next(populate.chain_factories(factories, model._name)).keys()
def is_electable(field):
return not field.compute \
and field.store \
and field.name not in ('create_uid', 'write_uid', 'write_date', 'create_date', 'id') \
and field.type not in ('many2many', 'one2many')
electable_fields = set([key for key, field in model._fields.items() if is_electable(field)])
no_factory_fields = set(electable_fields - factories_fields)
if no_factory_fields:
_logger.info('Model %s has some undefined field: %s', model._name, no_factory_fields)
_logger.info('No populate factories defiend for %s', no_factory_models)
|
# -*- coding: utf-8 -*-
a = [1, 2, 3, 4, 5, 6, 7, 8, 9]
print(a)
print(a[3])
# 크기가 N이고, 모든 값이 0인 1차원 리스트 초기화
n = 10
a = [0] * n
print(a)
# 인덱싱
a = [1, 2, 3, 4, 5, 6, 7, 8, 9]
# 여덟번째 원소만 출력
print(a[7])
# 뒤에서 첫 번째 원소 출력
print(a[-1])
# 뒤에서 세 번째 원소 출력
print(a[-3])
# 네번째 원소 값 변경
a[3] = 7
print(a)
# 슬라이싱
a = [1, 2, 3, 4, 5, 6, 7, 8, 9]
# 두 번째 원소부터 네 번째 원소까지
print(a[1:4])
# 리스트 컴프리헨션
array = [i for i in range(10)]
print(array)
# 조건문도 혼용 가능(홀수만 출력)
array = [i for i in range(10) if i % 2 == 1]
print(array)
# 수들의 제곱 값을 포함하는 리스트
array = [i * i for i in range(10)]
print(array)
# N X M 크기의 2차원 리스트 초기화
n = 4
m = 3
array = [[0] * m for _ in range(n)]
print(array) |
from django.shortcuts import render
from django.http import HttpResponseRedirect
from django.urls import reverse
from datetime import datetime
from track.models import WeightData
from django.db.models import Max, Min, Avg
def start(request):
latest_measure_list = WeightData.objects.order_by('-check_date')[:1]
history = WeightData.objects.order_by('-check_date')[:10]
absolute_max = WeightData.objects.all().aggregate(Max('weight_value'))
absolute_min = WeightData.objects.all().aggregate(Min('weight_value'))
average = WeightData.objects.all().aggregate(Avg('weight_value'))
if latest_measure_list: context = { 'latest_measure': latest_measure_list[0],
'history' : history,
'absolute_max' : absolute_max,
'absolute_min' : absolute_min,
'average' : average
}
else: context = {}
return render(request, 'track/start.html', context)
def save(request):
date = datetime.strptime(request.POST['date'], '%d.%m.%Y')
value = request.POST['value']
data, created = WeightData.objects.get_or_create(check_date=date, defaults={'check_date': date,'weight_value': value})
data.weight_value = value
data.save()
return HttpResponseRedirect(reverse('track:start'))
# try:
# selected_choice = question.choice_set.get(pk=request.POST['choice'])
# except (KeyError, Choice.DoesNotExist):
# # Redisplay the question voting form.
# return render(request, 'polls/detail.html', {
# 'question': question,
# 'error_message': "You didn't select a choice.",
# })
# else:
# selected_choice.votes += 1
# selected_choice.save()
# # Always return an HttpResponseRedirect after successfully dealing
# # with POST data. This prevents data from being posted twice if a
# # user hits the Back button.
# return HttpResponseRedirect(reverse('polls:results', args=(question.id,)))
|
#!/usr/bin/env python
#coding=utf-8
#date : 2015-04-12
from socket import *
def create_udp_client():
HOST = 'localhost'
PORT = 21567
BUFSIZ = 1024
ADDR = (HOST, PORT)
udpCliSock = socket(AF_INET, SOCK_DGRAM)
while True:
data = raw_input('please input msg> ')
if not data:
break
udpCliSock.sendto(data, ADDR)
data, ADDR = udpCliSock.recvfrom(BUFSIZ)
if not data:
break
print data
udpCliSock.close()
# End
if __name__ == "__main__":
create_udp_client() |
# from https://github.com/AtsushiSakai/PythonRobotics/blob/master/AerialNavigation/drone_3d_trajectory_following/TrajectoryGenerator.py
from mpl_toolkits import mplot3d
import matplotlib.pyplot as plt
import numpy as np
class TrajectoryGenerator():
def __init__(self, start_pos, des_pos, T, start_vel=[0,0,0], des_vel=[0,0,0], start_acc=[0,0,0], des_acc=[0,0,0]):
self.start_x = start_pos[0]
self.start_y = start_pos[1]
self.start_z = start_pos[2]
self.des_x = des_pos[0]
self.des_y = des_pos[1]
self.des_z = des_pos[2]
self.start_x_vel = start_vel[0]
self.start_y_vel = start_vel[1]
self.start_z_vel = start_vel[2]
self.des_x_vel = des_vel[0]
self.des_y_vel = des_vel[1]
self.des_z_vel = des_vel[2]
self.start_x_acc = start_acc[0]
self.start_y_acc = start_acc[1]
self.start_z_acc = start_acc[2]
self.des_x_acc = des_acc[0]
self.des_y_acc = des_acc[1]
self.des_z_acc = des_acc[2]
self.T = T
def solve(self):
A = np.array(
[[0, 0, 0, 0, 0, 1],
[self.T**5, self.T**4, self.T**3, self.T**2, self.T, 1],
[0, 0, 0, 0, 1, 0],
[5*self.T**4, 4*self.T**3, 3*self.T**2, 2*self.T, 1, 0],
[0, 0, 0, 2, 0, 0],
[20*self.T**3, 12*self.T**2, 6*self.T, 2, 0, 0]
])
b_x = np.array(
[[self.start_x],
[self.des_x],
[self.start_x_vel],
[self.des_x_vel],
[self.start_x_acc],
[self.des_x_acc]
])
b_y = np.array(
[[self.start_y],
[self.des_y],
[self.start_y_vel],
[self.des_y_vel],
[self.start_y_acc],
[self.des_y_acc]
])
b_z = np.array(
[[self.start_z],
[self.des_z],
[self.start_z_vel],
[self.des_z_vel],
[self.start_z_acc],
[self.des_z_acc]
])
self.x_c = np.linalg.solve(A, b_x)
self.y_c = np.linalg.solve(A, b_y)
self.z_c = np.linalg.solve(A, b_z)
def calculate_position(c, t):
"""
Calculates a position given a set of quintic coefficients and a time.
Args
c: List of coefficients generated by a quintic polynomial
trajectory generator.
t: Time at which to calculate the position
Returns
Position
"""
return c[0] * t**5 + c[1] * t**4 + c[2] * t**3 + c[3] * t**2 + c[4] * t + c[5]
def C(x):
return np.cos(x)
def S(x):
return np.sin(x)
def earth_to_body_frame(ii, jj, kk):
# C^b_n
R = [[C(kk) * C(jj), C(kk) * S(jj) * S(ii) - S(kk) * C(ii), C(kk) * S(jj) * C(ii) + S(kk) * S(ii)],
[S(kk) * C(jj), S(kk) * S(jj) * S(ii) + C(kk) * C(ii), S(kk) * S(jj) * C(ii) - C(kk) * S(ii)],
[-S(jj), C(jj) * S(ii), C(jj) * C(ii)]]
return np.array(R)
def body_to_earth_frame(ii, jj, kk):
# C^n_b
return np.transpose(earth_to_body_frame(ii, jj, kk))
if __name__ == '__main__':
start = [0,0,0]
desired = [100,200,300]
x_coord = []
y_coord = []
z_coord = []
T = 5
traj = TrajectoryGenerator(start, desired, T)
traj.solve()
traj_path = []
for i in range(T+1):
x_coord.append(calculate_position(traj.x_c,i))
y_coord.append(calculate_position(traj.y_c,i))
z_coord.append(calculate_position(traj.z_c,i))
traj_path.append([calculate_position(traj.x_c,i)[0], calculate_position(traj.y_c,i)[0], calculate_position(traj.z_c,i)[0]])
fig = plt.figure()
ax = plt.axes(projection="3d")
print(traj_path)
ax.scatter3D(x_coord, y_coord, z_coord, c='r')
ax.set_xlabel('x')
ax.set_ylabel('y')
ax.set_zlabel('z')
plt.show()
|
import math, Matrix
import numpy as np
#Вычисления массива функций
def DeterminatFunc(baseFunc,baseValue):
newValue = np.zeros((baseFunc.shape[0],baseFunc.shape[1]))
for i in range(0,newValue.shape[0]):
for j in range(0,newValue.shape[1]):
newValue[i][j] = baseFunc[i][j](baseValue[0],baseValue[1])
return Matrix.Determinat(newValue)
#Метод задания матриц для задания 3.2
def Decision1(baseX, accur):
ikobi = np.array([[lambda x,y: 2*x, lambda x,y: 2*y],
[lambda x,y: 3*x**2, lambda x,y: -1 ]])
delta1 = np.array([[lambda x,y: x**2 + y**2 - 1, lambda x,y: 2*y],
[lambda x,y: x**3 - y, lambda x,y: -1 ]])
delta2 = np.array([[lambda x,y: 2*x, lambda x,y: x**2 + y**2 - 1],
[lambda x,y: 3*x**2, lambda x,y: x**3 - y ]])
return RecursionDesicion(ikobi,delta1,delta2,baseX,accur)
#Метод задания матриц для задания 3.3
def Decision2(baseX, accur):
ikobi = np.array([[lambda x,y: y/(math.cos(x*y+0.4)**2) -2*x, lambda x,y: x/(math.cos(x*y+0.4)**2)],
[lambda x,y: 1.2*x, lambda x,y: 4*y ]])
delta1 = np.array([[lambda x,y: math.tan(x*y+0.4)-x*x, lambda x,y: x/(math.cos(x*y+0.4)**2)],
[lambda x,y: 0.6*x*x+2*y*y-1, lambda x,y: 4*y ]])
delta2 = np.array([[lambda x,y: y/(math.cos(x*y+0.4)**2) -2*x, lambda x,y: math.tan(x*y+0.4)-x*x],
[lambda x,y: 1.2*x, lambda x,y: 0.6*x*x+2*y*y-1 ]])
return RecursionDesicion(ikobi,delta1,delta2,baseX,accur)
#Рекурсионный метод итерационнго вычисления
def RecursionDesicion(ikobi,delta1,delta2,baseX,accur):
ikobiD = DeterminatFunc(ikobi,baseX)
delta1D = DeterminatFunc(delta1,baseX)
delta2D = DeterminatFunc(delta2,baseX)
newX = (baseX[0]-delta1D/ikobiD,baseX[1]-delta2D/ikobiD)
if abs(newX[0] - baseX[0]) > accur or abs(newX[1] - baseX[1]) > accur:
newX = RecursionDesicion(ikobi,delta1,delta2,newX,accur)
return newX
x = 1
y = 1
accuracy = 0.0001
print("Решения",Decision1((1,1),accuracy))
x = 1
y = 1
accuracy = 0.001
print("Решения",Decision2((1,1),accuracy))
|
dogname = ['Fido','Sean','Sally','Makr']
# print(dogname)
# inserting at 1 position
# dogname.insert(1, 'Jane')
# print(dogname)
print(dogname[2])
# To delete from list
del(dogname[2])
print(dogname)
# length of list
print(len(dogname))
# Updating a list
dogname[1] = 'Jane'
print(dogname)
# List can hvae mix values , inntegers, strings boolean etc |
from channels.routing import ProtocolTypeRouter, URLRouter
from channels.auth import AuthMiddlewareStack
from django.urls import path
from .consumers import Chat
application = ProtocolTypeRouter({
'websocket': AuthMiddlewareStack(URLRouter([
path('', Chat),
]))
})
|
from EngineClass import *
from DisplayClass import *
import pygame
import time
Score_Increment = 0.1
Width = 900
Height = 700
Tick_Time = 0.05
class Game:
def __init__(self, n):
self.paused = False
self.ensemble = Ensemble(np.array([[Width], [Height]]))
self.ensemble.populate(n)
self.display = Display(self.ensemble)
self.engine = Engine(self.ensemble)
self.score = 0
self.starting_bodies = 20
self.ticks_per_new_body = 30
self.ticks = 0
# --------------------------- Getters and setters
def get_ticks_per_new_body(self):
return self.ticks_per_new_body
def set_ticks_per_new_body(self, value):
self.ticks_per_new_body = value
def get_engine(self):
return self.engine
def get_ticks(self):
return self.ticks
def set_ticks(self, ticks):
self.ticks = ticks
def set_paused(self, bool):
self.paused = bool
def get_display(self):
return self.display
def get_paused(self):
return self.paused
def get_score(self):
return self.score
def set_score(self, score):
self.score = score
def get_ensemble(self):
return self.ensemble
def get_starting_bodies(self):
return self.starting_bodies
# ----------------------------- Game functions
def update_ticks_per_new_body(self):
self.set_ticks_per_new_body(int(20 + 100 * 1.1 ** -self.get_ticks()))
def update_score(self):
inc = 0.1 * self.get_ticks() ** 0.1
self.set_score(self.get_score() + inc)
def tick(self):
start_time = time.time()
if self.paused:
pass
else:
self.get_engine().tick()
self.get_display().draw_game(int(self.get_score()))
self.update_ticks_per_new_body()
if self.get_ticks() % self.get_ticks_per_new_body() == 0:
self.get_engine().get_ensemble().add_bodies_constant_mass(1, 4 * self.get_ticks() ** 0.15)
self.update_score()
self.set_ticks(self.get_ticks() + 1)
end_time = time.time()
time_remaining = Tick_Time + start_time - end_time
if time_remaining > 0:
time.sleep(time_remaining)
def reset_game(self):
self.get_ensemble().reset_ensemble(self.get_starting_bodies())
self.set_score(0)
def main_loop(self):
self.reset_game()
while True:
events = pygame.event.get()
for event in events:
if event.type == pygame.QUIT:
break
if self.get_ensemble().get_crashed():
self.get_display().draw_game_over()
if pygame.key.get_pressed()[pygame.K_SPACE]:
self.reset_game()
self.get_display().draw_game(int(self.get_score()))
elif self.get_paused():
for event in events:
if event.type == pygame.KEYDOWN and event.key == pygame.K_SPACE:
self.set_paused(False)
else:
self.tick()
for event in events:
if event.type == pygame.KEYDOWN and event.key == pygame.K_SPACE:
self.set_paused(True)
quit()
game = Game(20)
game.main_loop()
|
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
if __name__ == "__main__":
import sys
print "Don't run this file directly. It is used by other scripts."
sys.exit(0)
# NOTE: i must start at 0.
colors = ['b', 'r', 'g', 'm', 'c']
symbols = ['-', '--', '-.', ':']
characters = ['x', 'o', '^', 's', 'p', '*', '+', 'D']
def get_i_colors(i):
return i % len(colors)
def get_i_symbols(i, array):
return int(i / len(colors)) % len(array)
def color(i):
return colors[get_i_colors(i)]
def symbol(i):
return symbols[get_i_symbols(i, symbols)]
def character(i):
return characters[get_i_symbols(i, characters)]
def symb_col(i):
return color(i) + symbol(i)
|
class Solution:
def findElement(self, matrix, element):
row = 0
col = len(matrix[0]) - 1
while row <= len(matrix) - 1 and col >= 0:
if matrix[row][col] == element:
return True
elif matrix[row][col] > element:
col -= 1
else:
row += 1
return False
matrix = [
[15, 20, 40, 85],
[20, 35, 80, 95],
[30, 55, 95, 105],
[40, 80, 100, 120]
]
solution = Solution()
print(solution.findElement(matrix, 106)) |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import copy, itertools
from pyquery import PyQuery as pq
from bulva.parsers import MTParser
class Parser(MTParser):
URL = 'http://www.kinosvetozor.cz/cz/program/'
URL_BASE = 'http://www.kinosvetozor.cz'
def get_items(self):
items = []
data = self._get_data(self.URL)
if not data:
return items
data = pq(data.content)
for day, big, small in zip(data.find('h2.dnes'), data.find('table.programDen'),
data.find('table.malySal')):
date = self._parse_date(pq(day).text())
for one in itertools.chain(pq(big).find('tr')[1:-1],pq(small).find('tr')[1:-1]):
movie, ef, time = map(lambda x: pq(x), pq(one).find('td'))
item = copy.copy(self.item)
item['cycle'], item['start'] = movie.find('span a').attr('title'), self._mk_start_date(time.text(),
date=date)
item['title'], item['url'] = movie.find('a').attr('title'), '%s%s' % (
self.URL_BASE, movie.find('a').attr('href'))
items.append(item)
return items |
from unittest import mock
import pytest
from wordguess import wordguess
def mock_input(*args):
input_values = list(args)
def mock_input2(s):
print(s, end="")
return input_values.pop(0)
return mock_input2
def test_load_words():
test_words = ["TESTING", "PYTHON", "FINISH", "YELLOW", "ORANGE"]
test_word_str = "\n".join(test_words)
with mock.patch.object(wordguess.importlib.resources, "read_text",
return_value=test_word_str):
result = wordguess.load_words(4, 15)
assert result == test_words
@pytest.mark.parametrize("test_length, expected_result", [
(10, ["TEST", "FISHER", "PRODUCE", "INSTRUMENT", "LIGHT", "SHIELD"]),
(8, ["TEST", "FISHER", "PRODUCE", "LIGHT", "SHIELD"]),
])
def test_load_words_max_length(test_length, expected_result):
test_words = ["TEST", "FISHER", "PRODUCE", "INSTRUMENT", "TEMPERATURE",
"CONSTRUCTION", "SUBSCRIPTIONS", "LIGHT", "SHIELD",
"IDENTIFICATION"]
test_words_str = "\n".join(test_words)
with mock.patch.object(wordguess.importlib.resources, "read_text",
return_value=test_words_str):
result = wordguess.load_words(4, test_length)
assert result == expected_result
@pytest.mark.parametrize("test_length, expected_result", [
(10, ["INSTRUMENT", "TEMPERATURE", "CONSTRUCTION", "SUBSCRIPTIONS",
"IDENTIFICATION"]),
(12, ["CONSTRUCTION", "SUBSCRIPTIONS", "IDENTIFICATION"]),
])
def test_load_words_min_length(test_length, expected_result):
test_words = ["TEST", "FISHER", "PRODUCE", "INSTRUMENT", "TEMPERATURE",
"CONSTRUCTION", "SUBSCRIPTIONS", "LIGHT", "SHIELD",
"IDENTIFICATION"]
test_words_str = "\n".join(test_words)
with mock.patch.object(wordguess.importlib.resources, "read_text",
return_value=test_words_str):
result = wordguess.load_words(test_length, 15)
assert result == expected_result
def test_load_words_min_max_length():
test_words = ["TEST", "FISHER", "PRODUCE", "INSTRUMENT", "TEMPERATURE",
"CONSTRUCTION", "SUBSCRIPTIONS", "LIGHT", "SHIELD",
"IDENTIFICATION"]
test_words_str = "\n".join(test_words)
with mock.patch.object(wordguess.importlib.resources, "read_text",
return_value=test_words_str):
result = wordguess.load_words(10, 10)
assert result == ["INSTRUMENT"]
# def test_random_word():
# with mock.patch.object(wordguess.random, "choice", return_value="COMMIT"):
# result = wordguess.random_word(["CHOICE", "ENTERPRISE", "COMMIT"])
# assert result == "COMMIT"
def test_setup_word():
test_word = "MISSION"
result = wordguess.setup_word(test_word)
assert result == (["M", "I", "S", "S", "I", "O", "N"],
["_", "_", "_", "_", "_", "_", "_"])
def test_display_start_no_color(capsys):
num_wrong_guesses = wordguess.DEFAULT_NUM_WRONG_GUESSES
expected_capture = f"""Word Guess
A B C D E F G H I J K L M N O P Q R S T U V W X Y Z
_ _ _ _ _ _
Wrong Guesses 1 out of {num_wrong_guesses}
"""
l = [x for x in "ABCDEFGHIJKLMNOPQRSTUVWXYZ"]
b = ["_", "_", "_", "_", "_", "_"]
wordguess.display(l, b, 1, num_wrong_guesses, False)
captured = capsys.readouterr().out
assert captured == expected_capture
def test_display_start_color(capsys):
num_wrong_guesses = wordguess.DEFAULT_NUM_WRONG_GUESSES
expected_capture = f"""\033[1;37;40mWord Guess\033[m
A B C D E F G H I J K L M N O P Q R S T U V W X Y Z
_ _ _ _ _ _
Wrong Guesses 1 out of {num_wrong_guesses}
"""
l = [x for x in "ABCDEFGHIJKLMNOPQRSTUVWXYZ"]
b = ["_", "_", "_", "_", "_", "_"]
wordguess.display(l, b, 1, num_wrong_guesses, True)
captured = capsys.readouterr().out
assert captured == expected_capture
@pytest.mark.parametrize("color_mode", [True, False])
def test_play_quit(capsys, color_mode):
wordguess.input = mock_input("quit")
wordguess.play("LETTER", 6, color_mode, True)
captured = capsys.readouterr().out
assert "Quitting" in captured
def test_play_win_no_color(capsys):
with mock.patch.object(wordguess, "SLEEP_TIME", 0):
wordguess.input = mock_input("L", "T", "E", "R")
wordguess.play("LETTER", 6, False, True)
captured = capsys.readouterr().out
assert "You Won! You got the word" in captured
def test_play_win_color(capsys):
with mock.patch.object(wordguess, "SLEEP_TIME", 0):
wordguess.input = mock_input("L", "T", "E", "R")
wordguess.play("LETTER", 6, True, True)
captured = capsys.readouterr().out
assert "\033[1;32mYou Won! You got the word\033[m" in captured
def test_main_play_again(capsys):
with mock.patch.object(wordguess, "SLEEP_TIME", 0):
with mock.patch.object(wordguess, "load_words", return_value=["LETTER"]):
wordguess.input = mock_input("L", "T", "E", "R", "N")
wordguess.main()
captured = capsys.readouterr().out
assert "\033[1;32mYou Won! You got the word\033[m" in captured
assert "Would you like to play again? (Yes or no):" in captured
def test_main_single_play(capsys):
with mock.patch.object(wordguess, "SLEEP_TIME", 0):
with mock.patch.object(wordguess, "load_words", return_value=["LETTER"]):
wordguess.input = mock_input("L", "T", "E", "R", "N")
wordguess.main(["-s"])
captured = capsys.readouterr().out
assert "\033[1;32mYou Won! You got the word\033[m" in captured
assert "Would you like to play again? (Yes or no):" not in captured
def test_main_auto_play(capsys):
with mock.patch.object(wordguess, "SLEEP_TIME", 0):
with mock.patch.object(wordguess, "load_words", return_value=["LETTER"]):
wordguess.input = mock_input("L", "T", "E", "R", "L", "quit")
wordguess.main(["-a"])
captured = capsys.readouterr().out
assert "\033[1;32mYou Won! You got the word\033[m" in captured
assert "Would you like to play again? (Yes or no):" not in captured
assert "Quitting" in captured
def test_play_wrong_guess(capsys):
with mock.patch.object(wordguess, "SLEEP_TIME", 0):
wordguess.input = mock_input("L", "T", "W", "quit")
wordguess.play("LETTER", 6, False, True)
captured = capsys.readouterr().out
assert "Letter W not in the word" in captured
def test_play_out_of_guesses_no_color(capsys):
with mock.patch.object(wordguess, "SLEEP_TIME", 0):
wordguess.input = mock_input("K", "i", "l", "a", "s", "W", "z", "U")
wordguess.play("LETTER", 6, False, True)
captured = capsys.readouterr().out
assert "Out of guesses\nThe word was LETTER" in captured
def test_play_out_of_guesses_color(capsys):
with mock.patch.object(wordguess, "SLEEP_TIME", 0):
wordguess.input = mock_input("K", "i", "l", "a", "s", "W", "z", "U")
wordguess.play("LETTER", 6, True, True)
captured = capsys.readouterr().out
expected = "\033[1;31mOut of guesses\033[m\nThe word was LETTER"
assert expected in captured
def test_play_letter_already_guessed(capsys):
with mock.patch.object(wordguess, "SLEEP_TIME", 0):
wordguess.input = mock_input("s", "t", "a", "R", "t", "quit")
wordguess.play("LETTER", 6, False, True)
captured = capsys.readouterr().out
assert "Letter already been picked try again" in captured
@pytest.mark.parametrize("test_input", [
"1", "90909", "t23est", "3R", "?", "#", " ", "R9",
"test test", "test."
])
def test_play_invalid_input(capsys, test_input):
with mock.patch.object(wordguess, "SLEEP_TIME", 0):
wordguess.input = mock_input("s", test_input, "quit")
wordguess.play("LETTER", 6, False, True)
captured = capsys.readouterr().out
assert "Invalid input please try again" in captured
def test_play_guess_whole_word_correct(capsys):
with mock.patch.object(wordguess, "SLEEP_TIME", 0):
wordguess.input = mock_input("s", "LETTER", "quit")
wordguess.play("LETTER", 6, False, True)
captured = capsys.readouterr().out
assert "You Won! You guessed the word" in captured
def test_play_guess_whole_word_incorrect(capsys):
with mock.patch.object(wordguess, "SLEEP_TIME", 0):
wordguess.input = mock_input("s", "TESTING", "quit")
wordguess.play("LETTER", 6, False, True)
captured = capsys.readouterr().out
assert "TESTING is not the correct word" in captured
@pytest.mark.parametrize("test_input", ["TESTING", "LETTER"])
def test_play_guess_whole_word_false(test_input, capsys):
with mock.patch.object(wordguess, "SLEEP_TIME", 0):
wordguess.input = mock_input("s", test_input, "quit")
wordguess.play("LETTER", 6, False, False)
captured = capsys.readouterr().out
assert "Invalid input please try again" in captured
@pytest.mark.parametrize("test_value, expected_result", [
([], wordguess.DEFAULT_NUM_WRONG_GUESSES),
(["-W", "10"], 10),
])
def test_argument_parser_num_wrong_guess(test_value, expected_result):
result = wordguess.argument_parser(test_value)
assert result.num_wrong_guesses == expected_result
@pytest.mark.parametrize("test_length, expected_result", [
("10", 10), ("15", 15), ("4", 4), ("8", 8)
])
def test_argument_parser_max_word_length(test_length, expected_result):
result = wordguess.argument_parser(["--max", test_length])
assert result.max == expected_result
@pytest.mark.parametrize("test_length", [
"-20", "-1", "0", "1", "3", "16", "200",
"test", "f5", "8.8",
])
def test_argument_parser_max_word_length_error(test_length):
with pytest.raises(SystemExit):
wordguess.argument_parser(["--max", test_length])
@pytest.mark.parametrize("test_length, expected_result", [
("10", 10), ("15", 15), ("4", 4), ("8", 8)
])
def test_argument_parser_min_word_length(test_length, expected_result):
result = wordguess.argument_parser(["--min", test_length])
assert result.min == expected_result
@pytest.mark.parametrize("test_length", [
"-20", "-1", "0", "1", "3", "16", "200",
"test", "f5", "8.8",
])
def test_argument_parser_min_word_length_error(test_length):
with pytest.raises(SystemExit):
wordguess.argument_parser(["--min", test_length])
@pytest.mark.parametrize("test_input, expected_result", [
([], True), (["--no_color"], False),
])
def test_argument_parser_no_color(test_input, expected_result):
result = wordguess.argument_parser(test_input)
assert result.no_color == expected_result
@pytest.mark.parametrize("test_input, expected_result", [
([], False), (["-s"], True)
])
def test_argument_parser_single_play(test_input, expected_result):
result = wordguess.argument_parser(test_input)
assert result.single_play == expected_result
@pytest.mark.parametrize("test_input, expected_result", [
([], False), (["-a"], True)
])
def test_argument_parsing_auto_play(test_input, expected_result):
result = wordguess.argument_parser(test_input)
assert result.auto_play == expected_result
@pytest.mark.parametrize("test_input, expected_result", [
([], True), (["-n"], False)
])
def test_argument_parsing_no_guess_word(test_input, expected_result):
result = wordguess.argument_parser(test_input)
assert result.guess_word == expected_result
def test_display_version(capsys):
with pytest.raises(SystemExit):
wordguess.argument_parser(["--version"])
captured = capsys.readouterr().out
assert f"{wordguess.version}\n" == captured
def test_main_min_max_invalid(capsys):
result = wordguess.main(["--min", "8", "--max", "6"])
captured = capsys.readouterr().out
assert result == 1
assert "Error min can't be larger than max" in captured
@pytest.mark.parametrize("test_values, expected_results", [
("1", 1), ("2", 2), ("6", 6), ("20", 20), ("500", 500)
])
def test_positive_int_normal(test_values, expected_results):
result = wordguess.positive_int(test_values)
assert result == expected_results
@pytest.mark.parametrize("test_values", [
"0", "-3", "1.3", "0.4", "10.4", "a", "b", "", " ", "$", "time32"
])
def test_positive_int_error(test_values):
with pytest.raises(wordguess.argparse.ArgumentTypeError):
wordguess.positive_int(test_values)
@pytest.mark.parametrize("test_value, expected_result", [
("4", 4), ("6", 6), ("10", 10), ("14", 14), ("15", 15),
])
def test_int_between_4_and_15(test_value, expected_result):
result = wordguess.int_between_4_and_15(test_value)
assert result == expected_result
@pytest.mark.parametrize("test_values", [
"0", "256", "34.4", "Blue", "test", "-4", "1001", "", " ", "c40", "30c",
"*", "-C", "&", "100", "245", "1", "3", "16"
])
def test_int_between_4_and_15_error(test_values):
with pytest.raises(wordguess.argparse.ArgumentTypeError):
wordguess.int_between_4_and_15(test_values)
|
#Determina si un numero es primo o no
def is_primo(num):
if num<=1:
return False
es_primo = True
for x in range(2,num):
if(num%x==0):
es_primo=False
return es_primo
## Numeros primos hasta un limite
def num_primo(num):
for x in range(num+1):
if (is_primo(x)):
print(x," es un numero primo")
num_primo(340)
#SUMA DE GAUSS
def suma_gauss(n):
return (n*(n+1))/2
#fibonnaci recursivo mala idea
def fib(num):
if num==1 or num==0:
return num
if num>1:
return fib(num -1) + fib(num -2)
#fibonacci iterativo con limite siendo n el mayor numero calculado
def fib2(n):
a,b = 0,1
while b <= n:
print(b)
a ,b = b , a+b
#Fibonacci N-esimo
## Esta madre vuela
def fib3(n):
a,b = 0,1
c = 1;
while c <= n:
print(b)
a ,b = b , a+b
c+=1
fib3(300)
|
"""
A basic command-line tool that uses JavaScriptCore
See also <http://parmanoir.com/Taming_JavascriptCore_within_and_without_WebView>
TODO: This needs to be an example that does something useful
"""
import JavaScriptCore
with JavaScriptCore.autoreleasing(JavaScriptCore.JSGlobalContextCreate(None)) as ctx:
script = JavaScriptCore.JSStringCreateWithUTF8CString(b"return new Array")
fn, exc = JavaScriptCore.JSObjectMakeFunction(
ctx, None, 0, None, script, None, 1, None
)
assert exc is None
result, error = JavaScriptCore.JSObjectCallAsFunction(ctx, fn, None, 0, None, None)
JavaScriptCore.JSStringRelease(script)
# Result is now a reference to a JavaScript array.
print(result)
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
#Author: xiaojian
#Time: 2019/3/18 17:28
from Common.basepage import BasePage
from PageLocators.indexPage_locator import IndexPageLocator as loc
from Common import logger
import logging
class IndexPage(BasePage):
#点击导航栏内容
def click_nav_by_name(self,nav_name):
"""
:param nav_name: 导航名称。值为:首页、题库、我
:return: None
"""
if nav_name == "首页":
self.click_element(loc.home_nav_loc,"首页_点击主页按钮")
elif nav_name == "题库":
self.click_element(loc.tiku_nav_loc,"首页_点击题库按钮")
elif nav_name == "我":
self.click_element(loc.my_nav_loc,"首页_点击我的柠檬按钮")
else:
logging.ERROR("没有此导航名称!!")
|
import serial.rs485
import minimalmodbus
def leia(reg) :
ser=serial.rs485.RS485("/dev/ttyAMA0",19200)
ser.rs485_mode = serial.rs485.RS485Settings()
ser.rs485_mode.rts_level_for_tx=True # com 4n25 - emisor no resistor -o False # True
ser.rs485_mode.rts_level_for_rx=False # True # False
ser.timeout=0.7
m = minimalmodbus.Instrument(ser.port,1)
m.serial=ser
m.debug=False
try:
valor=m.read_float(reg,3,2)
m.serial.close()
return(valor)
except IOError:
print("# Failed to read from instrument")
m.serial.close()
return(0)
def testa() :
ser=serial.rs485.RS485("/dev/ttyAMA0",19200)
ser.rs485_mode = serial.rs485.RS485Settings()
ser.rs485_mode.rts_level_for_tx=True # False # True
ser.rs485_mode.rts_level_for_rx=False # True # False
ser.timeout=0.7
m = minimalmodbus.Instrument(ser.port,1)
m.serial=ser
m.debug=False
try:
m.read_float(1,3,2)
m.serial.close()
return(1)
except IOError:
m.serial.close()
return(0)
|
import torch
from . import Dataset
class SubsetDataset(Dataset):
""" Dataset that is a subset from another Dataset
"""
def __init__(self, dataset, indices):
""" Constructor
Args:
dataset: Original dataset
indices: Indices of original dataset for sample
"""
self._dataset = dataset
self._indices = indices
def __len__(self):
return len(self._indices)
def __getitem__(self, idx):
return self._dataset[self._indices[idx]]
class ShrinkDataset(SubsetDataset):
""" Dataset that shrink dataset
"""
INVALID_P_MESSAGE = "Invalid proportion number. Must lay between 0 and 1"
def __init__(self, dataset, p=1):
""" Constructor
Args:
dataset: Original dataset
p: Shrink proportion
"""
if p < 0 or p > 1:
raise ValueError(self.INVALID_P_MESSAGE)
x = round(p * len(dataset))
if x == 0:
indices = []
else:
indices = sorted(torch.randperm(len(dataset))[0:x])
super(ShrinkDataset, self).__init__(dataset,
indices)
|
"""
Palindromic Decomposition
https://www.codingame.com/ide/puzzle/palindromic-decomposition
Version: 0.3
Created: 08/07/2019
Last modified: 08/07/2019
"""
import sys
import math
import time
# Main input
input_string = input()
def asymmetric(x: int, v: int) -> int:
""" Asymmetric string """
# res = 3 * x + (v - 3)
return 3 * x + (v - 3)
def symmetric(x: int) -> int:
""" Symmetric string """
# n = x - 2
# res = n * (n + 1) / 2
# Total = Asymmetric + Symmetric
n = x - 2
return n * (n + 1) // 2
def general(string: str) -> int:
""" General case (no patterns in the string) """
result = 0
length = len(string)
for i in range(length):
str1 = string[:i + 1]
if len(set(str1)) == 1 or str1 == str1[::-1]:
if i == length - 1:
result += 3
break
for j in range(i + 1, length):
str2 = string[i + 1:j + 1]
if len(set(str2)) == 1 or str2 == str2[::-1]:
if j == length - 1:
result += 3
continue
str3 = string[j + 1:]
if len(set(str3)) == 1 or str3 == str3[::-1]:
result += 1
return result
# Debug info
print("Input:", input_string, file=sys.stderr)
print("Length:", len(input_string), file=sys.stderr)
ts = time.perf_counter()
length = len(input_string)
result = 0
string = ''
x = 1
if len(set(input_string)) == 1:
result = asymmetric(length, 3)
if length >= 3:
result += symmetric(length)
else:
for i in range(length // 2):
string += input_string[i]
step = len(string)
if length % step != 0:
continue
for j in range(i + 1, length - step + 1, step):
if string != input_string[j:j + step]:
x = 1
break
x += 1
if x != 1:
break
else:
string = input_string
result = general(string)
if x != 1:
v = result
result = asymmetric(x, v)
if string == string[::-1]:
result += symmetric(x)
if string[:len(string) // 2 + 1] == string[len(string) // 2:]:
result -= 1
te = time.perf_counter()
# Debug info
print("Result:", result, file=sys.stderr)
# print("general(input_string) =", general(input_string), file=sys.stderr)
print("Time: {:.3} input_string".format(te - ts), file=sys.stderr)
print(result)
|
'''
This is a simple module with a few functions
'''
author = 'Ted Petrou'
favorite_number = 4
def add(a, b):
return a + b
def sub(a, b):
return a - b
def multiply(a, b):
return a * b
def divide(a, b):
return a / b
def count_vowels(word):
count = 0
for letter in word.lower():
count += letter in 'aeiou'
return count |
import logging
from tests.frontend import webdriverUtils
from tests.frontend.critical_designs.base_procedure import BaseProcedure
class TestFillingsInDefs(BaseProcedure):
def setup_method(self, method):
# test config
self.critical_svg = "Fillings-in-defs.svg"
self.doConversion = True
# expectations (None means skip)
self.expected_gcode = "Fillings-in-defs.gco"
self.expectedBBox = {
"x": 76.14178466796875,
"y": 51.783084869384766,
"w": 159.1521759033203,
"h": 251.14407348632812,
}
# basics
self.log = logging.getLogger()
self.resource_base = "https://mrbeam.github.io/test_rsc/critical_designs/"
self.driver = webdriverUtils.get_chrome_driver()
self.browserLog = []
self.testEnvironment = {}
|
## Verify fluctuations in difference records ##
## according to expectation of not exceeding 7 S.D. ##
## from Heinemann and Conti Methods in Enzymology 207 ##
## Python Implementation
## Andrew Plested 2006
##
## Takes input from tab-delimited Excel file 'file.txt'.
## Columns are current traces
## Mean and variance are computed for the set to use in limit calculation
## Baseline noise is determined for each difference trace from the first hundred points (2 ms at 50 kHz)
## Traces that exceed the limit are popped from the list and the failing points are explicitly listed to the terminal
## Output is tab delimited columns of verified traces 'verified.txt'
import sys
from math import *
import platform
from trace_tools import mean, variance, rmsd, traces_scale, trace_scale, traces_average, baselines_quality, baseline_subtract
from file_tools import file_read, file_write, traces_into_lines, lines_into_traces
def square (x): return x*x
def mean_inverse_baseline_sub(traces, b_start=0, b_end=99):
"""get mean current waveform, invert for positive and subtract baseline"""
mean = traces_average(traces)
inverse_mean = trace_scale(mean,-1.0)
mean_bs_sub = baseline_subtract(inverse_mean, b_start, b_end)
return mean_bs_sub
def parameters ():
Decimation = 1
#print 'This platform calls itself', platform.system()
#would be better to have a class so that could be initialized with parameters.
def clean_bad_baselines(input_traces, baseline_range=[0,99]):
# bad traces are removed from input
##get baseline variances and their statistics
b_start = baseline_range[0]
b_end = baseline_range[1]
mean_sigma2_bs, rmsd_sigma2_bs, bs_variances = baselines_quality (input_traces, b_start, b_end)
print ('Mean baseline variance = ', mean_sigma2_bs)
print ('RMSD of baseline variance =', rmsd_sigma2_bs)
print (bs_variances)
## discard any trace with excessive baseline noise - Sigma2Bs gt Mean + 4 RMSD
ex_noise_traces_to_pop = []
for i in range (len(bs_variances)):
if bs_variances[i] > mean_sigma2_bs + 4 * rmsd_sigma2_bs:
ex_noise_traces_to_pop.append(i)
## Reverse order so highest are popped first
ex_noise_traces_to_pop.reverse()
for x in ex_noise_traces_to_pop:
input_traces.pop(x)
message = str(len(ex_noise_traces_to_pop))+" trace(s) had excessive baseline noise- discarded "+ str(ex_noise_traces_to_pop)
#print len(input_traces)
return input_traces, message
def construct_diffs(input_traces, UNITARY_CURRENT=.5, baseline_range=[0,99]):
## Construct difference traces according to transform of Sigg et al 1994
#Previously estimated, should be passed from GUI
messages =""
b_start = baseline_range[0]
b_end = baseline_range[1]
difference_traces = []
for x in range(len(input_traces)-1):
diff = []
for y in range(len(input_traces[0])):
diff.append((input_traces[x+1][y]-input_traces[x][y])/2)
difference_traces.append(diff)
print ('Constructed ', len(difference_traces), ' difference traces')
## calculate mean current, invert and subtract baseline
mean_I_inverted_bs_sub = mean_inverse_baseline_sub(input_traces, b_start, b_end)
##Recalculate mean baseline variance for remaining traces
mean_sigma2_bs, rmsd_sigma2_bs, bs_variances = baselines_quality (input_traces, b_start, b_end)
##calculate theoretical noise limit for each point in the trace
limits = []
for point in range(len(difference_traces[0])):
I = abs(mean_I_inverted_bs_sub[point])
limit = 7 * sqrt(UNITARY_CURRENT * I + mean_sigma2_bs)
limits.append(limit)
print ('Verifying variance of difference traces')
excess_points, excess_limits, excess_differences = [],[],[]
for difference_trace in difference_traces:
excess,excess_limit,excess_difference = [],[],[]
for i in range(len(difference_trace)):
point = float(difference_trace[i])
if abs(point) > limits [i]:
excess.append(i)
excess_limit.append(limits[i])
excess_difference.append(point)
excess_points.append(excess)
excess_limits.append(excess_limit)
excess_differences.append(excess_difference)
failed_traces = 0 #No traces have failed at this point
header_line = []
difference_traces_to_pop = []
input_traces_to_pop= []
for i in range(len(difference_traces)):
if len(excess_points[i]) > 0:
message = "Trace {} contained {} points greater than the limit and was removed from set\n".format(i, len(excess_points[i]))
messages += message
print (message)
print (zip(excess_limits[i],excess_differences[i]))
difference_traces_to_pop.append(i)
if input_traces_to_pop.count(i) == 0: #Check if this trace was already discarded last time
input_traces_to_pop.append(i)
input_traces_to_pop.append(i+1)
failed_traces += 1 #At least one trace has failed
header_line.append(str(i)) #write to the header
if failed_traces == 0:
messages += "None had excess variance over 7 x predicted S.D.\n"
#must pop traces in right order (highest first) otherwise numbering will be buggered.
difference_traces_to_pop.reverse()
input_traces_to_pop.reverse()
for x in difference_traces_to_pop:
difference_traces.pop(x)
for x in input_traces_to_pop:
input_traces.pop(x)
num_of_diff_traces = len (difference_traces)
return input_traces, difference_traces, messages, header_line
def final_prep(input_traces, difference_traces, baseline_range):
b_start = baseline_range[0]
b_end = baseline_range[1]
## calculate mean current, invert and subtract baseline
final_mean_I_inverted_bs_sub = mean_inverse_baseline_sub(input_traces, b_start, b_end)
final_ensemble_variance = []
for isochronous_point in range(len(difference_traces[0])):
isochrone =[]
for d_trace in difference_traces:
isochrone.append(d_trace[isochronous_point])
if sys.version_info[0] > 2:
mean_dZt_squared = mean (list(map (square, isochrone)))
else:
mean_dZt_squared = mean (map (square, isochrone))
#factor of '2' because of dZt = 1/2 * {y(i+1)-y(i)}; Heinemann and Conti
final_ensemble_variance.append(2 * mean_dZt_squared)
## Add Mean and Ensemble variances to output
difference_traces.insert(0, final_mean_I_inverted_bs_sub)
difference_traces.insert(1, final_ensemble_variance)
return difference_traces
def write_output (difference_traces, header_line, filename='verified.txt'):
output_lines = traces_into_lines (difference_traces)
## Finish constructing header line
header_line.insert(0, '<I>')
header_line.insert(1, '<Variance>')
header_line.insert(2, 'Verified Difference Current Traces')
header_line.insert(3, 'Traces removed ->')
output_lines.insert(0, header_line)
output_file = filename
print ('Writing {} difference traces to {} with mean and variance in first two columns...'.format(len(difference_traces), output_file))
file_write (output_file,output_lines)
print ('Done')
|
import socket
def get_remote_machine_info():
remote_host = 'www.aspira.hr'
try:
print "IP adress: %s" %socket.gethostbyname(remote_host)
except socket.error, err_msg:
print "%s: %s" %(remote_host,err_msg)
if __name__ == '__main__':
get_remote_machine_info()
|
__author__ = 'Ian'
import matplotlib.pyplot as plt
import matplotlib.dates as mdates
import os
import pandas as pd
import numpy as np
from sklearn.metrics import accuracy_score
from Data.scripts.data import data
from pandas.tools.plotting import autocorrelation_plot
def run_strategy(Y_pred, Returns_df):
# normalize rows to a sum of 1
# sum the rows of the prediction, divide by that number
Y_pred = Y_pred.divide(Y_pred.sum(axis=1), axis= 'index')
strat_returns_series = (Y_pred.multiply(Returns_df + 1, axis= 'index')).sum(axis=1)
#if at any point not invested in the market, hold value
strat_returns_series = strat_returns_series.replace(to_replace = 0, value= 1)
return strat_returns_series
#evaluate the
def strat_metrics(strat_series):
metrics = {}
metrics['return'] = strat_series[-1]
risk_free = 0
metrics['sharpe'] = ( (strat_series[-1]-1) - risk_free)/(np.std(strat_series))
metrics['max_drawdown'] = (1 - strat_series.div(strat_series.cummax())).max()
return metrics
X,Y = data.import_data(set= 'train')
coins = ['ETH', 'XRP','LTC', 'DASH', 'XMR']
returns_df = X[[coin + 'return' for coin in coins]]
returns_df.columns = coins
# autocorrelation_plot(returns_df[coins[3])
#
# plt.show()
# Y_ones = (Y*0) + 1
#
# # print(Y.sum().sum()/Y_ones.sum().sum())
# #
# # print('Accuracy: {}'.format(accuracy_score(Y.values, Y_ones.values)))
#
# output_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) + '/Baseline/plots'
# if not os.path.exists(output_dir):
# os.makedirs(output_dir)
#
# fig_ts = plt.figure()
#
# strat_series = (run_strategy(Y_ones, returns_df)).cumprod()
#
# strat_series.index = pd.to_datetime(strat_series.index, format='%Y-%m-%d %H:%M:%S')
#
# print(strat_metrics(strat_series))
##Plotting
# strat_series.plot(rot= 45)
# plt.xlabel('Date')
# plt.ylabel('Returns')
# plt.title('Time Series of Equal Investment Returns')
#
# fig_ts.savefig('{0}/baseline_ts.png'.format(output_dir)) |
import re
noise_words_file = open("Noisewords.txt", "r", encoding='utf8')
noise_words = set(line.strip() for line in noise_words_file.readlines())
def re_strip(string):
"""
Removes all non-alphanumeric characters from the ends of the given string.
:param string: a string to clean
:return: A cleaned version of the string
"""
pattern = "[^A-Za-z0-9]"
result = re.sub(f"^{pattern}+", "", string)
result = re.sub(f"{pattern}+$", "", result)
return result
def circular_shift(src_text, url, original, lowercase, title):
# Return empty array if input line is empty
if len(src_text) == 0:
return {}, {}
words = src_text.split(" ")
# Clean words
for index in range(len(words))[::-1]:
words[index] = re_strip(words[index])
if len(words[index]) == 0:
words.pop(index)
original_indexes = set()
lowercase_indexes = set()
shift_to_url = {}
url_to_title = {}
for i in range(len(words)):
if words[0] not in noise_words:
line = " ".join(words)
original_indexes.add(line)
lowercase_indexes.add(line.lower())
if line not in shift_to_url:
shift_to_url[line] = set()
shift_to_url[line].add(url)
if line.lower() not in shift_to_url:
shift_to_url[line.lower()] = set()
shift_to_url[line.lower()].add(url)
url_to_title[url] = title
# Shifts first word to the end
words.append(words.pop(0))
# Alphabetize the tuple list.
original_indexes = sorted(list(original_indexes))
lowercase_indexes = sorted(list(lowercase_indexes))
# Merge new tuple list with main list
original += original_indexes
lowercase += lowercase_indexes
return shift_to_url, url_to_title
|
##########################################################################
#
# Copyright (c) 2020, Cinesite VFX Ltd. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of Cinesite VFX Ltd. nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import unittest
import IECore
import IECoreScene
import GafferTest
import GafferArnoldUI.Private.VisualiserAlgo as VisualiserAlgo
class VisualiserAlgoTest( GafferTest.TestCase ) :
def testConformToOSLNetworkFull( self ) :
# Tests reserved word suffxing, bool to int conversions and output renaming
network = IECoreScene.ShaderNetwork(
shaders = {
"blackbodyHandle" : IECoreScene.Shader( "blackbody", "ai:shader", { "normalize" : True } ),
"imageHandle" : IECoreScene.Shader( "image", "ai:shader", { "filename" :"/a/map", "sflip" : True } ),
"multiplyHandle" : IECoreScene.Shader( "multiply", "ai:shader" ),
},
connections = [
( ( "blackbodyHandle", "" ), ( "multiplyHandle", "input1" ) ),
( ( "imageHandle", "" ), ( "multiplyHandle", "input2" ) ),
],
output = "multiplyHandle"
)
convertedNetwork = VisualiserAlgo.conformToOSLNetwork( network.getOutput(), network )
self.assertEqual( convertedNetwork.shaders(), {
"blackbodyHandle" : IECoreScene.Shader( "__viewer/__arnold_blackbody", "osl:shader", { "normalize_" : 1 } ),
"imageHandle" : IECoreScene.Shader( "__viewer/__arnold_image", "osl:shader", { "filename" : "/a/map", "sflip" : 1 } ),
"multiplyHandle" : IECoreScene.Shader( "__viewer/__arnold_multiply", "osl:shader" )
} )
self.assertEqual( convertedNetwork.inputConnections( "multiplyHandle" ), [
( ( "blackbodyHandle", "out" ), ( "multiplyHandle", "input1" ) ),
( ( "imageHandle", "out" ), ( "multiplyHandle", "input2" ) )
] )
self.assertEqual( convertedNetwork.getOutput(), ( "multiplyHandle", "out" ) )
def testConformToOSLNetworkImageFallback( self ) :
# Tests fallback on image
network = IECoreScene.ShaderNetwork(
shaders = {
"image1Handle" : IECoreScene.Shader( "image", "ai:shader", { "filename" : "1" } ),
"image2Handle" : IECoreScene.Shader( "image", "ai:shader", { "filename" : "2" } ),
"unsupportedHandle" : IECoreScene.Shader( "__never_supported__", "ai:shader" )
},
connections = [
( ( "image1Handle", "" ), ( "unsupportedHandle", "input1" ) ),
( ( "image2Handle", "" ), ( "unsupportedHandle", "input2" ) ),
],
output = "unsupportedHandle"
)
with IECore.CapturingMessageHandler() as mh :
convertedNetwork = VisualiserAlgo.conformToOSLNetwork( network.getOutput(), network )
self.assertEqual( len( mh.messages ), 1 )
self.assertEqual( mh.messages[0].level, IECore.Msg.Level.Warning )
self.assertTrue( "__never_supported__" in mh.messages[0].message )
self.assertTrue( "image2Handle" in mh.messages[0].message )
self.assertEqual( convertedNetwork.shaders(), {
"image" : IECoreScene.Shader( "__viewer/__arnold_image", "osl:shader", { "filename" : "2" } )
} )
self.assertEqual( convertedNetwork.getOutput(), ( "image", "out" ) )
def testConformToOSLNetworkFailure( self ) :
# Test null network
network = IECoreScene.ShaderNetwork(
shaders = {
"unsupportedHandle" : IECoreScene.Shader( "__never_supported__", "ai:shader" ),
"unsupported2Handle" : IECoreScene.Shader( "__never_supported__", "ai:shader" )
},
connections = [
( ( "unsupported2Handle", "" ), ( "unsupportedHandle", "input" ) )
],
output = "unsupportedHandle"
)
with IECore.CapturingMessageHandler() as mh :
convertedNetwork = VisualiserAlgo.conformToOSLNetwork( network.getOutput(), network )
self.assertEqual( len( mh.messages ), 1 )
self.assertEqual( mh.messages[0].level, IECore.Msg.Level.Error )
self.assertTrue( "__never_supported__" in mh.messages[0].message )
self.assertIsNone( convertedNetwork )
if __name__ == "__main__":
unittest.main()
|
from django.contrib import admin
from spreedly.models import Gift, Subscription
class SubscriptionAdmin(admin.ModelAdmin):
list_display = ('user', 'name', 'lifetime', 'active_until', 'active')
admin.site.register(Gift)
admin.site.register(Subscription, SubscriptionAdmin)
|
#!/usr/bin/env python
# OOI Data Team Portal
# Calculate Quarterly uFrame Data Statistics
# Written by Sage 6/19/17
import pandas as pd
from dateutil.relativedelta import relativedelta
from datetime import datetime
import requests
startTime = datetime.now()
#-------------------------
# Load M2M configuration
import ConfigParser
config = ConfigParser.ConfigParser()
config.readfp(open('../config.cfg'))
config = {
'username': config.get('ooinet','username'),
'password': config.get('ooinet','password')
}
#-------------------------
# First, load in master Instrument Streams list
filename = 'InstrumentStreams_20170619.csv'
print 'Loading: ' + filename
df_master = pd.read_csv(filename)
#-------------------------
# Second, load in Operational Status
xlfile = 'ExpectedData_20170621.xlsx'
print 'Loading: ' + xlfile
df_ops = pd.read_excel(xlfile, sheetname='refdes2.csv')
# Pull list of months from Ops Status
months = df_ops.columns[4:]
# Create output array
output = pd.concat([df_master,pd.DataFrame(columns=months)])
#-------------------------
# Process statistics for each Stream and Month
last_url = ''
for index,row in output.iterrows():
print "Processing: " + str(index) + ' ' + row['reference_designator'] + ' ' + row['method'] + ' ' + row['stream_name']
site = row['reference_designator'][:8]
node = row['reference_designator'][9:14]
inst = row['reference_designator'][15:]
url = 'https://ooinet.oceanobservatories.org/api/m2m/12576/sensor/inv/'+site+'/'+node+'/'+inst+'/metadata/times?partition=true'
# Grab Cassandra data from API
if url != last_url:
print " Getting new data"
data = requests.get(url, auth=(config['username'],config['password']))
last_url = url
if data.status_code == 200:
df = pd.read_json(data.text)
df = df[df['count']>1] # Remove rows with only 1 count
df['beginTime'] = pd.to_datetime(df['beginTime'], errors='coerce')
df['endTime'] = pd.to_datetime(df['endTime'], errors='coerce')
for month in months:
cc = df_ops.loc[ (df_ops[month].isin(['Operational','Pending'])) & (df_ops['reference_designator']==row['reference_designator']) ]
dd = df.loc[
(df['beginTime'] <= month + relativedelta(months=+1)) &
(df['endTime'] >= month) &
(df['method']==row['method']) &
(df['stream']==row['stream_name'])
]
if (len(cc)>0 and len(dd)>0):
output = output.set_value(index,month,2) # All good
elif len(cc) > 0:
output = output.set_value(index,month,1) # Expected but not found in system
elif len(dd) > 0:
output = output.set_value(index,month,3) # Found in system, but not expected
else:
output = output.set_value(index,month,0) # No date found or expected
#-------------------------
# Output data file
output.to_csv('quarterly_output.csv',header=True)
print "Elapsed time: " + str(datetime.now() - startTime)
|
# Take a TSV file of exit questionnaire data exported from LabKey and convert it to JSON following
# the OpenTargets "genetic association" schema https://github.com/opentargets/json_schema
import argparse
import csv
import json
import logging
import sys
import gel_utils
SOURCE_ID = "genomics_england_questionnaire"
PHENOTYPE_MAPPING_FILE = "phenotypes_text_to_efo.txt"
DATABASE_ID = "genomics_england_main_programme"
DATABASE_VERSION = "8" # Change if version changes
ASSERTION_DATE = "2019-11-28T23:00:00" # Change to date of data release
LABKEY_QUESTIONNAIRE_LINK_TEMPLATE = "http://emb-prod-mre-labkey-01.gel.zone:8080/labkey/query/main-programme/main-programme_v8_2019-11-28/executeQuery.view?schemaName=lists&query.queryName=gmc_exit_questionnaire&query.participant_id~eq={participant}"
SCHEMA_VERSION = "1.6.0" # Open Targets JSON schema version
def main():
parser = argparse.ArgumentParser(description='Generate Open Targets exit questionnaire JSON from an input TSV file')
parser.add_argument('--input', help="Questionnaire data TSV input file", required=True, action='store')
parser.add_argument('--hgnc_to_ensembl', help="File containing a list of HGNC symbol to Ensembl gene ID mappings",
required=True, action='store')
parser.add_argument('--disease_file', help="File containing list of participant to disease mappings", required=True, action='store')
parser.add_argument('--filter_participants', help="List of participants to filter out", required=False, action='store')
parser.add_argument("--log-level", help="Set the log level", action='store', default='WARNING')
args = parser.parse_args()
logging.basicConfig()
logger = logging.getLogger(__name__)
logger.setLevel(logging.getLevelName(args.log_level))
required_columns = ["participant_id", "family_id", "chromosome", "position", "reference",
"alternate", "acmg_classification", "actionability", "case_solved_family", "gene_name"]
count = 0
participant_to_disease = read_diseases_from_file(args.disease_file)
unknown_phenotypes = set()
phenotype_map = gel_utils.read_phenotype_to_efo_mapping(PHENOTYPE_MAPPING_FILE)
gel_utils.apply_phenotype_mapping_overrides(phenotype_map)
acmg_to_clinical_significance = build_acmg_to_clinical_significance_map()
# Read tiering data to get variant:ensembl gene mapping
hgnc_to_ensembl = read_hgnc_to_ensembl_mapping(args.hgnc_to_ensembl)
logger.debug('Read {} HGNC:Ensembl mappings'.format(len(hgnc_to_ensembl)))
if args.filter_participants:
participants_to_filter = gel_utils.read_participants_to_filter(args.filter_participants, logger)
else:
participants_to_filter = list()
logger.info("Reading TSV from " + args.input)
with open(args.input) as question_tsv_file:
reader = csv.DictReader(question_tsv_file, delimiter='\t')
for column in required_columns:
if column not in reader.fieldnames:
logger.error(
"Required column {} does not exist in {} (columns in file are {})".format(column, args.input,
reader.fieldnames))
sys.exit(1)
for row in reader:
if row['participant_id'] in participants_to_filter:
continue
my_instance = build_evidence_strings_object(row, phenotype_map, unknown_phenotypes, hgnc_to_ensembl, participant_to_disease, acmg_to_clinical_significance)
if my_instance:
print(json.dumps(my_instance))
count += 1
logger.info("Processed {} objects".format(count))
logger.info("{} phenotypes were not found:".format(len(unknown_phenotypes)))
for phenotype in unknown_phenotypes:
logger.info(phenotype)
def build_evidence_strings_object(row, phenotype_map, unknown_phenotypes, hgnc_to_ensembl, participant_to_disease, acmg_to_clinical_significance):
"""
Build a Python object containing the correct structure to match the Open Targets genetics.json schema
:return:
"""
logger = logging.getLogger(__name__)
participant_id = row['participant_id']
if participant_id not in participant_to_disease:
logger.warn("Participant " + participant_id + " has no associated disease")
# TODO - record these?
return
phenotype = participant_to_disease[participant_id]
if phenotype not in phenotype_map:
unknown_phenotypes.add(phenotype)
return
ontology_term = phenotype_map[phenotype]
score = 1 # TODO different score based on positive or negative result - e.g. 0 or skip entirely if phenotypes_solved is "no"?
clinical_significance = acmg_to_clinical_significance[row['acmg_classification']]
if row['gene_name'] == 'NA':
# TODO record number of NAs / missed lookups
return
# Only use first gene name if there are multiples separated by ;
gene_name = row['gene_name'].split(';')[0]
if gene_name not in hgnc_to_ensembl:
logger.warn("No Ensembl ID found for HGNC symbol " + row['gene_name'] + ", skipping")
return
else:
gene = hgnc_to_ensembl[gene_name]
# Build composite variant
variant = ':'.join((row['chromosome'], row['position'], row['reference'], row['alternate'])) # matches format in map
# Link to LabKey based on participant only
gel_link = LABKEY_QUESTIONNAIRE_LINK_TEMPLATE.format(participant=participant_id)
link_text = build_link_text(row)
obj = {
"sourceID": SOURCE_ID,
"access_level": "public",
"validated_against_schema_version": SCHEMA_VERSION,
"unique_association_fields": {
"participant_id": participant_id,
"gene": gene,
"phenotype": phenotype,
"variant": variant
},
"target": {
"id": "http://identifiers.org/ensembl/" + gene,
"target_type": "http://identifiers.org/cttv.target/gene_evidence",
"activity": "http://identifiers.org/cttv.activity/loss_of_function"
},
"disease": {
"name": phenotype,
"id": ontology_term
},
"type": "genetic_association",
"variant": {
"id": "http://identifiers.org/dbsnp/rs0",
"type": "snp single"
},
"evidence": {
"gene2variant": {
"is_associated": True,
"date_asserted": ASSERTION_DATE,
# TODO Placeholder - functional consquence (as a URI) is required bu the schema but is not included in the questionnaire data
"functional_consequence": "http://unknown",
"provenance_type": {
"database": {
"id": DATABASE_ID,
"version": DATABASE_VERSION
}
},
"evidence_codes": [
"http://identifiers.org/eco/cttv_mapping_pipeline"
],
"urls": [
{
"nice_name": link_text,
"url": gel_link
}
]
},
"variant2disease": {
# TODO check that this is actually unique
"unique_experiment_reference": "STUDYID_" + participant_id + variant + phenotype,
"is_associated": True,
"date_asserted": ASSERTION_DATE,
"resource_score": {
"type": "probability",
"value": score
},
"provenance_type": {
"database": {
"id": DATABASE_ID,
"version": DATABASE_VERSION
}
},
"evidence_codes": [
"http://identifiers.org/eco/GWAS"
],
"urls": [
{
"nice_name": link_text,
"url": gel_link
}
],
"clinical_significance": clinical_significance
}
}
}
return obj
def build_link_text(row):
"""
Build text that is displayed on participant link, e.g.
GeL variant for family 1234, participant 4567 case solved, actionable (pathogenic variant)
:return: String of text
"""
case_solved = "case solved" if row['case_solved_family'] == 'yes' else 'case not solved'
actionable = "actionable" if row['actionability'] == 'yes' else 'not actionable'
classification = row['acmg_classification'].replace('_', ' ')
text = "GeL variant for family {family}; participant {participant} {solved} {actionable} ({classification})".format(
family = row['family_id'],
participant = row['participant_id'],
solved = case_solved,
actionable = actionable,
classification = classification)
return text
def read_hgnc_to_ensembl_mapping(hgnc_to_ensembl_file_name):
"""
Build a map of HGNC symbols (used in GEL questionnaire data) to Ensembl gene IDs (used in Open Targets).
:param hgnc_to_ensembl_file_name: Name of mapping file.
:return: Map of HGNC to Ensembl identifiers.
"""
hgnc_to_ensembl = {}
with open(hgnc_to_ensembl_file_name, 'r') as mapping_file:
for line in mapping_file:
(hgnc, ensembl) = line.split()
hgnc_to_ensembl[hgnc] = ensembl
return hgnc_to_ensembl
def read_diseases_from_file(participant_to_disease_file_name):
"""
Build a map of participants to diseases from the rare_diseases_participant_disease file.
:param participant_to_disease_file_name: Name of mapping file.
:return: Map of participant IDs to diseases.
"""
participant_to_disease = {}
with open(participant_to_disease_file_name, 'r') as mapping_file:
reader = csv.DictReader(mapping_file, delimiter='\t')
for row in reader:
participant_to_disease[row['participant_id']] = row['normalised_specific_disease']
return participant_to_disease
def build_acmg_to_clinical_significance_map():
"""
Translate the ACMG classification from the GEL data into one of the values allowed by the Open Targets schema.
:return: Map containing the closest match between the GEL values and the allowed values.
"""
# Values from GEL data
# benign_variant
# likely_benign_variant
# likely_pathogenic_variant
# NA
# not_assessed
# pathogenic_variant
# variant_of_unknown_clinical_significance
#
# Values allowed by schema:
# Pathogenic
# Likely pathogenic
# protective
# association
# risk_factor
# Affects
# drug response
acmg_to_clinical_significance = {
"pathogenic_variant": "Pathogenic",
"likely_pathogenic_variant": "risk_factor",
"benign_variant": "association",
"likely_benign_variant": "association",
"NA": "association",
"not_assessed": "association",
"variant_of_unknown_clinical_significance": "association",
}
return acmg_to_clinical_significance
if __name__ == '__main__':
sys.exit(main())
|
# bubble sort
p = lambda x : print(x)
arr = [x for x in range(15,0,-1)]
def bubble_sort(arr):
if len(arr) <= 1:
return arr
_bool = True
while _bool :
_bool = False
for idx in range(len(arr)-1):
if arr[idx] > arr[idx+1]:
temp = arr[idx]
arr[idx] = arr[idx+1]
arr[idx+1] = temp
_bool = True
return arr
p(bubble_sort(arr))
|
from django.urls import path
from basic_app import views
#TEMPLATE TAGGING
app_name = 'basic_app' #this global variable name should be app_name.
urlpatterns = [
path('relative/',views.relative,name='relative'),
path('other/',views.other, name = 'other'),
]
|
from ethereum.utils import sha3
from trees_core.constants import NULL_HASH
from .exceptions import MemberNotExistException
from .node import Node
from ethereum.abi import encode_single
import rlp
class Leaf(object):
def __init__(self, offset, anchor, permil):
self.permil = permil
self.anchor = anchor
self.offset = offset
def hash(self):
enc_offset = encode_single(['uint', '256', False], self.offset)
enc_anchor = encode_single(['bytes', '32', False], self.anchor)
enc_permil = encode_single(["uint", '256', False], self.permil)
assert len(enc_permil) == 32
assert len(enc_anchor) == 32
assert len(enc_offset) == 32
hash = sha3(enc_offset + enc_anchor + enc_permil)
return hash
|
#!/usr/bin/python
import psutil, json, requests, getpass, hashlib
memory = psutil.virtual_memory()
swap = psutil.swap_memory()
disk = {'part': {}, 'usage': {}}
noditor_url = 'http://noditor.me'
def configure_app():
print "\n\nThe application should be configured."
print "Please login in your noditor account:"
email = raw_input("Email:")
password = getpass.getpass("Password:")
m = hashlib.sha1()
m.update(password)
m.hexdigest()
userObj = {
'email': email,
'password': m.hexdigest()
}
requestUrl = noditor_url + '/api/user/login'
login = requests.post(requestUrl, userObj)
user = login.json()
try:
user_id = user['_id']
serverObj = {
'user': user_id
}
servers = requests.post(noditor_url + '/api/server/find', serverObj, auth=(email, user_id))
serversArray = servers.json()
print "\n\nSelect the server you are seting up:"
indexServer = 1
for server in serversArray:
print '\t' + str(indexServer) + '. ' + server['name']
indexServer = indexServer + 1
selectedServer = raw_input("Type the server digit: ")
configuration = {
'user': email,
'password': userObj['password'],
'server_id': serversArray[int(selectedServer) - 1]['_id']
}
configurationFile = open('noditor.conf', 'w+')
configurationFile.write(json.JSONEncoder().encode(configuration))
print "The noditor script has been configured successfuly\n"
except KeyError:
print "The email or password are incorrect. Please try again"
def putData(configFile):
for index in psutil.disk_partitions():
disk['part'][index.device] = index
usage = psutil.disk_usage(index.device)
disk['usage'][index.device] = {
'total': usage[0],
'used': usage[1],
'free': usage[2],
'percent': usage[3]
}
data = {
'cpu': {
'count': psutil.cpu_count(False),
'count_logical': psutil.cpu_count(True),
'per': psutil.cpu_percent(1, True)
},
'memory': {
'total': memory[0],
'avail': memory[1],
'per': memory[2],
'used': memory[3],
'free': memory[4]
},
'swap': {
'total': swap[0],
'used': swap[1],
'free': swap[2],
'per': swap[3],
'sin': swap[4],
'sout': swap[5]
},
'disk': disk,
'users': psutil.users(),
'boot_time': psutil.boot_time()
}
configStr = configFile.read()
config = json.JSONDecoder().decode(configStr)
data['server_id'] = config['server_id']
serverdata = json.dumps(data)
headers = {'content-type': 'application/json'}
r = requests.put(noditor_url + '/api/serverdata', data=serverdata, auth=(config['user'],config['password']), headers=headers)
try:
configFile = open('noditor.conf', 'r')
putData(configFile)
except IOError:
configure_app()
configFile = open('noditor.conf', 'r')
putData(configFile)
|
# Generated by Django 3.1.3 on 2020-11-09 15:48
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('Medi', '0007_auto_20201109_1634'),
]
operations = [
migrations.CreateModel(
name='tags',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=30)),
],
),
migrations.CreateModel(
name='Writter',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('first_name', models.CharField(max_length=30)),
('last_name', models.CharField(max_length=30)),
('email', models.EmailField(max_length=254)),
('phone_number', models.CharField(blank=True, max_length=10)),
],
options={
'ordering': ['first_name'],
},
),
migrations.CreateModel(
name='Article',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=60)),
('post', models.TextField()),
('pub_date', models.DateTimeField(auto_now_add=True)),
('Writter', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='Medi.writter')),
('tags', models.ManyToManyField(to='Medi.tags')),
],
),
]
|
"""
PyPI setup file
"""
from setuptools import setup
setup(
name='dndbuddy_basic',
packages=['dndbuddy_basic'],
version='0.0.1',
author='Matt Cotton',
author_email='matthewcotton.cs@gmail.com',
url='https://github.com/MattCCS/DnDBuddy-Basic',
description='The Basic (fair use) module for DnDBuddy',
long_description=open("README.md").read(),
classifiers=[
"Programming Language :: Python :: 3",
],
entry_points={},
install_requires=[
"dndbuddy_core"
],
)
|
import sqlite3
import pandas as pd
import datetime
##estbablish the sqlite3 database connection
con = sqlite3.connect("ao3_tags.db")
cur = con.cursor()
##data types for reading in the clean data
##these have to be enforced or else pandas starts
##making up animals
dtypes_w={"language": 'category',
"restricted":"boolean",
"complete":"boolean",
"word_count":"Int64",
"Rating":str,
"Choose Not To Use Archive Warnings":bool,
"No Archive Warnings Apply":bool,
"Graphic Depictions Of Violence":bool,
"Major Character Death":bool,
"Rape/Non-Con":bool,
"Underage":bool,
"Gen":bool,
"F/M":bool,
"M/M":bool,
"Other":bool,
"F/F":bool,
"Multi":bool}
dtypes_t = {"type": str,
"name": str,
"canonical": bool,
"cached_count": "Int64",
"num_merged_tags": "Int64",
"merged_tag_ids": str,
"merged_counts": "Int64",
"total_counts": "Int64"
}
print("Start script:", datetime.datetime.now())
print("Getting works....:", datetime.datetime.now())
##load the csv file
##must specifty utf-8 in order to keep the non-Latin characters readable
##must specify converters and dtypes because those are not frickin' integers!
##load in chunks to avoid choking the memory
with pd.read_csv("clean_works.csv",3
encoding="utf-8",
index_col=False, header=0,
converters = {"tags":str, "work_id":str},
parse_dates = ["creation date"],
dtype=dtypes_w,
chunksize=100000) as reader:
chunk_no = 0
for chunk in reader:
chunk_no += 1
chunk.rename_axis("work_id", inplace=True)
##separate the tag strings, split them into lists
##then use .explode() to turn each work_id:list pair
##into a work_id:list_item pair for every
##list item
hunk = chunk["tags"].apply(lambda x: x.split("+")).reset_index()
hunk = hunk.explode("tags")
hunk.to_sql("tag_lists", con, if_exists="append", index=False)
##now we can drop the tags column and send everything else to another
##table in the db
chunk.drop(columns=["tags"], inplace=True)
chunk.to_sql("works", con, if_exists="append", index=False)
print("Chunk", chunk_no, "processed!", len(chunk), "works read in", len(hunk), "tags collected", datetime.datetime.now())
print("Works complete!", datetime.datetime.now())
print("Getting tags...", datetime.datetime.now())
##same as loading the previous csv
with pd.read_csv("clean_tags.csv",
encoding="utf-8",
index_col=False, header=0,
converters = {"id": str, "merger_id": str},
dtype=dtypes_t,
chunksize=100000) as reader:
chunk_no = 0
for glob in reader:
chunk_no += 1
##separate the parent tags from the ones that merge into them
blob = glob[ glob["merger_id"] == ""]
##create two tables -- all tags, and merged parent tags only
glob.to_sql("all_tags", con, if_exists="append", index=False)
blob.to_sql("merged_tags", con, if_exists="append", index=False)
print("Chunk", chunk_no, "processed!", len(glob),"tags consolidated to",len(blob))
print("Tags complete!", datetime.datetime.now())
print("And now, we join!", datetime.datetime.now())
##create one big honkin' table
##with tag and date info in one place
cur.execute("""CREATE TABLE tags_dates_types AS
SELECT works.'creation date' AS 'date',
works.rating,
tag_lists.work_id,
tag_lists.tags,
all_tags.type,
all_tags.name
FROM tag_lists, works, all_tags
WHERE works.work_id = tag_lists.work_id
AND all_tags.id = tag_lists.tags;""")
##close the db connection
con.commit()
con.close()
|
from urllib import parse
class QueryHelper():
def queryStringToDict(url=None, query_string=None):
# TODO : test url param against URL pattern
if not url and not query_string:
raise ValueError("You must provide either an URL or a querystring")
# pep8 ternary identation https://stackoverflow.com/a/26233610/4780833
qs = (
query_string
if query_string is not None
else parse.urlsplit(url).query)
return parse.parse_qsl(qs)
|
import chaipy.common as common
import chaipy.io as io
from chaipy.kaldi import ivector_ark_read, print_vector
def main():
desc = 'Convert from speaker i-vectors to utt-ivectors. Output to stdout.'
parser = common.init_argparse(desc)
parser.add_argument('spk_ivectors', help='File containing spk i-vectors.')
parser.add_argument('utt2spk', help='Kaldi utt2spk mapping.')
args = parser.parse_args()
spk_ivectors = ivector_ark_read(args.spk_ivectors)
utt2spk = io.dict_read(args.utt2spk, ordered=True)
spk2utt = common.make_reverse_index(utt2spk, ordered=True)
wrote = 0
for spk in spk2utt.keys():
for utt in spk2utt[spk]:
print_vector(utt, spk_ivectors[spk])
wrote += 1
io.log('Wrote {} utt i-vectors for {} spks'.format(wrote, len(spk2utt)))
if __name__ == '__main__':
main()
|
import time
def subsets_recursive(nums):
# First solution --- using recursive
if len(nums) != 0:
L = subsets_recursive(nums[:-1])
return L + [l + [nums[-1]] for l in L]
else:
return [[]]
def subsets_forloop(nums):
# Second solution --using for loop
L = [[]]
for i in range(len(nums)):
L = L + [l + [nums[i]] for l in L]
return L
inputs = [i for i in range(12)]
start_re = time.time()
h = subsets_recursive(inputs)
print(h)
end_re = time.time()
print('Recursive takes ', end_re - start_re, ' to finish')
start_fo = time.time()
h = subsets_forloop(inputs)
print(h)
end_fo = time.time()
print('For loop takes ', end_fo - start_fo, ' to finish')
|
from django.conf.urls.defaults import *
from django.conf import settings
from django.contrib import admin
admin.autodiscover()
from blog import urls as blog_urls
urlpatterns = patterns('',
# Uncomment the admin/doc line below to enable admin documentation:
(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
(r'^admin/', include(admin.site.urls)),
(r'^blog/', include(blog_urls)),
)
urlpatterns = urlpatterns + patterns('links.views',
url(r'^links/$', 'links'),
)
urlpatterns = urlpatterns + patterns('gallery.views',
url(r'^gallery/$', 'view_galleries', name='galleries'),
url(r'^gallery/(?P<gallery>[\w\_\-]+)/$', 'view_galleries', name='gallery'),
)
urlpatterns = urlpatterns + patterns('pages.views',
url(r'^home/$', 'home', name='home'),
)
# Redirect empty url to home page
urlpatterns = urlpatterns + patterns('django.views.generic.simple',
(r'^$', 'redirect_to', {'url': '/home/'}),
url(r'^gallery_static/$', 'direct_to_template', {'template': 'gallery_static.html'}),
)
# Create sitemap
from sitemap import sitemaps
urlpatterns = urlpatterns + patterns('django.contrib.sitemaps.views',
(r'^sitemap\.xml$', 'sitemap', {'sitemaps': sitemaps}),
)
# Static URLS is served by server. Django serves they only in DEBUG mode
if settings.DEBUG:
urlpatterns = urlpatterns + patterns('django.views.static',
url(r'^favicon.ico', 'serve',
{'document_root': settings.MEDIA_ROOT, 'path':'favicon.png'}),
url(r'^media/(?P<path>.*)$', 'serve',
{'document_root': settings.MEDIA_ROOT, 'show_indexes':True}),
) |
# -*- coding: utf-8 -*-
import argparse
from route4me import Route4Me
def main(api_key):
route4me = Route4Me(api_key)
telematics = route4me.telematics
print('****************************')
print('Searching for Global Vendors')
print('****************************')
vendors = telematics.search_vendor(size='global', per_page=2, page=1)
for vendor in vendors.get('vendors', []):
telematics.pp_response(vendor)
print('************************************')
print('Searching Sattellite Feature Vendors')
print('************************************')
vendors = telematics.search_vendor(feature='Satellite', per_page=2, page=1)
for vendor in vendors.get('vendors', []):
telematics.pp_response(vendor)
print('********************************')
print('Searching for GB country Vendors')
print('********************************')
vendors = telematics.search_vendor(country='GB', per_page=2, page=1)
for vendor in vendors.get('vendors', []):
telematics.pp_response(vendor)
print('**************************************')
print('Searching Vendors with keyword "fleet"')
print('**************************************')
vendors = telematics.search_vendor(s='fleet', per_page=3, page=1)
for vendor in vendors.get('vendors', []):
telematics.pp_response(vendor)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Search Telematics Vendors')
parser.add_argument('--api_key', dest='api_key', help='Route4Me API KEY',
type=str, required=True)
args = parser.parse_args()
main(args.api_key)
|
import copy
def hang(a):
n=len(a[0])
if n != 1:
s=0
for i in range(0,n):
t=copy.deepcopy(a)
t=t[1:]
for j in range(n-1):
del t[j][i]
s+= (-1)**i*a[0][i]*hang(t)
return s
else:
return a[0][0]
try:
a=input("please enter a fangzhen:")
if len(a)==len(a[0]):
print hang(a)
else:
print "not fang"
except(SyntaxError):
print"please replace your input"
|
"""
Brendan Koning
4/16/2015
Model.py
This file holds all of the information pertaining to
the models used in the program and methods to manipulate
those models.
"""
from collections import deque
class stats:
def __init__(self):
self.proceses = []
def add(self, id):
self.proceses.append(id)
def count(self):
return len(set(self.proceses))
class process:
def __init__(self, id):
self.refcount = 0
self.pagefault = 0
#Page Table is of the form (page, frame)
self.pages = []
self.pid = id
def add(self, page, frame):
for i in self.pages:
if(i[0] == page):
self.pages.remove(i)
self.pages.append((page, frame))
def getpages(self):
return self.pages
def getpagecount(self):
return len(set(self.pages))
def getrefcount(self):
return self.refcount
def getfaultcount(self):
return self.pagefault
def getpid(self):
return self.pid
def getall(self):
return (self.pages, self.refcount, self.pid)
def incref(self):
self.refcount += 1
def incfault(self):
self.pagefault += 1
def getpagetable(self):
s = ("Process " + self.pid + ":\n")
s += ("Page\tFrame\n")
for x in range(len(self.pages)):
s += (str(self.pages[x][0]) + "\t" + str(self.pages[x][1]) + "\n")
return s
class mainmemory:
class MemError(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
def __init__(self, total):
self.memory = []
#In refstack, left is most recently used, right
#is what will be removed if needed
self.refstack = deque([])
self.size = total
self.e = None
def __str__(self):
s = ("Frame #\t ProcID\t Page#\n")
for x in range(len(self.memory)):
curr = self.memory[x]
pid = curr[0]
page = curr[1]
s += (str(x) + "\t" + str(pid) + "\t" + str(page) + "\n")
return s
def getmem(self):
return self.memory
def isinmemory(self, pid, page):
return (pid, page) in self.memory
def getindex(self, pid, page):
try:
return (self.memory.index((pid, page)))
except ValueError, ex:
self. e = ex
#Puts the index of the (pid, page) tuple in memory as the
#most recently used.
def topofstack(self, pid, page):
index = self.getindex(pid, page)
if not self.e:
try:
self.refstack.remove(index)
except ValueError:
pass
self.refstack.appendleft(self.getindex(pid, page))
def addtomemory(self, pid, page):
if(len(self.memory) >= self.size):
raise self.MemError("Out of Memory")
self.memory.append((pid, page))
return self.memory.index((pid, page))
def getstack(self):
return self.refstack
#Determines the LRU page and replaces it
def lru(self, pid, page):
#Index of main mem page to be replaced
index = self.refstack.pop()
#Removes the frame at the specified index and replaces it
self.memory.pop(index)
self.memory.insert(index, (pid, page))
return index
|
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
# (c) DevOpsHQ, 2016
# Integration YouTrack and Zabbix alerts.
import yaml
from pyzabbix import ZabbixAPI
import sys
from six.moves.urllib.parse import quote
import logging
import time
import settings
from youtrack.connection import Connection
import re
import urllib3
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
# ------------ START Setup logging ------------
# Use logger to log information
logger = logging.getLogger()
logger.setLevel(settings.LOG_LEVEL)
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
# Log to file
fh = logging.FileHandler(settings.LOG_FILE_NAME)
fh.setLevel(settings.LOG_LEVEL)
fh.setFormatter(formatter)
logger.addHandler(fh)
# Log to stdout
ch = logging.StreamHandler()
ch.setLevel(settings.LOG_LEVEL)
ch.setFormatter(formatter)
logger.addHandler(ch) # Use logger to log information
# Log from pyzabbix
log = logging.getLogger('pyzabbix')
log.addHandler(ch)
log.addHandler(fh)
log.setLevel(settings.LOG_LEVEL)
# ------------ END Setup logging ------------
# ------------ START ZabbixAPI block ------------
Zbx = ZabbixAPI(settings.ZBX_SERVER)
Zbx.session.verify = False
Zbx.login(settings.ZBX_USER, settings.ZBX_PASSWORD)
# ------------ END ZabbixAPI block ------------
# ------------ START Function declaration ------------
def ExecAndLog(connection, issueId, command="", comment=""):
logger.debug("Run command in {issueId}: {command}. {comment}".format(issueId=issueId,
command=command,
comment=comment
))
connection.executeCommand(issueId=issueId,
command=command,
comment=comment,
)
# ------------ END Function declaration ------------
def updateIssue(connection, issueId, summary, description):
connection._req('POST', "/issue/{issueId}?summary={summary}&description={description}".format(
issueId=issueId,
summary=quote(summary),
description=quote(description)
))
def Main(sendTo, subject, yamlMessage):
"""
Workflow Zabbix-YouTrack
:param sendTo: URL to Youtrack (ex. https://youtrack.example.com)
:param subject: subject from Zabbix Action
:param yamlMessage: message from Zabbix Action
:return:
"""
# ----- Use below example yamlMessage to debug -----
# yamlMessage = """Name: 'Test Zabbix-YT workflow, ignore it'
# Text: 'Agent ping (server:agent.ping()): DOWN (1) '
# Hostname: 'server.exmpale.ru'
# Status: "OK"
# Severity: "High"
# EventID: "96976"
# TriggerID: "123456789012" """
messages = yaml.load(yamlMessage)
# ----- START Issue parameters -----
# Correspondence between the YouTrackPriority and ZabbixSeverity
# Critical >= High
# Normal < High
ytPriority = 'Normal'
if messages['Severity'] == 'Disaster' or messages['Severity'] == 'High':
ytPriority = 'Critical'
ytName = "{} ZabbixTriggerID::{}".format(messages['Name'], messages['TriggerID'])
# ----- END Issue parameters -----
# ----- START Youtrack Issue description -----
# Search link to other issue
searchString = "Hostname: '{}'".format(messages['Hostname'])
linkToHostIssue = "{youtrack}/issues/{projectname}?q={query}".format(
youtrack=sendTo,
projectname=settings.YT_PROJECT_NAME,
query=quote(searchString, safe='')
)
issueDescription = """
{ytName}
-----
{yamlMessage}
-----
- [Zabbix Dashboard]({zabbix}?action=dashboard.view)
- Show all issue for [*this host*]({linkToHostIssue})
""".format(
ytName=ytName,
yamlMessage=yamlMessage,
zabbix=settings.ZBX_SERVER,
linkToHostIssue=linkToHostIssue, )
# ----- END Youtrack Issue description -----
# Create connect to Youtrack API
connection = Connection(sendTo, token=settings.YT_TOKEN)
# ----- START Youtrack get or create issue -----
# Get issue if exist
# Search for TriggerID
createNewIssue = False
logger.debug("Get issue with text '{}'".format(messages['TriggerID']))
issue = connection.getIssues(settings.YT_PROJECT_NAME,
"ZabbixTriggerID::{}".format(messages['TriggerID']),
0,
1)
if len(issue) == 0:
createNewIssue = True
else:
# if issue contains TriggerID in summary, then it's good issue
# else create new issue, this is bad issue, not from Zabbix
if "ZabbixTriggerID::{}".format(messages['TriggerID']) in issue[0]['summary']:
issueId = issue[0]['id']
issue = connection.getIssue(issueId)
else:
createNewIssue = True
# Create new issue
if createNewIssue:
logger.debug("Create new issue because it is not exist")
issue = connection.createIssue(settings.YT_PROJECT_NAME,
'Unassigned',
ytName,
issueDescription,
priority=ytPriority,
subsystem=settings.YT_SUBSYSTEM,
state="Open",
type=settings.YT_TYPE,
)
time.sleep(3)
# Parse ID for new issue
result = re.search(r'(PI-\d*)', issue[0]['location'])
issueId = result.group(0)
issue = connection.getIssue(issueId)
logger.debug("Issue have id={}".format(issueId))
# Set issue service
ExecAndLog(connection, issueId, "Исполнитель {}".format(settings.YT_SERVICE))
# Update priority
ExecAndLog(connection, issueId, "Priority {}".format(ytPriority))
# ----- END Youtrack get or create issue -----
# ----- START PROBLEM block ------
if messages['Status'] == "PROBLEM":
# Reopen if Fixed or Verified or Canceled
if issue['State'] == u"На тестировании" or issue['State'] == u"Завершена" or issue['State'] == u"Исполнение не планируется":
# Reopen Issue
ExecAndLog(connection, issueId, "State Open")
# Исполнитель issue
ExecAndLog(connection, issueId, command="Исполнитель Unassigned")
# Update summary and description for issue
logger.debug("Run command in {issueId}: {command}".format(issueId=issueId,
command="""Update summary and description with connection.updateIssue method"""
))
updateIssue(connection, issueId=issueId, summary=ytName, description=issueDescription)
# Add comment
logger.debug("Run command in {issueId}: {command}".format(issueId=issueId,
command="""Now is PROBLEM {}""".format(
messages['Text'])
))
connection.executeCommand(issueId=issueId,
command="comment",
comment=settings.YT_COMMENT.format(
status=messages['Status'],
text=messages['Text'])
)
# Send ID to Zabbix:
logger.debug("ZABBIX-API: Send Youtrack ID to {}".format(messages['EventID']))
Zbx.event.acknowledge(eventids=messages['EventID'], action=4, message="Create Youtrack task")
Zbx.event.acknowledge(eventids=messages['EventID'], action=4,
message=(settings.YT_SERVER + "/issue/{}").format(issueId))
# ----- End PROBLEM block ------
# ----- Start OK block -----
if messages['Status'] == "OK":
if issue['State'] == u"На тестировании":
# Verify if Fixed
ExecAndLog(connection, issueId, command="State Завершена")
else:
if issue['State'] == u"Открыта":
ExecAndLog(connection, issueId, command="State Требует анализа проблемы")
logger.debug("Run command in {issueId}: {command}".format(issueId=issueId,
command="""Now is OK {}""".format(messages['Text'])
))
connection.executeCommand(issueId=issueId,
command="comment",
comment=settings.YT_COMMENT.format(
status=messages['Status'],
text=messages['Text'])
)
# ----- End OK block -----
if __name__ == "__main__":
logger.debug("Start script with arguments: {}".format(sys.argv[1:]))
try:
Main(
# Arguments WIKI: https://www.zabbix.com/documentation/3.0/ru/manual/config/notifications/media/script
settings.YT_SERVER, # to
sys.argv[2].decode('utf-8'), # subject
sys.argv[3].decode('utf-8'), # body
# FYI: Next argument used in code:
# sys.argv[4], # YT_PASSWORD
# sys.argv[5], # ZBX_PASSWORD
)
except Exception:
logger.exception("Exit with error") # Output exception
exit(1) |
import unittest
import numpy
import chainer
import chainer.functions as F
from chainer import testing
class TestFunction(unittest.TestCase):
def test_forward(self):
xs = (chainer.Variable(numpy.array([0])),
chainer.Variable(numpy.array([0])),
chainer.Variable(numpy.array([0])))
xs[0].rank = 1
xs[1].rank = 3
xs[2].rank = 2
ys = F.identity(*xs)
self.assertEqual(len(ys), len(xs))
for y in ys:
# rank is (maximum rank in xs) + 2, since Function call
# automatically inserts Split function.
self.assertEqual(y.rank, 5)
def test_backward(self):
x = chainer.Variable(numpy.array([1]))
y1 = F.identity(x)
y2 = F.identity(x)
z = y1 + y2
z.grad = numpy.array([1])
z.backward(retain_grad=True)
self.assertEqual(y1.grad[0], 1)
self.assertEqual(y2.grad[0], 1)
self.assertEqual(x.grad[0], 2)
def test_label(self):
self.assertEqual(chainer.Function().label,
'Function')
testing.run_module(__name__, __file__)
|
#!/usr/bin/python3
import datetime
import sys
import time
from collections import defaultdict
from as6libs import get_sun_info
import os
import requests
from urllib.request import urlretrieve
import json
json_file = open('../conf/as6.json')
json_str = json_file.read()
json_conf = json.loads(json_str)
def set_camera_time():
cur_datetime = datetime.datetime.now()
req_str = "year=" + str(cur_datetime.strftime("%Y")) + "&" + "month=" + str(cur_datetime.strftime("%m")) + "&" + "day=" + str(cur_datetime.strftime("%d")) + "&" + "hour=" + str(cur_datetime.strftime("%H")) + "&" + "minute=" + str(cur_datetime.strftime("%M")) + "&" + "second=" + str(cur_datetime.strftime("%S"))
print ("Set datetime, timezone and NTP server.")
url = "http://" + str(cam_ip) + "/cgi-bin/date_cgi?action=set&user=admin&pwd="+ cams_pwd +"&timezone=14&ntpHost=clock.isc.org&" + req_str
print (url)
r = requests.get(url)
print (r.text)
def nighttime_settings( ):
# set exposure time to 50
r = requests.get("http://" + cam_ip + "/webs/btnSettingEx?flag=1000¶mchannel=0¶mcmd=1058¶mctrl=50¶mstep=0¶mreserved=0&")
time.sleep(3)
print ("Nighttime settings...")
print ("turn off wdr")
WDR(0)
time.sleep(1)
print ("fix ir")
fix_ir()
set_special("1004", "255")
### Set gains to auto ###
set_special("1084", "0")
set_special("1087", "0")
set_special("1085", "0")
### BW/COLOR
print ("set BW to color=0 BW=2")
set_special("1036", "0")
### BLC
print ("set BLC")
set_special("1017", BLC)
### SET AGAIN
set_special("1056", 180)
### SET AGAIN
set_special("1056", 176)
### SET DGAIN HIGH to SHOCK SYSTEM
set_special("1086", 0)
time.sleep(1)
### SET DGAIN to Value we actually want
set_special("1086", 41)
print ("set BRIGHTNESS")
set_setting("Brightness", brightness)
print ("set CONTRAST")
set_setting("Contrast", contrast)
print ("set GAMA")
set_setting("Gamma", gamma)
#set_setting("InfraredLamp", "low")
#set_setting("TRCutLevel", "low")
file = open(cams_dir + "/temp/status" + cam_num + ".txt", "w")
file.write("night")
file.close()
#os.system("./allsky6-calibrate.py read_noise " + cam_num)
def daytime_settings():
### saturation
set_special("1004", "128")
### Set gains to auto ###
set_special("1084", "0")
set_special("1087", "0")
set_special("1085", "0")
### BW/COLOR
print ("set BW")
set_special("1036", "0")
WDR(1)
time.sleep(2)
WDR(0)
time.sleep(2)
WDR(1)
time.sleep(2)
### IR mode
#set_special("1064", "2")
### BLC
set_special("1017", "30")
set_setting("Brightness", brightness)
set_setting("Gamma", gamma)
set_setting("Contrast", contrast)
#set_setting("InfraredLamp", "low")
#set_setting("TRCutLevel", "low")
file = open(cams_dir + "temp/status" + cam_num + ".txt", "w")
file.write("day")
file.close()
def set_special(field, value):
url = "http://" + str(cam_ip) + "/webs/btnSettingEx?flag=1000¶mchannel=0¶mcmd=" + str(field) + "¶mctrl=" + str(value) + "¶mstep=0¶mreserved=0"
print (url)
r = requests.get(url)
print (r.text)
def WDR(on):
#WDR ON/OFF
url = "http://" + str(cam_ip) + "/webs/btnSettingEx?flag=1000¶mchannel=0¶mcmd=1037¶mctrl=" + str(on) + "¶mstep=0¶mreserved=0"
print (url)
r = requests.get(url)
print (r.text)
def fix_ir():
print ("Fixing IR settings.")
url = "http://" + str(cam_ip) + "/webs/btnSettingEx?flag=1000¶mchannel=0¶mcmd=1063¶mctrl=0¶mstep=0¶mreserved=0"
r = requests.get(url)
#print (r.text)
time.sleep(1)
url = "http://" + str(cam_ip) + "/webs/btnSettingEx?flag=1000¶mchannel=0¶mcmd=1047¶mctrl=0¶mstep=0¶mreserved=0"
r = requests.get(url)
#print (r.text)
# open or close
url = "http://" + str(cam_ip) + "/webs/btnSettingEx?flag=1000¶mchannel=0¶mcmd=1081¶mctrl=1¶mstep=0¶mreserved=0"
r = requests.get(url)
#print (r.text)
#ir direction
url = "http://" + str(cam_ip) + "/webs/btnSettingEx?flag=1000¶mchannel=0¶mcmd=1067¶mctrl=1¶mstep=0¶mreserved=0"
r = requests.get(url)
#print (r.text)
time.sleep(1)
# high low ICR
url = "http://" + str(cam_ip) + "/webs/btnSettingEx?flag=1000¶mchannel=0¶mcmd=1066¶mctrl=0¶mstep=0¶mreserved=0"
r = requests.get(url)
#print (r.text)
def set_setting(setting, value):
url = "http://" + str(cam_ip) + "/cgi-bin/videoparameter_cgi?action=set&user=admin&pwd=" + cams_pwd + "&action=get&channel=0&" + setting + "=" + str(value)
r = requests.get(url)
print (url)
return(r.text)
def get_settings():
url = "http://" + str(cam_ip) + "/cgi-bin/videoparameter_cgi?action=get&user=admin&pwd=" + cams_pwd + "&action=get&channel=0"
print (url)
settings = defaultdict()
r = requests.get(url)
resp = r.text
for line in resp.splitlines():
(set, val) = line.split("=")
settings[set] = val
return(settings)
try:
cam_num = sys.argv[1]
except:
cam_num = ""
exit()
cam_key = 'cam' + str(cam_num)
cam_ip = json_conf['cameras'][cam_key]['ip']
cams_pwd = json_conf['site']['cams_pwd']
cams_dir = json_conf['site']['cams_dir']
try:
file = open(cams_dir + "/temp/status" + cam_num + ".txt", "r")
cam_status = file.read()
print ("CAM STATUS: ", cam_status)
except:
print ("no cam status file exits.")
cam_status = ""
time_now = datetime.datetime.today().strftime('%Y/%m/%d %H:%M:%S')
print("TIME", time_now)
sun_status,sun_az,sun_alt = get_sun_info(time_now)
print ("SUN:", sun_status);
set_camera_time()
if sun_status == 'day' or sun_status == 'dusk' or sun_status == 'dawn':
brightness = json_conf['camera_settingsv1']['day']['brightness']
contrast = json_conf['camera_settingsv1']['day']['contrast']
gamma = json_conf['camera_settingsv1']['day']['gamma']
BLC = json_conf['camera_settingsv1']['day']['BLC']
if cam_status != sun_status:
print ("Daytime settings are not set but it is daytime!", cam_status, sun_status)
daytime_settings()
else:
print ("nothing to do...")
else:
brightness = json_conf['camera_settingsv1']['night']['brightness']
contrast = json_conf['camera_settingsv1']['night']['contrast']
gamma = json_conf['camera_settingsv1']['night']['gamma']
BLC = json_conf['camera_settingsv1']['night']['BLC']
nighttime_settings()
if cam_status != sun_status:
print ("Nighttime settings are not set but it is nighttime!", cam_status, sun_status)
nighttime_settings()
|
#! /usr/bin/env python2.7
#coding=utf-8
#filename: deep_tts.py
import sys
import time
import os
import glob
import matplotlib.pyplot as plt
from os import listdir
from os.path import isfile, join
import numpy as np
from scipy.cluster.vq import whiten
from keras.models import Sequential
from keras.layers.core import Dense, Activation
from keras.layers.recurrent import LSTM
def get_file_list(path):
path = os.path.abspath(path)
files = os.listdir(path)
for index, item in enumerate(files):
files[index] = os.path.join(path, files[index])
files = sorted(files)
return files
#input the input feature dir
def preprocess(dirname):
files = get_file_list(dirname)
index = 0
max_file_nums = -1
total_cmp_mat = np.empty(shape=(0,0))
for textdeep_name in files:
cmp_name = textdeep_name.replace("textdeep", "cmp_nb").replace("TextDeep", "cmp")
print cmp_name
index = index + 1
if index > max_file_nums and max_file_nums > 0:
break
if isfile(cmp_name):
fcmp = open(cmp_name, 'r')
cmp_mat = file2matrix(cmp_name)
if np.shape(total_cmp_mat)[0] == 0 :
total_cmp_mat = cmp_mat
else:
total_cmp_mat = np.concatenate((total_cmp_mat, cmp_mat))
normTotal, ranges, minVals = autoNorm(total_cmp_mat)
print ranges
print minVals
np.savetxt("minval.txt", minVals, fmt="%1.8f")
np.savetxt("ranges.txt", ranges, fmt="%1.8f")
for textdeep_name in files:
cmp_name = textdeep_name.replace("textdeep", "cmp_nb").replace("TextDeep", "cmp")
save_cmp_name = cmp_name.replace("cmp_nb", "preprocess/cmp_nb")
save_textdeep_name = textdeep_name.replace("textdeep", "preprocess/textdeep")
index = index + 1
if index > max_file_nums and max_file_nums > 0:
break
if isfile(cmp_name):
ftext = open(textdeep_name, 'r')
fcmp = open(cmp_name, 'r')
text_mat = file2matrix(textdeep_name)
cmp_mat = file2matrix(cmp_name)
text_mat = text_mat[0:-3]
text_num = np.shape(text_mat)[0]
cmp_num = np.shape(cmp_mat)[0]
if text_num < cmp_num:
continue
elif text_num > cmp_num:
text_mat = text_mat[0:cmp_num]
cmp_mat = norm_with_ranges(cmp_mat, minVals, ranges)
print np.shape(text_mat), np.shape(cmp_mat)
np.savetxt(save_cmp_name, cmp_mat, fmt='%1.8f')
np.savetxt(save_textdeep_name, text_mat, fmt='%1.8f')
print textdeep_name, cmp_name, save_cmp_name, save_textdeep_name
def file2matrix(filename):
fr = open(filename)
arrayOLines = fr.readlines()
numberOfLines = len(arrayOLines) #get the number of lines in the file
length = len(arrayOLines[0].split())
returnMat = np.zeros((numberOfLines,length)) #prepare matrix to return
index = 0
for line in arrayOLines:
line = line.strip()
listFromLine = line.split(' ')
returnMat[index,:] = listFromLine[:]
index += 1
return returnMat
def get_train_data(dirname):
files = get_file_list(dirname)
maxlen = 30
X_list = []
y_list = []
step = 3
for textdeep_name in files:
cmp_name = textdeep_name.replace("textdeep", "cmp_nb").replace("TextDeep", "cmp")
if isfile(cmp_name):
ftext = open(textdeep_name, 'r')
fcmp = open(cmp_name, 'r')
text_mat = file2matrix(textdeep_name)
cmp_mat = file2matrix(cmp_name)
print cmp_mat[1,2] # 第一个属性对应于文件是行,第二个属性对应于列
rows_num = np.shape(text_mat)[0]
for i in range(0, rows_num - maxlen, step):
X_list.append(text_mat[i:i+maxlen])
y_list.append(cmp_mat[i:i+maxlen])
X = np.array(X_list)
y = np.array(y_list)
print np.shape(X)
print X[0, 1, 2]
return X, y
def train(dirname):
X_train, y_train = get_train_data(dirname)
in_neurons = 222;
hidden_neurons = 512;
out_neurons = 42;
model = Sequential()
model.add(LSTM(output_dim=hidden_neurons, input_dim=in_neurons, return_sequences=True))
model.add(Dense(output_dim=out_neurons, input_dim=hidden_neurons))
model.add(Activation("linear"))
model.compile(loss="mean_squared_error", optimizer="rmsprop")
#json_string = model.to_json()
#open('model_architecture.json', 'w').write(json_string)
model.fit(X_train, y_train, batch_size=450, nb_epoch=10, validation_split=0.05)
model.save_weights('model_weights.h5')
#model reading
#model = model_from_json(open('my_model_architecture.json').read())
#model.load_weights('my_model_weights.h5')
def autoNorm(dataSet):
minVals = dataSet.min(0)
maxVals = dataSet.max(0)
ranges = maxVals - minVals + 1e-8
normDataSet = np.zeros(np.shape(dataSet))
m = dataSet.shape[0]
normDataSet = dataSet - np.tile(minVals, (m,1))
normDataSet = normDataSet / np.tile(ranges, (m,1)) #element wise divide
return normDataSet, ranges, minVals
def norm_with_ranges(dataSet, minVals, ranges):
normDataSet = np.zeros(np.shape(dataSet))
m = dataSet.shape[0]
normDataSet = dataSet - np.tile(minVals, (m,1))
normDataSet = normDataSet / np.tile(ranges, (m,1)) #element wise divide
return normDataSet
def f0_statistics(dir_name, class_stat=False):
np.set_printoptions(precision=2)
files = get_file_list(dir_name)
total_f0_mat = np.empty(shape=(0))
for f0_name in files:
if isfile(f0_name):
fcmp = open(f0_name, 'r')
f0_mat = file2matrix(f0_name)
voice_indexs = (f0_mat > 1)
f0_mat = f0_mat[voice_indexs]
if class_stat:
total_f0_mat = np.concatenate((total_f0_mat, f0_mat))
else:
max_vals = f0_mat.max()
min_vals = f0_mat.min()
mean_vals = f0_mat.mean()
var_vals = f0_mat.std()
print f0_name, " %.2f %.2f %.2f %.2f" %(max_vals, min_vals, mean_vals, var_vals)
if class_stat:
max_val = f0_mat.max()
min_val = f0_mat.min()
mean_val = f0_mat.mean()
var_val = f0_mat.std()
#hist, bins = np.histogram(total_f0_mat, bins=50, density=True)
#width = 0.7 * (bins[1] - bins[0])
#center = (bins[:-1] + bins[1:]) / 2
#plt.bar(center, hist, align='center', width=width)
#print np.sum(hist)
#plt.show()
plt.hist(total_f0_mat.astype(int), 50, normed=0, facecolor='green')
plt.xlabel('f0')
plt.ylabel('Frequency')
plt.title('xin150 distribute')
plot_str = "$\mu=%.2f,\ \sigma=%.2f$" % (mean_val, var_val)
plot_maxmin = "max=%.2f, min=%.2f" %(max_val, min_val)
print plot_str
#plt.text(60, .025, r'$\mu=100,\ \sigma=15$')
plt.text(120, 100, plot_str)
plt.text(120, 2000, plot_maxmin)
plt.show()
print "class_stat %2.f %2.f %.2f %.2f" %(max_val, min_val, mean_val, var_val)
if __name__ == '__main__':
#preprocess('/Users/sooda/data/deep_tts_data/textdeep/')
#train('/Users/sooda/data/deep_tts_data/preprocess/textdeep/')
#f0_statistics("/Users/sooda/data/tts/labixx500/hts/data/lf0_nb/", True)
f0_statistics("/Users/sooda/data/tts/jj_lf0/xin150/", True)
|
# https://www.practicepython.org/exercise/2014/06/06/17-decode-a-web-page.html
import requests
from bs4 import BeautifulSoup
url = 'http://www.nytimes.com/'
r = requests.get(url)
html = r.text
soup = BeautifulSoup(html, 'html.parser')
for story_heading in soup.find_all(class_="story-heading"):
# for the story headings that are links, print out the text
# and format it nicely
# for the others, take the contents out and format it nicely
if story_heading.a:
print(story_heading.a.text.replace("\n", " ").strip())
else:
print(story_heading.contents[0].strip())
|
import requests
import pandas as pd
#Year and game range for scraping NBA Play-by-Play: 82 game seasons (66 games in 2011, 50 games in 1998); 29 teams 1996 - 2003, 30 teams 2004 onward
games = range(1,991)
years = [2011]
for year in years:
season = str(year) + '-' + str(year+1)
playbyplay_df = pd.DataFrame()
for game in games:
print year, game
gameid = 20000000 + game + ((year-2000)*100000)
url = 'http://stats.nba.com/stats/playbyplayv2?EndPeriod=10&EndRange=55800&GameID=00' + str(gameid) + '&RangeType=2&Season=' + season + '&SeasonType=Regular+Season&StartPeriod=1&StartRange=0'
res = requests.get(url)
cols = res.json()['resultSets'][0]['headers']
rows = res.json()['resultSets'][0]['rowSet']
df = pd.DataFrame(rows, columns=cols)
playbyplay_df = playbyplay_df.append(df)
playbyplay_df.to_csv('playbyplay' + str(year) + '.csv') |
import re
import urllib.request
from urllib.request import Request
url = "https://www.google.com/search?q="
try:
stockFinder = input("Enter any company (belonging to USA) name to find its stock.\n")
url = url + stockFinder
print("The url of your preffered site is "+ url)
newUrl = Request(url,headers={'User-Agent':'Mozilla/5.0'})
dataOfSite = urllib.request.urlopen(newUrl).read()
actualData = dataOfSite.decode("utf-8")
m = re.search("US[$]",actualData)
stockValue=actualData[m.start():m.end()+10]
print("Stock of the entered company is: "+ stockValue)
except:
print("Entered company name does not belong to USA!!!")
|
"""
ajax的实战分析
在查看网页的时候,选择检查--network,然后我们清空所有的请求信息,刷新页面,
这时候的第一个请求就是这个页面的骨架,基本的代码都在里面,但是数据可能是Ajax请求之后,
在通过js渲染进去的,这个时候将我们想要的页面上的数据进行复制在network的Preview选项
卡查看Response Body,进行搜索,若结果存在,我们直接请求该网页,就能获取到想要的信息,
要是没有,数据就是通过后请求,在渲染的模式进行加载的,这里需要我们分析请求数据的链接,
首先将all的搜索模式切换成xhr,在刷新请求,这里截取到的都是ajax请求,在其中看是否有需要
的数据,如存在,分析请求方式,获取json数据即可。
""" |
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##
#
# See COPYING file distributed along with the PyMVPA package for the
# copyright and license terms.
#
### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##
"""
Analysis of eye movement patterns
=================================
In this example we are going to look at a classification analysis of eye
movement patterns. Although complex preprocessing steps can be performed to
extract higher-order features from the raw coordinate timeseries provided by an
eye-tracker, we are keeping it simple.
Right after importing the PyMVPA suite, we load the data from a textfile.
It contains coordinate timeseries of 144 trials (recorded with 350 Hz), where
subjects either looked at upright or inverted images of human faces. Each
timeseries snippet covers 3 seconds. This data has been pre-processed to remove
eyeblink artefacts.
In addition to the coordinates we also load trial attributes from a second
textfile. These attributes indicate which image was shown, whether it was
showing a male or female face, and wether it was upright or inverted.
"""
from mvpa.suite import *
# where is the data
datapath = os.path.join(pymvpa_datadbroot,
'face_inversion_demo', 'face_inversion_demo')
# (X, Y, trial id) for all timepoints
data = np.loadtxt(os.path.join(datapath, 'gaze_coords.txt'))
# (orientation, gender, image id) for each trial
attribs = np.loadtxt(os.path.join(datapath, 'trial_attrs.txt'))
"""
As a first step we put the coordinate timeseries into a dataset, and labels each
timepoint with its associated trial ID. We also label the two features
accordingly.
"""
raw_ds = Dataset(data[:,:2],
sa = {'trial': data[:,2]},
fa = {'fid': ['rawX', 'rawY']})
"""
The second step is down-sampling the data to about 33 Hz, resampling each trial
timeseries individually (using the trial ID attribute to define dataset chunks).
"""
ds = fft_resample(raw_ds, 100, window='hann',
chunks_attr='trial', attr_strategy='sample')
"""
Now we can use a :class:`~mvpa.mappers.boxcar.BoxcarMapper` to turn each
trial-timeseries into an individual sample. We know that each sample consists
of 100 timepoints. After the dataset is mapped we can add all per-trial
attributes into the sample attribute collection.
"""
bm = BoxcarMapper(np.arange(len(ds.sa['trial'].unique)) * 100,
boxlength=100)
bm.train(ds)
ds=ds.get_mapped(bm)
ds.sa.update({'orient': attribs[:,0].astype(int),
'gender': attribs[:,1].astype(int),
'img_id': attribs[:,1].astype(int)})
"""
In comparison with upright faces, inverted ones had prominent features at very
different locations on the screen. Most notably, the eyes were flipped to the
bottom half. To prevent the classifier from using such differences, we flip the
Y-coordinates for trials with inverted to align the with the upright condition.
"""
ds.samples[ds.sa.orient == 1, :, 1] = \
-1 * (ds.samples[ds.sa.orient == 1, :, 1] - 512) + 512
"""
The current dataset has 100 two-dimensional features, the X and Y
coordinate for each of the hundred timepoints. We use a
:class:`~mvpa.mappers.flatten.FlattenMapper` to convert each sample into a
one-dimensionl vector (of length 200). However, we also keep the original
dataset, because it will allow us to perform some plotting much easier.
"""
fm = FlattenMapper()
fm.train(ds)
# want to make a copy to keep the original pristine for later plotting
fds = ds.copy().get_mapped(fm)
# simplify the trial attribute
fds.sa['trial'] = [t[0] for t in ds.sa.trial]
"""
The last steps of preprocessing are Z-scoring all features
(coordinate-timepoints) and dividing the dataset into 8 chunks -- to simplify
a cross-validation analysis.
"""
zscore(fds, chunks_attr=None)
# for classification divide the data into chunks
nchunks = 8
chunks = np.zeros(len(fds), dtype='int')
for o in fds.sa['orient'].unique:
chunks[fds.sa.orient == o] = np.arange(len(fds.sa.orient == o)) % nchunks
fds.sa['chunks'] = chunks
"""
Now everything is set and we can proceed to the classification analysis. We
are using a support vector machine that is going to be trained on the
``orient`` attribute, indicating trials with upright and inverted faces. We are
going to perform the analysis with a :class:`~mvpa.clfs.meta.SplitClassifier`,
because we are also interested in the temporal sensitivity profile. That one is
easily accessible via the corresponding sensitivity analyzer.
"""
clf = SVM(space='orient')
mclf = SplitClassifier(clf, space='orient',
enable_ca=['confusion'])
sensana = mclf.get_sensitivity_analyzer()
sens = sensana(fds)
print mclf.ca.confusion
"""
The 8-fold cross-validation shows a trial-wise classification accuracy of
over 80%. Now we can take a look at the sensitivity. We use the
:class:`~mvpa.mappers.flatten.FlattenMapper` that is stored in the dataset to
unmangle X and Y coordinate vectors in the sensitivity array.
"""
# split mean sensitivities into X and Y coordinate parts by reversing through
# the flatten mapper
xy_sens = fds.a.mapper[-2].reverse(sens).samples
"""
Plotting the results
--------------------
The analysis is done and we can compile a figure to visualize the results.
After some inital preparations, we plot an example image of a face that was
used in this experiment. We align the image coordinates with the original
on-screen coordinates to match them to the gaze track, and overlay the image
with the mean gaze track across all trials for each condition.
"""
# descriptive plots
pl.figure()
# original screen size was
axes = ('x', 'y')
screen_size = np.array((1280, 1024))
screen_center = screen_size / 2
colors = ('r','b')
fig = 1
pl.subplot(2, 2, fig)
pl.title('Mean Gaze Track')
face_img = pl.imread(os.path.join(datapath, 'demo_face.png'))
# determine the extend of the image in original screen coordinates
# to match with gaze position
orig_img_extent=(screen_center[0] - face_img.shape[1]/2,
screen_center[0] + face_img.shape[1]/2,
screen_center[1] + face_img.shape[0]/2,
screen_center[1] - face_img.shape[0]/2)
# show face image and put it with original pixel coordinates
pl.imshow(face_img,
extent=orig_img_extent,
cmap=pl.cm.gray)
pl.plot(np.mean(ds.samples[ds.sa.orient == 1,:,0], axis=0),
np.mean(ds.samples[ds.sa.orient == 1,:,1], axis=0),
colors[0], label='inverted')
pl.plot(np.mean(ds.samples[ds.sa.orient == 2,:,0], axis=0),
np.mean(ds.samples[ds.sa.orient == 2,:,1], axis=0),
colors[1], label='upright')
pl.axis(orig_img_extent)
pl.legend()
fig += 1
"""
The next two subplot contain the gaze coordinate over the peri-stimulus time
for both, X and Y axis respectively.
"""
pl.subplot(2, 2, fig)
pl.title('Gaze Position X-Coordinate')
plot_erp(ds.samples[ds.sa.orient == 1,:,1], pre=0, errtype = 'std',
color=colors[0], SR=100./3.)
plot_erp(ds.samples[ds.sa.orient == 2,:,1], pre=0, errtype = 'std',
color=colors[1], SR=100./3.)
pl.ylim(orig_img_extent[2:])
pl.xlabel('Peristimulus Time')
fig += 1
pl.subplot(2, 2, fig)
pl.title('Gaze Position Y-Coordinate')
plot_erp(ds.samples[ds.sa.orient == 1,:,0], pre=0, errtype = 'std',
color=colors[0], SR=100./3.)
plot_erp(ds.samples[ds.sa.orient == 2,:,0], pre=0, errtype = 'std',
color=colors[1], SR=100./3.)
pl.ylim(orig_img_extent[:2])
pl.xlabel('Peristimulus Time')
fig += 1
"""
The last panel has the associated sensitivity profile for both coordinate axes.
"""
pl.subplot(2, 2, fig)
pl.title('SVM-Sensitivity Profiles')
lines = plot_err_line(xy_sens[..., 0], linestyle='-', fmt='ko', errtype='std')
lines[0][0].set_label('X')
lines = plot_err_line(xy_sens[..., 1], linestyle='-', fmt='go', errtype='std')
lines[0][0].set_label('Y')
pl.legend()
pl.ylim((-0.1, 0.1))
pl.xlim(0,100)
pl.axhline(y=0, color='0.6', ls='--')
pl.xlabel('Timepoints')
from mvpa.base import cfg
if cfg.getboolean('examples', 'interactive', True):
# show all the cool figures
pl.show()
"""
The following figure is not exactly identical to the product of this code, but
rather shows the result of a few minutes of beautifications in Inkscape_.
.. _Inkscape: http://www.inkscape.org/
.. figure:: ../pics/ex_eyemovements.*
:align: center
Gaze track for viewing upright vs. inverted faces. The figure shows the mean
gaze path for both conditions overlayed on an example face. The panels to
the left and below show the X and Y coordinates over the trial timecourse
(shaded aread corresponds to one standard deviation across all trials above
and below the mean). The black curve depicts the associated temporal SVM
weight profile for the classification of both conditions.
"""
|
import pyodbc
#Variables to connect to DB
server = 'localhost,1433'
database = 'Northwind'
username = 'SA'
password = 'Passw0rd2018'
docker_northwind = pyodbc.connect('DRIVER={ODBC Driver 17 for SQL Server};SERVER='+server+';DATABASE='+database+';UID='+username+';PWD='+password)
#What is a cursor?
cursor = docker_northwind.cursor()
#This is us executing a SQL query
cursor.execute("SELECT * FROM Customers;")
#From the cursor we can fetch one row
# row = cursor.fetchone()
# print(row)
#We can also Fetch all rows
all_customers = cursor.execute("SELECT * FROM Customers;").fetchall() #...Fetch all is Dangerous as it can block our CPU with huge amount of data!
# print (all_customers)
# for row in all_customers:
# print(row.ContactName, row.Fax)
#Because this is dangerous, we can use a while loop to fetchone() until record/row is none(break)
all_products = cursor.execute("SELECT * FROM Products;")
#This is more efficient than fetchall()
while True:
row_record = all_products.fetchone()
if row_record is None:
break
print(row_record.UnitPrice)
#We can use column name to retrieve specific data
all_null_fax = cursor.execute("SELECT ContactName, CompanyName, Phone FROM Customers WHERE Fax IS NULL;")
while True:
row_record = all_null_fax.fetchone()
if row_record is None:
break
print(row_record.ContactName,'--', row_record.CompanyName,'-- Phone:', row_record.Phone)
|
from datetime import datetime, timedelta
from pytz import timezone
import calendar
def get_time():
fmt = '%H:%M:%S'
aus = timezone('Australia/Sydney')
td = datetime.today()
aus_dt = td.astimezone(aus)
return aus_dt.strftime(fmt)
# print(get_time('%m-%d-%Y'))
def get_date():
aus = timezone('Australia/Sydney')
td = datetime.today()
aus_dt = td.astimezone(aus)
arr = ['Jan','Feb','Mar','Apr','May','Jun',
'July','Aug','Sep','Oct','Nov','Dec']
str_date = calendar.day_name[aus_dt.weekday()]+' '+str(aus_dt.day)+'-'+arr[aus_dt.month]+'-'+str(aus_dt.year)
return str_date
print(get_time()) |
from services.twitter import Twitter
import pyfiglet
class Browse:
# List of Menu
menus = [
{ 'id' : 0, 'title' : 'Read my home timeline' },
{ 'id' : 1, 'title' : 'Stalk someone timeline' },
{ 'id' : 2, 'title' : 'Retweet a tweet' },
{ 'id' : 3, 'title' : 'Like a tweet' },
{ 'id' : 4, 'title' : 'Reply a tweet' },
{ 'id' : 5, 'title' : 'Tweet to the world' },
{ 'id' : 99, 'title' : 'Get out from here! my boss is coming' }
]
# Timeline Pagination
tweet_limit = 200
per_page = 5
# Twitter constructor
twitter = Twitter()
def printTitle(self, title):
text = pyfiglet.figlet_format(title)
print(text)
# Showing available menu
def showMenu(self):
menus = self.menus
print("I want to ..")
for menu in menus:
item = '[' + str(menu['id']) + '] ' + menu['title']
if menu['id'] == 99 :
print("")
print(item)
print("")
chooseMenu = input('What do you want ? [0, 1, 2] : ')
print("")
self.menuController(chooseMenu)
# Controlling menu to function
def menuController(self, idMenu):
if idMenu == "0" :
self.showTimeline()
elif idMenu == "1" :
screen_name = input('whom you will stalk? [type username] ')
self.showTimeline(False, screen_name)
elif idMenu == "2" :
tweet_id = input('which tweet you want to retweet [type tweet id] ')
self.twitter.retweet(tweet_id)
print('')
elif idMenu == "3" :
tweet_id = input('which tweet you want to like [type tweet id] ')
self.twitter.likeTweet(tweet_id)
print('')
elif idMenu == "4" :
tweet_id = input('which tweet you want to reply [type tweet id] ')
message = input('type your reply : ')
self.twitter.replyTweet(message, tweet_id)
print('')
elif idMenu == "5" :
message = input('type your tweet : ')
self.twitter.postTweet(message)
print('')
elif idMenu == "99" :
exit()
else:
print('whoops!')
print("")
self.showMenu()
print("")
# showing timeline
def showTimeline(self, isHome = True, screen_name = ''):
if isHome :
self.printTitle('timeline')
timeline = self.twitter.getHomeTimeline(self.tweet_limit)
else :
if screen_name == '' :
print('screen name not specified')
self.showMenu()
else:
self.printTitle(screen_name + " timeline's")
timeline = self.twitter.getUserTimeline(screen_name, self.tweet_limit)
# pagination
current_page = 0
last_page = self.tweet_limit / self.per_page
while current_page != last_page :
firstIndex = current_page * self.per_page
lastIndex = firstIndex + self.per_page
print('timeline page ' + str(current_page + 1))
print('')
showTimeline = timeline[firstIndex:lastIndex]
for post in showTimeline :
print('id : ' + post['id_str'])
print('username : ' + post['user']['screen_name'])
print(post['text'])
print("")
print(str(post['retweet_count']) + ' retweet, ' + str(post['favorite_count']) + ' like')
print(post['created_at'])
print("====================================================================================================================")
print("")
action = input('press any key to continue or "m" to back to menu.. ')
print("")
if(action == "m"):
self.showMenu()
break
current_page += 1
# show title
twitter_title = pyfiglet.figlet_format("Twitter-CLI", font = "slant")
print(twitter_title)
browse = Browse()
while True:
browse.showMenu()
|
#!/usr/local/bin/python3
import pysig as ps
from pysig import DB
#import matplotlib.pyplot as pt
#import numpy
env = ps.Log(ps.Linear([0,2],[1,100]))
ps.plotsig(env)
|
import sys
from torch.utils.data import Dataset, DataLoader
import os
import os.path as osp
import glob
import numpy as np
import random
import cv2
import pickle as pkl
import json
import h5py
import torch
import matplotlib.pyplot as plt
from lib.utils.misc import process_dataset_for_video
class MPIINFDataset(Dataset):
def __init__(self, config, is_train=True):
self.frame_interval = config.DATA.FRAME_INTERVAL
# for mpi dataset, we convert its order to match with h36m
self.mpi2h36m = [10, 9, 8, 11, 12, 13, 4, 3, 2, 5, 6, 7, 1, 14, 15, 16, 0]
# randomization will lead to inferior performance
self.scale_path = "../data/mpi_train_scales.pkl" if config.USE_GT else "../data/mpi_train_scales_pre.pkl"
self.use_same_norm_2d = config.DATA.USE_SAME_NORM_2D
self.use_same_norm_3d = config.DATA.USE_SAME_NORM_3D
self.is_train = is_train
self.data_path = config.DATA.TRAIN_PATH if self.is_train else config.DATA.VALID_PATH
self.head_root_distance = 1 / config.TRAIN.CAMERA_SKELETON_DISTANCE
# whether to use dataset adapted from k[MaÌ]inetics
self.use_gt = config.USE_GT
self.use_ideal_scale = config.DATA.USE_IDEAL_SCALE
self.min_diff_dist = config.DATA.MIN_DIFF_DIST
self.use_scaler = config.TRAIN.USE_SCALER
self.bound_azim = float(config.TRAIN.BOUND_AZIM) # y axis rotation
self.bound_elev = float(config.TRAIN.BOUND_ELEV)
self._load_data_set()
def _load_data_set(self):
if self.is_train:
print('start loading mpiinf {} data.'.format("train" if self.is_train else "test"))
key = "joint_2d_gt" if self.use_gt else "joint_2d_pre"
fp = h5py.File(self.data_path, "r")
self.kp2ds = np.array(fp[key])[:, self.mpi2h36m, :2]
self.kp2ds[:, :, 0] = (self.kp2ds[..., 0] - 1024.0) / 1024.0
self.kp2ds[:, :, 1] = (self.kp2ds[..., 1] - 1024.0) / 1024.0
# self.kp2ds = np.maximum(np.minimum(self.kp2ds, 1.0), -1.0)
# locate root at the origin
self.kp2ds = self.kp2ds - self.kp2ds[:, 13:14]
self.kp2ds[:, 13] = 1e-5
# imagenames will be used to sample frames
self.imagenames = [name.decode() for name in fp['imagename'][:]]
if 'seqname' not in fp.keys():
# first we close the already opened (read-only) h5
fp.close()
print("Process corresponding dataset...")
process_dataset_for_video(self.data_path, is_mpi=True)
fp = h5py.File(self.data_path, "r")
self.sequence_lens = np.array(fp['seqlen'])
self.sequence_names = [name.decode() for name in fp['seqname'][:]]
self.indices_in_seq = np.array(fp['index_in_seq'])
# normlize again so that the mean distance of head and root is 1/c
if not self.use_same_norm_2d:
factor_gt = self.head_root_distance / (np.tile(np.linalg.norm(self.kp2ds[:, -1] - self.kp2ds[:, 13], axis=1).reshape(-1, 1, 1), (1, 17, 2)) + 1e-8)
else:
factor_gt = self.head_root_distance / np.linalg.norm(self.kp2ds[:, -1] - self.kp2ds[:, 13], axis=1).mean()
self.kp2ds = self.kp2ds * factor_gt
self.kp3ds = np.array(fp['joint_3d_gt'])[:, self.mpi2h36m, :3] / 1000.0
# factor_3d = np.linalg.norm(self.kp3ds[:, -1] - self.kp3ds[:, 13], axis=1).mean())
factor_filename = "../data/mpi_{}_factor_3d.pkl".format("train" if self.is_train else "test")
if not self.use_same_norm_3d:
factor_3d = (np.tile(np.linalg.norm(self.kp3ds[:, -1] - self.kp3ds[:, 13], axis=1).reshape(-1, 1, 1), (1, 17, 3)) + 1e-8)
print(factor_3d.shape)
with open(factor_filename, "wb") as f:
pkl.dump(factor_3d, f)
if osp.exists(self.scale_path):
with open(self.scale_path, "rb") as f:
self.scales = pkl.load(f)['scale']
else:
if self.use_scaler:
pass
# raise Warning("You haven't generated the computed scales, if you don't need to observe the scale error during training, \njust ignore this warning because it won't affect training.")
self.scales = None
if self.use_ideal_scale:
# scales computed from projection of 3d
with open("../data/mpi_{}_scales.pkl".format("train" if self.is_train else "valid"), "rb") as f:
scales = pkl.load(f)
self.kp2ds = self.kp2ds * scales
fp.close()
print('finished load mpiinf {} data, total {} samples'.format("train" if self.is_train else "test", \
self.kp2ds.shape[0]))
# generate the rotation factors
num_examples = self.kp2ds.shape[0]
rotation_y = (2 * np.random.random_sample((num_examples, 1)) - 1) * self.bound_azim
rotation_x = (2 * np.random.random_sample((num_examples, 1)) - 1) * self.bound_elev
rotation_z = (2 * np.random.random_sample((num_examples, 1)) - 1) * self.bound_elev / 2
rotation_1 = np.concatenate((rotation_y, rotation_x, rotation_z), axis=1)
rotation_2 = rotation_1.copy()
rotation_2[:, 0] = rotation_2[:, 0] + np.pi
self.rotation = np.concatenate((rotation_1, rotation_2), axis=0)
np.random.shuffle(self.rotation)
self.rotation = torch.from_numpy(self.rotation).float()
self.kp2ds = torch.from_numpy(self.kp2ds).float()
self.kp3ds = torch.from_numpy(self.kp3ds).float()
if self.scales is not None:
self.scales = torch.from_numpy(self.scales).float()
def get_seqnames(self):
return self.sequence_names
def __len__(self):
return self.kp2ds.shape[0]
def __getitem__(self, index):
seq_len = self.sequence_lens[index]
index_in_seq = self.indices_in_seq[index]
kps_3d = self.kp3ds[index]
rot = self.rotation[index]
if not self.is_train:
kps_2d = self.kp2ds[index]
# don't use
diff1 = diff2 = self.kp2ds[index]
else:
kps_2d = self.kp2ds[index]
if self.frame_interval + index < seq_len:
diff1_index = index + self.frame_interval
else:
diff1_index = index - self.frame_interval
diff1 = self.kp2ds[diff1_index]
diff_dist = np.random.randint(-index_in_seq, seq_len - index_in_seq)
while abs(diff_dist) < self.min_diff_dist:
diff_dist = np.random.randint(-index_in_seq, seq_len - index_in_seq)
diff2_index = index + diff_dist
diff2 = self.kp2ds[diff2_index]
rot = self.rotation[index]
# for valdiation, simply ignore scale
if self.scales is None or not self.is_train:
scale = 0
else:
scale = self.scales[index]
return kps_2d, kps_3d, rot, diff1, diff2, scale
|
#_*_ coding:utf-8 _*_
from log import Log
from traceback import format_exc
from bs4 import BeautifulSoup
import requests
from config import PER_REQUESTS_DELAY,PROXIES,IS_CHANGE_HOST,HOST_INDEX,WEBHOSTS
from lxml import etree
import time,re
from faker import Faker
'''
本模块依赖python第三库faker,安装方法 pip install faker
教程详解:https://mp.weixin.qq.com/s/iLjr95uqgTclxYfWWNxrAA
'''
###html5lib 解析需要传入的字符串编码为:Unicode
##随机User-Agent
faker = Faker()
##更换网站域名
reg = reg=re.compile(r'(https://www\.)(.*?)(/.*?)')
class Root(object):
def __init__(self,url='https://www.aastory.club/category.php',local=None,logger=None,is_change_host=IS_CHANGE_HOST,host_index=HOST_INDEX):
##更改网站域名
if is_change_host:
url=reg.sub(r'\1%s\3'%(WEBHOSTS[HOST_INDEX]),url)
self.url=url
self.local=local
self.host=self.url.rsplit('/',1)[0]
if logger is None:
log = Log()
logger = log.Logger
self.logger=logger
self.get_html()
# self.get_soup()
def get_html(self,):
if self.local:
##如果存在本地文件,就从本地文件读取html
with open(self.local,'r')as f:
self.html=f.read()
self.url=self.local
message=u'load html from localation=%s'%(self.local)
self.logger.info(message)
# print message
else:
message=u'start requests to %s,then will sleep %s second!'%(self.url,PER_REQUESTS_DELAY)
self.logger.info(message)
# print message
try:
headers={'User-Agent':faker.user_agent()}
self.html=requests.get(url=self.url,headers=headers,proxies=PROXIES,timeout=30).content
self.root=etree.HTML(self.html)
time.sleep(PER_REQUESTS_DELAY)
except Exception,e:
message=u'request url:%s catch exception:%s'%(self.url,e)
raise Exception,message
def get_soup(self,):
if not isinstance(self.html,unicode):
html=self.html.decode('utf-8','ignore')
else:
html=self.html
self.soup = BeautifulSoup(html, "lxml")
return self.soup
@classmethod
def tostring(self,element):
return etree.tostring(element)
@classmethod
def to_etree(self,html):
return etree.HTML(html)
def test():
root=Root()
print root.html
if __name__ == '__main__':
test()
|
import sys
from os.path import expanduser
home = expanduser("~")
sys.path.append('{}/ProjectDoBrain/codes/Modules'.format(home))
from rest_handler import RestHandler
from json_handler import JsonHandler
from csv_handler import CsvHandler
def parse_commands(argv):
from optparse import OptionParser
parser = OptionParser('"')
parser.add_option('-p', '--personFile', dest='person_file')
parser.add_option('-m', '--mobileOs', dest='mobile_os')
options, otherjunk = parser.parse_args(argv)
return options
#make person_id csv without HEADER
options = parse_commands(sys.argv[1:])
header_list = ["person_id"]
rest_handler = RestHandler(mobile_os=options.mobile_os)
json_handler = JsonHandler()
csv_handler = CsvHandler(filepath=options.person_file,header_list=header_list)
json_result = rest_handler.get_json_of_person_id()
result_dict_list = json_handler.json_person_id_to_dict_list(json_source = json_result, mobile_os=options.mobile_os)
csv_handler.dict_to_csv(dict_list=result_dict_list) |
import torch
import torch.nn as nn
import torch.nn.functional as F
from .transformer import build_transformer
class TrackingModel(nn.Module):
def __init__(self, transformer, num_classes, num_queries=None):
super().__init__()
self.transformer = transformer
self.num_classes = num_classes # NOTE num_classes *include* bg which is indexed at 0
hid_dim = self.transformer.embed_dim
self.match_embed = nn.Linear(hid_dim, num_classes)
def forward(self, reid_feat_pre, reid_feat_cur, mask_pre, mask_cur, reid_pos_enc_pre, reid_pos_enc_cur, train=True):
self.train() if train else self.eval()
hidden_state = self.transformer(reid_feat_pre_frame=reid_feat_pre, src_key_padding_mask=mask_pre, pos_embed=reid_pos_enc_pre,
reid_feat_cur_frame=reid_feat_cur, tgt_key_padding_mask=mask_cur, query_pos=reid_pos_enc_cur) # [nb_step,bs,max_nb2,embed_dim]
hidden_state = hidden_state[-1] # last step [bs, max_nb2, embed_dim]
return self.match_embed(hidden_state) # [bs, max_nb2, nb_classes], [bs]
def match_label(self, preds, nbdet_valid_cur, nbdet_valid_pre):
# preds: [bs, max_nb2, nb_classes], nbdet_valid_cur:[bs]
assert preds.size(0) == 1
nbdet_valid_cur, nbdet_valid_pre = nbdet_valid_cur.squeeze(0), nbdet_valid_pre.squeeze(0)
preds = preds.squeeze(0)[:nbdet_valid_cur] # [nbdet_valid_cur, nb_classes]
labels = preds.argmax(1) # [nbdet_valid_cur]
preds_probs = preds.softmax(-1)
preds_probs = preds_probs[torch.arange(preds_probs.size(0)), labels] # [nbdet_valid_cur]
assert preds_probs.size(0) == labels.size(0)
mask = labels.gt(0) & labels.le(nbdet_valid_pre)
labels -= 1
labels[~mask] = -1 # bg and another
preds_probs[~mask] = -1
return labels.cpu().numpy(), preds_probs.cpu().detach().numpy() # labels: [nbdet_valid_cur], 0-based
def match_label_eval(self, preds, nbdet_valid_cur, nbdet_valid_pre, max_nb_class=19):
nbdet_valid_cur, nbdet_valid_pre = nbdet_valid_cur.squeeze(0), nbdet_valid_pre.squeeze(0)
preds = preds.squeeze(0)[:nbdet_valid_cur][:,:max_nb_class]
labels = preds.argmax(1)
preds_probs = preds.softmax(-1)
preds_probs = preds_probs[torch.arange(preds_probs.size(0)), labels]
mask = labels.gt(0) & labels.le(nbdet_valid_pre)
labels -= 1
labels[~mask] = -1
preds_probs[~mask] = -1
return labels.cpu().numpy(), preds_probs.cpu().detach().numpy()
def build_model(args):
transformer_model = build_transformer(args)
return TrackingModel(transformer_model, args.num_classes) |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import hashlib
import datetime
import random
from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response, render, get_object_or_404
from django.template import RequestContext
from django.contrib.auth import *
from django.core.urlresolvers import reverse_lazy
from django.core.mail import EmailMessage
from django.utils import timezone
from django.contrib.auth.models import User, Group
from django.views import generic
from cotizador_acerta.views_mixins import *
from darientSessions.models import *
from darientSessions.forms import *
from django.template import Context
from django.template.loader import get_template
from django.contrib.auth.tokens import default_token_generator
from django.views.decorators.csrf import csrf_protect
from darientSessions.forms import PasswordResetForm
from django.utils.translation import ugettext as _
from django.template.response import TemplateResponse
from django.views.defaults import page_not_found
def user_registration(request):
if request.user.is_authenticated():
# We obtain the user group by the user logged.
# Sellers will created by agents
# Agents will created by admins
if (request.user.groups.first().name == "corredor")\
or (request.user.groups.first().name == "super_admin")\
or (request.user.groups.first().name == "admin"):
if request.method == 'POST':
if request.user.groups.first().name == "corredor":
form = UserCreateForm(request.POST)
else:
form = CorredorCreateForm(request.POST)
if form.is_valid():
my_user = form.save()
username = my_user.username
email = form.cleaned_data['email']
salt = hashlib.sha1(str(random.random())).hexdigest()[:5]
activation_key = hashlib.sha1(salt + email).hexdigest()
key_expires = datetime.datetime.today() +\
datetime.timedelta(2)
user = User.objects.get(username=username)
new_profile = UserProfile(user=user,
activation_key=activation_key,
key_expires=key_expires)
new_profile.save()
email_subject = 'Bienvenido(a) a Acerta Seguros'
to = [email]
link = 'http://' + request.get_host() + '/accounts/confirm/' + activation_key + '/' + str(user.pk)
if user.first_name and user.last_name:
iniciales = user.first_name[0] + user.last_name[0]
else:
iniciales = user.username[:2]
ctx = {
'user': user,
'link': link,
'iniciales': iniciales.upper(),
}
message = get_template('email_confirmation.html').render(Context(ctx))
msg = EmailMessage(email_subject, message, to=to)
msg.content_subtype = 'html'
if (request.user.groups.first().name == "super_admin")\
or request.user.groups.first().name == "admin":
msg.attach('manual_corredores.pdf',
open('cotizador_acerta/static/pdf/manual_corredores.pdf','rb').read(),
'application/pdf')
else:
msg.attach('manual_vendedores.pdf',
open('cotizador_acerta/static/pdf/manual_vendedores.pdf','rb').read(),
'application/pdf')
msg.send()
# Add the user into the group: Seller or Agent.
if request.user.groups.first().name == "super_admin"\
or request.user.groups.first().name == "admin":
group = Group.objects.get(name='corredor')
user.groups.add(group)
else:
group = Group.objects.get(name='vendedor')
user.groups.add(group)
# Add relationship Seller-Agent. If required.
if group.name == "vendedor":
new_relat = CorredorVendedor(corredor=request.user,
vendedor=user)
new_relat.save()
if request.user.groups.first().name == "super_admin"\
or request.user.groups.first().name == "admin":
if form.cleaned_data['ruc'] or form.cleaned_data['licencia']:
datos_corredor = DatosCorredor(user=user,
ruc=request.POST['ruc'],
licencia=request.POST['licencia'],
razon_social=form.cleaned_data['razon_social'],
)
else:
datos_corredor = DatosCorredor(user=user,
ruc='-',
licencia='-',
razon_social='-',
)
datos_corredor.save()
print datos_corredor.planes
return HttpResponseRedirect(
reverse_lazy('register'))
else:
if request.user.groups.first().name == "super_admin"\
or request.user.groups.first().name == "admin":
context = {'form': form}
return render_to_response(
'registro_corredor.html', context,
context_instance=RequestContext(request))
else:
context = {'form': form}
return render_to_response(
'register.html', context,
context_instance=RequestContext(request))
else:
if request.user.groups.first().name == "super_admin"\
or request.user.groups.first().name == "admin":
form = CorredorCreateForm()
context = {'form': form}
return render_to_response(
'registro_corredor.html', context,
context_instance=RequestContext(request))
else:
form = UserCreateForm()
context = {'form': form}
return render_to_response(
'register.html', context,
context_instance=RequestContext(request))
else:
return HttpResponseRedirect(
reverse_lazy('vehiculo'))
else:
return HttpResponseRedirect(
reverse_lazy('login'))
def authenticate_user(username=None, password=None):
""" Authenticate a user based on email address as the user name. """
try:
user = User.objects.get(email=username)
if user is not None:
return user
except User.DoesNotExist:
try:
user = User.objects.get(username=username)
if user is not None:
return user
except User.DoesNotExist:
return None
def login_request(request):
if request.user.is_authenticated():
return HttpResponseRedirect(
reverse_lazy('vehiculo'))
if request.method == 'POST':
form = LoginForm(request.POST)
if form.is_valid():
username = form.cleaned_data['username']
password = form.cleaned_data['password']
user_auth = authenticate_user(username, password)
if user_auth is not None:
if user_auth.is_active:
user = authenticate(username=user_auth.username,
password=password)
if user:
login(request, user)
return HttpResponseRedirect(
reverse_lazy('vehiculo'))
else:
form.add_error(
None, "Tu correo o contraseña no son correctos")
else:
form.add_error(None, "Aún no has confirmado tu correo.")
user = None
else:
form.add_error(
None, "Tu correo o contraseña no son correctos")
else:
form = LoginForm()
context = {'form': form, 'host': request.get_host()}
return render_to_response('login.html', context,
context_instance=RequestContext(request))
def editAccount(request):
if not request.user.is_authenticated():
return HttpResponseRedirect(reverse_lazy('/'))
template_name = 'edit_account.html'
if request.method == 'GET':
form = UserEditForm(initial={'username': request.user.username,
'email': request.user.email})
return render(request, template_name, {'form': form})
elif request.method == 'POST':
form = UserEditForm(request.POST, instance=request.user)
if form.is_valid():
form.save()
return HttpResponseRedirect(reverse_lazy('home'))
return render(request, template_name, {'form': form})
def register_confirm(request, activation_key):
if request.user.is_authenticated():
return HttpResponseRedirect(reverse_lazy('vehiculo'))
user_profile = get_object_or_404(UserProfile,
activation_key=activation_key)
user = user_profile.user
if user.is_active:
return HttpResponseRedirect(reverse_lazy('vehiculo'))
if user_profile.key_expires < timezone.now():
return HttpResponseRedirect(reverse_lazy('generate_key',
kwargs={'pk': user.pk}))
user.is_active = True
user.save()
return render_to_response('cuenta_activada.html')
def generate_key(request, pk):
user = User.objects.get(pk=pk)
UserProfile.objects.filter(user=user).delete()
salt = hashlib.sha1(str(random.random())).hexdigest()[:5]
activation_key = hashlib.sha1(salt + user.email).hexdigest()
key_expires = datetime.datetime.today() + datetime.timedelta(2)
new_profile = UserProfile(user=user, activation_key=activation_key,
key_expires=key_expires)
new_profile.save()
email_subject = 'Bienvenido(a) a Acerta Seguros'
to = [user.email]
link = 'http://' + request.get_host() + '/accounts/confirm/' + activation_key + '/' + user.pk
if user.first_name and user.last_name:
iniciales = user.first_name[0] + user.last_name[0]
else:
iniciales = user.username[:2]
ctx = {
'user': user,
'link': link,
'iniciales': iniciales.upper(),
}
message = get_template('email_confirmation.html').render(Context(ctx))
msg = EmailMessage(email_subject, message, to=to)
msg.content_subtype = 'html'
msg.send()
return render_to_response('reenvio_activacion.html')
@csrf_protect
def password_reset(request, is_admin_site=False,
template_name='registration/password_reset_form.html',
email_template_name='registration/password_reset_email.html',
subject_template_name='registration/password_reset_subject.txt',
password_reset_form=PasswordResetForm,
token_generator=default_token_generator,
post_reset_redirect=None,
from_email=None,
current_app=None,
extra_context=None,
html_email_template_name=None):
post_reset_redirect = reverse_lazy('password_reset_done')
if request.method == "POST":
form = password_reset_form(request.POST)
if form.is_valid():
opts = {
'use_https': request.is_secure(),
'token_generator': token_generator,
'from_email': from_email,
'email_template_name': email_template_name,
'subject_template_name': subject_template_name,
'request': request,
'html_email_template_name': html_email_template_name,
}
if is_admin_site:
opts = dict(opts, domain_override=request.get_host())
form.save(**opts)
return HttpResponseRedirect(post_reset_redirect)
else:
form = password_reset_form()
context = {
'form': form,
'title': _('Password reset'),
}
if extra_context is not None:
context.update(extra_context)
return TemplateResponse(request, template_name, context,
current_app=current_app)
class EditUser(LoginRequiredMixin, GroupRequiredMixin, generic.UpdateView):
template_name = "update_user_form.html"
model = User
form_class = UserEditForm
context_object_name = "usuario"
success_url = 'corredor_vendedor_detail'
def get_initial(self):
"""
Returns the initial data to use for forms on this view.
"""
datos = DatosCorredor.objects.get(user=self.object)
initial = self.initial.copy()
if datos:
if datos.ruc != '-':
initial['ruc'] = datos.ruc
if datos.licencia != '-':
initial['licencia'] = datos.licencia
if datos.razon_social != '-':
initial['razon_social'] = datos.razon_social
return initial
def form_valid(self, form):
"""
If the form is valid, redirect to the supplied URL.
"""
self.object = form.save()
user = User.objects.get(email=form.cleaned_data['email'])
corredor = DatosCorredor.objects.get(user=user)
if form.cleaned_data['licencia']:
corredor.licencia = form.cleaned_data['licencia']
if form.cleaned_data['ruc']:
corredor.ruc = form.cleaned_data['ruc']
if form.cleaned_data['razon_social']:
corredor.razon_social = form.cleaned_data['razon_social']
corredor.save()
return HttpResponseRedirect(
reverse_lazy(self.success_url, kwargs={'pk': user.pk}))
class EditVendedor(LoginRequiredMixin, CorredorRequiredMixin, generic.UpdateView):
template_name = "update_vendedor_form.html"
model = User
form_class = VendedorEditForm
context_object_name = "usuario"
success_url = 'corredor_vendedor_detail'
def form_valid(self, form):
"""
If the form is valid, redirect to the supplied URL.
"""
self.object = form.save()
user = User.objects.get(email=form.cleaned_data['email'])
return HttpResponseRedirect(
reverse_lazy(self.success_url, kwargs={'pk': user.pk}))
class EditPassword(LoginRequiredMixin, generic.UpdateView):
template_name = "update_password_form.html"
model = User
form_class = UserPasswordEditForm
context_object_name = "usuario"
success_url = 'vehiculo'
def get(self, request, *args, **kwargs):
self.object = self.get_object()
if int(request.user.pk) != int(kwargs['pk']):
return page_not_found(request)
return super(EditPassword, self).get(request, *args, **kwargs)
def form_valid(self, form):
"""
If the form is valid, redirect to the supplied URL.
"""
self.object = form.save()
return HttpResponseRedirect(
reverse_lazy(self.success_url))
class ActivateAccount(generic.UpdateView):
template_name = "activate_account.html"
model = User
form_class = UserPasswordEditForm
context_object_name = "usuario"
success_url = 'vehiculo'
def get(self, request, *args, **kwargs):
if request.user.is_authenticated():
return HttpResponseRedirect(reverse_lazy('vehiculo'))
user_profile = get_object_or_404(
UserProfile, activation_key=kwargs['activation_key'])
user = user_profile.user
if user.is_active:
return HttpResponseRedirect(reverse_lazy('vehiculo'))
if user_profile.key_expires < timezone.now():
return HttpResponseRedirect(reverse_lazy('generate_key',
kwargs={'pk': user.pk}))
user.is_active = True
user.save()
return super(ActivateAccount, self).get(request, *args, **kwargs)
def form_valid(self, form):
"""
If the form is valid, redirect to the supplied URL.
"""
self.object = form.save()
return HttpResponseRedirect(
reverse_lazy(self.success_url))
|
import random
from django.shortcuts import render
# Create your views here.
from rest_framework.generics import GenericAPIView
from rest_framework.mixins import RetrieveModelMixin
from rest_framework.response import Response
from rest_framework.views import APIView
from django_redis import get_redis_connection
from rest_framework import serializers
from users.models import User
from .constans import *
from celery_tasks.sms.tasks import send_sms_code
class SMSCodeView(APIView):
def get(self,request,mobile):
redis_cli =get_redis_connection('sms_code')
# 验证是否发送过验证码
if redis_cli.get('sms_flag'+mobile):
raise serializers.ValidationError("请不要重发发送")
code = random.randint(1,999999)
sms_code = "%06d"% code
# 创建redis管道,只交互一次
redis_pipeline = redis_cli.pipeline()
redis_pipeline.setex("sms_code"+mobile,SMS_CODE_EXPIRE,sms_code)
redis_pipeline.setex('sms_flag'+mobile,SMS_FLAG_EXPIRE,1)
redis_pipeline.execute()
send_sms_code.delay(mobile,sms_code,SMS_CODE_EXPIRE/60,1)
return Response({"message":"ok"})
class MobilesView(APIView):
"""手机号验证"""
def get(self,request,mobile):
# 丛书据库中获取与该手机号匹配的数据的数量
count = User.objects.filter(mobile=mobile).count()
data = {
"mobile":mobile,
"count":count
}
return Response(data)
class UsernamesView(APIView):
"""手机号验证"""
def get(self,request,username):
# 丛书据库中获取与该手机号匹配的数据的数量
count = User.objects.filter(username= username).count()
data = {
"username":username,
"count":count
}
return Response(data) |
# -*- coding:utf-8 -*-
from util.operatedb import *
from util.logger import Logger
logger = Logger(logger="db").getlog()
#用户成为会员
class becomeMember():
#1.删除用户
def del_user(self):
sql = 'delete c,w from customer_user c ,customer_user_wechat w where c.id = w.user_id AND w.nick_name like "鱼小七%"'
operatedb('member','delete',sql)
logger.info ('用户删除成功')
#2.查询用户user_id
def getUserId(self):
mem_sql = 'SELECT user_id from customer_user_wechat where nick_name like "鱼小七%"'
user_id = operatedb('member','select',mem_sql)
logger.info('user_id:'+str(user_id))
return user_id
#3.插入evaluation库
def eva_insert(self,user_id):
sql = 'INSERT INTO `user_member`(`user_id`, `order_id`, `member_id`, `status`, `expire_time`, `create_time`, `description`) VALUES (%s, 0, 3, 1, "2019-12-01 10:50:40", "2019-08-23 08:50:42", "")'%user_id
operatedb('evaluation','insert',sql)
logger.info('evaluation第一次插入成功')
#4.查询插入的序号
def eva_product(self):
sql = 'SELECT project_id FROM user_product ORDER BY id DESC LIMIT 1'
project_id = operatedb('evaluation','select',sql)
logger.info('project_id:'+str(project_id))
return project_id
#5.插入会员数据
def eva_insert_product(self,user_id,project_id):
sql = 'INSERT into user_product (user_id,product_id,order_id,type,project_id,`status`,total_num,num,create_time) values(%s,1,0,1,%s,2,100,100,"2018-12-26 15:43:59")' %(user_id,project_id)
operatedb('evaluation','insert',sql)
logger.info('evaluation第二次插入成功')
def action(self):
try:
user_id = self.getUserId()
self.eva_insert(user_id)
project_id = self.eva_product()
self.eva_insert_product(user_id,project_id+1)
logger.info('会员操作成功')
except Exception as e:
logger.error('数据库操作异常'+str(e))
if __name__ == '__main__':
a = becomeMember()
a.action() |
# -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html
from .items import SpidersProxyItem
class SpidersProxyPipeline(object):
def __init__(self):
super(SpidersProxyPipeline, self).__init__()
def process_item(self, item, spider):
if isinstance(item, SpidersProxyItem):
spider.redis_connection.sadd('proxy', item['proxy'])
return item
|
# -*- coding: UTF-8 -*-
from league.league import League
from script.zhongchao_guess import ZhongchaoGuess
import csv
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
def fajia_guess(round_num):
fajia = League(5, 'fajia', 'input/fajia_games.csv', 'input/fajia_ranking.csv')
fajia.parse()
season_2017 = fajia.get_season(2017)
season_2017.parse_games()
season_2018 = fajia.get_season(2018)
season_2018.parse_games()
games = season_2018.get_unfinished_games(round_num)
forcast_file = open('output/fajia_cai.csv', 'w')
forcast_write = csv.writer(forcast_file)
forcast_write.writerow(['轮次', '主队', '客队', '主排名', '客排名', '主球', '客球', '胜负'])
for game in games:
for a in season_2017.forecast(game):
forcast_write.writerow(a.to_row())
forcast_write.writerow([])
forcast_file.close()
def meiguo_guess(round_num):
meiguo = League(38, 'meiguo', 'input/meiguodalianmen_games.csv', 'input/meiguodalianmen_ranking.csv')
meiguo.parse()
season_2018 = meiguo.get_season(2018)
season_2018.parse_games()
games = season_2018.get_unfinished_games(round_num)
forcast_file = open('output/meiguo_cai.csv', 'w')
forcast_write = csv.writer(forcast_file)
forcast_write.writerow(['轮次', '主队', '客队', '主排名', '客排名', '主球', '客球', '胜负'])
for game in games:
for a in season_2018.forecast(game):
forcast_write.writerow(a.to_row())
forcast_write.writerow([])
forcast_file.close()
meiguo_guess('26')
#zhongchao = ZhongchaoGuess()
#zhongchao.guess_three(2018, 16)
# meidalian = MeidalianGuess()
# meidalian.guess_three(2018, 25)
# yingchao = YingchaoGuess()
# yingchao.guess_three(2018, 1) |
import os
import pickle
import shutil
import zipfile
from functools import partial
import numpy as np
import pandas as pd
import scanpy as sc
from scipy import sparse
from six import string_types
from odin.utils import MPI, one_hot
from sisua.data.const import MARKER_GENES
from sisua.data.path import DATA_DIR, DOWNLOAD_DIR
from sisua.data.single_cell_dataset import OMIC, SingleCellOMIC
from sisua.data.utils import download_file, read_r_matrix, validate_data_dir
_URLs = [
r"https://www.ebi.ac.uk/arrayexpress/files/E-MTAB-3929/E-MTAB-3929.processed.1.zip",
r"https://www.ebi.ac.uk/arrayexpress/files/E-MTAB-3929/E-MTAB-3929.processed.2.zip",
r"https://www.ebi.ac.uk/arrayexpress/files/E-MTAB-3929/E-MTAB-3929.processed.3.zip",
r"https://www.ebi.ac.uk/arrayexpress/files/E-MTAB-3929/E-MTAB-3929.processed.4.zip",
]
_MD5s = [
r"aecae7898f8313d326426720603133c0",
r"a83b09ee9465e3a908dd6a691da63e69",
r"d8fc86b50cae1f8ff0cb3ceb6ca73d40",
r"ecf2bd8b0176c00e9c05fdebbf7a856a",
]
def read_human_embryos(filtered_genes=True,
override=False,
verbose=True) -> SingleCellOMIC:
r""" Transcriptional map of human embryo development, including the sequenced
transcriptomes of 1529 individual cells from 88 human preimplantation
embryos. These data show that cells undergo an intermediate state of
co-expression of lineage-specific genes, followed by a concurrent
establishment of the trophectoderm, epiblast, and primitive endoderm
lineages, which coincide with blastocyst formation.
References:
Petropoulos S, Edsgärd D, Reinius B, et al. Single-Cell RNA-Seq Reveals
Lineage and X Chromosome Dynamics in Human Preimplantation Embryos.
Cell. 2016 Sep
Note:
Gene expression levels (RefSeq annotations) were estimated in terms of
reads per kilobase exon model and per million mapped reads (RPKM)
using rpkmforgenes
Genes were filtered, keeping 15633/26178 genes that
* were expressed in at least 5 out of 1919 sequenced cells (RPKM >= 10).
and
* for which cells with expression came from at least two
different embryos.
Cells were quality-filtered based on 4 criteria, keeping 1529/1919 cells.
* First, Spearman correlations, using the RPKM expression levels of
all genes, for every possible pair of cells were calculated and a
histogram of the maximum correlation obtained for each cell,
corresponding to the most similar cell, was used to identify 305
outlier cells with a maximum pair-wise correlations below 0.63.
* Second, a histogram of the number of expressed genes per cell was
used to identify 330 outlier cells with less than 5000 expressed
genes.
* Third, a histogram of the total transcriptional expression output
from the sex chromosomes (RPKM sum) was used to identify 33 cells
with indeterminable sex, or a called sex that was inconsistent with
other cells of that embryo
* Fourth, 13 outlier cells were identified using PCA and t-SNE
dimensionality reduction.
"""
download_dir = os.path.join(DOWNLOAD_DIR, 'human_embryos')
if not os.path.exists(download_dir):
os.makedirs(download_dir)
preprocessed_path = os.path.join(DATA_DIR, 'human_embryos_preprocessed')
if override:
shutil.rmtree(preprocessed_path)
if verbose:
print(f"Override preprocessed data at {preprocessed_path}")
if not os.path.exists(preprocessed_path):
os.makedirs(preprocessed_path)
### download data
files = []
for url, md5 in zip(_URLs, _MD5s):
path = download_file(url=url,
filename=os.path.join(download_dir,
os.path.basename(url)),
override=False,
md5=md5)
files.append(path)
### preprocessing
if len(os.listdir(preprocessed_path)) == 0:
data_map = {}
for f in files:
zipname = os.path.basename(f)
with zipfile.ZipFile(f, mode="r") as f:
for dat_file in f.filelist:
filename = dat_file.filename
dat = str(f.read(filename), 'utf-8')
x = []
for line in dat.split('\n'):
if len(line) == 0:
continue
line = line.split('\t')
x.append(line)
x = np.asarray(x).T
row_name = x[1:, 0]
col_name = x[0, 1:]
x = x[1:, 1:].astype(np.float32)
x = sparse.coo_matrix(x)
data_map[filename] = (x, row_name, col_name)
print(f"Read: {zipname} - {filename}")
print(f" * Matrix: {x.shape}")
print(f" * Row : {row_name.shape}-{row_name[:3]}")
print(f" * Col : {col_name.shape}-{col_name[:3]}")
# save loaded data to disk
for name, (x, row, col) in data_map.items():
with open(os.path.join(preprocessed_path, f"{name}:x"), "wb") as f:
sparse.save_npz(f, x)
with open(os.path.join(preprocessed_path, f"{name}:row"), "wb") as f:
np.save(f, row)
with open(os.path.join(preprocessed_path, f"{name}:col"), "wb") as f:
np.save(f, col)
del data_map
### read the data
# counts.txt (1529, 26178)
# ercc.counts.txt (1529, 92)
# rpkm.txt (1529, 26178)
# ercc.rpkm.txt (1529, 92)
data = {}
genes_path = os.path.join(preprocessed_path, "filtered_genes")
for path in os.listdir(preprocessed_path):
if path == os.path.basename(genes_path):
continue
name, ftype = os.path.basename(path).split(':')
with open(os.path.join(preprocessed_path, path), 'rb') as f:
if ftype == 'x':
x = sparse.load_npz(f).tocsr()
else:
x = np.load(f)
data[f"{name}_{ftype}"] = x
rpkm = data['rpkm.txt_x']
counts = data['counts.txt_x']
genes = data['counts.txt_col']
cells = data['counts.txt_row']
### filter genes
if not os.path.exists(genes_path):
# filter genes by rpkm
ids = np.asarray(np.sum(rpkm, axis=0) >= 10).ravel()
rpkm = rpkm[:, ids]
counts = counts[:, ids]
genes = genes[ids]
# filter genes by min 5 cells
ids = np.asarray(np.sum(counts > 0, axis=0) >= 5).ravel()
rpkm = rpkm[:, ids]
counts = counts[:, ids]
genes = genes[ids]
# filter highly variable genes
sco = SingleCellOMIC(X=counts, cell_id=cells, gene_id=genes)
sco.normalize(omic=OMIC.transcriptomic, log1p=True)
sco.filter_highly_variable_genes(n_top_genes=2000)
filtered = sco.var_names.to_numpy()
with open(genes_path, 'wb') as f:
pickle.dump([genes, filtered], f)
del sco
else:
with open(genes_path, 'rb') as f:
ids, filtered = pickle.load(f)
ids = set(ids)
ids = np.asarray([i in ids for i in genes])
rpkm = rpkm[:, ids]
counts = counts[:, ids]
genes = genes[ids]
# last filtering
if filtered_genes:
filtered = set(filtered)
ids = np.asarray([i in filtered for i in genes])
rpkm = rpkm[:, ids]
counts = counts[:, ids]
genes = genes[ids]
### create the SingleCellOMIC
sco = SingleCellOMIC(X=counts,
cell_id=cells,
gene_id=genes,
omic=OMIC.transcriptomic,
name="HumanEmbryos")
sco.add_omic(omic=OMIC.rpkm, X=rpkm, var_names=genes)
labels = ['.'.join(i.split('.')[:-2]) for i in sco.obs_names]
labels = ['E7' if i == 'E7.4' else i for i in labels]
labels_name = {j: i for i, j in enumerate(sorted(set(labels)))}
labels = np.array([labels_name[i] for i in labels])
sco.add_omic(omic=OMIC.celltype,
X=one_hot(labels, len(labels_name)),
var_names=list(labels_name.keys()))
sco.add_omic(omic=OMIC.ercc,
X=data['ercc.counts.txt_x'],
var_names=data['ercc.counts.txt_col'])
return sco
|
import pymongo, os
import json
def seedUserData(user_collection):
with open('backend/users-api/data.json') as user_data:
data = json.load(user_data)
response = user_collection.insert_many(data)
return response
if __name__ == "__main__":
db_uri = os.getenv('DB_URI') or 'localhost'
db_username = os.getenv('DB_USER') or 'root'
db_pass = os.getenv('DB_PASS') or 'admin'
db_client = pymongo.MongoClient(db_uri,
username=db_username,
password=db_pass)
userdb = db_client['usersdb']
user_collection = userdb['users']
collist = userdb.list_collection_names()
if "users" in collist:
print("The collection exists! No seeding needed")
else:
seedUserData(user_collection)
print("User data added!")
db_client.close()
|
# -*- coding: utf-8 -*-
"""
Utility functions, just doing simple tasks.
"""
__author__ = 'aildyakov'
def get_input_function():
"""
This function returns right `input` function for python2 and python3.
:return: function `input` in python3 or `raw_input` in python2.
"""
try:
input_function = raw_input
except NameError:
# `raw_input` was not defined, so `NameError` occured:
input_function = input
return input_function
def clear_screen():
"""Очистка экрана"""
print(chr(27) + "[2J")
|
"""
Program Name: field_util.py
Contact(s): George McCabe <mccabe@ucar.edu>
Description: METplus utility to handle MET config dictionaries with field info
"""
from . import get_threshold_via_regex, is_python_script, remove_quotes
def field_read_prob_info(config, c_dict, data_types, app_name):
"""! Read probabilistic variables for each field data type from the config
object and sets values in the c_dict as appropriate.
@param config METplusConfig object to read
@param c_dict dictionary to set values
@param data_types list of field types to check, i.e. FCST, OBS
@param app_name name of tool used to read wrapper-specific configs
"""
for data_type in data_types:
# check both wrapper-specific variable and generic variable
config_names = [
f'{data_type}_{app_name.upper()}_IS_PROB',
f'{data_type}_IS_PROB',
]
name = config.get_mp_config_name(config_names)
is_prob = config.getbool('config', name) if name else False
c_dict[f'{data_type}_IS_PROB'] = is_prob
# if field type is probabilistic, check if prob info is in GRIB PDS
if not is_prob:
continue
config_names = [
f'{data_type}_{app_name.upper()}_PROB_IN_GRIB_PDS',
f'{data_type}_PROB_IN_GRIB_PDS',
]
name = config.get_mp_config_name(config_names)
prob_in_pds = config.getbool('config', name) if name else False
c_dict[f'{data_type}_PROB_IN_GRIB_PDS'] = prob_in_pds
def get_field_info(c_dict, data_type='', v_name='', v_level='', v_thresh=None,
v_extra='', add_curly_braces=True):
"""! Format field information into format expected by MET config file
@param c_dict config dictionary to read values
@param v_level level of data to extract
@param v_thresh threshold value to use in comparison
@param v_name name of field to process
@param v_extra additional field information to add if available
@param data_type type of data to find i.e. FCST or OBS
@param add_curly_braces if True, add curly braces around each
field info string. If False, add single quotes around each
field info string (defaults to True)
@rtype string
@return list of formatted field information
"""
thresholds = _get_thresholds(c_dict, v_thresh, v_name, data_type)
# list to hold field information
fields = []
for thresh in thresholds:
field = _get_name_and_level(c_dict, data_type, v_name, v_level, thresh)
# handle cat_thresh if set
field += _get_thresh(c_dict, data_type, thresh)
# handle extra options if set
field += _get_extra(v_extra)
# add curly braces around field info if requested
# otherwise add single quotes around field info
field = f'{{ {field} }}' if add_curly_braces else f"'{field}'"
# add field info string to list of fields
fields.append(field)
# return list of strings in field dictionary format
return fields
def _get_thresholds(c_dict, v_thresh, v_name, data_type):
# if thresholds are set
if v_thresh:
# if neither fcst or obs are probabilistic,
# pass in all thresholds as a comma-separated list for 1 field info
if (not c_dict.get('FCST_IS_PROB', False) and
not c_dict.get('OBS_IS_PROB', False)):
return [','.join(v_thresh)]
return v_thresh
# if no thresholds are specified, fail if prob field is in grib PDS
if (c_dict.get(f'{data_type}_IS_PROB', False) and
c_dict.get(f'{data_type}_PROB_IN_GRIB_PDS', False) and
not is_python_script(v_name)):
return 'No threshold was specified for probabilistic GRIB data'
return [None]
def _get_name_and_level(c_dict, data_type, name, level, thresh):
"""!Format the name and level of a field to what the MET tools expect.
@param c_dict config dictionary to read values
@param data_type type of data to find i.e. FCST or OBS
@param name variable name
@param level variable level if set
@param thresh variable threshold if set
@returns string with the formatted name and level information
"""
if (c_dict.get(f'{data_type}_PROB_IN_GRIB_PDS', False) and
not is_python_script(name)):
return _handle_grib_pds_field_info(name, level, thresh)
# add field name
field = f'name="{name}";'
# add level if set
if level:
field += f' level="{remove_quotes(level)}";'
# add probabilistic identifier if necessary
if c_dict.get(f'{data_type}_IS_PROB', False):
field += " prob=TRUE;"
return field
def _get_thresh(c_dict, data_type, thresh):
"""!Format the categorical threshold value to what MET tools expect if set.
@param c_dict config dictionary to read values
@param data_type type of data to find i.e. FCST or OBS
@param thresh variable threshold if set
@returns formatted threshold key/value or empty string if not set
"""
cat_thresh = thresh
if c_dict.get(f'{data_type}_IS_PROB', False):
# add probabilistic cat thresh if different from default ==0.1
cat_thresh = c_dict.get(f'{data_type}_PROB_THRESH')
if not cat_thresh:
return ''
return f" cat_thresh=[ {cat_thresh} ];"
def _get_extra(v_extra):
"""!Format extra field options to what MET tools expect if set. Adds
trailing semicolon if not found.
@param v_extra string with extra variable config options
@returns string with a blank space followed by additional field options
if set or empty string if not.
"""
if not v_extra:
return ''
extra = v_extra.strip()
# if trailing semi-colon is not found, add it
if not extra.endswith(';'):
extra = f"{extra};"
return f' {extra}'
def format_field_info(c_dict, var_info, data_type, add_curly_braces=True):
"""! Format field information into format expected by MET config file
@param c_dict config dictionary to read values
@param var_info dictionary of field info to format
@param data_type type of data to find i.e. FCST or OBS
@param add_curly_braces if True, add curly braces around each
field info string. If False, add single quotes around each
field info string (defaults to True)
@rtype string
@return Returns a list of formatted field information or a string
containing an error message if something went wrong
"""
dt_lower = data_type.lower()
return get_field_info(c_dict=c_dict,
data_type=data_type,
v_name=var_info.get(f'{dt_lower}_name'),
v_level=var_info.get(f'{dt_lower}_level'),
v_thresh=var_info.get(f'{dt_lower}_thresh'),
v_extra=var_info.get(f'{dt_lower}_extra'),
add_curly_braces=add_curly_braces,
)
def format_all_field_info(c_dict, var_list, data_type, add_curly_braces=True):
"""!Format field information for a list of fields.
@param c_dict config dictionary to read values
@param var_list list of dictionaries of field info to format
@param data_type type of data to find i.e. FCST or OBS
@param add_curly_braces if True, add curly braces around each
field info string. If False, add single quotes around each
field info string (defaults to True)
@rtype string
@return Returns a string of formatted field information separated by
comma or None if something went wrong
"""
formatted_list = []
for var_info in var_list:
field_info = format_field_info(c_dict=c_dict,
var_info=var_info,
data_type=data_type,
add_curly_braces=add_curly_braces)
if not field_info:
return None
formatted_list.extend(field_info)
return ','.join(formatted_list)
def _handle_grib_pds_field_info(v_name, v_level, thresh):
"""! Format field string to read probabilistic data from the PDS of a GRIB
file. Thresholds are formatted using thresh_lo and thresh_hi syntax.
@param v_name name of field to read
@param v_level level of field to read
@param thresh threshold value to format if set
@returns formatted field string
"""
field = f'name="PROB"; level="{v_level}"; prob={{ name="{v_name}";'
if thresh:
thresh_tuple_list = get_threshold_via_regex(thresh)
for comparison, number in thresh_tuple_list:
# skip adding thresh_lo or thresh_hi if comparison is NA
if comparison == 'NA':
continue
if comparison in ["gt", "ge", ">", ">=", "==", "eq"]:
field = f"{field} thresh_lo={number};"
if comparison in ["lt", "le", "<", "<=", "==", "eq"]:
field = f"{field} thresh_hi={number};"
# add closing curly brace for prob=
return f'{field} }}'
|
# Problem [1074] : Z
import sys
n = 0
def recursion_Z(x, y, size):
global n
# 종료조건
if x == r and y == c:
print(n)
return
# 재귀
if x <= r < x + size and y <= c < y + size:
recursion_Z(x,y,size//2)
recursion_Z(x,y+size//2,size//2)
recursion_Z(x+size//2,y,size//2)
recursion_Z(x+size//2,y+size//2,size//2)
else:
n += size*size
N, r, c = map(int, input().split())
recursion_Z(0,0,2**N)
|
import glob
import os
import argparse
import csv
import cv2
parser = argparse.ArgumentParser(description="Generate a video annotation file.")
parser.add_argument("-d", "--data-dir", type=str)
parser.add_argument("-o", "--video-annotations", type=str)
parser.add_argument("-o", "--output-file", type=str, default="hyper-kvasir-video-annotations-file.csv")
def gather_images(data_dir, video_annotations, output_file):
annnotations = {}
with open(video_annotations) as f:
reader = csv.reader(f, delimiter=";")
next(reader)
for line in reader:
file_name = "%s.avi" % line[0]
finding = line[1]
annnotations[file_name] = finding
with open(output_file, "w") as f:
file_paths = sorted(list(glob.glob("%s/*" % data_dir)), key=lambda x: x.split("/")[-2])
f.write("file-name;main-finding;width;height;number-of-frames;fps;length;kilobytes\n")
for file_path in file_paths:
file_name = os.path.basename(file_path)
video = cv2.VideoCapture(file_path)
number_of_frames = int(video.get(cv2.CAP_PROP_FRAME_COUNT))
video_width = int(video.get(cv2.CAP_PROP_FRAME_WIDTH))
video_height = int(video.get(cv2.CAP_PROP_FRAME_HEIGHT))
if file_name in annnotations:
finding = annnotations[file_name]
else:
finding = "None"
fps = int(video.get(cv2.CAP_PROP_FPS))
length = number_of_frames // fps
kilobytes = os.path.getsize(file_path) >> 10
f.write("%s;%s;%s;%s;%s;%s;%s;%s\n" % (file_name, finding, video_width, video_height, number_of_frames, fps, length, kilobytes))
if __name__ == "__main__":
args = parser.parse_args()
data_dir = args.data_dir
video_annotations = args.video_annotations
output_file = args.output_file
gather_images(data_dir, video_annotations, output_file) |
# coding:utf-8
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import time
import logging
import json
from aliyunsdkcore.vendored.six.moves.urllib.request import urlopen
from aliyunsdkcore.auth.signers.signer import Signer
from aliyunsdkcore.acs_exception.exceptions import ServerException
logger = logging.getLogger(__name__)
class EcsRamRoleSigner(Signer):
_SESSION_PERIOD = 3600
_REFRESH_SCALE = 0.9
def __init__(self, ecs_ram_role_credential):
self._credential = ecs_ram_role_credential
self._last_update_time = 0
def sign(self, region_id, request):
self._check_session_credential()
session_ak, session_sk, token = self._session_credential
if request.get_style() == 'RPC':
request.add_query_param("SecurityToken", token)
else:
request.add_header("x-acs-security-token", token)
header = request.get_signed_header(region_id, session_ak, session_sk)
url = request.get_url(region_id, session_ak, session_sk)
return header, url
def _check_session_credential(self):
now = int(time.time())
if now - self._last_update_time > (self._SESSION_PERIOD * self._REFRESH_SCALE):
self._refresh_session_ak_and_sk()
def _refresh_session_ak_and_sk(self):
request_url = "http://100.100.100.200/latest/meta-data/ram/security-credentials/" + \
self._credential.role_name
content = urlopen(request_url).read()
response = json.loads(content.decode('utf-8'))
if response.get("Code") != "Success":
message = 'refresh Ecs sts token err, code is ' + \
response.get("Code")
raise ServerException(
response.get("Code"), message, None)
session_ak = response.get("AccessKeyId")
session_sk = response.get("AccessKeySecret")
token = response.get("SecurityToken")
self._session_credential = session_ak, session_sk, token
self._last_update_time = int(time.time())
|
from flask_wtf import FlaskForm
from wtforms import StringField, PasswordField, BooleanField, SubmitField, TextAreaField, SelectField,\
FileField,DateField, FormField
from wtforms.validators import DataRequired, NoneOf, AnyOf
from app import db
def pick_option(form, field):
print(field.data)
if field.data == "Choose":
raise ValidationError('Please choose an option')
class LoginForm(FlaskForm):
username = StringField('Username', validators=[DataRequired()])
password = PasswordField('Password', validators=[DataRequired()])
remember_me = BooleanField('Remember Me')
submit = SubmitField('Sign In')
class BasicInfo(FlaskForm):
name = StringField('Name', validators=[DataRequired()])
celltype = StringField('Cell type', validators=[DataRequired()])
species = SelectField('Species', validators=[NoneOf(values=["Choose", "Mouse"])],
choices=['Choose','Mouse', 'Human', 'Hamster', 'Drosophila'])
tissue = StringField('Name', validators=[DataRequired()])
class GeneticInfo(FlaskForm):
modmethod = StringField('Modification method', validators=[DataRequired()])
locus = StringField('Locus/Gene', validators=[DataRequired()])
tag = StringField('Epitope tag', validators=[DataRequired()])
modtype = SelectField('Modification type', choices=['Choose','Knockout', 'Knockin', 'Mutation', 'Transgene'],
validators=[NoneOf(['Choose'])])
mutation = StringField('Mutation', validators=[DataRequired()])
transgene = StringField('Transgene', validators=[DataRequired()])
resistance = StringField('Resistance', validators=[DataRequired()])
inducible = SelectField('Dox inducible', choices=['yes', 'no'],validators=[DataRequired()])
class CultureInfo(FlaskForm):
bsl = SelectField('Biosafety level', choices=['1', '2','3'],validators=[DataRequired()], default='1')
mycoplasma = SelectField('Mycoplasma status', choices=['negative', 'positive'],validators=[DataRequired()])
pcrdate = DateField("Mycoplama PCR date")
culturetype = SelectField('Culture type', choices=['adherent', 'suspension'],validators=[DataRequired()], default='adherent')
medium = StringField('Culture medium', validators=[DataRequired()])
notes = TextAreaField('Notes')
class AdditionalInfo(FlaskForm):
protocol = TextAreaField('Protocol')
wb = FileField('Western Blot')
pcr = FileField('PCR gel')
sequencing_info = FileField('Sequencing data')
facs = FileField('FACS data')
description = TextAreaField('Description')
comments = TextAreaField('Comments')
publication = StringField('Published in', validators=[DataRequired()])
class CreateNewCellLine(FlaskForm):
basic_information = FormField(BasicInfo)
genetic_information = FormField(GeneticInfo)
culture_information = FormField(CultureInfo)
additional_information = FormField(AdditionalInfo)
submit = SubmitField('Create') |
#!/usr/bin/python
'''
creates bundle graph from filtered multigraph
'''
### imports ###
import sys
import os
import logging
import networkx as nx
import numpy as np
import scipy.stats as stats
import cPickle
import helpers.io as io
import helpers.misc as misc
### definitions ###
### functions ###
def compress_edges(MG, p, q):
''' compresses the edges '''
# check for types.
bcnts = [0, 0, 0, 0]
for z in MG[p][q]:
bcnts[MG[p][q][z]['state']] += 1
# build numpy arrays for each distance type.
bdists = list()
for i in range(4):
bdists.append(np.zeros(bcnts[i], dtype=np.float))
# populate array with distances.
bidxs = [0, 0, 0, 0]
for z in MG[p][q]:
state = MG[p][q][z]['state']
dist = MG[p][q][z]['dist']
bdists[state][bidxs[state]] = dist
bidxs[state] += 1
# compute bundle info.
devs = list()
means = list()
mins = list()
maxs = list()
for i in range(4):
if bdists[i].shape[0] <= 0:
devs.append(-1)
means.append(-1)
mins.append(-1)
maxs.append(-1)
else:
devs.append(np.std(bdists[i]))
means.append(np.mean(bdists[i]))
mins.append(bdists[i].min())
maxs.append(bdists[i].max())
# return summaries.
return bcnts, bdists, devs, means, mins, maxs
def _load_reps(file_path):
''' loads repeat info from cpickle'''
# no weights.
if file_path == None:
return dict()
# try dictionary emthod.
if os.path.isdir(file_path) == True:
reps = dict()
for f in os.listdir(file_path):
n = f.replace(".npy","")
try:
reps[n] = np.load("%s/%s" % (file_path, f))
except:
continue
return reps
# get weights.
try:
with open(file_path) as fin:
return cPickle.load(fin)
except:
logging.warning("unable to load repeat pickle, ignoring weights")
return dict()
def create_bundles(paths, args):
""" creates bundles
Parameters
----------
paths.edge_file : string
args.bundle_size : int
args.pthresh : int
args.bup : int
"""
# load repeat annotations.
repcnts = _load_reps(args.rep_file)
# load the multi graph.
MG = nx.read_gpickle(paths.edge_file)
# create bundle graph.
BG = nx.Graph()
# add nodes.
for n in MG.nodes():
BG.add_node(n, MG.node[n])
# build set of adjacencies.
adjset = set()
for p, nbrs in MG.adjacency_iter():
for q in nbrs:
adjset.add(tuple(sorted([p,q])))
# compute bundles from adjacencies.
zerod = 0
zcnt = 0
ztot = len(adjset)
for p, q in adjset:
#logging.info("progress: %d of %d" % (zcnt, ztot))
zcnt += 1
# sanity check.
if MG.node[p]['cov'] == 0.0 or MG.node[q]['cov'] == 0.0:
logging.error("how can this happen?")
sys.exit()
# bundle size check.
bsize = len(MG[p][q])
if bsize < args.bundle_size:
continue
# group by insert size.
groups = dict()
std_devs = dict()
for z in MG[p][q]:
ins_size = MG[p][q][z]['ins_size']
if ins_size not in groups:
groups[ins_size] = list()
std_devs[ins_size] = MG[p][q][z]['std_dev']
groups[ins_size].append(z)
# loop over groups.
for ins_size in groups:
# compress info.
bcnts, bdists, devs, means, mins, maxs = compress_edges(MG, p, q)
# compute weights.
cov = 1 - abs(MG.node[p]['cov'] - MG.node[q]['cov']) / (MG.node[p]['cov'] + MG.node[q]['cov'])
# swap bdists for python lists.
for i in range(len(bdists)):
bdists[i] = list(bdists[i])
# add start stop info.
poses1 = list()
poses2 = list()
for z in MG[p][q]:
tmp = MG[p][q][z]
poses1.append((tmp['left1'], tmp['right1']))
poses2.append((tmp['left2'], tmp['right2']))
# create bundle.
if BG.has_edge(p, q):
logging.error("can't have multiple insert sizes between same node")
sys.exit(1)
# zero out negative distances.
avgs = [np.average(bdists[i]) for i in range(4)]
for i in range(4):
if avgs[i] == np.nan:
bcnts[i] = 0.0
if avgs[i] < -2 * args.bundle_size:
bcnts[i] = 0.0
zerod += 1
# don't add it if no support.
if np.sum(bcnts) == 0:
continue
#BG.add_edge(p, q, bcnts=bcnts, bdists=bdists, devs=devs, means=means, mins=mins, maxs=maxs, ins_size=ins_size, std_dev=std_devs[ins_size], poses1=poses1, poses2=poses2)
BG.add_edge(p, q, bcnts=bcnts, bdists=bdists, ins_size=ins_size, std_dev=std_devs[ins_size], cov=cov)
# start the slimming.
logging.info("starting repeat based slimming")
# do repeat mods.
track_upped = 0
track_remed = 0
track_ogedg = len(BG.edges())
idxs = np.zeros(1)
if repcnts != dict():
# create repeat distrib.
repavgs = np.zeros(len(repcnts), dtype=np.dtype([('name','S256'),('avg',np.float)]))
i = 0
for name in repcnts:
# save the name.
repavgs[i]['name'] = name
# skip no repeat info.
if name not in repcnts or repcnts[name] == None:
repavgs[i]['avg'] = 0
i += 1
continue
# take the average over ins_size + 6 (std_dev)
d = args.ins_size + (6 * args.std_dev)
if repcnts[name].shape[0] < d:
repavgs[i]['avg'] = np.average(repcnts[name])
else:
r = range(0,d)+range(len(repcnts[name])-d,len(repcnts[name]))
repavgs[i]['avg'] = np.average(repcnts[name][r])
i += 1
# compute the cutoff threshold.
score = stats.scoreatpercentile(repavgs[:]['avg'], args.pthresh)
idxs = repavgs[:]['avg'] > score
# look at each bundle and see if the repeats necessitates attention.
for p, q in BG.edges():
# get index of pairs.
idp = np.where(repavgs[:]['name'] == p)[0]
idq = np.where(repavgs[:]['name'] == q)[0]
# skip if both not high.
if idxs[idp] == False and idxs[idq] == False:
continue
# get score.
scp = repavgs[idp]['avg']
scq = repavgs[idq]['avg']
# check if this bundle needs attention.
if max(scp, scq) > score:
track_upped += 1
# it gets its minumm bundle size upped.
for i in range(len(BG[p][q]['bcnts'])):
# clear if it doesn't meet criteria.
if BG[p][q]['bcnts'][i] < args.bundle_size + args.bup:
BG[p][q]['bcnts'][i] = 0
# remove bundle if no support.
if np.sum(BG[p][q]['bcnts']) == 0:
track_remed += 1
BG.remove_edge(p,q)
else:
logging.info('no repeat information supplied')
# add repeat weights.
for p, q in BG.edges():
# create weight.
BG[p][q]['u'] = [0.0] * 4
# sum weights.
for z in MG[p][q]:
left1 = MG[p][q][z]['left1']
left2 = MG[p][q][z]['left2']
right1 = MG[p][q][z]['right1']
right2 = MG[p][q][z]['right2']
cntl = np.sum(repcnts[p][left1:left2])
cntr = np.sum(repcnts[p][right1:right2])
try:
propl = 1.0 - (float(cntl) / float(left2-left1))
propr = 1.0 - (float(cntr) / float(right2-right1))
except:
continue
# add average.
p_k = (propl + propr) / 2.0
# add it.
BG[p][q]['u'][MG[p][q][z]['state']] += p_k
# note the modifications due to filtering.
logging.info("contigs with repeat regions in %.2f threshold: %i of %i" % (args.pthresh, np.sum(idxs), len(idxs)))
logging.info("bundles effected by repeats: %i of %i" % (track_upped, track_ogedg))
logging.info("bundles removed by repeats: %i of %i" % (track_remed, track_ogedg))
logging.info("bundles removed by neg dist: %i" % (zerod))
logging.info("total bundles: %i" % (len(BG.edges())))
# write to disk.
nx.write_gpickle(BG, paths.bundle_file)
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2017-08-22 00:57
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('Producto', '0003_auto_20170821_1059'),
]
operations = [
migrations.RemoveField(
model_name='industria',
name='producto',
),
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.