text stringlengths 38 1.54M |
|---|
#!/usr/bin/env python
import unittest
import nagios
from component_health import parse_response
class TestParseResponse(unittest.TestCase):
def runTest(self):
self.assertEqual(
parse_response("{\"status\": \"ok\", \"summary\": \"Test Summary\", \"details\": []}"),
(nagios.OK,
'Test Summary',
[]))
self.assertEqual(
parse_response("{\"status\": \"warn\", \"summary\": \"Test Summary\", \"details\": []}"),
(nagios.WARN,
'Test Summary',
[]))
self.assertEqual(
parse_response("{\"status\": \"crit\", \"summary\": \"Test Summary\", \"details\": []}"),
(nagios.CRIT,
'Test Summary',
[]))
self.assertEqual(
parse_response("{\"status\": \"\", \"summary\": \"Test Summary\", \"details\": []}"),
(nagios.UNKNOWN,
'Test Summary',
[]))
self.assertRaises(ValueError, parse_response, '')
if __name__ == "__main__":
unittest.main()
|
from unittest import TestCase
from eemeter.uploader import api
import pandas as pd
from datetime import datetime
import pytz
class APITestCase(TestCase):
def setUp(self):
self.minimal_project_df = self._minimal_project_df_fixture()
self.minimal_consumption_df = self._minimal_consumption_df_fixture()
def _minimal_project_df_fixture(self):
data = [
{
"project_id": "ID_1",
"zipcode": "01234",
"weather_station": "012345",
"latitude": 89.0,
"longitude": -42.0,
"baseline_period_end": datetime(2015, 1, 1),
"reporting_period_start": datetime(2015, 2, 1),
},
]
df = pd.DataFrame(data)
return df
def _minimal_consumption_df_fixture(self):
data = [
{
"project_id": "ID_1",
"start": datetime(2015, 1, 1),
"end": datetime(2015, 1, 2),
"fuel_type": "electricity",
"unit_name": "kWh",
"value": 0,
"estimated": True,
},
]
df = pd.DataFrame(data)
return df
def test_get_project_attribute_keys_data(self):
project_attribute_keys_data = \
api._get_project_attribute_keys_data(self.minimal_project_df)
assert project_attribute_keys_data == []
def test_get_project_data(self):
projects_data = api._get_project_data(self.minimal_project_df, [])
project_data, project_attributes_data = next(projects_data)
assert project_data["project_id"] == "ID_1"
assert project_data["zipcode"] == "01234"
assert project_data["weather_station"] == "012345"
assert project_data["latitude"] == 89.0
assert project_data["longitude"] == -42.0
assert project_data["baseline_period_end"] == "2015-01-01T00:00:00+0000"
assert project_data["reporting_period_start"] == "2015-02-01T00:00:00+0000"
assert project_attributes_data == []
def test_get_consumption_records_data(self):
consumptions_data = api._get_consumption_data(self.minimal_consumption_df)
consumption_metadata, consumption_records = next(consumptions_data)
assert consumption_metadata["fuel_type"] == "E"
assert consumption_metadata["energy_unit"] == "KWH"
assert consumption_metadata["project_id"] == "ID_1"
assert consumption_records[0]["value"] == 0.0
assert consumption_records[0]["estimated"] == True
assert consumption_records[0]["start"] == "2015-01-01T00:00:00+0000"
|
class FuelConsumption:
def __init__(self, konnection):
self.lf_stream = konnection.conn.add_stream(konnection.vessel.resources.amount, 'LiquidFuel')
self.ox_stream = konnection.conn.add_stream(konnection.vessel.resources.amount, 'Oxidizer')
self.met_stream = konnection.met_stream
self.last_ox_val = 0
self.last_lf_val = 0
self.last_lf_time = 0
self.last_ox_time = 0
def compute_lf_consumption(self):
current_lf = self.lf_stream()
current_time = self.met_stream()
if current_time == 0:
return 0
ret = abs((current_lf - self.last_lf_val) / (current_time - self.last_lf_time))
self.last_lf_val = current_lf
self.last_lf_time = current_time
return ret
def compute_ox_consumption(self):
current_ox = self.lf_stream()
current_time = self.met_stream()
if current_time == 0:
return 0
ret = abs((current_ox - self.last_ox_val) / (current_time - self.last_ox_time))
self.last_ox_val = current_ox
self.last_ox_time = current_time
return ret |
# -*- coding: utf-8 -*-
"""WebSocket Address
=======================
The :mod:`darc.sites.ws` module is customised to
handle WebSocket addresses.
"""
import darc.typing as typing
from darc.error import LinkNoReturn
from darc.link import Link
from darc.proxy.ws import save_ws
from darc.sites._abc import BaseSite
class WebSocket(BaseSite):
"""WebSocket addresses."""
@staticmethod
def crawler(timestamp: typing.Datetime, session: typing.Session, link: Link) -> typing.NoReturn: # pylint: disable=unused-argument
"""Crawler hook for WebSocket addresses.
Args:
timestamp: Timestamp of the worker node reference.
session (:class:`requests.Session`): Session object with proxy settings.
link: Link object to be crawled.
Raises:
LinkNoReturn: This link has no return response.
"""
save_ws(link)
raise LinkNoReturn(link)
@staticmethod
def loader(timestamp: typing.Datetime, driver: typing.Driver, link: Link) -> typing.NoReturn: # pylint: disable=unused-argument
"""Not implemented.
Raises:
LinkNoReturn: This hook is not implemented.
"""
raise LinkNoReturn(link)
|
# -*- coding: utf-8 -*-
"""
Created on Mon Mar 19 13:36:31 2018
@author: Han
"""
import pandas as pd
import numpy as np
import os
'''return model'''
def CorrelationTest(factordataset,factorlist,stock,time,M):
if type(factordataset) == str:
df = pd.read_csv(factordataset,parse_dates=[str(time)])
del df['Unnamed: 0']
elif type(factordataset) == pd.DataFrame:
df = factordataset
df[str(time)] = pd.to_datetime(df[str(time)])
del df[str(stock)]
def corrcoef(df,factorlist):
temp = df[factorlist]
temp = pd.DataFrame(np.corrcoef(temp.T))
temp.columns = factorlist
temp.index = factorlist
return(temp)
temp = df.groupby(str(time)).apply(lambda x :corrcoef(x,factorlist))
abstemp = abs(temp)
abstemp = abstemp.reset_index()
datelist = list(abstemp[str(time)].drop_duplicates().sort_values())
corrall = dict()
for i in range(0,(len(datelist)-M)):
temp = abstemp[(abstemp[str(time)]>datelist[i])&(abstemp[str(time)]<=datelist[i+M])]
del temp[str(time)]
temp3 = temp.groupby('level_1').median()
namelist = list(temp3.columns)
temp3.columns = [i + '_median' for i in temp3.columns]
temp2 = temp.groupby('level_1').mean()
temp2 = temp2[namelist]
temp2.columns = [i + '_mean' for i in temp2.columns]
temp = pd.merge(temp2,temp3,right_index=True,left_index=True)
temp = temp.loc[namelist].reset_index()
corrall[datelist[i+M]] = temp
return corrall
if __name__ == '__main__' :
factordataset = os.path.join(os.path.abspath('.'),'Data','Factor_preprocessing.csv')
test = CorrelationTest(factordataset,['beta', 'BP', 'earningsfactor', 'leveragefactor', 'RSTR','Non-Linear Size', 'residualvolatilityfactor', 'Size'],'Stkcd','Trddt',12)
|
import io
import math
import time
import sys
import random
import signal
import subprocess
import pprint
import socket
import threading
import os
from errno import ESRCH
from os import kill, path, unlink, path, listdir, remove
from rpc_commands_lib import Commands_Rpc
from time import sleep
from uuid import uuid4
MEGABYTE = 1024 * 1024
current_fio_pid = -1
# ## Objective
# The purpose of these tests is to verify the possibility of using lvol configuration in SPDK.
#
# ## Methodology
# Configuration in test is to be done using example stub application.
# All management is done using RPC calls, including logical volumes management.
# All tests are performed using malloc backends.
# One exception to malloc backends are tests for logical volume
# tasting - these require persistent merory like NVMe backend.
#
# Tests will be executed as scenarios - sets of smaller test step
# in which return codes from RPC calls is validated.
# Some configuration calls may also be validated by use of
# "get_*" RPC calls, which provide additional information for verifying
# results.
#
# Tests with thin provisioned lvol bdevs, snapshots and clones are using nbd devices.
# Before writing/reading to lvol bdev, bdev is installed with rpc nbd_start_disk.
# After finishing writing/reading, rpc nbd_stop_disk is used.
def is_process_alive(pid):
try:
os.kill(pid, 0)
except Exception as e:
return 1
return 0
def get_fio_cmd(nbd_disk, offset, size, rw, pattern, extra_params=""):
fio_template = "fio --name=fio_test --filename=%(file)s --offset=%(offset)s --size=%(size)s"\
" --rw=%(rw)s --direct=1 %(extra_params)s %(pattern)s"
pattern_template = ""
if pattern:
pattern_template = "--do_verify=1 --verify=pattern --verify_pattern=%s"\
" --verify_state_save=0" % pattern
fio_cmd = fio_template % {"file": nbd_disk, "offset": offset, "size": size,
"rw": rw, "pattern": pattern_template,
"extra_params": extra_params}
return fio_cmd
def run_fio(fio_cmd, expected_ret_value):
global current_fio_pid
try:
proc = subprocess.Popen([fio_cmd], shell=True)
current_fio_pid = proc.pid
proc.wait()
rv = proc.returncode
except Exception as e:
print("ERROR: Fio test ended with unexpected exception.")
rv = 1
if expected_ret_value == rv:
return 0
if rv == 0:
print("ERROR: Fio test ended with unexpected success")
else:
print("ERROR: Fio test ended with unexpected failure")
return 1
class FioThread(threading.Thread):
def __init__(self, nbd_disk, offset, size, rw, pattern, expected_ret_value,
extra_params=""):
super(FioThread, self).__init__()
self.fio_cmd = get_fio_cmd(nbd_disk, offset, size, rw, pattern,
extra_params=extra_params)
self.rv = 1
self.expected_ret_value = expected_ret_value
def run(self):
print("INFO: Starting fio")
self.rv = run_fio(self.fio_cmd, self.expected_ret_value)
print("INFO: Fio test finished")
def test_counter():
'''
:return: the number of tests
'''
return ['test_case' in i for i in dir(TestCases)].count(True)
def case_message(func):
def inner(*args, **kwargs):
test_name = {
# bdev_lvol_delete_lvstore - positive tests
254: 'destroy_after_bdev_lvol_resize_positive',
255: 'delete_lvol_store_persistent_positive',
551: 'delete_lvol_bdev',
552: 'bdev_lvol_delete_lvstore_with_clones',
553: 'unregister_lvol_bdev',
# logical volume clear_method test
850: 'clear_method_none',
851: 'clear_method_unmap',
}
num = int(func.__name__.strip('test_case')[:])
print("************************************")
print("START TEST CASE {name}".format(name=test_name[num]))
print("************************************")
fail_count = func(*args, **kwargs)
print("************************************")
if not fail_count:
print("END TEST CASE {name} PASS".format(name=test_name[num]))
else:
print("END TEST CASE {name} FAIL".format(name=test_name[num]))
print("************************************")
return fail_count
return inner
class TestCases(object):
def __init__(self, rpc_py, total_size, block_size, base_dir_path, app_path):
self.c = Commands_Rpc(rpc_py)
self.total_size = total_size
self.block_size = block_size
self.cluster_size = None
self.path = base_dir_path
self.app_path = app_path
self.lvs_name = "lvs_test"
self.lbd_name = "lbd_test"
self.vhost_config_path = path.join(path.dirname(sys.argv[0]), 'vhost.conf')
def _gen_lvs_uuid(self):
return str(uuid4())
def _gen_lvb_uuid(self):
return "_".join([str(uuid4()), str(random.randrange(9999999999))])
def compare_two_disks(self, disk1, disk2, expected_ret_value):
cmp_cmd = "cmp %s %s" % (disk1, disk2)
try:
process = subprocess.check_output(cmp_cmd, stderr=subprocess.STDOUT, shell=True)
rv = 0
except subprocess.CalledProcessError as ex:
rv = 1
except Exception as e:
print("ERROR: Cmp ended with unexpected exception.")
rv = 1
if expected_ret_value == rv:
return 0
elif rv == 0:
print("ERROR: Cmp ended with unexpected success")
else:
print("ERROR: Cmp ended with unexpected failure")
return 1
def run_fio_test(self, nbd_disk, offset, size, rw, pattern, expected_ret_value=0):
fio_cmd = get_fio_cmd(nbd_disk, offset, size, rw, pattern)
return run_fio(fio_cmd, expected_ret_value)
def _stop_vhost(self, pid_path):
with io.open(pid_path, 'r') as vhost_pid:
pid = int(vhost_pid.readline())
if pid:
try:
kill(pid, signal.SIGTERM)
for count in range(30):
sleep(1)
kill(pid, 0)
except OSError as err:
if err.errno == ESRCH:
pass
else:
return 1
else:
return 1
else:
return 1
return 0
def _start_vhost(self, vhost_path, pid_path):
subprocess.call("{app} -f "
"{pid} &".format(app=vhost_path,
pid=pid_path), shell=True)
for timeo in range(10):
if timeo == 9:
print("ERROR: Timeout on waiting for app start")
return 1
if not path.exists(pid_path):
print("Info: Waiting for PID file...")
sleep(1)
continue
else:
break
# Wait for RPC to open
sock = socket.socket(socket.AF_UNIX)
for timeo in range(30):
if timeo == 29:
print("ERROR: Timeout on waiting for RPC start")
return 1
try:
sock.connect("/var/tmp/spdk.sock")
break
except socket.error as e:
print("Info: Waiting for RPC Unix socket...")
sleep(1)
continue
else:
sock.close()
break
with io.open(pid_path, 'r') as vhost_pid:
pid = int(vhost_pid.readline())
if not pid:
return 1
return 0
def get_lvs_size(self, lvs_name="lvs_test"):
lvs = self.c.bdev_lvol_get_lvstores(lvs_name)[0]
return int(int(lvs['free_clusters'] * lvs['cluster_size']) / MEGABYTE)
def get_lvs_divided_size(self, split_num, lvs_name="lvs_test"):
# Actual size of lvol bdevs on creation is rounded up to multiple of cluster size.
# In order to avoid over provisioning, this function returns
# lvol store size in MB divided by split_num - rounded down to multiple of cluster size."
lvs = self.c.bdev_lvol_get_lvstores(lvs_name)[0]
return int(int(lvs['free_clusters'] / split_num) * lvs['cluster_size'] / MEGABYTE)
def get_lvs_cluster_size(self, lvs_name="lvs_test"):
lvs = self.c.bdev_lvol_get_lvstores(lvs_name)[0]
return int(int(lvs['cluster_size']) / MEGABYTE)
@case_message
def test_case254(self):
"""
destroy_resize_logical_volume_positive
Positive test for destroying a logical_volume after resizing.
Call bdev_lvol_delete_lvstore with correct logical_volumes name.
"""
# Create malloc bdev
base_name = self.c.bdev_malloc_create(self.total_size,
self.block_size)
# Construct lvol store on create malloc bdev
uuid_store = self.c.bdev_lvol_create_lvstore(base_name,
self.lvs_name)
# Check correct uuid values in response bdev_lvol_get_lvstores command
fail_count = self.c.check_bdev_lvol_get_lvstores(base_name, uuid_store,
self.cluster_size)
size = self.get_lvs_divided_size(4)
# bdev_lvol_create on correct lvs_uuid and size is
# equal to one quarter of size malloc bdev
uuid_bdev = self.c.bdev_lvol_create(uuid_store,
self.lbd_name,
size)
# check size of the lvol bdev
fail_count += self.c.check_bdev_get_bdevs_methods(uuid_bdev, size)
sz = size + 4
# Resize_lvol_bdev on correct lvs_uuid and size is
# equal to one quarter of size malloc bdev plus 4 MB
self.c.bdev_lvol_resize(uuid_bdev, sz)
# check size of the lvol bdev by command RPC : bdev_get_bdevs
fail_count += self.c.check_bdev_get_bdevs_methods(uuid_bdev, sz)
# Resize_lvol_bdev on correct lvs_uuid and size is
# equal half of size malloc bdev
sz = size * 2
self.c.bdev_lvol_resize(uuid_bdev, sz)
# check size of the lvol bdev by command RPC : bdev_get_bdevs
fail_count += self.c.check_bdev_get_bdevs_methods(uuid_bdev, sz)
# Resize_lvol_bdev on correct lvs_uuid and size is
# equal to three quarters of size malloc bdev
sz = size * 3
self.c.bdev_lvol_resize(uuid_bdev, sz)
# check size of the lvol bdev by command RPC : bdev_get_bdevs
fail_count += self.c.check_bdev_get_bdevs_methods(uuid_bdev, sz)
# Resize_lvol_bdev on correct lvs_uuid and size is
# equal to size if malloc bdev minus 4 MB
sz = (size * 4) - 4
self.c.bdev_lvol_resize(uuid_bdev, sz)
# check size of the lvol bdev by command RPC : bdev_get_bdevs
fail_count += self.c.check_bdev_get_bdevs_methods(uuid_bdev, sz)
# Resize_lvol_bdev on the correct lvs_uuid and size is equal 0 MiB
sz = 0
self.c.bdev_lvol_resize(uuid_bdev, sz)
# check size of the lvol bdev by command RPC : bdev_get_bdevs
fail_count += self.c.check_bdev_get_bdevs_methods(uuid_bdev, sz)
# Destroy lvol store
self.c.bdev_lvol_delete_lvstore(uuid_store)
if self.c.check_bdev_lvol_get_lvstores("", "", "") == 1:
fail_count += 1
self.c.bdev_malloc_delete(base_name)
# Expected result:
# - lvol bdev should change size after resize operations
# - calls successful, return code = 0
# - no other operation fails
# - bdev_lvol_get_lvstores: response should be of no value after destroyed lvol store
return fail_count
@case_message
def test_case255(self):
"""
delete_lvol_store_persistent_positive
Positive test for removing lvol store persistently
"""
base_path = path.dirname(sys.argv[0])
base_name = "aio_bdev0"
aio_bdev0 = path.join(base_path, "aio_bdev_0")
# Construct aio bdev
self.c.bdev_aio_create(aio_bdev0, base_name, 4096)
# Create lvol store on created aio bdev
uuid_store = self.c.bdev_lvol_create_lvstore(base_name,
self.lvs_name)
fail_count = self.c.check_bdev_lvol_get_lvstores(base_name, uuid_store,
self.cluster_size)
# Destroy lvol store
if self.c.bdev_lvol_delete_lvstore(self.lvs_name) != 0:
fail_count += 1
# Delete aio bdev
self.c.bdev_aio_delete(base_name)
# Create aio bdev on the same file
self.c.bdev_aio_create(aio_bdev0, base_name, 4096)
# Wait 1 second to allow time for lvolstore tasting
sleep(1)
# check if destroyed lvol store does not exist on aio bdev
ret_value = self.c.check_bdev_lvol_get_lvstores(base_name, uuid_store,
self.cluster_size)
if ret_value == 0:
fail_count += 1
self.c.bdev_aio_delete(base_name)
# Expected result:
# - bdev_lvol_get_lvstores should not report any existsing lvol stores in configuration
# after deleting and adding NVMe bdev
# - no other operation fails
return fail_count
@case_message
def test_case551(self):
"""
bdev_lvol_delete_ordering
Test for destroying lvol bdevs in particular order.
Check destroying wrong one is not possible and returns error.
"""
fail_count = 0
snapshot_name = "snapshot"
clone_name = "clone"
# Create malloc bdev
base_name = self.c.bdev_malloc_create(self.total_size,
self.block_size)
# Construct_lvol_store on correct, exisitng malloc bdev
uuid_store = self.c.bdev_lvol_create_lvstore(base_name,
self.lvs_name,
self.cluster_size)
# Check correct uuid values in response bdev_lvol_get_lvstores command
fail_count = self.c.check_bdev_lvol_get_lvstores(base_name, uuid_store,
self.cluster_size)
lvs = self.c.bdev_lvol_get_lvstores()
size = int(int(lvs[0]['free_clusters'] * lvs[0]['cluster_size']) / 4 / MEGABYTE)
# Construct thin provisioned lvol bdev
uuid_bdev0 = self.c.bdev_lvol_create(uuid_store,
self.lbd_name, size, thin=True)
lvol_bdev = self.c.get_lvol_bdev_with_name(uuid_bdev0)
# Create snapshot of thin provisioned lvol bdev
fail_count += self.c.bdev_lvol_snapshot(lvol_bdev['name'], snapshot_name)
snapshot_bdev = self.c.get_lvol_bdev_with_name(self.lvs_name + "/" + snapshot_name)
# Create clone of snapshot and check if it ends with success
fail_count += self.c.bdev_lvol_clone(self.lvs_name + "/" + snapshot_name, clone_name)
clone_bdev = self.c.get_lvol_bdev_with_name(self.lvs_name + "/" + clone_name)
# Try to destroy snapshot with clones and check if it fails
ret_value = self.c.bdev_lvol_delete(snapshot_bdev['name'])
if ret_value == 0:
print("ERROR: Delete snapshot should fail but didn't")
fail_count += 1
# Destroy clone and then snapshot
fail_count += self.c.bdev_lvol_delete(lvol_bdev['name'])
fail_count += self.c.bdev_lvol_delete(clone_bdev['name'])
fail_count += self.c.bdev_lvol_delete(snapshot_bdev['name'])
# Destroy lvol store
fail_count += self.c.bdev_lvol_delete_lvstore(uuid_store)
# Check response bdev_lvol_get_lvstores command
if self.c.check_bdev_lvol_get_lvstores("", "", "") == 1:
fail_count += 1
# Delete malloc bdev
self.c.bdev_malloc_delete(base_name)
# Expected result:
# - bdev_lvol_get_lvstores: response should be of no value after destroyed lvol store
# - no other operation fails
return fail_count
@case_message
def test_case552(self):
"""
bdev_lvol_delete_lvstore_with_clones
Test for destroying lvol store with clones present,
without removing them first.
"""
fail_count = 0
snapshot_name = "snapshot"
snapshot_name2 = "snapshot2"
clone_name = "clone"
# Create malloc bdev
base_name = self.c.bdev_malloc_create(self.total_size,
self.block_size)
# Construct_lvol_store on correct, exisitng malloc bdev
uuid_store = self.c.bdev_lvol_create_lvstore(base_name,
self.lvs_name,
self.cluster_size)
# Check correct uuid values in response bdev_lvol_get_lvstores command
fail_count = self.c.check_bdev_lvol_get_lvstores(base_name, uuid_store,
self.cluster_size)
lvs = self.c.bdev_lvol_get_lvstores()
size = int(int(lvs[0]['free_clusters'] * lvs[0]['cluster_size']) / 4 / MEGABYTE)
# Create lvol bdev, snapshot it, then clone it and then snapshot the clone
uuid_bdev0 = self.c.bdev_lvol_create(uuid_store, self.lbd_name, size, thin=True)
lvol_bdev = self.c.get_lvol_bdev_with_name(uuid_bdev0)
fail_count += self.c.bdev_lvol_snapshot(lvol_bdev['name'], snapshot_name)
snapshot_bdev = self.c.get_lvol_bdev_with_name(self.lvs_name + "/" + snapshot_name)
fail_count += self.c.bdev_lvol_clone(self.lvs_name + "/" + snapshot_name, clone_name)
clone_bdev = self.c.get_lvol_bdev_with_name(self.lvs_name + "/" + clone_name)
fail_count += self.c.bdev_lvol_snapshot(clone_bdev['name'], snapshot_name2)
snapshot_bdev2 = self.c.get_lvol_bdev_with_name(self.lvs_name + "/" + snapshot_name2)
# Try to destroy snapshot with 2 clones and check if it fails
ret_value = self.c.bdev_lvol_delete(snapshot_bdev['name'])
if ret_value == 0:
print("ERROR: Delete snapshot should fail but didn't")
fail_count += 1
# Destroy lvol store without deleting lvol bdevs
fail_count += self.c.bdev_lvol_delete_lvstore(uuid_store)
# Check response bdev_lvol_get_lvstores command
if self.c.check_bdev_lvol_get_lvstores("", "", "") == 1:
fail_count += 1
# Delete malloc bdev
self.c.bdev_malloc_delete(base_name)
# Expected result:
# - bdev_lvol_get_lvstores: response should be of no value after destroyed lvol store
# - no other operation fails
return fail_count
@case_message
def test_case553(self):
"""
unregister_lvol_bdev
Test for unregistering the lvol bdevs.
Removing malloc bdev under an lvol store triggers unregister of
all lvol bdevs. Verify it with clones present.
"""
fail_count = 0
snapshot_name = "snapshot"
snapshot_name2 = "snapshot2"
clone_name = "clone"
# Create malloc bdev
base_name = self.c.bdev_malloc_create(self.total_size,
self.block_size)
# Construct_lvol_store on correct, exisitng malloc bdev
uuid_store = self.c.bdev_lvol_create_lvstore(base_name,
self.lvs_name,
self.cluster_size)
# Check correct uuid values in response bdev_lvol_get_lvstores command
fail_count = self.c.check_bdev_lvol_get_lvstores(base_name, uuid_store,
self.cluster_size)
lvs = self.c.bdev_lvol_get_lvstores()
size = int(int(lvs[0]['free_clusters'] * lvs[0]['cluster_size']) / 4 / MEGABYTE)
# Create lvol bdev, snapshot it, then clone it and then snapshot the clone
uuid_bdev0 = self.c.bdev_lvol_create(uuid_store, self.lbd_name, size, thin=True)
lvol_bdev = self.c.get_lvol_bdev_with_name(uuid_bdev0)
fail_count += self.c.bdev_lvol_snapshot(lvol_bdev['name'], snapshot_name)
snapshot_bdev = self.c.get_lvol_bdev_with_name(self.lvs_name + "/" + snapshot_name)
fail_count += self.c.bdev_lvol_clone(self.lvs_name + "/" + snapshot_name, clone_name)
clone_bdev = self.c.get_lvol_bdev_with_name(self.lvs_name + "/" + clone_name)
fail_count += self.c.bdev_lvol_snapshot(clone_bdev['name'], snapshot_name2)
snapshot_bdev2 = self.c.get_lvol_bdev_with_name(self.lvs_name + "/" + snapshot_name2)
# Delete malloc bdev
self.c.bdev_malloc_delete(base_name)
# Check response bdev_lvol_get_lvstores command
if self.c.check_bdev_lvol_get_lvstores("", "", "") == 1:
fail_count += 1
# Expected result:
# - bdev_lvol_get_lvstores: response should be of no value after destroyed lvol store
# - no other operation fails
return fail_count
@case_message
def test_case850(self):
""""
Clear_method
Test for clear_method equals to none
"""
# Create malloc bdev
base_name = self.c.bdev_malloc_create(self.total_size,
self.block_size)
# Construct lvol store on created malloc bddev
lvs_uuid = self.c.bdev_lvol_create_lvstore(base_name,
self.lvs_name)
# Check correct uuid values in response bdev_lvol_get_lvstores command
fail_count = self.c.check_bdev_lvol_get_lvstores(base_name, lvs_uuid,
self.cluster_size)
lvs = self.c.bdev_lvol_get_lvstores(self.lvs_name)[0]
# Construct lvol bdev on lvol store
lbd_size = int(lvs['cluster_size'] / MEGABYTE)
bdev_uuid = self.c.bdev_lvol_create(lvs_uuid,
self.lbd_name,
lbd_size,
clear_method='none')
lvol_bdev = self.c.get_lvol_bdev_with_name(bdev_uuid)
nbd_name = "/dev/nbd0"
fail_count += self.c.nbd_start_disk(bdev_uuid, nbd_name)
# Write pattern to lvol bdev starting from offset 0.
fail_count += self.run_fio_test(nbd_name, 0, lvs['cluster_size'],
"write", "0xdd")
fail_count += self.c.nbd_stop_disk(nbd_name)
# Delete lvol bdev
fail_count += self.c.bdev_lvol_delete(bdev_uuid)
# Delete lvol store. We need to do this so that we can attach the underlying malloc
# bdev to nbd to examine its contents.
fail_count += self.c.bdev_lvol_delete_lvstore(lvs_uuid)
fail_count += self.c.nbd_start_disk(base_name, nbd_name)
metadata_pages = 1 + lvs['total_data_clusters'] + (math.ceil(5 + math.ceil(lvs['total_data_clusters'] / 8) / 4096)) * 3
last_metadata_lba = int(metadata_pages * 4096 / self.block_size)
offset_metadata_end = int(last_metadata_lba * self.block_size)
last_cluster_of_metadata = math.ceil(metadata_pages / lvs['cluster_size'] / 4096)
offset = last_cluster_of_metadata * lvs['cluster_size']
size_metadata_end = offset - offset_metadata_end
# Check if data on area between end of metadata
# and first cluster of lvol bdev remained unchaged
fail_count += self.run_fio_test("/dev/nbd0", offset_metadata_end,
size_metadata_end, "read", "0x00")
# Check if data on first lvol bdevs remains unchanged.
fail_count += self.run_fio_test("/dev/nbd0", offset, lvs['cluster_size'], "read", "0xdd")
fail_count += self.c.nbd_stop_disk(nbd_name)
self.c.bdev_malloc_delete(base_name)
# Expected result:
# - calls successful, return code = 0
# - get_bdevs: no change
# - no other operation fails
return fail_count
@case_message
def test_case851(self):
""""
Clear_method
Test lvol bdev with clear_method equals to unmap
"""
# Create malloc bdev
base_name = self.c.bdev_malloc_create(self.total_size,
self.block_size)
nbd_name = "/dev/nbd0"
fail_count = self.c.nbd_start_disk(base_name, nbd_name)
# Write data to malloc bdev starting from offset 0.
fail_count += self.run_fio_test(nbd_name, 0, self.total_size * MEGABYTE,
"write", "0xdd")
fail_count += self.c.nbd_stop_disk(nbd_name)
# Construct lvol store on created malloc bddev
lvs_uuid = self.c.bdev_lvol_create_lvstore(base_name,
self.lvs_name,
clear_method='none')
# Check correct uuid values in response bdev_lvol_get_lvstores command
fail_count = self.c.check_bdev_lvol_get_lvstores(base_name, lvs_uuid,
self.cluster_size)
lvs = self.c.bdev_lvol_get_lvstores(self.lvs_name)[0]
# Construct lvol bdev on lvol store
lbd_size = int(lvs['cluster_size'] / MEGABYTE)
bdev_uuid = self.c.bdev_lvol_create(lvs_uuid,
self.lbd_name,
lbd_size,
clear_method='unmap')
# Check that data on lvol bdev remains unchanged
fail_count += self.c.nbd_start_disk(bdev_uuid, nbd_name)
fail_count += self.run_fio_test(nbd_name, 0, lvs['cluster_size'],
"read", "0xdd")
fail_count += self.c.nbd_stop_disk(nbd_name)
# Delete lvol bdev
fail_count += self.c.bdev_lvol_delete(bdev_uuid)
# Delete lvol store
fail_count += self.c.bdev_lvol_delete_lvstore(lvs_uuid)
fail_count += self.c.nbd_start_disk(base_name, nbd_name)
metadata_pages = 1 + lvs['total_data_clusters'] + \
(math.ceil(5 + math.ceil(lvs['total_data_clusters'] / 8) / 4096)) * 3
last_metadata_lba = int(metadata_pages * 4096 / self.block_size)
offset_metadata_end = int(last_metadata_lba * self.block_size)
last_cluster_of_metadata = math.ceil(metadata_pages / lvs['cluster_size'] / 4096)
offset = last_cluster_of_metadata * lvs['cluster_size']
size_metadata_end = offset - offset_metadata_end
# Check if data on area between end of metadata
# and first cluster of lvol bdev remained unchaged
fail_count += self.run_fio_test("/dev/nbd0", offset_metadata_end,
size_metadata_end, "read", "0xdd")
# Check if data on lvol bdev was zeroed.
# Malloc bdev should zero any data that is unmapped.
fail_count += self.run_fio_test("/dev/nbd0", offset, lvs['cluster_size'], "read", "0x00")
self.c.bdev_malloc_delete(base_name)
# Expected result:
# - calls successful, return code = 0
# - get_bdevs: no change
# - no other operation fails
return fail_count
|
from pathlib import *
from filetools import *
def getTumorType(projectPath):
return projectPath.strip("/").split("/")[-4]
def getLabName(projectPath):
return projectPath.strip("/").split("/")[-2]
def getInstitutionName(projectPath):
return projectPath.strip("/").split("/")[-3]
def getProjectNumber(projectPath):
return projectPath.strip("/").split("/")[-1]
def getStudyId(projectPath):
# Using new pathlib Path object (which overloads "/")
# for path concatentation
data=parseMetaData( Path(projectPath) / "meta_study.txt" )
return data["cancer_study_identifier"]
def parseMetaData(fname):
fp=smartOpen(fname)
data=dict()
for line in fp:
line=line.strip()
pos=line.find(":")
if pos>-1:
data[line[:(pos)]]=line[(pos+2):]
else:
print >>sys.stderr, fname
print >>sys.stderr, "[",line,"]"
raise ValueError("Invalid meta data line %s" % (line))
return data
def smartOpen(pathType,mode="r"):
if isinstance(pathType,PosixPath):
fp=pathType.open(mode=mode)
elif isinstance(pathType,str):
fp=open(pathType,mode=mode)
elif isinstance(pathType,file):
fp=pathType
else:
raise ValueError("Invalid filepath type <%s>" % (type(pathType)))
return fp
|
from django.http import HttpResponse
from django.shortcuts import render, redirect
from .forms import LoginForm, RegisterForm
from django.contrib.auth import authenticate, login
from django.contrib.auth import get_user_model
import views
from django.http import HttpResponseRedirect
def gotobooks(request):
return HttpResponseRedirect('/books/')
def home_page(request):
return render(request,'home_page.html')
def login_page(request):
login_form = LoginForm(request.POST or None)
context = {'form' : login_form}
if login_form.is_valid():
username = login_form.cleaned_data.get('username')
password = login_form.cleaned_data.get('password')
user = authenticate(request, username=username, password=password)
if user is not None:
login(request, user)
return redirect('home')
else:
context['error'] = "Incorrect Password"
return render(request, 'login.html', context)
User = get_user_model()
def register_page(request):
register_form = RegisterForm(request.POST or None)
context = {'form' : register_form}
if register_form.is_valid():
username = register_form.cleaned_data.get('username')
email = register_form.cleaned_data.get('email')
password = register_form.cleaned_data.get('password')
user = User.objects.create_user(username, email, password)
return redirect('home')
return render(request, 'register.html', context) |
"""
Alexander Eriksson
Windows 10
"""
def Palindrom(User_Input):
not_valid = "!\"#€%&/()=? :,'" # En sträng med ogiltiga tecken som skall tas bort
User_Input = User_Input.lower() #Gör om samtliga karaktärer till små bokstäver
i=0 #Ger "i" värdet 0
while i < len( not_valid ): #Om antalet ogiltiga tecken är större än i(0) så fortsätter while loopen.
User_Input = User_Input.replace( not_valid[i], "") #Replacar ogiltiga tecken med 'ingenting'
i = i + 1 #För att avbryta/hoppa ur while loopen.
User_Input_index = len(User_Input) - 1 #-1 för att få sista karaktären i ordet och använda det som index.
Tom_Lista = [] #Skapar en tom lista för att stoppa in User_Input i.
while User_Input_index >= 0: #Så länge ordet är lika med eller större än 0.
Tom_Lista.append(User_Input[User_Input_index]) #Stoppar in en karaktär i taget i listan, efter vilket index som är valt.
User_Input_index = User_Input_index - 1 #Med hjälp av While-loopen, tar en karaktär i taget tills alla är i listan.
User_Input_reverse = "".join(Tom_Lista) #Gör om till en sammanhängande sträng
if User_Input == User_Input_reverse: #if-förhållande, om det bearbetade ordet är samma
return True #som original User_Input.
else:
return False
User_Input = input("Skriv in ditt ord eller mening:") #Frågar användare om input
if (Palindrom(User_Input)): #Stoppar in inputen i funktionen ovan
print ("Det är en Palindrom!") #Print om funktionen visar sig True
else:
print ("Det är inte en Palindrom :(") #Print om funktionen visar sig False
|
import os
from cryptography.fernet import Fernet
import clipboard
import random
import getpass
import string
import hashlib
appdata = os.environ.get('AppData')
pw_path = appdata + '/pwmanager/pw.txt'
key_path = appdata + '/pwmanager/key.key'
master_path = appdata + '/pwmanager/master.file'
counter = 0
counter2 = 0
def write_key():
if os.path.exists(key_path):
pass
else:
key = Fernet.generate_key()
with open(key_path, 'wb') as key_file:
key_file.write(key)
def load_key():
return open(key_path, 'rb').read()
def master_password():
tries = 0
done = False
if os.path.exists(master_path):
while not done:
if tries == 0:
sha512obj = hashlib.sha512()
pwin = getpass.getpass('Enter master password or (q)uit: ')
with open(master_path, 'r') as f:
mpw = f.read()
sha512obj.update(str.encode(pwin))
if sha512obj.hexdigest() == mpw:
print('\nWelcome to PWMANAGER v0.0.1!')
done = True
elif pwin == "q":
done = True
quit()
else:
tries += 1
elif tries > 0:
sha512obj = hashlib.sha512()
pwin = getpass.getpass('Password wrong! Try agian: ')
with open(master_path, 'r') as f:
mpw = f.read()
sha512obj.update(str.encode(pwin))
if sha512obj.hexdigest() == mpw:
print('\nWelcome to PWMANAGER v0.0.1!')
done = True
elif pwin == "q":
done = True
quit()
else:
tries += 1
else:
try:
sha512obj = hashlib.sha512()
os.mkdir(appdata + '/pwmanager')
with open(master_path, 'w') as f:
pwin = input('Create master password or (q)uit: ')
sha512obj.update(str.encode(pwin))
f.write(sha512obj.hexdigest())
print('\nWelcome to PWMANAGER v0.0.1!')
f.close
with open(appdata + '/pwmanager/help.txt', 'w') as f:
f.write("there is no help")
f.close()
except FileExistsError:
sha512obj = hashlib.sha512()
with open(master_path, 'w') as f:
pwin = input('Create master password or (q)uit: ')
sha512obj.update(str.encode(pwin))
f.write(sha512obj.hexdigest())
with open(appdata + '/pwmanager/help.txt', 'w') as f:
f.write("there is no help")
f.close()
print('\nWelcome to PWMANAGER v0.0.1!')
def encrypt(filename, key):
f = Fernet(key)
with open(filename, 'rb') as file:
FileContents = file.read()
ContentsEncrypted = f.encrypt(FileContents)
with open(filename, 'wb') as file:
file.write(ContentsEncrypted)
def decrypt(filename, key):
f = Fernet(key)
with open(filename, 'rb') as file:
ContentsEncrypted = file.read()
ContentsDecrypted = f.decrypt(ContentsEncrypted)
with open(filename, 'wb') as file:
file.write(ContentsDecrypted)
def CreatePath():
if os.path.exists(pw_path):
pass
else:
try:
os.mkdir(appdata + '/pwmanager')
with open(pw_path, 'w'):
os.close
except FileExistsError:
with open(pw_path, 'w'):
os.close
def help():
help = open(appdata + '/pwmanager/help.txt').read()
return help
def MainMenu():
global counter
if counter <= 0:
MenuIn = input(
'\nDo you want to (r)ead all, (s)earch for, (e)dit or (a)dd a password? Else (q)uit or get (h)elp.\n\n')
elif counter > 0:
MenuIn = input(
'\nPlease only answer with \"r\", \"s\", \"a\", \"e\" or \"q\"!\n\n')
if MenuIn == "a":
counter = 0
AddPw()
elif MenuIn == "r":
counter = 0
ReadAll()
elif MenuIn == "s":
counter = 0
PrintServices()
search()
elif MenuIn == "e":
counter = 0
edit()
elif MenuIn == "h":
counter = 0
print(help())
MainMenu()
elif MenuIn == "q":
quit()
else:
counter += 1
MainMenu()
def AddPw():
decrypt(pw_path, key)
global counter2
with open(pw_path, 'a') as pwfile:
print()
ServiceIn = input("Enter Service: ")
if ServiceIn == "cancel":
encrypt(pw_path, key)
MainMenu()
elif ServiceIn == "":
print("\nService can't be blank!")
AddPw()
else:
pass
FileContent = open(pw_path).read().splitlines()
for index, line in enumerate(FileContent):
if counter2 <= 0:
if "Service" in line and ServiceIn in line:
print("\nService maybe already saved, please double check:\n")
print(FileContent[index+0:index+3])
still_save = input(
"\nDo you still want to register a new password? (y/n)\n")
if still_save == "y":
counter2 += 1
pass
elif still_save == "n":
encrypt(pw_path, key)
MainMenu()
else:
print("Please only answer with \"y\" or \"n\"\n")
else:
pass
else:
username = input("Enter username: ")
passwordin = input("Enter password: ")
if passwordin == 'random':
chars = string.digits + '!@#$%^&*()'
length = int(input('Length of password:'))
if length < 3:
print('Password must at least be 3 characters long!')
encrypt(pw_path, key)
MainMenu()
total = length
password = ''
for i in range(length-2):
password += random.choice(chars)
total -= 1
counter = len(string.ascii_letters)
for letter in string.ascii_letters:
counter -= 1
if letter in password:
break
elif counter == 1:
password += random.choice(string.ascii_letters)
total -= 1
counter = len(string.ascii_letters)
for digit in string.digits:
counter -= 1
if digit in password:
break
elif counter == 1:
password += random.choice(string.digits)
total -= 1
counter = len('!@#$%^&*()')
for char in '!@#$%^&*()':
counter -= 1
if char in password:
break
elif counter == 1:
password += random.choice('!@#$%^&*()')
total -= 1
while total != 0:
password += random.choice(chars)
total -= 1
print()
print(f'Your new password is "{password}".')
clipboard.copy(password)
print()
srvice = "Service: " + ServiceIn + "\n"
usrnm = "Username: " + username + "\n"
pswrd = "Password: " + password + "\n"
pwfile.write("------------------------------------" + "\n")
pwfile.write(srvice)
pwfile.write(usrnm)
pwfile.write(pswrd)
pwfile.close()
else:
print()
srvice = "Service: " + ServiceIn + "\n"
usrnm = "Username: " + username + "\n"
pswrd = "Password: " + passwordin + "\n"
pwfile.write("------------------------------------" + "\n")
pwfile.write(srvice)
pwfile.write(usrnm)
pwfile.write(pswrd)
pwfile.close()
counter2 = 0
encrypt(pw_path, key)
MainMenu()
# this might seem a little confusing as i tried to make a method to edit the passwords but it didnt work so i just made it open notepad
def edit():
decrypt(pw_path, key)
os.system('notepad ' + pw_path)
# found = False
# is_skipped = False
# file = open(pw_path).read().splitlines()
# SearchIn = input("\nEnter Service you want to delete: ")
# if SearchIn == "":
# print("\nSearch can't be blank!")
# edit()
# elif SearchIn == "cancel":
# MainMenu()
# else:
# pass
# temp_file_path = pw_path + '.bak'
# temp_file = open(temp_file_path, 'w')
# temp_file.close()
# for index, line in enumerate(file):
# if 'Service: ' in line and SearchIn in line:
# found = True
# password_section = file[index-1:index+3]
# # password_section = ' '.join(password_sectionw)
# # print(password_section)
# if found:
# with open(temp_file_path, 'a') as temp_file:
# for lines in enumerate(file):
# if 'Service: ' not in line and SearchIn not in line:
# temp_file.write(line)
# else:
# is_skipped = True
# if is_skipped:
# os.remove('D:/Libraries/Documents/resources_py/pwtest.txt')
# os.rename('D:/Libraries/Documents/resources_py/pwtest.txt.bak', 'D:/Libraries/Documents/resources_py/pwtest.txt')
# else:
# os.remove('D:/Libraries/Documents/resources_py/pwtest.txt.bak')
# if not found:
# print("\nPassword for " + SearchIn + " was not found.")
# edit()
encrypt(pw_path, key)
MainMenu()
def ReadAll():
decrypt(pw_path, key)
with open(pw_path, 'r') as pw:
content = pw.read()
print(content)
pw.close()
encrypt(pw_path, key)
MainMenu()
def PrintServices():
decrypt(pw_path, key)
print("\nAvailable Services:\n")
with open(pw_path, 'r') as pw:
for line in pw:
if 'Service' in line:
out = line.replace('Service:', '')
out = out.strip()
print(out)
encrypt(pw_path, key)
def search():
decrypt(pw_path, key)
pw = open(pw_path).read().splitlines()
SearchIn = input("\nEnter Service: ")
if SearchIn == "":
print("\nSearch can't be blank!")
encrypt(pw_path, key)
search()
elif SearchIn == "cancel":
encrypt(pw_path, key)
MainMenu()
else:
pass
print()
found = False
for index, line in enumerate(pw):
if 'Service:' in line and SearchIn in line:
print(pw[index+0:index+3])
password_itself = (pw[index+2]).replace('Password: ', '')
clipboard.copy(password_itself)
found = True
if not found:
print("Password for " + SearchIn + " was not found.")
encrypt(pw_path, key)
search()
encrypt(pw_path, key)
MainMenu()
master_password()
write_key()
key = load_key()
CreatePath()
try:
decrypt(pw_path, key)
encrypt(pw_path, key)
except:
encrypt(pw_path, key)
print("\nYour passwords have not been encrypted since the last time you used pwmanager.",
"This error is most likely caused by an unexpected shutdown of the program during an unfinished action.")
# time.sleep(5)
MainMenu()
|
import importlib.util
blender_loader = importlib.util.find_spec('bpy')
# Include the bl_info at the top level always
bl_info = {
"name": "Yakuza GMD File Import/Export",
"author": "Samuel Stark (TheTurboTurnip)",
"version": (0, 2, 2),
"blender": (2, 80, 0),
"location": "File > Import-Export",
"description": "Import-Export Yakuza GMD Files (tested with Kenzan, Y3, Y4, Y5, Y0, YK1)",
"warning": "",
"doc_url": "",
"category": "Import-Export",
}
if blender_loader:
from .blender.addon import * |
import numpy as np
R = 10
EXP = 6
N = 10**EXP
np.random.seed(1)
pt_x = np.random.uniform(-R, R, N)
pt_y = np.random.uniform(-R, R, N)
pts = zip(pt_x, pt_y)
ctr = 0
for i, pt in enumerate(pts):
dist = np.linalg.norm(pt)
if dist<=R:
ctr+=1
PI = 4*ctr/(i+1)
if i%(N//100)==0:
print(PI)
print('PI estimation by Monte Carlo = %f' %PI)
|
# --------------------------------------------------------------------------
#
# Copyright (c) Microsoft Corporation. All rights reserved.
#
# The MIT License (MIT)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the ""Software""), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
# --------------------------------------------------------------------------
import io
import json
from azure.core.pipeline.transport import HttpRequest
from os.path import dirname, pardir, join, realpath
import pytest
cwd = dirname(realpath(__file__))
class TestSendRequest(object):
@pytest.mark.asyncio
async def test_send_request_with_body_get_model_deserialize(self):
from bodycomplex.aio import AutoRestComplexTestService
from bodycomplex.models import Siamese
client = AutoRestComplexTestService(base_url="http://localhost:3000")
request = HttpRequest("GET", "/complex/inheritance/valid",
headers={
'Accept': 'application/json'
},
)
async with AutoRestComplexTestService(base_url="http://localhost:3000") as client:
response = await client._send_request(request)
deserialized = Siamese.deserialize(response)
assert 2 == deserialized.id
assert "Siameeee" == deserialized.name
assert -1 == deserialized.hates[1].id
assert "Tomato" == deserialized.hates[1].name
@pytest.mark.asyncio
async def test_send_request_with_body_get_direct_json(self):
from bodycomplex.aio import AutoRestComplexTestService
from bodycomplex.models import Siamese
request = HttpRequest("GET", "/complex/inheritance/valid",
headers={
'Accept': 'application/json'
},
)
async with AutoRestComplexTestService(base_url="http://localhost:3000") as client:
response = await client._send_request(request, stream=True)
chunks = []
async for chunk in response.stream_download(None):
chunks.append(chunk)
data = b''.join(chunks).decode('utf-8')
json_response = json.loads(data)
assert 2 == json_response['id']
assert "Siameeee" == json_response['name']
assert - 1 == json_response['hates'][1]['id']
assert "Tomato" == json_response['hates'][1]['name']
@pytest.mark.asyncio
async def test_send_request_with_body_put_json_dumps(self):
from bodycomplex.aio import AutoRestComplexTestService
siamese_body = {
"id": 2,
"name": "Siameeee",
"color": "green",
"hates":
[
{
"id": 1,
"name": "Potato",
"food": "tomato"
},
{
"id": -1,
"name": "Tomato",
"food": "french fries"
}
],
"breed": "persian"
}
request = HttpRequest("PUT", "/complex/inheritance/valid",
headers={
'Content-Type': 'application/json'
}
)
request.set_json_body(siamese_body)
async with AutoRestComplexTestService(base_url="http://localhost:3000") as client:
response = await client._send_request(request)
assert response.status_code == 200
@pytest.mark.asyncio
async def test_send_request_with_body_serialize(self):
from bodycomplex.aio import AutoRestComplexTestService
from bodycomplex.models import Siamese, Dog
siamese = Siamese(
id=2,
name="Siameeee",
color="green",
hates=[
Dog(
id=1,
name="Potato",
food="tomato"
),
Dog(
id=-1,
name="Tomato",
food="french fries"
)
],
breed="persian"
)
request = HttpRequest("PUT", "/complex/inheritance/valid",
headers={
'Content-Type': 'application/json'
}
)
request.set_json_body(siamese.serialize())
async with AutoRestComplexTestService(base_url="http://localhost:3000") as client:
response = await client._send_request(request)
assert response.status_code == 200
@pytest.mark.asyncio
async def test_send_request_get_stream(self):
from bodyfile.aio import AutoRestSwaggerBATFileService
client = AutoRestSwaggerBATFileService(base_url="http://localhost:3000", connection_data_block_size=1000)
file_length = 0
with io.BytesIO() as file_handle:
request = HttpRequest("GET", "/files/stream/nonempty",
headers={
'Accept': 'image/png, application/json'
},
)
response = await client._send_request(request, stream=True)
assert response.status_code == 200
stream = response.stream_download(None) # want to make pipeline client an optional param in azure-core
total = len(stream)
assert not stream.response.internal_response._released
async for data in stream:
assert 0 < len(data) <= stream.block_size
file_length += len(data)
print("Downloading... {}%".format(int(file_length*100/total)))
file_handle.write(data)
assert file_length != 0
sample_file = realpath(
join(cwd, pardir, pardir, pardir, pardir, pardir,
"node_modules", "@microsoft.azure", "autorest.testserver", "routes", "sample.png"))
with open(sample_file, 'rb') as data:
sample_data = hash(data.read())
assert sample_data == hash(file_handle.getvalue())
await client.close()
@pytest.mark.asyncio
async def test_send_request_put_stream(self):
from bodyformdata.aio import AutoRestSwaggerBATFormDataService
client = AutoRestSwaggerBATFormDataService(
base_url="http://localhost:3000",
)
test_string = "Upload file test case"
test_bytes = bytearray(test_string, encoding='utf-8')
with io.BytesIO(test_bytes) as stream_data:
request = HttpRequest("PUT", '/formdata/stream/uploadfile',
headers={
'Content-Type': 'application/octet-stream'
},
data=stream_data,
)
response = await client._send_request(request, stream=True)
assert response.status_code == 200
await client.close()
@pytest.mark.asyncio
async def test_send_request_full_url(self):
from bodycomplex.aio import AutoRestComplexTestService
from bodycomplex.models import Siamese
request = HttpRequest("GET", "http://localhost:3000/complex/inheritance/valid",
headers={
'Accept': 'application/json'
},
)
async with AutoRestComplexTestService(base_url="http://fakeUrl") as client:
response = await client._send_request(request)
deserialized = Siamese.deserialize(response)
assert 2 == deserialized.id
assert "Siameeee" == deserialized.name
assert -1 == deserialized.hates[1].id
assert "Tomato" == deserialized.hates[1].name
|
from ScenarioHelper import *
def main():
CreateScenaFile(
"m9082.bin", # FileName
"m9082", # MapName
"m9082", # Location
0x00C3, # MapIndex
"ed7356",
0x00000000, # Flags
("", "", "", "", "", ""), # include
0x00, # PlaceNameNumber
0x2A, # PreInitFunctionIndex
b'\x00\xff\xff', # Unknown_51
# Information
[0, 0, -1000, 0, 0, 0, 24000, 500, 30, 45, 0, 360, 0, 0, 0, 0, 0, 1, 195, 0, 0, 0, 1],
)
BuildStringList((
"m9082", # 0
"亚里欧斯", # 1
"显示台词用模型", # 2
"亚里欧斯带领的魔兽", # 3
"亚里欧斯带领的魔兽", # 4
"表现效果用模型", # 5
"bm9069", # 6
))
ATBonus("ATBonus_1D8", 100, 5, 0, 5, 0, 5, 0, 2, 5, 0, 0, 0, 2, 0, 0, 0)
MonsterBattlePostion("MonsterBattlePostion_298", 8, 12, 180)
MonsterBattlePostion("MonsterBattlePostion_29C", 3, 14, 180)
MonsterBattlePostion("MonsterBattlePostion_2A0", 13, 14, 180)
MonsterBattlePostion("MonsterBattlePostion_2A4", 0, 0, 180)
MonsterBattlePostion("MonsterBattlePostion_2A8", 0, 0, 180)
MonsterBattlePostion("MonsterBattlePostion_2AC", 0, 0, 180)
MonsterBattlePostion("MonsterBattlePostion_2B0", 0, 0, 180)
MonsterBattlePostion("MonsterBattlePostion_2B4", 0, 0, 180)
MonsterBattlePostion("MonsterBattlePostion_278", 7, 4, 0)
MonsterBattlePostion("MonsterBattlePostion_27C", 10, 11, 225)
MonsterBattlePostion("MonsterBattlePostion_280", 4, 7, 90)
MonsterBattlePostion("MonsterBattlePostion_284", 12, 7, 270)
MonsterBattlePostion("MonsterBattlePostion_288", 4, 11, 135)
MonsterBattlePostion("MonsterBattlePostion_28C", 11, 4, 315)
MonsterBattlePostion("MonsterBattlePostion_290", 7, 12, 180)
MonsterBattlePostion("MonsterBattlePostion_294", 5, 5, 45)
# monster count: 0
# event battle count: 1
BattleInfo(
"BattleInfo_2B8", 0x0042, 255, 6, 45, 3, 3, 30, 0, "bm9069", 0x00000000, 100, 0, 0, 0,
(
("ms02401.dat", "ms85401.dat", "ms85501.dat", 0, 0, 0, 0, 0, "MonsterBattlePostion_298", "MonsterBattlePostion_278", "ed7527", "ed7453", "ATBonus_1D8"),
(),
(),
(),
)
)
AddCharChip((
"apl/ch51744.itc", # 00
))
DeclNpc(0, 12000, 211500, 180, 389, 0x0, 0, 0, 0, 255, 255, 0, 2, 255, 0)
DeclNpc(0, 0, 0, 0, 508, 0x0, 0, 0, 0, 255, 255, 255, 255, 255, 0)
DeclNpc(0, 0, 0, 0, 453, 0x0, 0, 0, 0, 255, 255, 255, 255, 255, 0)
DeclNpc(0, 0, 0, 0, 453, 0x0, 0, 0, 0, 255, 255, 255, 255, 255, 0)
DeclNpc(0, 13100, 204699, 305, 508, 0x0, 0, 0, 0, 255, 255, 255, 255, 255, 0)
DeclEvent(0x0000, 0, 9, 0.0, 185.0, 11.0, 225.0, [0.20000000298023224, -0.0, 0.0, 0.0, -0.0, 0.1666666716337204, -0.0, 0.0, 0.0, -0.0, 0.20000000298023224, 0.0, -0.0, -30.83333396911621, -2.200000047683716, 1.0])
DeclActor(3500, 0, 155000, 1200, 3500, 1000, 155000, 0x007C, 0, 3, 0x0000)
ChipFrameInfo(940, 0) # 0
ScpFunction((
"Function_0_3AC", # 00, 0
"Function_1_3F8", # 01, 1
"Function_2_517", # 02, 2
"Function_3_747", # 03, 3
"Function_4_82B", # 04, 4
"Function_5_AAF", # 05, 5
"Function_6_C02", # 06, 6
"Function_7_C63", # 07, 7
"Function_8_CC4", # 08, 8
"Function_9_CD7", # 09, 9
"Function_10_5036", # 0A, 10
"Function_11_5061", # 0B, 11
"Function_12_508C", # 0C, 12
"Function_13_50B7", # 0D, 13
"Function_14_50E2", # 0E, 14
"Function_15_510D", # 0F, 15
"Function_16_5131", # 10, 16
"Function_17_5143", # 11, 17
"Function_18_5155", # 12, 18
"Function_19_5167", # 13, 19
"Function_20_5173", # 14, 20
"Function_21_51C1", # 15, 21
"Function_22_520F", # 16, 22
"Function_23_5238", # 17, 23
"Function_24_5282", # 18, 24
"Function_25_52AF", # 19, 25
"Function_26_52DB", # 1A, 26
"Function_27_52FE", # 1B, 27
"Function_28_5347", # 1C, 28
"Function_29_5390", # 1D, 29
"Function_30_53AC", # 1E, 30
"Function_31_5420", # 1F, 31
"Function_32_6DE5", # 20, 32
"Function_33_6E2E", # 21, 33
"Function_34_6E98", # 22, 34
"Function_35_76A5", # 23, 35
"Function_36_76B5", # 24, 36
"Function_37_76C4", # 25, 37
"Function_38_76D6", # 26, 38
"Function_39_76E8", # 27, 39
"Function_40_76F4", # 28, 40
"Function_41_7742", # 29, 41
"Function_42_7790", # 2A, 42
"Function_43_77B7", # 2B, 43
))
def Function_0_3AC(): pass
label("Function_0_3AC")
Jc((scpexpr(EXPR_PUSH_VALUE_INDEX, 0x0), scpexpr(EXPR_PUSH_LONG, 0x65), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_3BD")
Event(0, 4)
label("loc_3BD")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x22, 0)), scpexpr(EXPR_END)), "loc_3D1")
ClearScenarioFlags(0x22, 0)
Event(0, 31)
Jump("loc_3F7")
label("loc_3D1")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x22, 1)), scpexpr(EXPR_END)), "loc_3E5")
ClearScenarioFlags(0x22, 1)
Event(0, 34)
Jump("loc_3F7")
label("loc_3E5")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1A9, 4)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x22, 2)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_3F7")
Event(0, 8)
label("loc_3F7")
Return()
# Function_0_3AC end
def Function_1_3F8(): pass
label("Function_1_3F8")
OP_F0(0x1, 0x320)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1A9, 4)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x22, 2)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_414")
OP_50(0x1, (scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
label("loc_414")
OP_1B(0x1, 0x0, 0x5)
ModifyEventFlags(0, 0, 0x80)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1A9, 3)), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1A9, 4)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_431")
ModifyEventFlags(1, 0, 0x80)
label("loc_431")
OP_52(0x8, 0x2D, (scpexpr(EXPR_PUSH_LONG, 0x41A), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
OP_52(0x8, 0x2E, (scpexpr(EXPR_PUSH_LONG, 0x41A), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
OP_52(0x8, 0x2F, (scpexpr(EXPR_PUSH_LONG, 0x41A), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
SetMapObjFlags(0x2, 0x1000)
ClearMapObjFlags(0x2, 0x4)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1A9, 4)), scpexpr(EXPR_END)), "loc_4BE")
SetMapObjFlags(0x2, 0x4)
SetMapObjFrame(0xFF, "magi10_add", 0x1, 0x1)
SetMapObjFrame(0xFF, "magi11_add", 0x1, 0x1)
SetMapObjFrame(0xFF, "point_add", 0x1, 0x1)
SetMapObjFrame(0xFF, "magi_04_add", 0x0, 0x1)
OP_70(0x1, 0x96)
Jump("loc_50A")
label("loc_4BE")
SetMapObjFrame(0xFF, "magi10_add", 0x0, 0x1)
SetMapObjFrame(0xFF, "magi11_add", 0x0, 0x1)
SetMapObjFrame(0xFF, "point_add", 0x0, 0x1)
SetMapObjFrame(0xFF, "magi_04_add", 0x1, 0x1)
OP_70(0x1, 0x3C)
label("loc_50A")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1A9, 4)), scpexpr(EXPR_END)), "loc_516")
Call(0, 42)
label("loc_516")
Return()
# Function_1_3F8 end
def Function_2_517(): pass
label("Function_2_517")
SetChrFlags(0x8, 0x10)
TalkBegin(0x8)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1CF, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_6FE")
#C0001
ChrTalk(
0x8,
"……………………………………\x02",
)
CloseMessageWindow()
#C0002
ChrTalk(
0x101,
(
"#00001F……完全失去意识了,\x01",
"似乎没有生命危险。\x02",
)
)
CloseMessageWindow()
#C0003
ChrTalk(
0x104,
(
"#00306F真是个惊人的对手啊……\x01",
"面对我们全体成员,竟然还能战斗到如此程度。\x02",
)
)
CloseMessageWindow()
#C0004
ChrTalk(
0x103,
"#00208F不愧是『风之剑圣』……\x02",
)
CloseMessageWindow()
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x9)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_621")
#C0005
ChrTalk(
0x10A,
"#00603F哼……总算战胜他了。\x02",
)
CloseMessageWindow()
label("loc_621")
#C0006
ChrTalk(
0x101,
(
"#00003F小滴肯定很担心亚里欧斯先生,\x01",
"真想马上把他送到梅尔卡瓦……\x02\x03",
"#00001F……但玛丽亚贝尔小姐和伊安律师\x01",
"还在前方等着我们。\x02\x03",
"#00003F虽然有些对不起他,\x01",
"但这件事还是暂时推后吧。\x02",
)
)
CloseMessageWindow()
#C0007
ChrTalk(
0x102,
"#00100F是啊……我们先走吧。\x02",
)
CloseMessageWindow()
SetScenarioFlags(0x1CF, 0)
Jump("loc_743")
label("loc_6FE")
#C0008
ChrTalk(
0x8,
"……………………………………\x02",
)
CloseMessageWindow()
SetChrName("")
#A0009
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"似乎完全失去意识了。\x07\x00\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
label("loc_743")
TalkEnd(0x8)
Return()
# Function_2_517 end
def Function_3_747(): pass
label("Function_3_747")
OP_F4(0x2)
FadeToDark(300, 0, 100)
#A0010
AnonymousTalk(
0x3E7,
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"有恢复导力的装置。\x07\x00\x02",
)
)
Menu(
0,
-1,
-1,
1,
(
"在这里休息\x01", # 0
"放弃\x01", # 1
)
)
MenuEnd(0x0)
OP_60(0x0)
OP_57(0x0)
Jc((scpexpr(EXPR_GET_RESULT, 0x0), scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_81C")
FadeToBright(100, 0)
Sleep(500)
SoundLoad(13)
OP_74(0x0, 0x1E)
Sound(7, 0, 100, 0)
OP_70(0x0, 0x0)
OP_71(0x0, 0x0, 0x1E, 0x0, 0x0)
OP_79(0x0)
OP_71(0x0, 0x1F, 0x186, 0x0, 0x20)
Sleep(1000)
StopBGM(0xBB8)
FadeToDark(1000, 0, -1)
Sleep(700)
Sound(13, 0, 100, 0)
OP_0D()
OP_32(0xFF, 0xFE, 0x0)
OP_6A(0x0, 0x0)
OP_31(0x1)
Sleep(3500)
OP_70(0x0, 0x0)
OP_1F()
FadeToBright(1000, 0)
OP_57(0x0)
TalkEnd(0xFF)
Return()
label("loc_81C")
FadeToBright(300, 0)
TalkEnd(0xFF)
Return()
# Function_3_747 end
def Function_4_82B(): pass
label("Function_4_82B")
EventBegin(0x0)
FadeToDark(0, 0, -1)
OP_E2(0x3)
LoadEffect(0x1, "event/ev202_00.eff")
OP_68(-340, 13500, 219060, 0)
MoveCamera(29, 41, 0, 0)
OP_6E(600, 0)
SetCameraDistance(12970, 0)
SetChrPos(0x0, 0, 12000, 222000, 180)
SetChrPos(0x1, 0, 12000, 222000, 180)
SetChrPos(0x2, 0, 12000, 222000, 180)
SetChrPos(0x3, 0, 12000, 222000, 180)
OP_A7(0x0, 0xFF, 0xFF, 0xFF, 0x0, 0x0)
OP_A7(0x1, 0xFF, 0xFF, 0xFF, 0x0, 0x0)
OP_A7(0x2, 0xFF, 0xFF, 0xFF, 0x0, 0x0)
OP_A7(0x3, 0xFF, 0xFF, 0xFF, 0x0, 0x0)
FadeToBright(500, 0)
OP_0D()
Sound(920, 0, 100, 0)
PlayEffect(0x1, 0xFF, 0x0, 0x0, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sleep(100)
def lambda_93B():
OP_A7(0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0x1F4)
ExitThread()
QueueWorkItem(0x0, 2, lambda_93B)
def lambda_94C():
OP_95(0xFE, -240, 12000, 218120, 2500, 0x0)
ExitThread()
QueueWorkItem(0x0, 1, lambda_94C)
Sleep(500)
PlayEffect(0x1, 0xFF, 0x1, 0x0, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sleep(100)
def lambda_9A3():
OP_A7(0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0x1F4)
ExitThread()
QueueWorkItem(0x1, 2, lambda_9A3)
def lambda_9B4():
OP_95(0xFE, -1420, 12000, 218280, 2500, 0x0)
ExitThread()
QueueWorkItem(0x1, 1, lambda_9B4)
Sleep(500)
Sound(920, 0, 100, 0)
PlayEffect(0x1, 0xFF, 0x2, 0x0, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sleep(100)
def lambda_A11():
OP_A7(0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0x1F4)
ExitThread()
QueueWorkItem(0x2, 2, lambda_A11)
def lambda_A22():
OP_95(0xFE, 1060, 12000, 218310, 2500, 0x0)
ExitThread()
QueueWorkItem(0x2, 1, lambda_A22)
Sleep(500)
PlayEffect(0x1, 0xFF, 0x3, 0x0, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sleep(100)
def lambda_A79():
OP_A7(0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0x1F4)
ExitThread()
QueueWorkItem(0x3, 2, lambda_A79)
def lambda_A8A():
OP_95(0xFE, -2780, 12000, 218680, 2500, 0x0)
ExitThread()
QueueWorkItem(0x3, 1, lambda_A8A)
WaitChrThread(0x3, 1)
Sleep(500)
OP_E2(0x2)
OP_69(0xFF, 0x0)
EventEnd(0x5)
Return()
# Function_4_82B end
def Function_5_AAF(): pass
label("Function_5_AAF")
EventBegin(0x0)
OP_E2(0x3)
LoadEffect(0x1, "event/evwarp.eff")
Sound(936, 0, 100, 0)
PlayEffect(0x1, 0xFF, 0x0, 0x0, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
def lambda_B08():
OP_A7(0xFE, 0xFF, 0xFF, 0xFF, 0x0, 0x1F4)
ExitThread()
QueueWorkItem(0x0, 2, lambda_B08)
Sleep(100)
PlayEffect(0x1, 0xFF, 0x1, 0x0, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
def lambda_B53():
OP_A7(0xFE, 0xFF, 0xFF, 0xFF, 0x0, 0x1F4)
ExitThread()
QueueWorkItem(0x1, 2, lambda_B53)
Sleep(100)
PlayEffect(0x1, 0xFF, 0x2, 0x0, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
def lambda_B9E():
OP_A7(0xFE, 0xFF, 0xFF, 0xFF, 0x0, 0x1F4)
ExitThread()
QueueWorkItem(0x2, 2, lambda_B9E)
Sleep(100)
PlayEffect(0x1, 0xFF, 0x3, 0x0, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
def lambda_BE9():
OP_A7(0xFE, 0xFF, 0xFF, 0xFF, 0x0, 0x1F4)
ExitThread()
QueueWorkItem(0x3, 2, lambda_BE9)
Sleep(1000)
NewScene("m9008", 102, 0, 0)
IdleLoop()
Return()
# Function_5_AAF end
def Function_6_C02(): pass
label("Function_6_C02")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x4)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_C1A")
LoadChrToIndex("chr/ch03150.itc", 0x23)
label("loc_C1A")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x5)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_C32")
LoadChrToIndex("chr/ch03250.itc", 0x23)
label("loc_C32")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x8)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_C4A")
LoadChrToIndex("chr/ch02950.itc", 0x23)
label("loc_C4A")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x9)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_C62")
LoadChrToIndex("chr/ch00950.itc", 0x23)
label("loc_C62")
Return()
# Function_6_C02 end
def Function_7_C63(): pass
label("Function_7_C63")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF5, 0x4)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_C7B")
LoadChrToIndex("chr/ch03150.itc", 0x24)
label("loc_C7B")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF5, 0x5)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_C93")
LoadChrToIndex("chr/ch03250.itc", 0x24)
label("loc_C93")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF5, 0x8)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_CAB")
LoadChrToIndex("chr/ch02950.itc", 0x24)
label("loc_CAB")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF5, 0x9)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_CC3")
LoadChrToIndex("chr/ch00950.itc", 0x24)
label("loc_CC3")
Return()
# Function_7_C63 end
def Function_8_CC4(): pass
label("Function_8_CC4")
EventBegin(0x0)
StopBGM(0xFA0)
OP_50(0x1, (scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
EventEnd(0x5)
Return()
# Function_8_CC4 end
def Function_9_CD7(): pass
label("Function_9_CD7")
EventBegin(0x0)
FadeToDark(0, -1, 0)
FadeToDark(500, 0, -1)
OP_0D()
FadeToBright(0, -1)
CreatePortrait(0, 234, 0, 490, 256, 0, 0, 256, 256, 0, 0, 256, 256, 0xFFFFFF, 0x0, "bu01400.itp")
LoadChrToIndex("apl/ch51233.itc", 0x1E)
LoadChrToIndex("chr/ch00050.itc", 0x1F)
LoadChrToIndex("chr/ch00150.itc", 0x20)
LoadChrToIndex("chr/ch00250.itc", 0x21)
LoadChrToIndex("chr/ch00350.itc", 0x22)
Call(0, 6)
Call(0, 7)
LoadChrToIndex("chr/ch02450.itc", 0x25)
LoadChrToIndex("monster/ch85450.itc", 0x26)
LoadChrToIndex("monster/ch60051.itc", 0x27)
LoadChrToIndex("monster/ch85550.itc", 0x28)
LoadChrToIndex("monster/ch60051.itc", 0x29)
LoadChrToIndex("apl/ch51743.itc", 0x2A)
LoadEffect(0x0, "event/ev602_01.eff")
LoadEffect(0x1, "event/eva06_02.eff")
LoadEffect(0x2, "event/eva06_01.eff")
LoadEffect(0x3, "event/ev17013.eff")
SoundLoad(128)
SoundLoad(825)
SoundLoad(832)
SoundLoad(881)
SoundLoad(833)
SoundLoad(4064)
SoundLoad(4077)
SoundLoad(4065)
SoundLoad(4066)
SoundLoad(4067)
SetChrPos(0x101, 0, 25000, 181800, 0)
SetChrPos(0x102, 1100, 25000, 181100, 0)
SetChrPos(0x103, 200, 25000, 180000, 0)
SetChrPos(0x104, -1100, 25000, 180750, 0)
SetChrPos(0xF4, -650, 25000, 179250, 0)
SetChrPos(0xF5, 850, 25000, 179000, 0)
ClearChrFlags(0x4, 0x80)
ClearChrBattleFlags(0x4, 0x8000)
ClearChrFlags(0x5, 0x80)
ClearChrBattleFlags(0x5, 0x8000)
SetChrChipByIndex(0x8, 0x1E)
SetChrSubChip(0x8, 0x0)
ClearChrFlags(0x8, 0x80)
SetChrFlags(0x8, 0x8000)
SetChrPos(0x8, 0, 12000, 210000, 180)
OP_52(0x8, 0x2D, (scpexpr(EXPR_PUSH_LONG, 0x41A), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
OP_52(0x8, 0x2E, (scpexpr(EXPR_PUSH_LONG, 0x41A), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
OP_52(0x8, 0x2F, (scpexpr(EXPR_PUSH_LONG, 0x41A), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
ClearChrFlags(0x9, 0x80)
OP_A7(0x9, 0xFF, 0xFF, 0xFF, 0x0, 0x0)
SetChrPos(0x9, 0, 12000, 198500, 0)
SetChrChipByIndex(0xA, 0x26)
SetChrSubChip(0xA, 0x0)
SetChrFlags(0xA, 0x20)
OP_52(0xA, 0x24, (scpexpr(EXPR_PUSH_LONG, 0xD8), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
OP_52(0xA, 0x7, (scpexpr(EXPR_PUSH_LONG, 0x7D0), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
SetChrPos(0xA, -2500, 12000, 211500, 180)
OP_A7(0xA, 0xFF, 0xFF, 0xFF, 0x0, 0x0)
ClearChrFlags(0xA, 0x80)
ClearChrBattleFlags(0xA, 0x8000)
SetChrChipByIndex(0xB, 0x28)
SetChrSubChip(0xB, 0x0)
SetChrFlags(0xB, 0x20)
OP_52(0xB, 0x24, (scpexpr(EXPR_PUSH_LONG, 0xD8), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
OP_52(0xB, 0x7, (scpexpr(EXPR_PUSH_LONG, 0x7D0), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
SetChrPos(0xB, 2500, 12000, 211500, 180)
OP_A7(0xB, 0xFF, 0xFF, 0xFF, 0x0, 0x0)
ClearChrFlags(0xB, 0x80)
ClearChrBattleFlags(0xB, 0x8000)
BeginChrThread(0xA, 2, 0, 29)
BeginChrThread(0xB, 2, 0, 29)
ClearChrFlags(0xC, 0x80)
OP_68(0, 13000, 180500, 0)
MoveCamera(0, 38, 0, 0)
OP_6E(600, 0)
SetCameraDistance(21000, 0)
Sleep(500)
OP_68(0, 13000, 188000, 4500)
MoveCamera(0, 38, 0, 4500)
OP_6E(600, 4500)
SetCameraDistance(24000, 4500)
FadeToBright(1000, 0)
def lambda_FCA():
OP_9B(0x0, 0xFE, 0x0, 0x1B58, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x101, 1, lambda_FCA)
Sleep(50)
def lambda_FE2():
OP_9B(0x0, 0xFE, 0x0, 0x1B58, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x103, 1, lambda_FE2)
Sleep(50)
def lambda_FFA():
OP_9B(0x0, 0xFE, 0x0, 0x1B58, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x102, 1, lambda_FFA)
Sleep(50)
def lambda_1012():
OP_9B(0x0, 0xFE, 0x0, 0x1B58, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x104, 1, lambda_1012)
Sleep(50)
def lambda_102A():
OP_9B(0x0, 0xFE, 0x0, 0x1B58, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0xF4, 1, lambda_102A)
Sleep(50)
def lambda_1042():
OP_9B(0x0, 0xFE, 0x0, 0x1B58, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0xF5, 1, lambda_1042)
OP_0D()
Sleep(2400)
OP_C9(0x0, 0x80000000)
#N0011
NpcTalk(
0x8,
"男性的声音",
"#4064V#6P#30W#16A你们到了啊。\x02",
)
#Auto
CloseMessageWindow()
OP_C9(0x1, 0x80000000)
OP_63(0x101, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
OP_63(0x102, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
Sleep(50)
OP_63(0x103, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
OP_63(0x104, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
Sleep(50)
OP_63(0xF4, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
OP_63(0xF5, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
Sleep(1000)
WaitChrThread(0xF5, 1)
OP_6F(0x79)
PlayBGM("ed7356", 0)
BeginChrThread(0x101, 0, 0, 10)
Sleep(50)
BeginChrThread(0x102, 0, 0, 11)
Sleep(50)
BeginChrThread(0x103, 0, 0, 12)
Sleep(50)
BeginChrThread(0x104, 0, 0, 13)
Sleep(50)
BeginChrThread(0xF4, 0, 0, 14)
Sleep(50)
BeginChrThread(0xF5, 0, 0, 15)
OP_68(-410, 13300, 205280, 4000)
MoveCamera(47, 16, 0, 4000)
OP_6E(600, 4000)
SetCameraDistance(16180, 4000)
OP_6F(0x79)
WaitChrThread(0x101, 0)
WaitChrThread(0x104, 0)
WaitChrThread(0x103, 0)
WaitChrThread(0x102, 0)
WaitChrThread(0xF4, 0)
WaitChrThread(0xF5, 0)
#C0012
ChrTalk(
0x101,
"#00001F#12P……亚里欧斯先生。\x02",
)
CloseMessageWindow()
#C0013
ChrTalk(
0x104,
(
"#00301F#12P已经不穿那套\x01",
"长官制服了啊……?\x02",
)
)
CloseMessageWindow()
OP_CB(0x0, 0x3, 0xFFFFFFFF, 0x1F4, 0x0, 0x0)
OP_CB(0x0, 0x0, 0xFFFFD8F0, 0x0, 0x1F4, 0x0)
OP_CC(0x0, 0x0, 0x3)
OP_CC(0x0, 0x0, 0x0)
SetMessageWindowPos(14, 280, 35, 3)
Sleep(500)
#A0014
AnonymousTalk(
0x8,
(
"虽然库罗伊斯先生诚意邀请,\x01",
"但对我而言,终究还是太过勉强。\x02\x03",
"既然独立无效宣言已经发布,\x01",
"我也就没资格再穿那套衣服了。\x02\x03",
"如今的我,既不是国防长官也不是游击士。 \x02\x03",
"站在这里的,\x01",
"只是一名飘泊的剑士。\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
SetMessageWindowPos(14, 280, 60, 3)
OP_CB(0x0, 0x3, 0xFFFFFF, 0x1F4, 0x0, 0x0)
OP_CB(0x0, 0x0, 0x0, 0x0, 0x1F4, 0x0)
OP_CC(0x0, 0x0, 0x3)
OP_CC(0x0, 0x0, 0x0)
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x9)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_138A")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x9)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_1364")
OP_FC(0xFFF4)
Jump("loc_1367")
label("loc_1364")
OP_FC(0xC)
label("loc_1367")
#C0015
ChrTalk(
0x10A,
"#00600F#13P马克莱因……\x02",
)
CloseMessageWindow()
OP_5A()
Jump("loc_13D9")
label("loc_138A")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x8)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_13D9")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x8)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_13B4")
OP_FC(0xFFF4)
Jump("loc_13B7")
label("loc_13B4")
OP_FC(0xC)
label("loc_13B7")
#C0016
ChrTalk(
0x109,
"#10113F#13P亚里欧斯先生……\x02",
)
CloseMessageWindow()
OP_5A()
label("loc_13D9")
#C0017
ChrTalk(
0x102,
"#00108F#12P你为何如此……\x02",
)
CloseMessageWindow()
#C0018
ChrTalk(
0x103,
"#00206F#12P……未免也太古板了。\x02",
)
CloseMessageWindow()
OP_63(0x101, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x0)
Sleep(1500)
OP_64(0x101)
#C0019
ChrTalk(
0x101,
(
"#00004F#12P……哈哈,真没办法啊……\x02\x03",
"#00008F想问的事情实在太多了,\x01",
"一时还无法理清思绪……\x02\x03",
"#00000F不过,我们可以先来\x01",
"『核对答案』吗……?\x02",
)
)
CloseMessageWindow()
#C0020
ChrTalk(
0x8,
(
"#01404F好#5P——我原本就有这个打算。\x02\x03",
"#01400F尽管问吧……\x01",
"除了一件事情之外,\x01",
"我全都可以回答。\x02",
)
)
CloseMessageWindow()
#C0021
ChrTalk(
0x101,
"#00003F#12P那么……\x02",
)
CloseMessageWindow()
ClearScenarioFlags(0x0, 3)
ClearScenarioFlags(0x0, 0)
ClearScenarioFlags(0x0, 1)
ClearScenarioFlags(0x0, 2)
label("loc_154C")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 3)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_358B")
FadeToDark(300, 0, 100)
OP_0D()
MenuCmd(0, 0)
MenuCmd(1, 0, "五年前的『事故』")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 0)), scpexpr(EXPR_END)), "loc_1599")
MenuCmd(1, 0, "与伊安律师之间的关系")
label("loc_1599")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 1)), scpexpr(EXPR_END)), "loc_15B8")
MenuCmd(1, 0, "黑之竞拍会上的琪雅")
label("loc_15B8")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 2)), scpexpr(EXPR_END)), "loc_15D5")
MenuCmd(1, 0, "盖伊身亡的那一天")
label("loc_15D5")
MenuCmd(2, 0, -1, -1, 0)
MenuEnd(0x0)
OP_60(0x0)
FadeToBright(300, 0)
OP_0D()
Switch(
(scpexpr(EXPR_GET_RESULT, 0x0), scpexpr(EXPR_END)),
(0, "loc_160F"),
(1, "loc_2199"),
(2, "loc_2ADB"),
(3, "loc_357E"),
(SWITCH_DEFAULT, "loc_3586"),
)
label("loc_160F")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_205A")
#C0022
ChrTalk(
0x101,
(
"#00008F#12P……抱歉,要提及\x01",
"您的沉痛往事了……\x02\x03",
"#00001F五年前那起『事故』的内情,\x01",
"可以告诉我们吗……?\x02",
)
)
CloseMessageWindow()
#C0023
ChrTalk(
0x8,
(
"#01403F#5P好……\x01",
"现在已经没有隐瞒的必要了。\x02\x03",
"#01400F五年前,在大道上发生了\x01",
"运输车爆炸事故……\x02\x03",
"正如你们所觉察到的一样,\x01",
"那是因帝国与共和国之间的\x01",
"谍报战而导致的结果。\x02",
)
)
CloseMessageWindow()
#C0024
ChrTalk(
0x102,
"#00106F#12P果然……\x02",
)
CloseMessageWindow()
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x9)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_1880")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x9)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_1773")
OP_FC(0xFFF4)
Jump("loc_1776")
label("loc_1773")
OP_FC(0xC)
label("loc_1776")
#C0025
ChrTalk(
0x10A,
"#00608F#13P………………………………\x02",
)
CloseMessageWindow()
OP_5A()
#C0026
ChrTalk(
0x8,
(
"#01402F#5P呵呵,一科肯定\x01",
"掌握到这一事实了吧?\x02\x03",
"#01403F但由于高层对帝国派\x01",
"和共和国派有所顾虑,于是便\x01",
"理所当然地将这件事压下了……\x02\x03",
"#01400F虽然我对此感到失望,\x01",
"但事到如今,已经没有怨恨了。\x02",
)
)
CloseMessageWindow()
#C0027
ChrTalk(
0x10A,
"#00606F#13P……我很抱歉。\x02",
)
CloseMessageWindow()
OP_5A()
label("loc_1880")
#C0028
ChrTalk(
0x103,
(
"#00208F#12P……因为那起事故,亚里欧斯先生的\x01",
"妻子和小滴才会……\x02",
)
)
CloseMessageWindow()
#C0029
ChrTalk(
0x8,
(
"#01403F#5P是的……纱绫因此丧命,\x01",
"滴则失明了。\x02\x03",
"#01408F自那之后的五年间……\x01",
"两国的谍报机构日趋完善,\x01",
"无谓的破坏工作也渐渐绝迹……\x02\x03",
"#01401F但在长达数十年的暗斗过程中,\x01",
"曾出现过很多和\x01",
"纱绫一样的受害者。\x02\x03",
"#01403F罗伊德,其中也包括你的父母,\x01",
"还有伊安律师的家人。\x02",
)
)
CloseMessageWindow()
OP_63(0x101, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
OP_63(0x102, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
Sleep(50)
OP_63(0x103, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
OP_63(0x104, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
Sleep(50)
OP_63(0xF4, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
OP_63(0xF5, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
Sleep(1000)
OP_82(0x64, 0x0, 0xBB8, 0x12C)
#C0030
ChrTalk(
0x101,
"#00005F#12P#4S!?\x02",
)
CloseMessageWindow()
#C0031
ChrTalk(
0x104,
"#00307F#12P什么……!?\x02",
)
CloseMessageWindow()
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x17C, 6)), scpexpr(EXPR_END)), "loc_1BAA")
#C0032
ChrTalk(
0x102,
(
"#00101F#12P罗、罗伊德的父母\x01",
"不是在某起……!?\x02",
)
)
CloseMessageWindow()
#C0033
ChrTalk(
0x103,
"#00208F#12P在十五年前的某起飞船事故中……\x02",
)
CloseMessageWindow()
#C0034
ChrTalk(
0x101,
(
"#00006F#12P嗯……我以前和你们说过。\x02\x03",
"#00008F但我那时刚刚懂事,\x01",
"几乎没留下什么印象……\x02\x03",
"#00013F也就是说……\x01",
"伊安律师的家人也是在那时……?\x02",
)
)
CloseMessageWindow()
Jump("loc_1CAB")
label("loc_1BAA")
#C0035
ChrTalk(
0x102,
"#00105F#12P罗、罗伊德的父母!?\x02",
)
CloseMessageWindow()
#C0036
ChrTalk(
0x103,
"#00206F#12P……第一次听说……\x02",
)
CloseMessageWindow()
#C0037
ChrTalk(
0x101,
(
"#00006F#12P我的父母……\x01",
"在十五年前飞船刚开始投入运行时,\x01",
"遭遇事故而去世了……\x02\x03",
"#00008F但我那时刚刚懂事,\x01",
"几乎没留下什么印象……\x02\x03",
"#00013F也就是说……\x01",
"伊安律师的家人也是在那时……?\x02",
)
)
CloseMessageWindow()
label("loc_1CAB")
#C0038
ChrTalk(
0x8,
(
"#01403F#5P嗯,他的妻子和孩子\x01",
"也乘坐了那班飞船。\x02\x03",
"我至少还有滴……\x01",
"而他却失去了一切。\x01",
"那种伤痛,非我所能想象。\x02\x03",
"#01400F另外,盖伊和伊安律师就是\x01",
"在当时作为死难者家属而相识的。\x02",
)
)
CloseMessageWindow()
#C0039
ChrTalk(
0x101,
"#00001F#12P……………………………………\x02",
)
CloseMessageWindow()
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x8)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_1DE0")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x8)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_1DB5")
OP_FC(0xFFF4)
Jump("loc_1DB8")
label("loc_1DB5")
OP_FC(0xC)
label("loc_1DB8")
#C0040
ChrTalk(
0x109,
"#10106F#13P……竟、竟有这种事……\x02",
)
CloseMessageWindow()
OP_5A()
label("loc_1DE0")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x9)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_1E3E")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x9)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_1E0A")
OP_FC(0xFFF4)
Jump("loc_1E0D")
label("loc_1E0A")
OP_FC(0xC)
label("loc_1E0D")
#C0041
ChrTalk(
0x10A,
(
"#00606F#13P这是连一科\x01",
"都没有掌握的情报……\x02",
)
)
CloseMessageWindow()
OP_5A()
label("loc_1E3E")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x4)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_1EA6")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x4)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_1E68")
OP_FC(0xFFF4)
Jump("loc_1E6B")
label("loc_1E68")
OP_FC(0xC)
label("loc_1E6B")
#C0042
ChrTalk(
0x105,
(
"#10401F#13P……原来如此,\x01",
"没想到还有这样一番往事……\x02",
)
)
CloseMessageWindow()
OP_5A()
label("loc_1EA6")
#C0043
ChrTalk(
0x8,
(
"#01403F#5P……在五年前的那起事件发生之后,\x01",
"我辞去了警察一职,\x01",
"加入游击士协会。\x02\x03",
"作出这个决定的理由有很多,比如对警察\x01",
"感到失望,筹措滴的住院费用等等……\x02\x03",
"#01408F但实际上,我也许只是\x01",
"想逃避失去纱绫的悲伤罢了。\x02\x03",
"#01400F为此,我才会一头扎进\x01",
"没完没了的游击士工作中。\x02",
)
)
CloseMessageWindow()
#C0044
ChrTalk(
0x101,
"#00008F#12P亚里欧斯先生……\x02",
)
CloseMessageWindow()
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x5)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_2052")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x5)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_1FFC")
OP_FC(0xFFF4)
Jump("loc_1FFF")
label("loc_1FFC")
OP_FC(0xC)
label("loc_1FFF")
#C0045
ChrTalk(
0x106,
(
"#10706F#13P(……过去被『银』暗杀的那些人的家人,\x01",
" 肯定也会如此悲伤吧……)\x02",
)
)
CloseMessageWindow()
OP_5A()
label("loc_2052")
SetScenarioFlags(0x0, 0)
Jump("loc_2194")
label("loc_205A")
#C0046
ChrTalk(
0x8,
(
"#01403F#5P五年前,发生在大路上的\x01",
"那起运输车爆炸事故\x01",
"夺走了纱绫的生命和滴的光明……\x02\x03",
"#01401F那是因帝国与共和国之间的\x01",
"谍报战而导致的结果。\x02\x03",
"另外,十五年前的那起飞船事故\x01",
"也是由于同样的原因而发生的。\x02\x03",
"#01403F那起事故……使盖伊和你的父母,\x01",
"还有伊安律师的家人不幸丧生。\x02",
)
)
CloseMessageWindow()
#C0047
ChrTalk(
0x101,
"#00008F#12P……………………………………\x02",
)
CloseMessageWindow()
label("loc_2194")
Jump("loc_3586")
label("loc_2199")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_2A1B")
#C0048
ChrTalk(
0x101,
(
"#00006F#12P……有件事情,\x01",
"我一直怀有疑问。\x02\x03",
"#00001F那就是您为何会与迪塔先生\x01",
"他们有所往来。\x02",
)
)
CloseMessageWindow()
OP_63(0x8, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
Sleep(1000)
#C0049
ChrTalk(
0x8,
"#01405F#5P哦……?\x02",
)
CloseMessageWindow()
#C0050
ChrTalk(
0x102,
(
"#00106F#12P……虽然迪塔叔叔和贝尔对\x01",
"经济、金融,以及与库罗伊斯家族\x01",
"存在关联的教团了解甚详……\x02",
)
)
CloseMessageWindow()
#C0051
ChrTalk(
0x104,
(
"#00303F#12P但关于帝国与共和国的暗斗……\x02\x03",
"#00301F如果连这些情况都了如指掌,\x01",
"就未免有些奇怪了。\x02",
)
)
CloseMessageWindow()
#C0052
ChrTalk(
0x103,
(
"#00203F#12P双方原本毫不相干……\x02\x03",
"#00201F然而,迪塔先生就任为总统之后,\x01",
"却任命亚里欧斯先生为国防长官……\x02",
)
)
CloseMessageWindow()
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x9)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_2423")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x9)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_238D")
OP_FC(0xFFF4)
Jump("loc_2390")
label("loc_238D")
OP_FC(0xC)
label("loc_2390")
#C0053
ChrTalk(
0x10A,
(
"#00606F#13P……原来如此,是这样啊。\x02\x03",
"#00601F也就是说,将他们联系起来的人\x01",
"就是伊安律师吧?\x02",
)
)
CloseMessageWindow()
OP_5A()
#C0054
ChrTalk(
0x101,
(
"#00001F#12P嗯……\x01",
"亚里欧斯先生,没错吧?\x02",
)
)
CloseMessageWindow()
Jump("loc_2588")
label("loc_2423")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x4)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_24E5")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x4)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_244D")
OP_FC(0xFFF4)
Jump("loc_2450")
label("loc_244D")
OP_FC(0xC)
label("loc_2450")
#C0055
ChrTalk(
0x105,
(
"#10406F#13P……原来如此,是这样啊。\x02\x03",
"#10401F也就是说,是那个大胡子熊律师\x01",
"在中间牵线搭桥啊。\x02",
)
)
CloseMessageWindow()
OP_5A()
#C0056
ChrTalk(
0x101,
(
"#00001F#12P嗯……\x01",
"亚里欧斯先生,没错吧?\x02",
)
)
CloseMessageWindow()
Jump("loc_2588")
label("loc_24E5")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x8)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_2588")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x8)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_250F")
OP_FC(0xFFF4)
Jump("loc_2512")
label("loc_250F")
OP_FC(0xC)
label("loc_2512")
#C0057
ChrTalk(
0x109,
(
"#10108F#13P难道……\x02\x03",
"#10101F将他们联系起来的人\x01",
"就是伊安律师……?\x02",
)
)
CloseMessageWindow()
OP_5A()
#C0058
ChrTalk(
0x101,
(
"#00001F#12P嗯……\x01",
"亚里欧斯先生,没错吧?\x02",
)
)
CloseMessageWindow()
label("loc_2588")
#C0059
ChrTalk(
0x8,
"#01404F#5P呵呵……正是如此。\x02",
)
CloseMessageWindow()
FadeToDark(1000, 0, -1)
OP_0D()
CreatePortrait(0, 224, 0, 480, 256, 10, 16, 256, 256, 0, 0, 256, 256, 0xFFFFFF, 0x0, "bu02200.itp")
OP_CB(0x0, 0x3, 0xAAFFFFFF, 0x1F4, 0x0, 0x0)
OP_CC(0x0, 0x0, 0x3)
Sleep(500)
SetMessageWindowPos(14, 280, -1, 3)
#A0060
AnonymousTalk(
0x8,
(
"#01402F和你们一样,在从事警察工作时,\x01",
"我和盖伊也屡受伊安律师的关照,\x01",
"他提供的情报给我们带来了很大帮助。\x02\x03",
"在镇压教团据点的作战中,\x01",
"伊安律师也以民间顾问的身份\x01",
"给我们提供了协助。\x02\x03",
"#01403F在我成为游击士之后……\x01",
"仍然频繁与他交换情报。\x02",
)
)
CloseMessageWindow()
CreatePortrait(1, 224, 0, 480, 256, 65296, 16, 256, 256, 0, 0, 256, 256, 0xFFFFFF, 0x0, "bu02800.itp")
OP_CB(0x1, 0x3, 0xAAFFFFFF, 0x1F4, 0x0, 0x0)
OP_CC(0x0, 0x1, 0x3)
Sleep(500)
SetMessageWindowPos(14, 280, -1, 3)
#A0061
AnonymousTalk(
0x8,
(
"#01403F#5P而律师通过IBC的法务工作,\x01",
"与库罗伊斯父女交情颇深。\x02\x03",
"#01401F于是,一切情报与要素\x01",
"都集中、整合在了律师的手中……\x02\x03",
"库罗伊斯先生在律师的诱导之下,\x01",
"通过各种政治工作与『至宝』的力量,\x01",
"成功实现了克洛斯贝尔的独立。\x02\x03",
"#01403F但他却不知道,律师先生和玛丽亚贝尔小姐\x01",
"一直在暗中推进他们的真正计划。\x02",
)
)
CloseMessageWindow()
FadeToBright(800, 0)
OP_CB(0x0, 0x3, 0xFFFFFF, 0x320, 0x0, 0x0)
OP_CB(0x1, 0x3, 0xFFFFFF, 0x320, 0x0, 0x0)
OP_CC(0x0, 0x0, 0x3)
OP_CC(0x0, 0x1, 0x3)
OP_CC(0x1, 0xFF, 0x0)
OP_0D()
Sleep(300)
#C0062
ChrTalk(
0x101,
"#00013F#12P真正计划……\x02",
)
CloseMessageWindow()
#C0063
ChrTalk(
0x102,
"#00108F#12P就是『碧零计划』吗……\x02",
)
CloseMessageWindow()
#C0064
ChrTalk(
0x8,
(
"#01403F#5P不错……律师很快就察觉到了\x01",
"纱绫那起事故背后的真相。\x02\x03",
"于是,他向我说明了情况……\x01",
"而我决定协助他们完成此项计划。\x02\x03",
"#01400F这就是事情的全部经过。\x02",
)
)
CloseMessageWindow()
#C0065
ChrTalk(
0x101,
"#00008F#12P………………………………\x02",
)
CloseMessageWindow()
#C0066
ChrTalk(
0x103,
(
"#00206F#12P一切都在伊安律师和\x01",
"玛丽亚贝尔小姐的掌控之中……\x02",
)
)
CloseMessageWindow()
#C0067
ChrTalk(
0x104,
"#00301F#12P……真是惊人。\x02",
)
CloseMessageWindow()
SetScenarioFlags(0x0, 1)
Jump("loc_2AD6")
label("loc_2A1B")
#C0068
ChrTalk(
0x8,
(
"#01403F#5P将库罗伊斯先生和我这两个\x01",
"毫无关系的人联系起来的,\x01",
"正是伊安律师。\x02\x03",
"伊安律师很快就察觉到了\x01",
"五年前那起事故的真相,\x01",
"并邀请我参与『碧零计划』……\x02\x03",
"#01400F而我也接受了他的邀请。\x02",
)
)
CloseMessageWindow()
label("loc_2AD6")
Jump("loc_3586")
label("loc_2ADB")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 2)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_345B")
#C0069
ChrTalk(
0x101,
(
"#00006F#12P……还有一件事,\x01",
"我也怀有疑问……\x02\x03",
"#00013F把琪雅从『太阳堡垒』的地下\x01",
"带出来的人就是您吧?\x02\x03",
"另外,把她和预计在『黑之竞拍会』拍卖的\x01",
"罗赞贝尔克人偶调包的人应该也是您吧?\x02",
)
)
CloseMessageWindow()
#C0070
ChrTalk(
0x102,
"#00105F#12P说、说起来……\x02",
)
CloseMessageWindow()
#C0071
ChrTalk(
0x103,
(
"#00206F#12P这个问题至今都没有\x01",
"完全解明呢。\x02",
)
)
CloseMessageWindow()
#C0072
ChrTalk(
0x8,
(
"#01404F#5P不错,正是如此。\x02\x03",
"#01402F但这件事情的主导者并非律师,\x01",
"而是玛丽亚贝尔小姐。\x02",
)
)
CloseMessageWindow()
CreatePortrait(0, 0, 8, 480, 264, 0, 0, 512, 256, 0, 0, 480, 256, 0xFFFFFF, 0x0, "c_vis303.itp")
CreatePortrait(1, 0, 8, 480, 264, 0, 0, 512, 256, 0, 0, 480, 256, 0xFFFFFF, 0x0, "c_vis304.itp")
OP_CB(0x0, 0x3, 0xFFFFFFFF, 0x320, 0x0, 0x0)
OP_CC(0x0, 0x0, 0x3)
Sleep(500)
SetMessageWindowPos(14, 280, -1, 3)
#A0073
AnonymousTalk(
0x8,
(
"#01403F她似乎完全掌握了\x01",
"约亚西姆的动向……\x02\x03",
"#01401F凭借她的传送术,我们轻易\x01",
"抵达了最底层的祭坛,\x01",
"并把那孩子从摇篮中释放。\x02",
)
)
CloseMessageWindow()
OP_CB(0x1, 0x4, 0x0, 0x0, 0x0, 0x0)
OP_CB(0x1, 0x3, 0xFFFFFFFF, 0x320, 0x0, 0x0)
OP_CC(0x0, 0x1, 0x3)
OP_CB(0x0, 0x3, 0xFFFFFF, 0x0, 0x0, 0x0)
Sleep(500)
SetMessageWindowPos(14, 280, -1, 3)
#A0074
AnonymousTalk(
0x8,
(
"#01403F之后,我把那孩子和\x01",
"从雷米菲利亚运送过来的\x01",
"罗赞贝尔克人偶调了包。\x02\x03",
"#01400F那个罗赞贝尔克人偶也是\x01",
"玛丽亚贝尔小姐准备的,目的是\x01",
"为了避免让鲁巴彻的人看出端倪。\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
SetMessageWindowPos(14, 280, -1, 3)
#A0075
AnonymousTalk(
0x102,
"#00106F……竟连那种事都考虑到了……\x02",
)
CloseMessageWindow()
OP_CB(0x1, 0x3, 0xFFFFFF, 0x320, 0x0, 0x0)
OP_CC(0x0, 0x1, 0x3)
OP_CC(0x1, 0xFF, 0x0)
Sleep(300)
#C0076
ChrTalk(
0x101,
(
"#00006F#12P不过……\x01",
"这种行为对于玛丽亚贝尔小姐\x01",
"又有什么意义呢?\x02\x03",
"#00013F如果他们为了实现计划,需要得到琪雅,\x01",
"只要把她看管好不就行了……\x02",
)
)
CloseMessageWindow()
#C0077
ChrTalk(
0x8,
(
"#01403F#5P原因之一,是要把『黑月』牵扯进来,\x01",
"令鲁巴彻颜面扫地,\x01",
"踏出自取灭亡的第一步……\x02\x03",
"#01400F如果那孩子在竞拍会的\x01",
"现场突然醒来,\x01",
"玛丽亚贝尔小姐肯定会采取行动。\x02\x03",
"我想,她应该会在惊讶的客人和\x01",
"马尔克尼的面前抬出IBC的名号,\x01",
"从而接收那个孩子。\x02\x03",
"#01404F如果『黑月』在那时采取行动,\x01",
"情况恐怕会有所不同……\x02\x03",
"不过,我当时也\x01",
"潜伏在会场内。\x02\x03",
"#01402F无论事态如何发展,\x01",
"我们都足以掌控整个局势。\x02",
)
)
CloseMessageWindow()
#C0078
ChrTalk(
0x104,
(
"#00306F#12P该怎么说呢……\x01",
"准备得也太周全了吧。\x02",
)
)
CloseMessageWindow()
#C0079
ChrTalk(
0x103,
"#00211F#12P计划实在太缜密了……\x02",
)
CloseMessageWindow()
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x5)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_315F")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x5)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_311D")
OP_FC(0xFFF4)
Jump("loc_3120")
label("loc_311D")
OP_FC(0xC)
label("loc_3120")
#C0080
ChrTalk(
0x106,
(
"#10708F#13P……我当时确实\x01",
"感觉到还有其他人\x01",
"潜伏在会场……\x02",
)
)
CloseMessageWindow()
label("loc_315F")
#C0081
ChrTalk(
0x8,
(
"#01403F#5P而另一个原因……\x02\x03",
"#01401F就是让『至宝』在那种\x01",
"特殊情况下苏醒,\x01",
"以便确认她的潜在能力。\x02",
)
)
CloseMessageWindow()
OP_63(0x101, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
OP_63(0x102, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
Sleep(50)
OP_63(0x103, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
OP_63(0x104, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
Sleep(50)
OP_63(0xF4, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
OP_63(0xF5, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
Sleep(1000)
#C0082
ChrTalk(
0x103,
"#00205F#12P确认琪雅的潜在能力?\x02",
)
CloseMessageWindow()
#C0083
ChrTalk(
0x102,
"#00101F#12P什、什么意思……?\x02",
)
CloseMessageWindow()
#C0084
ChrTalk(
0x8,
(
"#01403F#5P不清楚,玛丽亚贝尔小姐\x01",
"只说了这么多。\x02\x03",
"#01408F那种外部环境,或许也是让\x01",
"那孩子从长久的沉睡中\x01",
"苏醒的条件之一吧……\x02\x03",
"#01400F不管怎么说,不知是女神的引导,\x01",
"还是单纯的偶然,\x01",
"她在你们面前苏醒了。\x02\x03",
"对于玛丽亚贝尔小姐来说,\x01",
"这自然是出乎意料的状况……\x02\x03",
"#01403F不过,她似乎毫不介意\x01",
"那孩子被你们收养,\x01",
"并与你们一起生活。\x02",
)
)
CloseMessageWindow()
#C0085
ChrTalk(
0x101,
"#00008F#12P………………………………\x02",
)
CloseMessageWindow()
#C0086
ChrTalk(
0x104,
(
"#00306F#12P……想不明白,\x01",
"真是莫名其妙啊……\x02",
)
)
CloseMessageWindow()
#C0087
ChrTalk(
0x102,
(
"#00108F#12P……贝尔……\x01",
"到底有何打算呢……?\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 2)
Jump("loc_3579")
label("loc_345B")
#C0088
ChrTalk(
0x8,
(
"#01403F#5P从『太阳堡垒』释放琪雅,\x01",
"并将她带进竞拍会场的行动,\x01",
"都是在玛丽亚贝尔小姐的主导下进行的。\x02\x03",
"#01408F其目的是为了\x01",
"诱导鲁巴彻走向毁灭,\x01",
"控制住局势的发展……\x02\x03",
"#01401F同时,也是为了让那孩子\x01",
"在特殊的情况下苏醒,\x01",
"以便确认她的潜在能力。\x02\x03",
"#01403F至于其它事情,\x01",
"很遗憾,我也不清楚。\x02",
)
)
CloseMessageWindow()
label("loc_3579")
Jump("loc_3586")
label("loc_357E")
SetScenarioFlags(0x0, 3)
Jump("loc_3586")
label("loc_3586")
Jump("loc_154C")
label("loc_358B")
OP_63(0x101, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x0)
Sleep(1500)
OP_64(0x101)
#C0089
ChrTalk(
0x101,
(
"#00006F#12P#30W……那么……\x02\x03",
"#00008F大哥去世那天的真相……\x01",
"……可以告诉我吗?\x02",
)
)
CloseMessageWindow()
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x9)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_363F")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x9)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_3620")
OP_FC(0xFFF4)
Jump("loc_3623")
label("loc_3620")
OP_FC(0xC)
label("loc_3623")
#C0090
ChrTalk(
0x10A,
"#00601F#13P……………\x02",
)
CloseMessageWindow()
OP_5A()
label("loc_363F")
#C0091
ChrTalk(
0x103,
"#00208F#12P……啊………\x02",
)
CloseMessageWindow()
#C0092
ChrTalk(
0x8,
"#01403F#5P#30W………………………………\x02",
)
CloseMessageWindow()
OP_63(0x8, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x0)
Sleep(1500)
OP_64(0x8)
#C0093
ChrTalk(
0x8,
"#01400F#5P#30W好。\x02",
)
CloseMessageWindow()
StopBGM(0xFA0)
FadeToDark(1000, 0, -1)
OP_0D()
WaitBGM()
Sound(883, 0, 60, 0)
Sleep(2300)
Sound(128, 2, 10, 0)
Sleep(150)
OP_25(0x80, 0x14)
Sleep(150)
OP_25(0x80, 0x1E)
Sleep(150)
OP_25(0x80, 0x28)
Sleep(150)
OP_25(0x80, 0x32)
Sleep(500)
SetMessageWindowPos(14, 280, -1, 3)
#A0094
AnonymousTalk(
0x8,
(
"#3C#30W……纱绫去世,我离开\x01",
"警察局之后,又过了两年……\x02\x03",
"在那期间,我参与伊安律师他们的计划,\x01",
"并完成了几项工作……\x02\x03",
"全都是些不可告人……\x01",
"暗藏阴谋的工作。\x02\x03",
"然而,连同协会成员在内,\x01",
"大家都没有察觉到\x01",
"那些情况。\x02\x03",
"唯一例外的人就是我过去的搭档……\x01",
"盖伊·班宁斯。\x02",
)
)
CloseMessageWindow()
PlayBGM("ed7560", 0)
CreatePortrait(0, 224, 0, 480, 256, 0, 16, 256, 256, 0, 0, 256, 256, 0xFFFFFF, 0x0, "bu07800.itp")
OP_CB(0x0, 0x3, 0xEEFFFFFF, 0x1F4, 0x0, 0x0)
OP_CC(0x0, 0x0, 0x3)
Sleep(500)
SetMessageWindowPos(14, 280, -1, 3)
#A0095
AnonymousTalk(
0x8,
(
"#3C#30W盖伊……那家伙凭借着\x01",
"惊人的直觉和顽强的毅力,\x01",
"接近了各种阴谋与秘密的核心。\x02\x03",
"帝国与共和国的暗中斗争……\x02\x03",
"哈尔特曼议长和鲁巴彻,\x01",
"以及D∴G教团残余势力的动向……\x02\x03",
"甚至连深藏于幕后的\x01",
"库罗伊斯家族的计划都……\x02\x03",
"于是──\x02\x03",
"在那个雨天,盖伊把我叫到了\x01",
"当时刚刚开工的兰花塔\x01",
"施工现场……\x02",
)
)
CloseMessageWindow()
OP_CB(0x0, 0x3, 0xFF000000, 0x1F4, 0x0, 0x0)
OP_CC(0x0, 0x0, 0x3)
OP_25(0x80, 0x3C)
Sleep(200)
OP_25(0x80, 0x46)
Sleep(200)
OP_25(0x80, 0x50)
Sleep(200)
OP_25(0x80, 0x5A)
Sleep(200)
OP_25(0x80, 0x64)
CreatePortrait(0, 0, 8, 480, 264, 0, 0, 512, 256, 0, 0, 480, 256, 0xFF000000, 0x0, "c_vis305.itp")
OP_CB(0x0, 0x3, 0xFFFFFFFF, 0x320, 0x0, 0x0)
OP_CC(0x0, 0x0, 0x3)
Sleep(500)
SetMessageWindowPos(14, 280, -1, 3)
#A0096
AnonymousTalk(
0x8,
(
"#3C#30W当然,盖伊并没有\x01",
"掌握计划的详细情况……\x02\x03",
"但他的推测准确得惊人,\x01",
"基本把握了整个计划的全貌。\x02\x03",
"包括库罗伊斯家族利用教团和黑手党,\x01",
"趁势进军政坛……\x02\x03",
"主导袭击克洛斯贝尔市的行动,\x01",
"并伪造出是国外势力所为的假象,\x01",
"以此来煽动民众的独立情绪……\x02\x03",
"甚至连利用库罗伊斯家族的『某种东西』\x01",
"来镇压、控制整个大陆的计划都被他……\x02\x03",
"实在是令人\x01",
"难以置信。\x02\x03",
"随后──\x02",
)
)
CloseMessageWindow()
OP_CB(0x0, 0x3, 0xFF000000, 0x1F4, 0x0, 0x0)
OP_CC(0x0, 0x0, 0x3)
OP_CC(0x1, 0xFF, 0x0)
CreatePortrait(0, 0, 0, 512, 512, 0, 65296, 512, 512, 0, 0, 512, 512, 0xFF000000, 0x0, "c_vis330.itp")
CreatePortrait(1, 0, 0, 512, 512, 0, 0, 512, 512, 0, 0, 512, 512, 0xFFFFFF, 0x0, "c_vis331.itp")
OP_CB(0x0, 0x3, 0xFFFFFFFF, 0x1F4, 0x0, 0x0)
OP_CB(0x0, 0x0, 0x0, 0x0, 0x7D0, 0x0)
OP_CC(0x0, 0x0, 0x0)
OP_CB(0x1, 0x4, 0x0, 0x0, 0x0, 0x0)
OP_CB(0x1, 0x3, 0xFFFFFFFF, 0x1F4, 0x0, 0x0)
OP_CB(0x1, 0x0, 0x0, 0xFFFC5680, 0x7D0, 0x0)
OP_CC(0x0, 0x1, 0x3)
OP_CB(0x0, 0x3, 0xFFFFFF, 0x1F4, 0x0, 0x0)
OP_CC(0x0, 0x1, 0x0)
OP_CB(0x1, 0x3, 0xFF000000, 0x1F4, 0x0, 0x0)
OP_CC(0x0, 0x1, 0x3)
OP_CC(0x1, 0xFF, 0x0)
Sound(534, 0, 80, 0)
Sleep(100)
PlayEffect(0x3, 0x3, 0xC, 0x0, 0, 0, 0, 180, 0, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sound(372, 0, 40, 0)
Sleep(200)
Sound(540, 0, 100, 0)
Sound(511, 0, 100, 0)
Sleep(400)
Sound(540, 0, 100, 0)
Sound(372, 0, 40, 0)
Sound(566, 0, 50, 0)
Sleep(200)
CreatePortrait(0, 0, 8, 480, 264, 0, 0, 512, 256, 0, 0, 480, 256, 0xFF000000, 0x0, "c_vis306.itp")
OP_CB(0x0, 0x3, 0xFFFFFFFF, 0x1F4, 0x0, 0x0)
Sleep(50)
BeginChrThread(0x8, 0, 0, 30)
OP_CC(0x0, 0x0, 0x3)
Sleep(500)
SetMessageWindowPos(14, 280, -1, 3)
#A0097
AnonymousTalk(
0x8,
(
"#3C#30W……盖伊并没有接受我的劝告,\x01",
"不答应就此罢手不管……\x02\x03",
"于是我们就在雨中展开了一场殊死搏斗。\x02\x03",
"论武术水平,我要略胜一筹……\x01",
"然而,盖伊凭着坚不可摧的意志,\x01",
"激发出了自身的最大力量。\x02\x03",
"我们交战数十个回合,\x01",
"不断消耗对方的体力,\x01",
"在雨中持续着死战……\x02\x03",
"最后──\x02",
)
)
CloseMessageWindow()
OP_CB(0x0, 0x3, 0xFF000000, 0x320, 0x0, 0x0)
OP_CC(0x0, 0x0, 0x3)
OP_CC(0x1, 0xFF, 0x0)
Sleep(800)
CreatePortrait(0, 0, 8, 480, 264, 0, 0, 512, 256, 0, 0, 480, 256, 0xFF000000, 0x0, "c_vis307.itp")
OP_CB(0x0, 0x3, 0xFFFFFFFF, 0x3E8, 0x0, 0x0)
OP_CC(0x0, 0x0, 0x3)
Sleep(800)
SetMessageWindowPos(14, 280, -1, 3)
#A0098
AnonymousTalk(
0x8,
(
"#3C#30W盖伊他……\x01",
"丧命了。\x02\x03",
"把他的旋棍从现场带走的人\x01",
"自然就是我。\x02\x03",
"因为我不想让别人根据旋棍上的无数刀痕\x01",
"而确定犯人的身份。\x07\x00\x02",
)
)
CloseMessageWindow()
OP_CB(0x0, 0x3, 0xFFFFFF, 0x3E8, 0x0, 0x0)
OP_CC(0x0, 0x0, 0x3)
OP_CC(0x1, 0xFF, 0x0)
StopSound(128, 1000, 100)
FadeToBright(1500, 0)
OP_0D()
Sleep(500)
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x8)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_3FD5")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x8)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_3F7E")
OP_FC(0xFFF4)
Jump("loc_3F81")
label("loc_3F7E")
OP_FC(0xC)
label("loc_3F81")
#C0099
ChrTalk(
0x109,
"#10106F#13P#30W竟有这种事……\x02",
)
CloseMessageWindow()
OP_5A()
#C0100
ChrTalk(
0x103,
"#00208F#12P#30W……………………………\x02",
)
CloseMessageWindow()
Jump("loc_402B")
label("loc_3FD5")
#C0101
ChrTalk(
0x101,
"#00008F#12P#30W……………………………\x02",
)
CloseMessageWindow()
#C0102
ChrTalk(
0x103,
"#00206F#12P#30W……竟有这种事…………\x02",
)
CloseMessageWindow()
label("loc_402B")
OP_63(0x8, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x0)
Sleep(2000)
OP_64(0x8)
Sleep(500)
#C0103
ChrTalk(
0x8,
(
"#01403F#5P#30W……这就是发生在\x01",
"那个雨天的事情。\x02\x03",
"#01408F黑手党的成员随后现身,\x01",
"并将盖伊的徽章取走,\x01",
"这倒是出乎我的预料……\x02\x03",
"#01400F不管怎么说,这样一来,\x01",
"你的疑问已经基本解明了吧?\x02",
)
)
CloseMessageWindow()
#C0104
ChrTalk(
0x101,
"#00003F#12P不。\x02",
)
CloseMessageWindow()
OP_63(0x8, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
Sleep(1000)
#C0105
ChrTalk(
0x8,
"#01405F#5P………………………………\x02",
)
CloseMessageWindow()
#C0106
ChrTalk(
0x101,
(
"#00006F#12P您想必也清楚,\x01",
"大哥的死因是遭到了枪击。\x02\x03",
"#00001F关于这一点,\x01",
"您还没有做出解释吧……?\x02",
)
)
CloseMessageWindow()
#C0107
ChrTalk(
0x8,
(
"#01403F#5P……在当警察的时候,\x01",
"我学会了使用手枪。\x02\x03",
"#01401F由于对手太过难缠,\x01",
"我情急之下就用了枪,\x01",
"这有什么问题吗……?\x02",
)
)
CloseMessageWindow()
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x9)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_42F7")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x9)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_425E")
OP_FC(0xFFF4)
Jump("loc_4261")
label("loc_425E")
OP_FC(0xC)
label("loc_4261")
#C0108
ChrTalk(
0x10A,
(
"#00606F#13P你在说谎,马克莱因。\x02\x03",
"#00601F在那种殊死搏斗中,\x01",
"怎么会有取出\x01",
"其它武器的余暇。\x02\x03",
"更何况是在对手的背后\x01",
"射出致命一击,这更加不可能。\x02",
)
)
CloseMessageWindow()
Jump("loc_4381")
label("loc_42F7")
#C0109
ChrTalk(
0x101,
(
"#00013F#12P您在说谎。\x02\x03",
"#00006F在那种殊死搏斗中,\x01",
"根本就没有取出\x01",
"其它武器的余暇。\x02\x03",
"#00001F想在对手背后射出致命一击,\x01",
"就更是不可能了。\x02",
)
)
CloseMessageWindow()
label("loc_4381")
StopBGM(0xFA0)
#C0110
ChrTalk(
0x8,
"#01401F#5P………………………………\x02",
)
CloseMessageWindow()
#C0111
ChrTalk(
0x104,
"#00303F#12P嗯,说得有理。\x02",
)
CloseMessageWindow()
#C0112
ChrTalk(
0x102,
(
"#00108F#12P到底是谁开枪\x01",
"击中了盖伊先生……\x02",
)
)
CloseMessageWindow()
#C0113
ChrTalk(
0x103,
"#00201F#12P……请您告诉我们。\x02",
)
CloseMessageWindow()
BeginChrThread(0x8, 0, 0, 22)
WaitChrThread(0x8, 0)
OP_63(0x8, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x0)
Sleep(2000)
OP_64(0x8)
WaitBGM()
Sleep(10)
PlayBGM("ed7356", 0)
MoveCamera(43, 13, 0, 20000)
Sleep(500)
#C0114
ChrTalk(
0x8,
(
"#01403F#5P#30W不管你们怎么说,\x01",
"导致盖伊死亡的,\x01",
"不是别人,就是我。\x02\x03",
"#01400F而我……\x01",
"不惜牺牲过去的搭档,\x01",
"也要参与这项计划。\x02\x03",
"到了如今……\x01",
"甚至还利用年幼少女的心\x01",
"而推动此计划。\x02\x03",
"#01403F这一切都是为了纱绫……\x01",
"还有滴的未来。\x02",
)
)
CloseMessageWindow()
#C0115
ChrTalk(
0x101,
"#00001F#12P……亚里欧斯先生……\x02",
)
CloseMessageWindow()
#C0116
ChrTalk(
0x102,
(
"#00108F#12P您觉得……小滴会因为\x01",
"您这种做法而开心吗……?\x02",
)
)
CloseMessageWindow()
#C0117
ChrTalk(
0x8,
(
"#01403F#5P#30W她当然不可能开心。\x02\x03",
"#01401F然而,\x01",
"克洛斯贝尔这片土地上的诅咒\x01",
"令那个孩子失去了母亲和光明。\x02\x03",
"只要克洛斯贝尔一直\x01",
"在大陆中处于这个位置,\x01",
"诅咒就永远不会消失。\x02\x03",
"#01403F除非发生某种\x01",
"超越人间常理的『奇迹』。\x02",
)
)
CloseMessageWindow()
#C0118
ChrTalk(
0x101,
"#00005F#12P……!?\x02",
)
CloseMessageWindow()
BeginChrThread(0x8, 0, 0, 23)
WaitChrThread(0x8, 0)
OP_63(0x101, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
OP_63(0x102, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
Sleep(50)
OP_63(0x103, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
OP_63(0x104, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
Sleep(50)
OP_63(0xF4, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
OP_63(0xF5, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
Sleep(1000)
#C0119
ChrTalk(
0x8,
(
"#01400F#5P#30W三年前,盖伊他……\x01",
"完全没有责备过我,经过一番\x01",
"殊死搏斗之后,丢掉了性命。\x02\x03",
"而琪雅在成为『至宝』之后,\x01",
"治好了滴的眼睛。\x02\x03",
"#01403F我已经……\x01",
"没有回头的理由了。\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
Fade(250)
SetCameraDistance(15370, 800)
BeginChrThread(0x8, 0, 0, 24)
Sleep(500)
PlayEffect(0x1, 0x1, 0x8, 0x1, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sleep(250)
OP_82(0x64, 0x0, 0xBB8, 0x320)
Sound(825, 2, 50, 0)
Sound(832, 2, 100, 0)
Sound(881, 0, 50, 0)
Sound(833, 0, 50, 0)
Sleep(1000)
#C0120
ChrTalk(
0x8,
(
"#01403F#5P#30W……如果无法认同,\x01",
"就凭实力来阻止我吧。\x02\x03",
"#01401F为了给兄长报仇,并夺回重要的人……\x02\x03",
"#01407F你就用他留下的旋棍\x01",
"来开拓前进的\x01",
"道路吧……!\x02",
)
)
CloseMessageWindow()
StopBGM(0xBB8)
#C0121
ChrTalk(
0x101,
(
"#00006F#12P……明白了。\x02\x03",
"#00001F但是,我完全没有\x01",
"为大哥报仇的想法。\x02",
)
)
CloseMessageWindow()
Sleep(200)
BeginChrThread(0x101, 0, 0, 16)
WaitChrThread(0x101, 0)
PlayEffect(0x2, 0x0, 0x101, 0x1, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sound(881, 0, 50, 0)
Sound(833, 0, 50, 0)
OP_25(0x339, 0x46)
WaitBGM()
Sleep(10)
PlayBGM("ed7527", 0)
SetCameraDistance(18370, 20000)
Sleep(1000)
#C0122
ChrTalk(
0x101,
(
"#00003F#12P作为继承了\x01",
"盖伊·班宁斯的遗志\x01",
"而成立的小小部门……\x02\x03",
"#00001F作为承载了小滴\x01",
"和大家的期望的\x01",
"『特别任务支援科』……\x02\x03",
"#00007F我们一定会跨越您这道『壁障』,\x01",
"带回琪雅……\x01",
"在真正意义上解决此次事件!\x02",
)
)
CloseMessageWindow()
Sleep(150)
OP_82(0x64, 0x0, 0xBB8, 0x12C)
#C0123
ChrTalk(
0x8,
"#01405F#5P……!\x02",
)
CloseMessageWindow()
#C0124
ChrTalk(
0x104,
(
"#00302F#12P哈哈……\x01",
"不愧是我们的队长!\x02",
)
)
CloseMessageWindow()
#C0125
ChrTalk(
0x102,
(
"#00101F#12P这也是为了在兰花塔\x01",
"等着我们的小滴……\x02",
)
)
CloseMessageWindow()
#C0126
ChrTalk(
0x103,
"#00201F#12P……我们绝对不能退缩……!\x02",
)
CloseMessageWindow()
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x9)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_4B99")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x9)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_4B65")
OP_FC(0xFFF4)
Jump("loc_4B68")
label("loc_4B65")
OP_FC(0xC)
label("loc_4B68")
#C0127
ChrTalk(
0x10A,
(
"#00604F#13P哼……\x01",
"真是一群让人头疼的家伙。\x02",
)
)
CloseMessageWindow()
OP_5A()
label("loc_4B99")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x4)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_4BF7")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x4)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_4BC3")
OP_FC(0xFFF4)
Jump("loc_4BC6")
label("loc_4BC3")
OP_FC(0xC)
label("loc_4BC6")
#C0128
ChrTalk(
0x105,
(
"#10402F#13P呵呵……\x01",
"支援科就是这种感觉呢。\x02",
)
)
CloseMessageWindow()
OP_5A()
label("loc_4BF7")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x8)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_4C4A")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x8)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_4C21")
OP_FC(0xFFF4)
Jump("loc_4C24")
label("loc_4C21")
OP_FC(0xC)
label("loc_4C24")
#C0129
ChrTalk(
0x109,
"#10107F#13P我会全力援护大家的!\x02",
)
CloseMessageWindow()
OP_5A()
label("loc_4C4A")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x5)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_4CD8")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x5)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_4C74")
OP_FC(0xFFF4)
Jump("loc_4C77")
label("loc_4C74")
OP_FC(0xC)
label("loc_4C77")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x8)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_4CB2")
#C0130
ChrTalk(
0x106,
"#10701F#13P我也会……尽己所能!\x02",
)
CloseMessageWindow()
OP_5A()
Jump("loc_4CD8")
label("loc_4CB2")
#C0131
ChrTalk(
0x106,
"#10707F#13P我会全力协助各位的!\x02",
)
CloseMessageWindow()
OP_5A()
label("loc_4CD8")
BeginChrThread(0x102, 0, 0, 17)
BeginChrThread(0x103, 0, 0, 18)
BeginChrThread(0x104, 0, 0, 19)
BeginChrThread(0xF4, 0, 0, 20)
BeginChrThread(0xF5, 0, 0, 21)
WaitChrThread(0x102, 0)
WaitChrThread(0x103, 0)
WaitChrThread(0x104, 0)
WaitChrThread(0xF4, 0)
WaitChrThread(0xF5, 0)
Sleep(500)
OP_C9(0x0, 0x80000000)
#C0132
ChrTalk(
0x8,
"#01404F#4077V#5P#30W#35A呵呵,很好。\x02",
)
#Auto
CloseMessageWindow()
OP_57(0x0)
OP_5A()
BeginChrThread(0x8, 0, 0, 25)
WaitChrThread(0x8, 0)
Sound(817, 0, 100, 0)
BeginChrThread(0xA, 3, 0, 27)
BeginChrThread(0xB, 3, 0, 28)
WaitChrThread(0xA, 3)
WaitChrThread(0xB, 3)
OP_68(180, 13300, 207000, 20000)
MoveCamera(43, 13, 0, 20000)
SetCameraDistance(14120, 20000)
CreatePortrait(0, 65514, 0, 490, 256, 0, 0, 512, 256, 0, 0, 512, 256, 0xFFFFFF, 0x0, "bu01402.itp")
OP_CB(0x0, 0x3, 0xFFFFFFFF, 0x1F4, 0x0, 0x0)
OP_CB(0x0, 0x0, 0xFFFFD8F0, 0x0, 0x1F4, 0x0)
OP_CC(0x0, 0x0, 0x3)
OP_CC(0x0, 0x0, 0x0)
SetMessageWindowPos(14, 280, 35, 3)
Sleep(500)
BeginChrThread(0x8, 0, 0, 26)
#A0133
AnonymousTalk(
0x8,
(
"#4065V#40W#70A八叶一刀流·二型奥义传人,\x01",
"亚里欧斯·马克莱因……\x02\x03",
"#4066V#74A出于一己私欲,背离正义,\x01",
"舍弃道德,坚持一意孤行!\x02\x03",
"#4067V#30A来吧!特别任务支援科!\x02",
)
)
#Auto
WaitChrThread(0x8, 0)
CloseMessageWindow()
OP_C9(0x1, 0x80000000)
OP_57(0x0)
OP_5A()
SetMessageWindowPos(14, 280, 60, 3)
OP_CB(0x0, 0x3, 0xFFFFFF, 0x1F4, 0x0, 0x0)
OP_CB(0x0, 0x0, 0x0, 0x0, 0x1F4, 0x0)
OP_CC(0x0, 0x0, 0x3)
OP_CC(0x0, 0x0, 0x0)
Sleep(300)
SetMessageWindowPos(330, 100, -1, -1)
SetChrName("罗伊德等人")
OP_82(0xC8, 0x0, 0xBB8, 0x1F4)
#A0134
AnonymousTalk(
0xFF,
"#4S#12A哦哦!\x02",
)
#Auto
Sound(2153, 255, 90, 0) #voice#Elie
Sound(2343, 255, 100, 1) #voice#Randy
Sound(2249, 255, 100, 2) #voice#Tio
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x8)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_4F62")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x8)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_4F59")
OP_FC(0x4)
Sound(2478, 255, 100, 4) #voice#Noel
Jump("loc_4F62")
label("loc_4F59")
OP_FC(0x3)
Sound(2478, 255, 100, 3) #voice#Noel
label("loc_4F62")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x4)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_4F95")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x4)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_4F8F")
Sound(2417, 255, 100, 4) #voice#Lazy
Jump("loc_4F95")
label("loc_4F8F")
Sound(2417, 255, 100, 3) #voice#Lazy
label("loc_4F95")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x9)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_4FC8")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x9)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_4FC2")
Sound(2544, 255, 100, 4) #voice#Dudley
Jump("loc_4FC8")
label("loc_4FC2")
Sound(2544, 255, 100, 3) #voice#Dudley
label("loc_4FC8")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x5)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_4FFB")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x5)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_4FF5")
Sound(3174, 255, 100, 4) #voice#Rixia
Jump("loc_4FFB")
label("loc_4FF5")
Sound(3174, 255, 100, 3) #voice#Rixia
label("loc_4FFB")
Sound(2055, 255, 100, 5) #voice#Lloyd
CloseMessageWindow()
OP_57(0x0)
OP_5A()
SetMessageWindowPos(14, 280, 60, 3)
Battle("BattleInfo_2B8", 0x0, 0x0, 0x100, 0x45, 0xFF)
FadeToDark(0, 0, -1)
SetChrFlags(0xA, 0x80)
SetChrFlags(0xB, 0x80)
Call(0, 31)
Return()
# Function_9_CD7 end
def Function_10_5036(): pass
label("Function_10_5036")
OP_9B(0x0, 0xFE, 0x0, 0x1770, 0xFA0, 0x1)
OP_96(0xFE, 0x0, 0x2EE0, 0x318BC, 0xFA0, 0x0)
OP_93(0xFE, 0x0, 0x1F4)
Return()
# Function_10_5036 end
def Function_11_5061(): pass
label("Function_11_5061")
OP_9B(0x0, 0xFE, 0x0, 0x157C, 0xFA0, 0x1)
OP_96(0xFE, 0x460, 0x2EE0, 0x313E4, 0xFA0, 0x0)
OP_93(0xFE, 0x0, 0x1F4)
Return()
# Function_11_5061 end
def Function_12_508C(): pass
label("Function_12_508C")
OP_9B(0x0, 0xFE, 0x0, 0x157C, 0xFA0, 0x1)
OP_96(0xFE, 0x1A4, 0x2EE0, 0x30EB2, 0xFA0, 0x0)
OP_93(0xFE, 0x0, 0x1F4)
Return()
# Function_12_508C end
def Function_13_50B7(): pass
label("Function_13_50B7")
OP_9B(0x0, 0xFE, 0x0, 0x157C, 0xFA0, 0x1)
OP_96(0xFE, 0xFFFFFB0A, 0x2EE0, 0x3116E, 0xFA0, 0x0)
OP_93(0xFE, 0x0, 0x1F4)
Return()
# Function_13_50B7 end
def Function_14_50E2(): pass
label("Function_14_50E2")
OP_9B(0x0, 0xFE, 0x0, 0x1996, 0xFA0, 0x1)
OP_96(0xFE, 0xFFFFF614, 0x2EE0, 0x313DA, 0xFA0, 0x0)
OP_93(0xFE, 0x0, 0x1F4)
Return()
# Function_14_50E2 end
def Function_15_510D(): pass
label("Function_15_510D")
OP_9B(0x0, 0xFE, 0x0, 0x1AC2, 0xFA0, 0x1)
OP_96(0xFE, 0x9EC, 0x2EE0, 0x31312, 0xFA0, 0x0)
Return()
# Function_15_510D end
def Function_16_5131(): pass
label("Function_16_5131")
Sleep(150)
Sound(805, 0, 100, 0)
SetChrChipByIndex(0xFE, 0x1F)
SetChrSubChip(0xFE, 0x0)
Return()
# Function_16_5131 end
def Function_17_5143(): pass
label("Function_17_5143")
Sleep(300)
Sound(531, 0, 100, 0)
SetChrChipByIndex(0xFE, 0x20)
SetChrSubChip(0xFE, 0x0)
Return()
# Function_17_5143 end
def Function_18_5155(): pass
label("Function_18_5155")
Sleep(450)
Sound(805, 0, 100, 0)
SetChrChipByIndex(0xFE, 0x21)
SetChrSubChip(0xFE, 0x0)
Return()
# Function_18_5155 end
def Function_19_5167(): pass
label("Function_19_5167")
Sleep(450)
SetChrChipByIndex(0xFE, 0x22)
SetChrSubChip(0xFE, 0x0)
Return()
# Function_19_5167 end
def Function_20_5173(): pass
label("Function_20_5173")
Sleep(600)
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x5)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_5193")
Sound(540, 0, 50, 0)
Jump("loc_51B8")
label("loc_5193")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x8)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x9)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_OR), scpexpr(EXPR_END)), "loc_51B8")
Sound(531, 0, 100, 0)
label("loc_51B8")
SetChrChipByIndex(0xFE, 0x23)
SetChrSubChip(0xFE, 0x0)
Return()
# Function_20_5173 end
def Function_21_51C1(): pass
label("Function_21_51C1")
Sleep(750)
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF5, 0x5)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_51E1")
Sound(540, 0, 50, 0)
Jump("loc_5206")
label("loc_51E1")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF5, 0x8)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_EXEC_OP, "OP_FB(0xF5, 0x9)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_OR), scpexpr(EXPR_END)), "loc_5206")
Sound(531, 0, 100, 0)
label("loc_5206")
SetChrChipByIndex(0xFE, 0x24)
SetChrSubChip(0xFE, 0x0)
Return()
# Function_21_51C1 end
def Function_22_520F(): pass
label("Function_22_520F")
SetChrChipByIndex(0x8, 0x2A)
SetChrFlags(0xFE, 0x1000)
SetChrFlags(0xFE, 0x20)
SetChrFlags(0xFE, 0x2)
SetChrSubChip(0xFE, 0x0)
Sleep(125)
SetChrSubChip(0xFE, 0x1)
Sleep(125)
SetChrSubChip(0xFE, 0x2)
Sleep(500)
Return()
# Function_22_520F end
def Function_23_5238(): pass
label("Function_23_5238")
SetChrChipByIndex(0x8, 0x2A)
SetChrFlags(0xFE, 0x1000)
SetChrFlags(0xFE, 0x20)
SetChrFlags(0xFE, 0x2)
SetChrSubChip(0xFE, 0x2)
Sleep(125)
Sound(932, 0, 60, 0)
SetChrSubChip(0xFE, 0x3)
Sleep(125)
SetChrSubChip(0xFE, 0x4)
Sleep(375)
SetChrSubChip(0xFE, 0x5)
Sleep(125)
Sound(859, 0, 100, 0)
SetChrSubChip(0xFE, 0x6)
Sleep(125)
SetChrSubChip(0xFE, 0x7)
Sleep(500)
Return()
# Function_23_5238 end
def Function_24_5282(): pass
label("Function_24_5282")
SetChrSubChip(0xFE, 0x7)
Sleep(125)
SetChrSubChip(0xFE, 0x8)
Sleep(125)
SetChrSubChip(0xFE, 0x9)
Sleep(125)
SetChrSubChip(0xFE, 0xA)
Sleep(125)
Sound(812, 0, 100, 0)
Sound(531, 0, 50, 0)
SetChrSubChip(0xFE, 0xB)
Return()
# Function_24_5282 end
def Function_25_52AF(): pass
label("Function_25_52AF")
SetChrSubChip(0xFE, 0xB)
Sleep(125)
SetChrSubChip(0xFE, 0xC)
Sleep(125)
SetChrSubChip(0xFE, 0xD)
Sleep(125)
Sound(531, 0, 50, 0)
SetChrSubChip(0xFE, 0xE)
Sleep(250)
Sound(859, 0, 60, 0)
Sleep(250)
Return()
# Function_25_52AF end
def Function_26_52DB(): pass
label("Function_26_52DB")
SetChrSubChip(0xFE, 0xE)
Sleep(91)
Sound(540, 0, 40, 0)
SetChrSubChip(0xFE, 0xF)
Sleep(91)
SetChrSubChip(0xFE, 0x10)
Sleep(91)
SetChrSubChip(0xFE, 0x11)
Sleep(364)
Return()
# Function_26_52DB end
def Function_27_52FE(): pass
label("Function_27_52FE")
PlayEffect(0x0, 0x5, 0xFE, 0x1, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sleep(500)
OP_A7(0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0x1F4)
StopEffect(0x5, 0x2)
Return()
# Function_27_52FE end
def Function_28_5347(): pass
label("Function_28_5347")
PlayEffect(0x0, 0x6, 0xFE, 0x1, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sleep(500)
OP_A7(0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0x1F4)
StopEffect(0x6, 0x2)
Return()
# Function_28_5347 end
def Function_29_5390(): pass
label("Function_29_5390")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_53AB")
OP_A1(0xFE, 0x4B0, 0x5, 0x0, 0x1, 0x2, 0x3, 0x4)
Jump("Function_29_5390")
label("loc_53AB")
Return()
# Function_29_5390 end
def Function_30_53AC(): pass
label("Function_30_53AC")
OP_CB(0x0, 0x0, 0xFFFFE0C0, 0x0, 0x6E, 0x0)
OP_CC(0x0, 0x0, 0x0)
OP_CB(0x0, 0x0, 0x1770, 0x0, 0x5A, 0x0)
OP_CC(0x0, 0x0, 0x0)
OP_CB(0x0, 0x0, 0xFFFFF060, 0x0, 0x46, 0x0)
OP_CC(0x0, 0x0, 0x0)
OP_CB(0x0, 0x0, 0x7D0, 0x0, 0x32, 0x0)
OP_CC(0x0, 0x0, 0x0)
OP_CB(0x0, 0x0, 0x0, 0x0, 0x1E, 0x0)
OP_CC(0x0, 0x0, 0x0)
Return()
# Function_30_53AC end
def Function_31_5420(): pass
label("Function_31_5420")
EventBegin(0x0)
FadeToDark(0, 0, -1)
OP_E2(0x3)
LoadChrToIndex("chr/ch00050.itc", 0x1F)
LoadChrToIndex("chr/ch00150.itc", 0x20)
LoadChrToIndex("chr/ch00250.itc", 0x21)
LoadChrToIndex("chr/ch00350.itc", 0x22)
Call(0, 6)
Call(0, 7)
LoadChrToIndex("apl/ch51744.itc", 0x26)
LoadEffect(0x0, "event/ev17084.eff")
LoadEffect(0x1, "event/ev17085.eff")
SoundLoad(128)
SoundLoad(4078)
SoundLoad(4079)
OP_68(-80, 13300, 209040, 0)
MoveCamera(358, 19, 0, 0)
OP_6E(600, 0)
SetCameraDistance(19000, 0)
SetCameraDistance(16500, 2500)
SetChrPos(0x101, 0, 12000, 207440, 0)
SetChrPos(0x102, 1120, 12000, 206200, 0)
SetChrPos(0x103, 420, 12000, 204870, 0)
SetChrPos(0x104, -1270, 12000, 205570, 0)
SetChrPos(0xF4, -2540, 11990, 206190, 0)
SetChrPos(0xF5, 2540, 12000, 205990, 0)
ClearChrFlags(0x4, 0x80)
ClearChrBattleFlags(0x4, 0x8000)
ClearChrFlags(0x5, 0x80)
ClearChrBattleFlags(0x5, 0x8000)
SetChrChipByIndex(0x101, 0x1F)
SetChrSubChip(0x101, 0x0)
SetChrChipByIndex(0x102, 0x20)
SetChrSubChip(0x102, 0x0)
SetChrChipByIndex(0x103, 0x21)
SetChrSubChip(0x103, 0x0)
SetChrChipByIndex(0x104, 0x22)
SetChrSubChip(0x104, 0x0)
SetChrChipByIndex(0xF4, 0x23)
SetChrSubChip(0xF4, 0x0)
SetChrChipByIndex(0xF5, 0x24)
SetChrSubChip(0xF5, 0x0)
SetChrChipByIndex(0x8, 0x0)
SetChrSubChip(0x8, 0x0)
ClearChrFlags(0x8, 0x80)
SetChrFlags(0x8, 0x8000)
SetChrFlags(0x8, 0x1000)
SetChrFlags(0x8, 0x2)
SetChrFlags(0x8, 0x800)
SetChrPos(0x8, 0, 12000, 211500, 180)
BeginChrThread(0x8, 0, 0, 32)
OP_68(0, 13000, 210000, 0)
MoveCamera(0, 15, 0, 0)
OP_6E(600, 0)
SetCameraDistance(17500, 0)
SetCameraDistance(15800, 12000)
FadeToBright(1000, 0)
OP_0D()
OP_C9(0x0, 0x80000000)
#C0135
ChrTalk(
0x8,
(
"#01404F#4078V#5P#80W#30A………呵呵…………\x02\x03",
"#4079V#60A罗伊德……还有其他人……\x01",
"……你们真的变强了呢。\x02",
)
)
#Auto
CloseMessageWindow()
OP_C9(0x1, 0x80000000)
OP_57(0x0)
OP_5A()
Fade(500)
OP_68(0, 13100, 208700, 0)
MoveCamera(46, 18, 0, 0)
OP_6E(600, 0)
SetCameraDistance(14680, 0)
EndChrThread(0x8, 0x0)
SetChrSubChip(0x8, 0x1)
OP_0D()
Sleep(500)
#C0136
ChrTalk(
0x101,
(
"#00006F#12P#40W……呼……呼……\x02\x03",
"#00008F如果真是如此……那也是因为\x01",
"我们一直都在以亚里欧斯先生为目标……\x02",
)
)
OP_6F(0x79)
CloseMessageWindow()
#C0137
ChrTalk(
0x104,
(
"#00306F#12P#40W的确……如果没有您,\x01",
"我们恐怕也走不到这一步……\x02",
)
)
CloseMessageWindow()
#C0138
ChrTalk(
0x103,
"#00206F#12P#40W……我有同感……\x02",
)
CloseMessageWindow()
#C0139
ChrTalk(
0x102,
(
"#00108F#12P#40W您一直走在遥远的前方,\x01",
"是我们不断追赶并试图跨越的『壁障』……\x02",
)
)
CloseMessageWindow()
#C0140
ChrTalk(
0x8,
(
"#01402F#5P#40W呵呵……真是的……\x02\x03",
"#01404F我哪有接受这种\x01",
"赞誉的资格……\x02",
)
)
CloseMessageWindow()
OP_63(0x101, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x0)
Sleep(1500)
OP_64(0x101)
Sleep(500)
#C0141
ChrTalk(
0x101,
(
"#00003F#12P#30W……亚里欧斯先生。\x02\x03",
"#00001F那一天,开枪击中大哥的人\x01",
"是伊安律师吧……?\x02",
)
)
CloseMessageWindow()
OP_63(0x102, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
OP_63(0x103, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
Sleep(50)
OP_63(0x104, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
OP_63(0xF4, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
Sleep(50)
OP_63(0xF5, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
Sleep(1000)
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x9)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_5951")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x9)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_592D")
OP_FC(0xFFF4)
Jump("loc_5930")
label("loc_592D")
OP_FC(0xC)
label("loc_5930")
#C0142
ChrTalk(
0x10A,
"#00605F#13P#30W……!\x02",
)
CloseMessageWindow()
OP_5A()
Jump("loc_59A0")
label("loc_5951")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x8)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_59A0")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x8)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_597B")
OP_FC(0xFFF4)
Jump("loc_597E")
label("loc_597B")
OP_FC(0xC)
label("loc_597E")
#C0143
ChrTalk(
0x109,
"#10105F#13P#30W……啊……!\x02",
)
CloseMessageWindow()
OP_5A()
label("loc_59A0")
#C0144
ChrTalk(
0x104,
"#00301F#12P#30W这……\x02",
)
CloseMessageWindow()
#C0145
ChrTalk(
0x8,
(
"#01403F#5P#40W………………………………\x02\x03",
"#01400F……你为何会这样想……?\x02",
)
)
CloseMessageWindow()
#C0146
ChrTalk(
0x101,
(
"#00006F#12P#30W这只是简单的排除法……\x02\x03",
"#00008F……从事件的背景来考虑……\x01",
"除了律师之外,嫌疑人就只有\x01",
"迪塔先生和玛丽亚贝尔小姐了……\x02\x03",
"#00001F但迪塔先生似乎\x01",
"并不了解计划的全貌,\x01",
"而玛丽亚贝尔小姐与大哥没有任何接触……\x02\x03",
"#00006F只有伊安律师\x01",
"与大哥来往甚密……\x02\x03",
"而且……他经常去国外出差,\x01",
"需要一定的自卫手段,\x01",
"就算能熟练使用手枪也不足为奇……\x02\x03",
"#00013F……您的意见如何?\x02",
)
)
CloseMessageWindow()
#C0147
ChrTalk(
0x8,
(
"#01403F#5P#40W……六十分吧……\x02\x03",
"#01402F不过……看来是\x01",
"不得不承认你及格了……\x02",
)
)
CloseMessageWindow()
StopBGM(0xFA0)
FadeToDark(1000, 0, -1)
OP_0D()
Sleep(1000)
Sound(128, 2, 30, 0)
Sleep(150)
OP_25(0x80, 0x28)
Sleep(150)
OP_25(0x80, 0x32)
Sleep(150)
OP_25(0x80, 0x3C)
Sleep(150)
OP_25(0x80, 0x46)
Sleep(150)
OP_25(0x80, 0x50)
Sleep(150)
OP_25(0x80, 0x5A)
Sleep(150)
OP_25(0x80, 0x64)
Sleep(300)
Sound(884, 0, 100, 0)
Sleep(3000)
WaitBGM()
Sleep(10)
PlayBGM("ed7534", 0)
CreatePortrait(0, 0, 8, 480, 264, 0, 0, 512, 256, 0, 0, 480, 256, 0xFFFFFF, 0x0, "c_vis308.itp")
CreatePortrait(1, 0, 8, 480, 264, 0, 0, 512, 256, 0, 0, 480, 256, 0xFFFFFF, 0x0, "c_vis317.itp")
OP_CB(0x0, 0x3, 0xFFFFFFFF, 0x3E8, 0x0, 0x0)
OP_CC(0x0, 0x0, 0x3)
Sleep(500)
SetMessageWindowPos(20, 160, -1, -1)
SetChrName("盖伊")
#A0148
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x14),
"#40W呼、呼……\x02\x03",
"……喂,亚里欧斯……\x02\x03",
"我们好像都到极限了……\x01",
"不然就先休战吧?\x02",
)
)
CloseMessageWindow()
SetMessageWindowPos(230, 140, -1, -1)
SetChrName("亚里欧斯")
#A0149
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x14),
"#40W……你在说什么蠢话……\x02\x03",
"既然事情已经败露,\x01",
"我绝不能让你离开此地……\x02\x03",
"如果你还想活着迎接下个月的婚礼,\x01",
"就抱着杀死我的决心攻过来吧……!\x02",
)
)
CloseMessageWindow()
SetMessageWindowPos(20, 160, -1, -1)
SetChrName("盖伊")
#A0150
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x14),
"#40W我怎么可能那么做……\x02\x03",
"那样的话,不就无法邀请\x01",
"你和小滴参加婚礼了吗……\x02",
)
)
CloseMessageWindow()
SetMessageWindowPos(330, 140, -1, -1)
SetChrName("亚里欧斯")
#A0151
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x14),
"……!\x02",
)
)
CloseMessageWindow()
SetMessageWindowPos(20, 160, -1, -1)
SetChrName("盖伊")
#A0152
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x14),
"#40W放心吧……我并没有把\x01",
"你们的计划告诉任何人……\x02\x03",
"原本想找达德利\x01",
"帮忙……\x01",
"但那家伙是个死脑筋。\x02\x03",
"另外,我也没和\x01",
"赛尔盖长官说过呢……\x02",
)
)
CloseMessageWindow()
SetMessageWindowPos(250, 140, -1, -1)
SetChrName("亚里欧斯")
#A0153
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x14),
"#40W你……\x02\x03",
"……你难道就没想过,\x01",
"我听了这些话之后,\x01",
"将会下定杀人灭口的决心吗……?\x02",
)
)
CloseMessageWindow()
SetMessageWindowPos(20, 160, -1, -1)
SetChrName("盖伊")
#A0154
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x14),
"#30W不会啊……\x01",
"因为你一向很没心机。\x02\x03",
"不然也不会独身一人\x01",
"来这种地方赴约了。\x02",
)
)
CloseMessageWindow()
SetMessageWindowPos(330, 140, -1, -1)
SetChrName("亚里欧斯")
#A0155
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x14),
"#30W唔……\x02",
)
)
CloseMessageWindow()
SetMessageWindowPos(20, 160, -1, -1)
SetChrName("盖伊")
#A0156
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x14),
"#30W总之……这件事就先到此为止,\x01",
"我们一起去喝一杯吧?\x02\x03",
"最近这两年,我们一直\x01",
"都没有好好聊过……\x02\x03",
"你总要给我一个向你炫耀\x01",
"弟弟和女朋友的机会吧?\x02",
)
)
CloseMessageWindow()
SetMessageWindowPos(240, 150, -1, -1)
SetChrName("亚里欧斯")
#A0157
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x14),
"#30W呼……你真是老样子啊。\x02\x03",
"你的弟弟……\x01",
"已经十五岁了吧?\x02",
)
)
CloseMessageWindow()
SetMessageWindowPos(20, 160, -1, -1)
SetChrName("盖伊")
#A0158
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x14),
"#30W嗯,他完全不像我,非常聪明呢。\x02\x03",
"我打算送他去\x01",
"高等学校读书……\x02\x03",
"……好啦,先不说这些了。\x01",
"雨还没停,我们不如到『加兰特』——\x02",
)
)
CloseMessageWindow()
Sound(567, 0, 100, 0)
Sleep(200)
OP_CB(0x1, 0x4, 0x0, 0x0, 0x0, 0x0)
OP_CB(0x1, 0x3, 0xFFFFFFFF, 0x1F4, 0x0, 0x0)
Sleep(500)
SetMessageWindowPos(80, 160, -1, -1)
SetChrName("盖伊")
#A0159
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x14),
"#60W啊——\x02",
)
)
CloseMessageWindow()
SetMessageWindowPos(330, 140, -1, -1)
SetChrName("亚里欧斯")
#A0160
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x14),
"#30W!?\x02",
)
)
CloseMessageWindow()
OP_CB(0x0, 0x3, 0xFFFFFF, 0x0, 0x0, 0x0)
OP_CB(0x1, 0x3, 0xFFFFFF, 0x1F4, 0x0, 0x0)
OP_CC(0x0, 0x1, 0x3)
OP_CC(0x1, 0xFF, 0x0)
CreatePortrait(0, 0, 8, 480, 264, 0, 0, 512, 256, 0, 0, 480, 256, 0xFFFFFF, 0x0, "c_vis309.itp")
OP_CB(0x0, 0x3, 0xFFFFFFFF, 0x2EE, 0x0, 0x0)
OP_CC(0x0, 0x0, 0x3)
Sleep(300)
SetMessageWindowPos(330, 160, -1, -1)
SetChrName("亚里欧斯")
#A0161
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x14),
"#30W律师……!?\x02",
)
)
CloseMessageWindow()
SetMessageWindowPos(250, 180, -1, -1)
SetChrName("盖伊")
#A0162
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x14),
"#50W……哈哈……\x02\x03",
"……原来如此……\x01",
"幕后黑手原来是你啊……\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
OP_CB(0x0, 0x3, 0xFFFFFF, 0x1F4, 0x0, 0x0)
OP_CC(0x0, 0x0, 0x3)
OP_CC(0x1, 0xFF, 0x0)
Sound(885, 0, 80, 0)
Sound(811, 0, 80, 0)
CreatePortrait(0, 0, 8, 480, 264, 0, 0, 512, 256, 0, 0, 480, 256, 0xFFFFFF, 0x0, "c_vis310.itp")
CreatePortrait(1, 0, 8, 480, 264, 0, 0, 512, 256, 0, 0, 480, 256, 0xFFFFFF, 0x0, "c_vis318.itp")
CreatePortrait(2, 0, 8, 480, 264, 0, 0, 512, 256, 0, 0, 480, 256, 0xFFFFFF, 0x0, "c_vis319.itp")
CreatePortrait(3, 0, 8, 480, 264, 0, 0, 512, 256, 0, 0, 480, 256, 0xFFFFFF, 0x0, "c_vis320.itp")
OP_CB(0x0, 0x3, 0xFFFFFFFF, 0x1F4, 0x0, 0x0)
OP_CC(0x0, 0x0, 0x3)
Sleep(1000)
OP_CB(0x1, 0x4, 0x0, 0x0, 0x0, 0x0)
OP_CB(0x1, 0x3, 0xFFFFFFFF, 0x1F4, 0x0, 0x0)
Sleep(1000)
OP_CB(0x0, 0x3, 0xFFFFFF, 0x0, 0x0, 0x0)
OP_CB(0x2, 0x4, 0x0, 0x0, 0x0, 0x0)
OP_CB(0x2, 0x3, 0xFFFFFFFF, 0x1F4, 0x0, 0x0)
Sleep(1000)
OP_CB(0x1, 0x3, 0xFFFFFF, 0x0, 0x0, 0x0)
OP_CB(0x3, 0x4, 0x0, 0x0, 0x0, 0x0)
OP_CB(0x3, 0x3, 0xFFFFFFFF, 0x1F4, 0x0, 0x0)
SetMessageWindowPos(30, 160, -1, -1)
SetChrName("伊安律师")
#A0163
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x14),
"#30W……抱歉,盖伊。\x02\x03",
"考虑到你父母的情况,\x01",
"原本也想过邀你加入……\x02\x03",
"但我相信你一定\x01",
"不会赞同我们。\x02",
)
)
CloseMessageWindow()
SetMessageWindowPos(350, 140, -1, -1)
SetChrName("亚里欧斯")
#A0164
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x14),
"#30W……律师先生………\x02",
)
)
CloseMessageWindow()
SetMessageWindowPos(200, 200, -1, -1)
SetChrName("盖伊")
#A0165
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x14),
"#50W哈哈……那当然……\x02\x03",
"……有律师先生在幕后策划,\x01",
"这个计划……肯定会\x01",
"进展得很顺利……\x02\x03",
"但是……代替我的人……\x01",
"一定会出现的……\x02",
)
)
CloseMessageWindow()
SetMessageWindowPos(40, 160, -1, -1)
SetChrName("伊安律师")
#A0166
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x14),
"#30W嗯……也许吧。\x02",
)
)
CloseMessageWindow()
SetMessageWindowPos(350, 140, -1, -1)
SetChrName("亚里欧斯")
#A0167
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x14),
"#30W盖伊……!\x01",
"……振作点……!\x02",
)
)
CloseMessageWindow()
SetMessageWindowPos(210, 200, -1, -1)
SetChrName("盖伊")
#A0168
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x14),
"#60W咳……啊……\x01",
"……这下可糟了……\x02\x03",
"#80W早知如此……\x01",
"真该先向罗伊德……和塞茜尔……\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
SetMessageWindowPos(14, 280, 60, 3)
OP_CB(0x3, 0x3, 0xFFFFFF, 0x3E8, 0x0, 0x0)
OP_CB(0x2, 0x3, 0xFFFFFF, 0x0, 0x0, 0x0)
OP_CC(0x0, 0x3, 0x3)
OP_CC(0x1, 0xFF, 0x0)
Sound(885, 0, 90, 0)
Sleep(100)
Sound(811, 0, 90, 0)
Sound(862, 0, 40, 0)
StopBGM(0x1770)
Sleep(2000)
StopSound(128, 2000, 100)
WaitBGM()
SetCameraDistance(16180, 3000)
FadeToBright(1500, 0)
OP_0D()
OP_6F(0x79)
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x9)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_6716")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x9)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_66E7")
OP_FC(0xFFF4)
Jump("loc_66EA")
label("loc_66E7")
OP_FC(0xC)
label("loc_66EA")
#C0169
ChrTalk(
0x10A,
"#00608F#13P#30W……………………………\x02",
)
CloseMessageWindow()
OP_5A()
label("loc_6716")
#C0170
ChrTalk(
0x103,
"#00213F#12P#30W………盖伊先生………\x02",
)
CloseMessageWindow()
#C0171
ChrTalk(
0x102,
"#00108F#12P#30W……竟有这种事……\x02",
)
CloseMessageWindow()
#C0172
ChrTalk(
0x104,
"#00308F#12P#30W真是不幸的往事啊……\x02",
)
CloseMessageWindow()
#C0173
ChrTalk(
0x101,
(
"#00006F#12P#30W……多谢您告诉我\x01",
"大哥临终时的状况。\x02",
)
)
CloseMessageWindow()
#C0174
ChrTalk(
0x8,
(
"#01404F#5P#40W……不用道谢……\x02\x03",
"#01400F伊安律师……\x01",
"恐怕是不会动摇的……\x02\x03",
"而且……\x01",
"琪雅的决心似乎也很坚定……\x02\x03",
"#01404F#50W如果想打动他们二人……\x01",
"就拼尽一切,放手一搏吧……\x02",
)
)
CloseMessageWindow()
Sleep(200)
Sound(898, 0, 100, 0)
def lambda_6886():
OP_A6(0xFE, 0x0, 0x23, 0x1F4, 0xBB8)
ExitThread()
QueueWorkItem(0x8, 2, lambda_6886)
WaitChrThread(0x8, 2)
BeginChrThread(0x8, 0, 0, 33)
WaitChrThread(0x8, 0)
Sleep(250)
PlayBGM("ed7356", 0)
OP_50(0x4C, (scpexpr(EXPR_PUSH_LONG, 0x164), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Sleep(700)
Sound(202, 0, 100, 0)
Sound(181, 0, 80, 0)
PlayEffect(0x0, 0xFF, 0xFF, 0x0, 0, 12050, 208000, 0, 0, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sleep(200)
SetMapObjFrame(0xFF, "magi_04_add", 0x0, 0x1)
Sleep(2000)
OP_68(0, 13400, 208000, 0)
MoveCamera(30, 20, 0, 0)
OP_6E(700, 0)
SetCameraDistance(42000, 0)
Fade(500)
SetCameraDistance(44000, 5000)
OP_0D()
Sound(223, 0, 50, 0)
Sound(293, 0, 60, 0)
PlayEffect(0x1, 0xFF, 0xFF, 0x0, -8810, 11000, 195890, 250, -40, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sleep(50)
PlayEffect(0x1, 0xFF, 0xFF, 0x0, -18740, 11000, 207720, 240, -40, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sleep(50)
PlayEffect(0x1, 0xFF, 0xFF, 0x0, -8810, 11000, 219990, 277, -40, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sleep(50)
PlayEffect(0x1, 0xFF, 0xFF, 0x0, 2340, 11000, 226130, 26, -40, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sleep(50)
PlayEffect(0x1, 0xFF, 0xFF, 0x0, 14640, 11000, 216150, 34, -40, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sleep(50)
PlayEffect(0x1, 0xFF, 0xFF, 0x0, 14650, 11000, 200000, 64, -40, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sleep(150)
OP_75(0x2, 0x1, 0x7D0)
PlayEffect(0x1, 0xFF, 0xFF, 0x0, -14190, 4900, 215730, 314, -33, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sleep(50)
PlayEffect(0x1, 0xFF, 0xFF, 0x0, 3670, 8000, 223960, 85, -33, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sleep(50)
PlayEffect(0x1, 0xFF, 0xFF, 0x0, 16510, 5300, 208790, 89, -33, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sleep(150)
PlayEffect(0x1, 0xFF, 0xFF, 0x0, -14840, 11000, 200070, 295, -40, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sleep(50)
PlayEffect(0x1, 0xFF, 0xFF, 0x0, -14870, 11000, 216410, 326, -40, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sleep(50)
PlayEffect(0x1, 0xFF, 0xFF, 0x0, -2400, 11000, 226170, 334, -40, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sleep(50)
PlayEffect(0x1, 0xFF, 0xFF, 0x0, 8990, 11000, 220020, 80, -40, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sleep(50)
PlayEffect(0x1, 0xFF, 0xFF, 0x0, 18340, 11000, 208220, 120, -40, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sleep(50)
PlayEffect(0x1, 0xFF, 0xFF, 0x0, 8860, 11000, 195800, 110, -40, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sleep(150)
PlayEffect(0x1, 0xFF, 0xFF, 0x0, -1580, 4700, 224260, 271, -33, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sleep(50)
PlayEffect(0x1, 0xFF, 0xFF, 0x0, 16370, 8500, 219180, 44, -33, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sleep(50)
PlayEffect(0x1, 0xFF, 0xFF, 0x0, 6440, 1300, 196290, 113, -13, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sleep(50)
PlayEffect(0x1, 0xFF, 0xFF, 0x0, 23440, -900, 210080, 119, -13, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sleep(500)
Sleep(100)
Sleep(100)
Sleep(100)
Sleep(100)
Sleep(100)
Sleep(100)
FadeToDark(1500, 0, -1)
OP_24(0x80)
OP_0D()
SetScenarioFlags(0x22, 0)
SetScenarioFlags(0x22, 2)
NewScene("m9008", 0, 0, 0)
IdleLoop()
Return()
# Function_31_5420 end
def Function_32_6DE5(): pass
label("Function_32_6DE5")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_6E2D")
SetChrSubChip(0xFE, 0x8)
Sleep(150)
SetChrSubChip(0xFE, 0x9)
Sleep(150)
SetChrSubChip(0xFE, 0xA)
Sleep(150)
SetChrSubChip(0xFE, 0xB)
Sleep(150)
SetChrSubChip(0xFE, 0xC)
Sleep(150)
SetChrSubChip(0xFE, 0xB)
Sleep(150)
SetChrSubChip(0xFE, 0xA)
Sleep(150)
SetChrSubChip(0xFE, 0x9)
Sleep(150)
Jump("Function_32_6DE5")
label("loc_6E2D")
Return()
# Function_32_6DE5 end
def Function_33_6E2E(): pass
label("Function_33_6E2E")
SetChrSubChip(0xFE, 0x1)
Sleep(100)
SetChrSubChip(0xFE, 0x2)
Sleep(100)
SetChrSubChip(0xFE, 0x3)
Sleep(100)
SetChrSubChip(0xFE, 0x4)
Sleep(100)
SetChrSubChip(0xFE, 0x5)
Sleep(100)
SetChrSubChip(0xFE, 0x6)
Sleep(100)
SetChrSubChip(0xFE, 0x7)
Sound(811, 0, 40, 0)
Sleep(100)
SetChrSubChip(0xFE, 0x7)
Sleep(300)
SetChrSubChip(0xFE, 0x7)
Sleep(100)
SetChrSubChip(0xFE, 0xD)
Sleep(100)
SetChrSubChip(0xFE, 0xE)
Sleep(100)
Sound(811, 0, 80, 0)
SetChrSubChip(0xFE, 0xF)
Sleep(100)
Sound(862, 0, 30, 0)
Sleep(300)
Return()
# Function_33_6E2E end
def Function_34_6E98(): pass
label("Function_34_6E98")
EventBegin(0x0)
FadeToDark(0, 0, -1)
OP_E2(0x3)
Call(0, 43)
LoadChrToIndex("chr/ch00050.itc", 0x1F)
LoadChrToIndex("chr/ch00150.itc", 0x20)
LoadChrToIndex("chr/ch00250.itc", 0x21)
LoadChrToIndex("chr/ch00350.itc", 0x22)
Call(0, 6)
Call(0, 7)
LoadChrToIndex("chr/ch00056.itc", 0x25)
LoadEffect(0x0, "event/ev17012.eff")
SetChrPos(0x101, -430, 12000, 207440, 0)
SetChrPos(0x102, 470, 12000, 206000, 0)
SetChrPos(0x103, -1370, 12000, 204870, 0)
SetChrPos(0x104, 1370, 12000, 204570, 0)
SetChrPos(0xF4, -2540, 12000, 205690, 0)
SetChrPos(0xF5, 2400, 12000, 205790, 315)
ClearChrFlags(0x4, 0x80)
ClearChrBattleFlags(0x4, 0x8000)
ClearChrFlags(0x5, 0x80)
ClearChrBattleFlags(0x5, 0x8000)
SetChrChipByIndex(0x101, 0x1F)
SetChrSubChip(0x101, 0x0)
SetChrChipByIndex(0x102, 0x20)
SetChrSubChip(0x102, 0x0)
SetChrChipByIndex(0x103, 0x21)
SetChrSubChip(0x103, 0x0)
SetChrChipByIndex(0x104, 0x22)
SetChrSubChip(0x104, 0x0)
SetChrChipByIndex(0xF4, 0x23)
SetChrSubChip(0xF4, 0x0)
SetChrChipByIndex(0xF5, 0x24)
SetChrSubChip(0xF5, 0x0)
SetChrChipByIndex(0x8, 0x0)
SetChrSubChip(0x8, 0xF)
ClearChrFlags(0x8, 0x80)
SetChrFlags(0x8, 0x8000)
SetChrFlags(0x8, 0x1000)
SetChrFlags(0x8, 0x20)
SetChrFlags(0x8, 0x2)
SetChrFlags(0x8, 0x800)
SetChrPos(0x8, 0, 12000, 211500, 180)
SetMapObjFrame(0xFF, "magi_04_add", 0x0, 0x1)
SetMapObjFlags(0x2, 0x4)
OP_68(0, 13050, 222000, 0)
MoveCamera(16, 28, 0, 0)
OP_6E(600, 0)
SetCameraDistance(31500, 0)
SetCameraDistance(29500, 2600)
FadeToBright(1000, 0)
OP_0D()
Sleep(300)
PlayEffect(0x0, 0x0, 0xFF, 0x0, 0, 12000, 222000, 0, 0, 0, 1000, 1000, 1000, 0xFF, 0, 0, 0, 0)
Sound(935, 0, 80, 0)
SetMapObjFrame(0xFF, "magi10_add", 0x1, 0x1)
SetMapObjFrame(0xFF, "magi11_add", 0x1, 0x1)
Sleep(200)
SetMapObjFrame(0xFF, "point_add", 0x1, 0x1)
Sleep(600)
StopEffect(0x0, 0x2)
OP_6F(0x79)
OP_68(0, 12800, 208700, 0)
MoveCamera(46, 18, 0, 0)
OP_6E(600, 0)
SetCameraDistance(18410, 0)
Fade(500)
OP_0D()
Sleep(300)
BeginChrThread(0x101, 0, 0, 36)
BeginChrThread(0x102, 0, 0, 37)
BeginChrThread(0x103, 0, 0, 38)
BeginChrThread(0x104, 0, 0, 39)
BeginChrThread(0xF4, 0, 0, 40)
BeginChrThread(0xF5, 0, 0, 41)
WaitChrThread(0x101, 0)
WaitChrThread(0x102, 0)
WaitChrThread(0x103, 0)
WaitChrThread(0x104, 0)
WaitChrThread(0xF4, 0)
WaitChrThread(0xF5, 0)
Sleep(100)
OP_68(160, 12800, 209170, 2000)
MoveCamera(44, 18, 0, 2000)
OP_6E(600, 2000)
SetCameraDistance(17490, 2000)
BeginChrThread(0x101, 0, 0, 35)
WaitChrThread(0x101, 0)
Sleep(500)
OP_63(0x101, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x0)
Sleep(1500)
OP_64(0x101)
Sleep(500)
OP_6F(0x79)
Jc((scpexpr(EXPR_GET_RESULT, 0x2), scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_7199")
#C0175
ChrTalk(
0x102,
"#00108F#12P罗伊德……\x02",
)
CloseMessageWindow()
Jump("loc_7295")
label("loc_7199")
Jc((scpexpr(EXPR_GET_RESULT, 0x2), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_71D0")
#C0176
ChrTalk(
0x103,
"#00208F#12P……罗伊德前辈……\x02",
)
CloseMessageWindow()
Jump("loc_7295")
label("loc_71D0")
Jc((scpexpr(EXPR_GET_RESULT, 0x2), scpexpr(EXPR_PUSH_LONG, 0x2), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_71FF")
#C0177
ChrTalk(
0x104,
"#00308F#12P罗伊德……\x02",
)
CloseMessageWindow()
Jump("loc_7295")
label("loc_71FF")
Jc((scpexpr(EXPR_GET_RESULT, 0x2), scpexpr(EXPR_PUSH_LONG, 0x3), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_7232")
#C0178
ChrTalk(
0x105,
"#10408F#12P……罗伊德……\x02",
)
CloseMessageWindow()
Jump("loc_7295")
label("loc_7232")
Jc((scpexpr(EXPR_GET_RESULT, 0x2), scpexpr(EXPR_PUSH_LONG, 0x4), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_7265")
#C0179
ChrTalk(
0x109,
"#10108F#12P罗伊德警官……\x02",
)
CloseMessageWindow()
Jump("loc_7295")
label("loc_7265")
Jc((scpexpr(EXPR_GET_RESULT, 0x2), scpexpr(EXPR_PUSH_LONG, 0x5), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_7295")
#C0180
ChrTalk(
0x106,
"#10708F#12P……罗伊德警官。\x02",
)
CloseMessageWindow()
label("loc_7295")
#C0181
ChrTalk(
0x101,
(
"#00004F#11P#30W哈哈……\x02\x03",
"#00008F……如今……\x01",
"终于有种追上大哥的感觉了。\x02\x03",
"#00002F谢谢……\x01",
"多亏有大家帮忙。\x02",
)
)
CloseMessageWindow()
#C0182
ChrTalk(
0x104,
(
"#00304F#12P哈哈……\x01",
"这叫什么话。\x02",
)
)
CloseMessageWindow()
#C0183
ChrTalk(
0x103,
(
"#00204F#12P……我认为是\x01",
"罗伊德前辈靠自己的意志打破了\x01",
"亚里欧斯先生这道『壁障』。\x02\x03",
"#00208F从而让盖伊先生的死因\x01",
"时隔多年之后终于真相大白……\x02",
)
)
CloseMessageWindow()
#C0184
ChrTalk(
0x102,
(
"#00104F#12P是啊……我们只是\x01",
"从旁协助而已。\x02\x03",
"#00108F不过,接下来就\x01",
"不能再说是协助了……\x02",
)
)
CloseMessageWindow()
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x5)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_7447")
#C0185
ChrTalk(
0x106,
"#10703F#12P……是啊……\x02",
)
CloseMessageWindow()
Jump("loc_74A4")
label("loc_7447")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x8)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_7479")
#C0186
ChrTalk(
0x109,
"#10106F#12P……是啊……\x02",
)
CloseMessageWindow()
Jump("loc_74A4")
label("loc_7479")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x4)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_74A4")
#C0187
ChrTalk(
0x105,
"#10406F#12P……是啊。\x02",
)
CloseMessageWindow()
label("loc_74A4")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x9)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_74EC")
#C0188
ChrTalk(
0x104,
(
"#00308F#12P贝尔小姐和伊安律师,\x01",
"还有阿琪……\x02",
)
)
CloseMessageWindow()
OP_5A()
Jump("loc_7585")
label("loc_74EC")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x4)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_753D")
#C0189
ChrTalk(
0x105,
(
"#10408F#12P玛丽亚贝尔小姐和大胡子熊律师,\x01",
"还有琪雅……\x02",
)
)
CloseMessageWindow()
Jump("loc_7585")
label("loc_753D")
Jc((scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x8)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_7585")
#C0190
ChrTalk(
0x109,
(
"#10108F#12P玛丽亚贝尔小姐和伊安律师,\x01",
"还有琪雅……\x02",
)
)
CloseMessageWindow()
label("loc_7585")
#C0191
ChrTalk(
0x101,
"#00006F#11P#30W……嗯……\x02",
)
CloseMessageWindow()
OP_68(350, 12800, 208640, 1000)
MoveCamera(37, 17, 0, 1000)
def lambda_75C5():
OP_93(0xFE, 0xB4, 0x190)
ExitThread()
QueueWorkItem(0x101, 2, lambda_75C5)
Sleep(300)
def lambda_75D5():
TurnDirection(0xFE, 0x101, 500)
ExitThread()
QueueWorkItem(0xF4, 2, lambda_75D5)
OP_6F(0x79)
#C0192
ChrTalk(
0x101,
(
"#00003F#5P最后的『领域』也已经开放了。\x02\x03",
"#00000F总之……\x01",
"我们先回『神域』的终点吧。\x02",
)
)
CloseMessageWindow()
SetCameraDistance(17740, 1000)
FadeToDark(1000, 0, -1)
OP_0D()
ClearChrFlags(0x8, 0x8000)
SetChrPos(0x0, 0, 12000, 202500, 0)
SetChrFlags(0x4, 0x80)
SetChrBattleFlags(0x4, 0x8000)
SetChrFlags(0x5, 0x80)
SetChrBattleFlags(0x5, 0x8000)
OP_69(0xFF, 0x0)
OP_37()
SetScenarioFlags(0x1A9, 4)
OP_29(0xB2, 0x1, 0x8)
ModifyEventFlags(0, 0, 0x80)
OP_50(0x4C, (scpexpr(EXPR_PUSH_LONG, 0xFFFF), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
OP_50(0x1, (scpexpr(EXPR_PUSH_LONG, 0x164), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
ClearScenarioFlags(0x22, 2)
OP_E2(0x2)
Sleep(500)
EventEnd(0x5)
Return()
# Function_34_6E98 end
def Function_35_76A5(): pass
label("Function_35_76A5")
OP_9B(0x0, 0xFE, 0x162, 0x320, 0x3E8, 0x0)
Return()
# Function_35_76A5 end
def Function_36_76B5(): pass
label("Function_36_76B5")
Sound(805, 0, 100, 0)
SetChrChipByIndex(0xFE, 0xFF)
SetChrSubChip(0xFE, 0x0)
Return()
# Function_36_76B5 end
def Function_37_76C4(): pass
label("Function_37_76C4")
Sleep(200)
Sound(531, 0, 100, 0)
SetChrChipByIndex(0xFE, 0xFF)
SetChrSubChip(0xFE, 0x0)
Return()
# Function_37_76C4 end
def Function_38_76D6(): pass
label("Function_38_76D6")
Sleep(300)
Sound(805, 0, 100, 0)
SetChrChipByIndex(0xFE, 0xFF)
SetChrSubChip(0xFE, 0x0)
Return()
# Function_38_76D6 end
def Function_39_76E8(): pass
label("Function_39_76E8")
Sleep(100)
SetChrChipByIndex(0xFE, 0xFF)
SetChrSubChip(0xFE, 0x0)
Return()
# Function_39_76E8 end
def Function_40_76F4(): pass
label("Function_40_76F4")
Sleep(400)
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x5)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_7714")
Sound(540, 0, 50, 0)
Jump("loc_7739")
label("loc_7714")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x8)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_EXEC_OP, "OP_FB(0xF4, 0x9)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_OR), scpexpr(EXPR_END)), "loc_7739")
Sound(531, 0, 100, 0)
label("loc_7739")
SetChrChipByIndex(0xFE, 0xFF)
SetChrSubChip(0xFE, 0x0)
Return()
# Function_40_76F4 end
def Function_41_7742(): pass
label("Function_41_7742")
Sleep(500)
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF5, 0x5)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_END)), "loc_7762")
Sound(540, 0, 50, 0)
Jump("loc_7787")
label("loc_7762")
Jc((scpexpr(EXPR_EXEC_OP, "OP_FB(0xF5, 0x8)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_EXEC_OP, "OP_FB(0xF5, 0x9)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_OR), scpexpr(EXPR_END)), "loc_7787")
Sound(531, 0, 100, 0)
label("loc_7787")
SetChrChipByIndex(0xFE, 0xFF)
SetChrSubChip(0xFE, 0x0)
Return()
# Function_41_7742 end
def Function_42_7790(): pass
label("Function_42_7790")
SetChrChipByIndex(0x8, 0x0)
SetChrSubChip(0x8, 0x10)
ClearChrFlags(0x8, 0x80)
SetChrFlags(0x8, 0x800)
SetChrFlags(0x8, 0x8000)
SetChrFlags(0x8, 0x2)
SetChrFlags(0x8, 0x1000)
SetChrFlags(0x8, 0x800)
Return()
# Function_42_7790 end
def Function_43_77B7(): pass
label("Function_43_77B7")
RunExpression(0x2, (scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1AB, 1)), scpexpr(EXPR_END)), "loc_77D9")
RunExpression(0x2, (scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Jump("loc_7942")
label("loc_77D9")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1AB, 2)), scpexpr(EXPR_END)), "loc_77F1")
RunExpression(0x2, (scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Jump("loc_7942")
label("loc_77F1")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1AB, 3)), scpexpr(EXPR_END)), "loc_7809")
RunExpression(0x2, (scpexpr(EXPR_PUSH_LONG, 0x2), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Jump("loc_7942")
label("loc_7809")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1AB, 4)), scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x8)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_782C")
RunExpression(0x2, (scpexpr(EXPR_PUSH_LONG, 0x4), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Jump("loc_7942")
label("loc_782C")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1AB, 5)), scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x4)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_784F")
RunExpression(0x2, (scpexpr(EXPR_PUSH_LONG, 0x3), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Jump("loc_7942")
label("loc_784F")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1AB, 6)), scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x5)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_7872")
RunExpression(0x2, (scpexpr(EXPR_PUSH_LONG, 0x5), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Jump("loc_7942")
label("loc_7872")
Jc((scpexpr(EXPR_EXEC_OP, "OP_DC(0x1)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_7890")
RunExpression(0x2, (scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Jump("loc_7942")
label("loc_7890")
Jc((scpexpr(EXPR_EXEC_OP, "OP_DC(0x1)"), scpexpr(EXPR_PUSH_LONG, 0x2), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_78AE")
RunExpression(0x2, (scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Jump("loc_7942")
label("loc_78AE")
Jc((scpexpr(EXPR_EXEC_OP, "OP_DC(0x1)"), scpexpr(EXPR_PUSH_LONG, 0x3), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_78CC")
RunExpression(0x2, (scpexpr(EXPR_PUSH_LONG, 0x2), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Jump("loc_7942")
label("loc_78CC")
Jc((scpexpr(EXPR_EXEC_OP, "OP_DC(0x1)"), scpexpr(EXPR_PUSH_LONG, 0x8), scpexpr(EXPR_EQU), scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x8)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_78F5")
RunExpression(0x2, (scpexpr(EXPR_PUSH_LONG, 0x4), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Jump("loc_7942")
label("loc_78F5")
Jc((scpexpr(EXPR_EXEC_OP, "OP_DC(0x1)"), scpexpr(EXPR_PUSH_LONG, 0x4), scpexpr(EXPR_EQU), scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x4)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_791E")
RunExpression(0x2, (scpexpr(EXPR_PUSH_LONG, 0x3), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Jump("loc_7942")
label("loc_791E")
Jc((scpexpr(EXPR_EXEC_OP, "OP_DC(0x1)"), scpexpr(EXPR_PUSH_LONG, 0x5), scpexpr(EXPR_EQU), scpexpr(EXPR_EXEC_OP, "GetPartyIndex(0x5)"), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_NEG), scpexpr(EXPR_GTR), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_7942")
RunExpression(0x2, (scpexpr(EXPR_PUSH_LONG, 0x5), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
label("loc_7942")
Return()
# Function_43_77B7 end
SaveToFile()
Try(main)
|
selling_price=float(input("Enter thr selling price:"))
cost_price=float(input("Enter thr cost price:"))
if(selling_price>cost_price):
print("profit")
else:
print("loss") |
#!/usr/bin/env python3
from flask import Blueprint, render_template
trymez = Blueprint('trymez', __name__, url_prefix='/try')
@trymez.route('/', methods=['GET'])
def show_try():
return render_template('trymez.html') |
from django.db import models
import datetime
from django.utils import timezone
from decimal import Decimal
from allauth.socialaccount.models import SocialAccount
from django.contrib.auth.models import User
import hashlib
DAYS_OF_WEEK = (
(0, 'Monday'),
(1, 'Tuesday'),
(2, 'Wednesday'),
(3, 'Thursday'),
(4, 'Friday'),
(5, 'Saturday'),
(6, 'Sunday'),
)
class RestaurantInfo(models.Model):
"""
Model storing basic information of the restaurant
"""
name = models.CharField(max_length=50)
email = models.EmailField(null=True, blank=True)
phone = models.CharField(max_length=20, null=True, blank=True)
fax = models.CharField(max_length=20, null=True, blank=True)
street = models.CharField(max_length=50, null=True, blank=True)
city = models.CharField(max_length=50, null=True, blank=True)
state = models.CharField(max_length=50, null=True, blank=True)
zipcode = models.IntegerField(null=True, blank=True)
about_us = models.TextField(null=True, blank=True)
logo = models.ImageField(upload_to = 'images/RestaurantInfo/', blank=True)
def __unicode__(self):
return self.name
class BusinessHours(models.Model):
"""
Model to store business hours of the restaurant
"""
day = models.IntegerField(choices=DAYS_OF_WEEK)
open_time = models.TimeField('open time')
close_time = models.TimeField('close time')
def contain_curr_time(self, curr_date_time):
"""
Function to return whether the input curr_date_time is in the time/date represeted by the tuple.
"""
#curr_date_time = timezone.localtime(timezone.now())
if(self.day == curr_date_time.weekday()):
return ((self.open_time <= curr_date_time.time()) and (curr_date_time.time() <= self.close_time))
return False
def __unicode__(self):
return "%s" % self.get_day_display()
class Suggestion(models.Model):
"""
Model to store suggestion from customer
"""
name = models.CharField(max_length=50)
email = models.EmailField()
post_date = models.DateTimeField('date posted', auto_now = True)
content = models.TextField()
def __unicode__(self):
return u"%s" % self.id
class MenuCategory(models.Model):
"""
Model to store categories of menu (e.g, dinner, lunch, etc.)
"""
name = models.CharField(max_length=50, unique = True)
description = models.CharField(max_length=255, null=True, blank=True)
start_day = models.IntegerField('start serving day', choices=DAYS_OF_WEEK, null=True, blank=True)
end_day = models.IntegerField('end serving day', choices=DAYS_OF_WEEK, null=True, blank=True)
start_time = models.TimeField('start serving time', null=True, blank=True)
end_time = models.TimeField('end serving time', null=True, blank=True)
def contain_curr_time(self, curr_date_time):
"""
Function to return whether the input curr_date_time is in the time/date represeted by the tuple.
"""
if(self.start_day <= curr_date_time.weekday() and curr_date_time.weekday() <= self.end_day):
return ((self.start_time <= curr_date_time.time()) and (curr_date_time.time() <= self.end_time))
return False
def __unicode__(self):
return self.name
class FoodCategory(models.Model):
"""
Model to store categories of food (e.g, noodle, sandwich, etc.)
"""
name = models.CharField(max_length=50)
description = models.CharField(max_length=255, null=True, blank=True)
def __unicode__(self):
return self.name
class FoodItem(models.Model):
"""
Model to store food item and its detail
"""
name = models.CharField(max_length=50)
description = models.CharField(max_length=255, blank=True)
image = models.ImageField(upload_to = 'images/FoodItem/', blank=True)
def __unicode__(self):
return self.name
class ItemChoice(models.Model):
"""
Model to store choices of food item, having food item as foreign key
"""
name = models.CharField(max_length=50)
food_item = models.ForeignKey(FoodItem)
price_add = models.FloatField()
def __unicode__(self):
return u"%s [%s]" % (self.food_item, self.get_name_display())
class FoodMenu(models.Model):
"""
Model to represent menu of a restaurant where each tuple refer to a food item in a food category in a menu category
"""
food_item = models.ForeignKey(FoodItem)
food_cat = models.ForeignKey(FoodCategory)
menu_cat = models.ForeignKey(MenuCategory)
price = models.DecimalField(max_digits=8, decimal_places=2, default=Decimal('0.00'))
def __unicode__(self):
return self.food_item.name + " | " + self.menu_cat.name
class AlbumGallery(models.Model):
"""
Model to represent gallery album
"""
name = models.CharField(max_length=50, default = "untitled")
description = models.CharField(max_length=255, null=True, blank=True)
def __unicode__(self):
return self.name
class ImageGallery(models.Model):
"""
Model to represent gallery image
"""
name = models.CharField(max_length=50, default = "untitled")
image = models.ImageField(upload_to = 'images/ImageGallery')
albums = models.ManyToManyField(AlbumGallery)
def __unicode__(self):
return self.name
class LikeFoodMenu(models.Model):
"""
Model to represent customer liking food menu
"""
customer = models.ForeignKey(User)
food_menu = models.ForeignKey(FoodMenu)
|
# -*- coding: utf-8 -*-
import inject
import logging
import psycopg2
import sys
import crypt
if __name__ == '__main__':
if len(sys.argv) < 5:
sys.exit(1)
host = sys.argv[1]
port = sys.argv[2]
user = sys.argv[3]
dbpassword = sys.argv[4]
db = sys.argv[5]
con = psycopg2.connect(host=host, port=port, user=user, password=dbpassword, dbname=db)
try:
cur = con.cursor()
cur.execute('select username from credentials.user_password up, domain.users du where up.user_id = du.id')
if cur.rowcount <= 0:
sys.exit()
existent = []
for username in cur:
existent.append(username[0])
linesp = []
f = open('/etc/passwd','r')
try:
for l in f:
u = l.split(':')[0]
if u not in existent:
linesp.append(l)
finally:
f.close()
linesg = []
f2 = open('/etc/group', 'r')
try:
for l in f2:
u = l.split(':')[0]
if u not in existent:
linesg.append(l)
finally:
f2.close()
liness = []
f3 = open('/etc/shadow', 'r')
try:
for l in f3:
u = l.split(':')[0]
if u not in existent:
liness.append(l)
finally:
f3.close()
cur.execute('select username,password from credentials.user_password up, domain.users du where up.user_id = du.id')
if cur.rowcount <= 0:
sys.exit()
uid = 1000
gid = 1000
f = open('/etc/passwd', 'w')
f2 = open('/etc/group', 'w')
f3 = open('/etc/shadow', 'w')
try:
for l in linesp:
f.write(l)
for l in linesg:
f2.write(l)
for l in liness:
f3.write(l)
for (username, password) in cur:
f.write('{0}:x:{1}:{2}:{0}:/home/{0}:/bin/bash\n'.format(username, uid, gid))
f2.write('{0}:x:{1}:{0}\n'.format(username, gid))
f3.write('{0}:{1}:::::::\n'.format(username, crypt.crypt(password)))
uid = uid + 1
gid = gid + 1
finally:
f.close()
f2.close()
f3.close()
finally:
con.close()
|
from pigtest import PigTestCase, main
class TestExcite(PigTestCase):
PigScript = 'top_density_songs'
def generateRecord(self, fields):
return (
fields.get('track_id'), fields.get('analysis_sample_rate'),
fields.get('artist_7digitalid'), fields.get('artist_familiarity'),
fields.get('artist_hotness'), fields.get('artist_id'),
fields.get('artist_latitude'), fields.get('artist_location'),
fields.get('artist_longitude'), fields.get('artist_mbid'), fields.get('artist_mbtags'),
fields.get('artist_mbtags_count'), fields.get('artist_name'), fields.get('artist_playmeid'),
fields.get('artist_terms'), fields.get('artist_terms_freq'), fields.get('artist_terms_weight'),
fields.get('audio_md5'), fields.get('bars_confidence'), fields.get('bars_start'),
fields.get('beats_confidence'), fields.get('beats_start'), fields.get('danceability'),
fields.get('duration'), fields.get('end_of_fade_in'), fields.get('energy'),
fields.get('key'), fields.get('key_confidence'), fields.get('loudness'),
fields.get('mode'), fields.get('mode_confidence'), fields.get('release'),
fields.get('release_7digitalid'), fields.get('sections_confidence'), fields.get('sections_start'),
fields.get('segments_confidence'), fields.get('segments_loudness_max'),
fields.get('segments_loudness_max_time'), fields.get('segments_loudness_max_start'),
fields.get('segments_pitches'), fields.get('segments_start'),
fields.get('segments_timbre'), fields.get('similar_artists'), fields.get('song_hotness'),
fields.get('song_id'), fields.get('start_of_fade_out'), fields.get('tatums_confidence'),
fields.get('tatums_start'), fields.get('tempo'), fields.get('time_signature'),
fields.get('time_signature_confidence'), fields.get('title'), fields.get('track_7digitalid'),
fields.get('year')
)
def testFilterDuration(self):
self.mockAlias('songs', [
self.generateRecord({'track_id': '12345', 'duration':1.2}),
self.generateRecord({'track_id': '12346', 'duration':0}),
self.generateRecord({'track_id': '12347', 'duration':0}),
self.generateRecord({'track_id': '12348', 'duration':100}),
])
self.assertAliasEquals('filtered_songs', [
self.generateRecord({'track_id': '12345', 'duration':1.2}),
self.generateRecord({'track_id': '12348', 'duration':100}),
])
|
class Solution:
def mergeSimilarItems(self, items1: List[List[int]], items2: List[List[int]]) -> List[List[int]]:
d = {}
for i in items1:
d[i[0]] = i[1]
for i in items2:
if i[0] in d:
d[i[0]] += i[1]
else:
d[i[0]] = i[1]
ans = []
for i in d:
temp = [i, d[i]]
ans.append(temp)
ans.sort()
return ans |
a, b, c = input().split()
if(int(a) %2 == 0 and int(b) %2 == 0 and int(c) %2 == 0):
print("NO")
elif(int(a) %2 != 0 and int(b) %2 != 0 and int(c) %2 != 0):
print("NO")
elif(int(a) %2 == 0 and int(b) %2 != 0 and int(c) %2 != 0):
print("YES")
elif(int(a) %2 == 0 and int(b) %2 == 0 and int(c) %2 != 0):
print("YES")
elif(int(a) %2 == 0 and int(b) %2 != 0 and int(c) %2 == 0):
print("YES")
elif(int(a) %2 != 0 and int(b) %2 == 0 and int(c) %2 == 0):
print("YES")
elif(int(a) %2 != 0 and int(b) %2 != 0 and int(c) %2 == 0):
print("YES")
elif(int(a) %2 != 0 and int(b) %2 == 0 and int(c) %2 != 0):
print("YES")
|
from queue import Queue
def bfs(RG,src,sink):
parent = {v:None for v in RG}
dist = {v:None for v in RG}
que = Queue()
que.put(src)
dist[src] = 0
while not que.empty():
u = que.get()
if u==sink: break
for v in RG[u]:
if RG[u][v]==0: continue
if dist[v]==None:
que.put(v)
dist[v] = dist[u]+1
parent[v] = u
if dist[sink]==None: return -1
ans = []
while parent[sink]!=None:
ans.append((parent[sink],sink))
sink = parent[sink]
return ans
def maxFlow(G,src,sink):
RG = {v:{} for v in G}
for u in G:
for v in G[u]:
RG[u][v] = G[u][v]
RG[v][u] = 0
while True:
path = bfs(RG,src,sink)
if path==-1: break
cf = min(RG[u][v] for u,v in path)
for (u,v) in path:
RG[u][v] -= cf
RG[v][u] += cf
return {(u,v):G[u][v]-RG[u][v] for u in G for v in G[u]} #sum(G[0][v]-RG[0][v] for v in G[0])
G = {
0:{1:16,2:13},
1:{3:12},
2:{1:4,4:14},
3:{2:9,5:20},
4:{3:7,5:4},
5:{},
}
"""
G = {
0:{1:7,2:14,3:9},
1:{2:10,5:15},
2:{4:13},
3:{2:3,4:6},
4:{1:8,6:11},
5:{6:18},
6:{}
}"""
print(maxFlow(G,0,5))
|
# filled the empty age with median value of age
data['Age'].fillna(data['Age'].median(), inplace=True)
survived_sex = data[data['Survived']==1]['Sex'].value_counts()
dead_sex = data[data['Survived']==0]['Sex'].value_counts()
#plot the survived male , female and dead male,female
df = pd.DataFrame([survived_sex,dead_sex])
df.index = ['Survived','Dead']
df.plot(kind='bar', figsize=(15,8))
# * It can be clearly seen from the above graph that females survied more than men.
# * Much more than men actually!
# * Should the values be just 0 and 1 for male and female.. or should the diffrence be more?
# dead and survived based on age of people
figure = plt.figure(figsize=(15,8))
plt.hist([data[data['Survived']==1]['Age'],data[data['Survived']==0]['Age']], color = ['g','r'],
bins = 10,label = ['Survived','Dead'])
plt.xlabel('Age')
plt.ylabel('Number of passengers')
plt.legend()
# * what can be seen here is..
# * those in the range 20-40 are more likely to be dead.
# * those in teh range 70-80 are almost always dead
# * 0-20 there is not much diff i think
# * making these as features would be a good idea?
#
# plotting number of survivors based on the fare they gave
figure = plt.figure(figsize=(15,8))
plt.hist([data[data['Survived']==1]['Fare'],data[data['Survived']==0]['Fare']], color = ['g','r'],
bins = 10,label = ['Survived','Dead'])
plt.xlabel('Fare')
plt.ylabel('Number of passengers')
plt.legend()
# * Not exactly sure whether making <50 a feature will be a good idea? Although people less than 50 have high death rate!!
# * But over the complete data set we cannot say anything substancial from the fare alone
# depending upon age the rate of survival
# clearly see that, lower part of reds and above part is green suggesting... lower fares were killed early!
plt.figure(figsize=(15,8))
ax = plt.subplot()
ax.scatter(data[data['Survived']==1]['Age'],data[data['Survived']==1]['Fare'],c='green',s=40)
ax.scatter(data[data['Survived']==0]['Age'],data[data['Survived']==0]['Fare'],c='red',s=40)
ax.set_xlabel('Age')
ax.set_ylabel('Fare')
ax.legend(('survived','dead'),scatterpoints=1,loc='upper right',fontsize=20,)
# * Now i know that indivisually age between 20-40 are killed more.
# * Also i know indivisually those with lower fares are also killed more.
# * <font color="red">Should i have indivisual features of these? or should i combine both into single feature and that will be better predictor?</font>
ax = plt.subplot()
ax.set_ylabel('Survived')
ax.set_xlabel('Pclass')
ax.hist([data[data['Survived']==1]['Pclass'],data[data['Survived']==0]['Pclass']],color = ['g','r'],)
# * So from the above we see pclass3 is mostly dead. Other classes are not giving much info.
# Plotting how fares versus pclass goes?
ax = plt.subplot()
ax.set_ylabel('Average fare')
# we are plotting the mean cause the mean would show overall co-relation
#rather than indivisual data points which may be unclear
data.groupby('Pclass').mean()['Fare'].plot(kind='bar',figsize=(15,8), ax = ax)
survived_embark = data[data['Survived']==1]['Embarked'].value_counts()
dead_embark = data[data['Survived']==0]['Embarked'].value_counts()
df = pd.DataFrame([survived_embark,dead_embark])
df.index = ['Survived','Dead']
df.plot(kind='bar',stacked=True, figsize=(15,8))
|
#!/usr/bin/env python3
# Minimum missed number function
def min_num(o):
sort = sorted(o)
# print(sorted(o))
k = 0
for i in sort:
if (sort[k] in sort) and ((sort[k] + 1) in sort):
k += 1
# print(sort[k], sort[k] + 1, k)
#
else:
# print('missed number is ', min_num(inp))
# k += 1
return sort[k] + 1
# input numbers from user
inp = (input('Please enter space-delimited non-negative integer numbers: ')).split(' ')
#print(inp)
for i in inp:
if not i.isdigit():
inp.remove(i)
#print(inp)
inp_int = list(map(int, inp))
# Exceptions catching
# catching negative numbers
for i in inp:
if int(i) < 0:
print('negative number exists, please enter again')
exit()
if len(inp) < 2:
if (int(inp[0]) == 1):
print('entered 1, it`s minimal non-negative integer number')
exit()
elif (int(inp[0]) == 0):
print("entered 0 it`s againt the rules!")
exit()
if (int(inp[0])>=2):
print('missed number is ', int(inp[0])-int(inp[0])+1)
exit()
# catching duplicates
#d = {}
#for i in inp:
# d.update({i: inp.count(i)})
#if len(d) < len(inp):
# print('duplicate values, please enter again')
# exit()
# catching no holes
#t_min = min(inp_int)
#t_max = max(inp_int)+1
#range_list = list(range(t_min, t_max))
#if (sum(inp_int) == sum(range_list)):
# print('no holes, enter again ')
# exit()
#printing missed number after checks
print('missed number is ', min_num(inp_int))
|
# -*- encoding: utf-8 -*-
# author:virualv
# date :8/27/2018
s = '特斯拉'
s_to_unicode = s.decode('utf-8')
unicode_to_gbk = s_to_unicode.encode('gbk')
print(s_to_unicode)
print(unicode_to_gbk.decode('gbk')) |
from django.urls import path
from . import views
urlpatterns = [
path('', views.index, name='find'),
path('detail/<int:hospital_id>', views.detail, name='Hospital_detail'),
] |
#!/usr/bin/env python
#
# Test cases for tournament.py
from tournament import *
def testDeleteMatches():
deleteMatches()
print "1. Old matches can be deleted."
def testRegister():
deleteMatches()
deletePlayers()
registerPlayer("Chandra Nalaar")
c = 1 #countPlayers()
if c != 1:
raise ValueError(
"After one player registers, countPlayers() should be 1.")
print "4. After registering a player, countPlayers() returns 1."
conn=psycopg2.connect(database="tournament")
cur=conn.cursor()
cur.execute("SELECT 2+2;")
val=cur.fetchone()
print(val)
#registerPlayer("Chandra Nalaar")
#conn.close()
#testDeleteMatches()
#testRegister()
#deletePlayers() |
#!/usr/bin/env python
from functools import partial
import logging
import os
import pickle
from typing import List, Tuple
import numpy as np
import skimage.io as sio
from divik.cluster import GAPSearch, KMeans
import divik._cli._utils as scr
import divik.core as u
Segmentations = List[Tuple[u.IntLabels, u.Centroids]]
def get_segmentations(kmeans: GAPSearch) -> Segmentations:
return [(est.labels_, est.cluster_centers_) for est in kmeans.estimators_]
def make_segmentations_matrix(kmeans: GAPSearch) -> np.ndarray:
return np.hstack([e.labels_.reshape(-1, 1) for e in kmeans.estimators_])
def save(kmeans: GAPSearch, destination: str, xy: np.ndarray=None):
logging.info("Saving result.")
logging.info("Saving model.")
fname = partial(os.path.join, destination)
with open(fname('model.pkl'), 'wb') as pkl:
pickle.dump(kmeans, pkl)
logging.info("Saving segmentations.")
np.savetxt(fname('final_partition.csv'), kmeans.labels_.reshape(-1, 1),
delimiter=', ', fmt='%i')
np.save(fname('final_partition.npy'), kmeans.labels_.reshape(-1, 1))
segmentations = get_segmentations(kmeans)
with open(fname('segmentations.pkl'), 'wb') as pkl:
pickle.dump(segmentations, pkl)
partitions = make_segmentations_matrix(kmeans)
np.savetxt(fname('partitions.csv'), partitions, delimiter=', ', fmt='%i')
for i in range(partitions.shape[1]):
np.savetxt(fname('partitions.{0}.csv').format(i + kmeans.min_clusters),
partitions[:, i].reshape(-1, 1), delimiter=', ', fmt='%i')
if xy is not None:
visualization = u.visualize(kmeans.labels_, xy=xy)
sio.imsave(fname('partitions.{0}.png').format(kmeans.n_clusters_),
visualization)
def main():
data, config, destination, xy = scr.initialize()
try:
single_kmeans = KMeans(**config['kmeans'])
kmeans = GAPSearch(single_kmeans, **config['gap'])
kmeans.fit(data)
except Exception as ex:
logging.error("Failed with exception.")
logging.error(repr(ex))
raise
save(kmeans, destination, xy)
if __name__ == '__main__':
main()
|
from time import sleep
import threading
def scheduler(f, args, n):
def worker(f, args, n):
seconds = n / 1000
sleep(seconds)
f(*args)
t = threading.Thread(target=worker, args=(f, args, n))
t.start()
return t
if __name__ == "__main__":
jobs = []
jobs.append(scheduler(print, ('''
____ ___ _ ____ ____ ___ _ _ _ ____ _ _
| _ \ / _ \ / \ | _ \ | _ \ / _ \| | | | / \ | _ \ / \ | |
| |_) | | | |/ _ \ | | | | | |_) | | | | | | | / _ \ | | | |/ _ \ | |
| _ <| |_| / ___ \| |_| | | _ <| |_| | |___| |___ / ___ \ | |_| / ___ \|_|
|_| \_\\\___/_/ \_\____/ |_| \_\\\___/|_____|_____/_/ \_\ |____/_/ \_(_)
''',), 9000))
jobs.append(scheduler(print, ("One second has passed.",), 1000))
jobs.append(scheduler(print, ("Two seconds have passed.",), 2000))
jobs.append(scheduler(print, ("Three seconds have passed.",), 3000))
jobs.append(scheduler(print, ("Four seconds have passed.",), 4000))
jobs.append(scheduler(print, ("Five seconds have passed.",), 5000))
jobs.append(scheduler(print, ("Six seconds have passed.",), 6000))
jobs.append(scheduler(print, ("Seven seconds have passed.",), 7000))
jobs.append(scheduler(print, ("Eight seconds have passed.",), 8000))
for job in jobs:
job.join()
|
import Image, ImageStat
import sys
import pdb
import const
import line_util
dark_threshold = 236
light_threshold = 240
def count_lines_without_dark_pixel(im, start_x, start_y, end_x, end_y, thresh):
""" Return number of lines with no dark pixel from x to x2"""
dark_misses = 0
start_y, end_y = min(start_y, end_y), max(start_y, end_y)
for test_y in range(start_y,end_y):
found_dark = False
for test_x in range(start_x,end_x):
p = im.getpixel((test_x,test_y))
if p[0] <= thresh:
found_dark = True
break
if not found_dark:
dark_misses += 1
return dark_misses
def get_ulc_if_untinted_oval(im, x, y):
"""Return upper left corner of oval bbox if x,y on oval's bottom wall.
Look back to find trailing oval offering darkness at:
left wall, up by half oval height and back by half oval width,
with half oval width of checking;
right wall, up by half oval height and forward by half oval width,
with half oval width of checking;
and top wall, up by oval height with half oval height of checking.
"""
oval_height = int(round(const.target_height_inches * const.dpi))
oval_width = int(round(const.target_width_inches * const.dpi))
left_wall = -1
exclusion_zone_width = int(round(0.03 * const.dpi))
mid_oval_y = int(round(y - (oval_height/2)))
top_oval_y = y - oval_height
for test_x in range(x-oval_width,x):
p = im.getpixel((test_x,mid_oval_y))
if p[0]<=dark_threshold:
# first check: confirm at least one dark pixel
# on each line from mid_oval to bottom and top of oval,
# going out from test_x to test_x + mid_oval
dark_misses = count_lines_without_dark_pixel(
im,
test_x,
top_oval_y,
test_x + int(round(oval_height/2)),
y,
dark_threshold)
if dark_misses > 1:
continue
# confirm average intensity in exclusion zone > light_threshold
xzone_stat = ImageStat.Stat(im.crop((test_x - exclusion_zone_width,
mid_oval_y,
test_x - 1,
mid_oval_y+1)))
if xzone_stat.mean[0] >= light_threshold:
left_wall = test_x
break
if left_wall < 0:return []
right_wall = -1
# now that we've found a left wall, anticipate the right wall
# at left_wall + oval_width
for test_x in range(left_wall+oval_width-(const.dpi/32),
left_wall+oval_width+(const.dpi/32)):
p = im.getpixel((test_x,mid_oval_y))
if p[0] <= dark_threshold:
# first check: confirm at least one dark pixel
# on each line from mid_oval to bottom and top of oval,
# going out from test_x to test_x + mid_oval
dark_misses = count_lines_without_dark_pixel(
im,
test_x - (oval_height/2),
top_oval_y,
test_x ,
y,
dark_threshold)
if dark_misses > 1:
continue
# confirm average intensity in exclusion zone > light_threshold
xzone_stat = ImageStat.Stat(im.crop((test_x + 1,
mid_oval_y,
test_x + exclusion_zone_width,
mid_oval_y+1)))
if xzone_stat.mean[0] >= light_threshold:
right_wall = test_x
break
if left_wall < 0 or right_wall < 0:
return []
top_wall = -1
for test_y in range(y - (3*oval_height/2),y-(oval_height/2),1):
p = im.getpixel((x,test_y))
if p[0]<dark_threshold:
# confirm average intensity in exclusion zone > light_threshold
xzone_stat = ImageStat.Stat(im.crop((x,
test_y - exclusion_zone_width,
x + exclusion_zone_width,
test_y - 1)))
if xzone_stat.mean[0] >= light_threshold:
top_wall = test_y
break
if left_wall >= 0 and right_wall >= 0 and top_wall >=0:
return [left_wall,top_wall]
else:
return []
def find_bottom_landmark_dash(page,starting_x,starting_y,dpi):
""" Return x offset of inboard edge of dash at starting_y.
Unfortunately, a bottom dash might stand alone, so we must start
looking close to the edge and eliminate the requirement to pass a
dark pixel before finding a white/black boundary.
It's not in a dash unless it is dark with white above and below.
Wait to be in a dash and then return first x not in a dash.
"""
# simplify testing by taking page argument as image if no image attr
return_x = -1
try:
im = page.image
except:
im = page
if starting_x < (im.size[0]/2):
# go from starting point rightwards
incr = 1
else:
# go from starting point leftwards
incr = -1
# skip pixels until dark pixel is found with white above and below
ldt = const.line_darkness_threshold
let = const.line_exit_threshold
for test_x in range(starting_x,starting_x + (incr*dpi/4),incr):
p = im.getpixel((test_x,starting_y))
p_above = im.getpixel((test_x,starting_y - (dpi/10)))
p_below = im.getpixel((test_x,starting_y + (dpi/10)))
if p[0] <= ldt and p_above[0] >= let and p_below >= let:
starting_x = test_x
break
else:
continue
break
# now scan all black pixels, breaking at first white, which is landmark
for test_x in range(starting_x+incr,starting_x + (incr*dpi/4),incr):
p = im.getpixel((test_x,starting_y))
if p[0] >= const.line_darkness_threshold:
return_x = test_x
break
return return_x
def find_top_landmark_dash(page, starting_x, starting_y, dpi):
""" Return x offset of inboard edge of dash at starting_y.
For top dashes, we can start in what would be the second dash
and insist on traveling through some black before resetting on
white and looking for black again. Then, if we are more than 1/4"
from the edge, we can check 1/4" farther to the edge and see if
there's another black pixel there bounded by white pixels above
and below, in which case the black pixel at the edge is the location
we want.
Unfortunately, a bottom dash might stand alone, so we must start
looking close to the edge and eliminate the requirement to pass a
dark pixel before finding a white/black boundary.
"""
oval_height = int(round(const.target_height_inches * dpi))
oval_width = int(round(const.target_width_inches * dpi))
return_x = -1
# simplify testing by taking page argument as image if no image attr
try:
im = page.image
except:
im = page
if starting_x < (im.size[0]/2):
# go from starting point leftwards
incr = -1
ending_x = 1
else:
# go from starting point rightwards
incr = 1
ending_x = im.size[0] - 1
# we must pass through a dark pix followed by >= dpi/50 light pix,
# then stop at next dark pix; if that location is > dpi/4,
# check to see if pix at location - dpi/4 is dark
# and positions 1/10" above and below are light; if so,
# subtract dpi/4 from returned x coordinate.
passed_dark_pix = False
num_light_pix = 0
required_light_pix = dpi/50
landmark_x = -1
for test_x in range(starting_x, ending_x, incr):
pix = im.getpixel((test_x,starting_y))
dark = (pix[0] <= const.line_darkness_threshold)
if dark:
passed_dark_pix = True
num_light_pix = 0
elif passed_dark_pix:
num_light_pix += 1
if num_light_pix > required_light_pix:
landmark_x = test_x
break
# continue in same direction until dark encountered again
for test_x in range(landmark_x, ending_x, incr):
pix = im.getpixel((test_x,starting_y))
if pix[0] <= const.line_darkness_threshold:
landmark_x = test_x - incr
break
# go with more extreme alternative if it has a dash
if landmark_x > (1+(dpi/4)) and landmark_x < (im.size[0]-(1+(dpi/4))):
alternate_x = test_x + incr*(1+(dpi/4))
p = im.getpixel((alternate_x,starting_y))
p_above = im.getpixel((alternate_x,starting_y-(dpi/10)))
p_below = im.getpixel((alternate_x,starting_y+(dpi/10)))
darkp = (p[0] < const.line_darkness_threshold)
darkp_above = (p_above[0] < const.line_darkness_threshold)
darkp_below = (p_below[0] < const.line_darkness_threshold)
if darkp and ((not darkp_above) or (not darkp_below)):
landmark_x = alternate_x
return landmark_x
def find_horizontal_lines(page, starting_x, dpi):
retlist = []
# simplify testing by taking page argument as image if no image attr
try:
im = page.image
except:
im = page
starting_y_offset = 300
while True:
line =line_util.scan_strips_for_horiz_line_y(
im,
dpi,
starting_x,
starting_y_offset = starting_y_offset,
height_to_scan=(im.size[1] - starting_y_offset - (dpi/4)))
#print "Line at",line
if line == 0:
break
else:
retlist.append(line)
starting_y_offset = line+(dpi/6)
#print "Line list",retlist
return retlist
def find_potential_contests(lines,min_sep):
"""Two lines must be separated by >= min_sep to be contest bounds."""
pot_contests = []
lines2 = lines[1:]
#lines2.append(lines[-1])
for a,b in zip(lines,lines2):
a,b = min(a,b), max(a,b)
if (b-a) >= min_sep:
pot_contests.append((a,b))
return pot_contests
def find_untinted_voteops(page,starting_x,starting_y,ending_y,dpi):
"""Given deskewed image and starting x, return list of untinted voteops.
The Humboldt Diebold images are so compressed that the tint is iffy.
The SLO Diebold images use untinted vote ovals.
So an alternative is to use a darkness test followed by a check for
darkness at the correct related pixels, COUPLED WITH A TEST FOR
NO SUBSTANTIAL DARKNESS IN TEST ZONES OUTBOARD FROM THE OVAL.
Scan down deskewed image at starting x.
When tinted pixel found, call is_bottom_wall_of_oval to determine
existence of vote oval ending at coords.
When only one oval is found in a sublist,
check for additional oval at same y offset
"""
oval_height = int(round(const.target_height_inches * dpi))
oval_width = int(round(const.target_width_inches * dpi))
retlist = []
# simplify testing by taking page argument as image if no image attr
try:
im = page.image
except:
im = page
skip = 0
for y in range(starting_y,ending_y,1):
if skip > 0:
skip -= 1
continue
p = im.getpixel((starting_x,y))
# on darkened pix, check for and append oval to retlist
ulc_of_oval = []
if p[0] < dark_threshold:
# first check a horizontal line to confirm multiple darks
crop = im.crop((int(starting_x - 10),
int(y),
int(starting_x + 10),
int(y+1)))
mean = ImageStat.Stat(crop).mean[0]
#print "Croplist",starting_x -10,y,starting_x+10,y+1,"mean red",mean
if mean > 240:
continue
#print "Checking at", starting_x, y
ulc_of_oval = get_ulc_if_untinted_oval(im,starting_x,y)
if len(ulc_of_oval)<1:
continue
# you could OCR now and store Choices instead of coords
retlist.append(ulc_of_oval)
# if you add an oval, you can skip dpi/6 pixels, because
# ovals will never be spaced closer than 1/6" apart
skip = dpi/6
#print "Retlist",retlist
#pdb.set_trace()
if len(retlist)!=1:
return retlist
# where only one oval has been located
# search for a second vote oval horizontal-aligned with the first
# we assume column width of not more than 2.75",
# and further assume ovals aligned on quarter inch horiz
# and further assume horizontally oriented ovals will not be
# closer than 3/4" apart
# Use the one and only y coordinate in the single entry retlist
# Beware of drift due to tilt; may need to test several y's
for y in range(retlist[0][1] + oval_height-2,
retlist[0][1] + oval_height + 2):
# if another oval has been appended in the loop, break
if len(retlist) > 1:
break
for n in range(3,8):
test_x = starting_x + ((n*dpi)/4)
p = im.getpixel((test_x,y))
# on tinted pix, check for and append oval to current sublist;
# on darkened untinted pix, add new sublist
ulc_of_oval = []
if p[0] < dark_threshold:
# first check a horizontal line to confirm multiple darks
croplist = (test_x - 10,
y,
test_x + 10,
y+1)
crop = im.crop(croplist)
mean = ImageStat.Stat(crop).mean[0]
if mean > 240:
continue
ulc_of_oval = get_ulc_if_untinted_oval(im,test_x,y)
if len(ulc_of_oval)<1:
continue
retlist.append(ulc_of_oval)
break
return retlist
if __name__ == "__main__":
if len(sys.argv) < 3:
print "usage: python diebold_util.py diebold_image_filename startx"
image = Image.open(sys.argv[1])
startx = int(sys.argv[2])
starting_y_offset = 300
lines = find_horizontal_lines(image,startx,dpi)
pot_contests = find_potential_contests(lines,150)
print pot_contests
for contest_start_y, contest_end_y in pot_contests:
print "Searching targets from y to y",contest_start_y, contest_end_y
vops = find_untinted_voteops(image,
startx,
contest_start_y,
contest_end_y,
dpi)
print "Found",vops
sys.exit(0)
|
from http.server import BaseHTTPRequestHandler, HTTPServer
from sensors import get_all_sensor_data
import json
class S(BaseHTTPRequestHandler):
def do_HEAD(self):
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
def do_GET(self):
self.do_HEAD()
if self.path == '/':
self.wfile.write(json.dumps(get_all_sensor_data()).encode())
def run(server_class=HTTPServer, handler_class=S, port=8000):
server_address = ('10.0.0.66', port)
httpd = server_class(server_address, handler_class)
print ('Starting httpd...')
httpd.serve_forever()
if __name__ == "__main__":
run()
|
#!/usr/bin/env python
# Import flask and template operators
from flask import Flask, render_template
import flask.views
# Import SQLAlchemy
from flask.ext.sqlalchemy import SQLAlchemy
from flask_marshmallow import Marshmallow
from werkzeug.utils import import_string
# Define the WSGI application object
app = Flask(__name__)
# Configurations
app.config.from_object('config')
import config as config
# Define the database object which is imported
# by modules and controllers
db = SQLAlchemy(app)
ma = Marshmallow(app)
# Setup the MLTTools library for use by this and controllers
from tools.utilities import MLTTools
tools = MLTTools()
# Define the default methods for the routes
default_methods = ['GET', 'POST']
# Sample HTTP error handling
@app.errorhandler(404)
def not_found(error):
return tools.makeResponse(results=None, errors=True, message='Invalid Route')
@app.errorhandler(500)
def internal_error(error):
return tools.makeResponse(results=None, errors=True, message='Internal Error')
@app.route('/', methods=default_methods)
def root():
return render_template('root/main.html'), 200
@app.route('/readme', methods=default_methods)
def readMe():
return render_template('root/readme.html'), 200
# Register the modules that will be loaded as plugins
for plugin in config.PLUGINS:
this_plugin = import_string(plugin)
app.register_blueprint(this_plugin)
@app.after_request
def after_request(response):
response.headers.add('Access-Control-Allow-Origin', 'http://localhost:9000')
response.headers.add('Access-Control-Allow-Headers', 'Content-Type,Authorization')
response.headers.add('Access-Control-Allow-Methods', 'GET,PUT,POST,DELETE')
return response
if __name__ == "__main__":
app.run()
|
import cv2
import time
from deepface import DeepFace
import os
import numpy as np
def capture_image(TIMER_READY=int(2), TIMER_COUNT=int(3)):
timer_ready = TIMER_READY
timer_count = TIMER_COUNT
cap = cv2.VideoCapture(0)
while True:
prev = time.time()
while timer_ready >= 0:
ret, img = cap.read()
font = cv2.FONT_HERSHEY_SIMPLEX
cv2.putText(img, "GET READY FOR IMAGE",
(0,100), font,
1, (0, 255, 255),
4, cv2.LINE_AA)
cv2.imshow('a', img)
cv2.waitKey(125)
cur = time.time()
if cur-prev >= 1:
prev = cur
timer_ready = timer_ready-1
prev = time.time()
while timer_count >= 0:
ret, img = cap.read()
font = cv2.FONT_HERSHEY_SIMPLEX
cv2.putText(img, str(timer_count),
(50, 250), font,
7, (0, 255, 255),
4, cv2.LINE_AA)
cv2.imshow('a', img)
cv2.waitKey(125)
# current time
cur = time.time()
if cur-prev >= 1:
prev = cur
timer_count = timer_count-1
#
ret, img = cap.read()
img_copy = img.copy()
font = cv2.FONT_HERSHEY_SIMPLEX
cv2.putText(img_copy, "CLICKED IMAGE",
(0, 100), font,
1, (0, 255, 255),
4, cv2.LINE_AA)
cv2.imshow('a', img_copy)
cv2.waitKey(2000)
# cv2.imwrite('face_database/camera.jpg', img)
break
cap.release()
cv2.destroyAllWindows()
return img
def get_face(img=None):
backends = ['opencv', 'ssd', 'dlib', 'mtcnn', 'retinaface']
detected_face = DeepFace.detectFace(img, detector_backend = backends[0])
return detected_face
def image_capture(TIMER_READY=int(2), TIMER_COUNT=int(3)):
image = capture_image(TIMER_READY, TIMER_COUNT)
cv2.imwrite('face_curr/camera.jpg', image)
backends = ['opencv', 'ssd', 'mtcnn', 'retinaface']
metrics = ["cosine", "euclidean", "euclidean_l2"]
val = 0
person = {'Name': None,
'Age': None,
'Sex': None}
for img in os.listdir('face_database'):
obj = DeepFace.verify('face_curr/camera.jpg', os.path.join('face_database', img), model_name = "VGG-Face", detector_backend = backends[0], distance_metric = metrics[1])
if obj['verified']:
print('match found')
data = (img.split('.')[:-1])[0].split('_')
person['Name'] = data[0]
person['Age'] = data[1]
person['Sex'] = data[2]
print(person)
val = 1
break
if val == 0:
print('new patient')
# os.remove('face_curr/camera.jpg')
return val, person
|
# Generated by Django 2.0.6 on 2018-07-20 19:16
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('team', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Challenge',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=150)),
('description', models.TextField()),
('start_date', models.DateField()),
('end_date', models.DateField()),
('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='team.TeamMember')),
('team', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='team.Team')),
],
),
migrations.CreateModel(
name='CheckPoint',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('goal', models.CharField(max_length=200)),
('description', models.TextField()),
('challenge', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='challenge.Challenge')),
],
),
migrations.CreateModel(
name='Comment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('text', models.TextField()),
('created_date', models.DateTimeField(auto_now=True)),
('checkPoint', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='comments', to='challenge.CheckPoint')),
('team_member', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='team.TeamMember')),
],
),
migrations.CreateModel(
name='MemberChallenge',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('progress', models.TextField()),
('current_pos', models.IntegerField(default=0)),
('challenge', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='challenge.Challenge')),
('team_member', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='team.TeamMember')),
],
),
]
|
import argparse
from tools.data_io import save_object, load_object
from tools.utils import get_logger, read_file_contents_list
import numpy as np
from scipy.stats import multivariate_normal
from scipy.spatial.distance import mahalanobis
import pandas as pd
import matplotlib.pyplot as plt
from mpl_toolkits.axes_grid1 import make_axes_locatable
from tools.clinical import ClinicalDataReaderSPORE
import math
logger = get_logger('Pairwise distance')
def check_if_same_subject(file_name1, file_name2):
subject1_id = ClinicalDataReaderSPORE._get_subject_id_from_file_name(file_name1)
subject2_id = ClinicalDataReaderSPORE._get_subject_id_from_file_name(file_name2)
return subject1_id == subject2_id
def get_idx_pair_of_intra_inter_groups(file_list):
intra_pair_list = []
inter_pair_list = []
for idx1 in range(len(file_list)):
file_name1 = file_list[idx1]
for idx2 in range(idx1 + 1, len(file_list)):
file_name2 = file_list[idx2]
idx_pair = [idx1, idx2]
if check_if_same_subject(file_name1, file_name2):
intra_pair_list.append(idx_pair)
else:
inter_pair_list.append(idx_pair)
logger.info(f'Num of intra-subject pair: {len(intra_pair_list)}')
logger.info(f'Num of inter-subject pair: {len(inter_pair_list)}')
# logger.info('List of intra subject pairs')
# print(intra_pair_list)
return intra_pair_list, inter_pair_list
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--in-csv', type=str)
parser.add_argument('--metric-column', type=str)
parser.add_argument('--out-png', type=str)
args = parser.parse_args()
df = pd.read_csv(args.in_csv, index_col='Scan')
data_dict = df.to_dict('index')
file_list = list(data_dict.keys())
value_list = np.array([data_dict[file_name][args.metric_column] for file_name in file_list])
intra_pair_list, inter_pair_list = get_idx_pair_of_intra_inter_groups(file_list)
intra_pair_dist_list = np.zeros((len(intra_pair_list),), dtype=float)
inter_pair_dist_list = np.zeros((len(inter_pair_list),), dtype=float)
for idx_pair in range(len(intra_pair_list)):
pair = intra_pair_list[idx_pair]
intra_pair_dist_list[idx_pair] = math.fabs(value_list[pair[0]] - value_list[pair[1]])
for idx_pair in range(len(inter_pair_list)):
pair = inter_pair_list[idx_pair]
inter_pair_dist_list[idx_pair] = math.fabs(value_list[pair[0]] - value_list[pair[1]])
logger.info(f'Inter dist mean: {np.mean(inter_pair_dist_list)}')
logger.info(f'Intra dist mean: {np.mean(intra_pair_dist_list)}')
logger.info(f'Ratio: {np.mean(inter_pair_dist_list) / np.mean(intra_pair_dist_list)}')
data_array_sequence = [inter_pair_dist_list, intra_pair_dist_list]
fig, ax = plt.subplots(figsize=(18, 12))
color_list = ['red', 'blue']
label_list = ['Inter-subject', 'Intra-subject']
hist_info = ax.hist(
data_array_sequence,
bins=10,
color=color_list,
label=label_list,
alpha=0.5,
rwidth=0.8
)
# print(hist_info)
ax.legend(loc='best')
ax.set_ylabel('Count')
ax.set_xlabel('Mahalanobis distance')
logger.info(f'Save plot to {args.out_png}')
plt.savefig(args.out_png, bbox_inches='tight', pad_inches=0.1)
plt.close()
if __name__ == '__main__':
main() |
import os
def main():
cash = 5000
bank(cash)
def bank(cash):
print("Choose a number: ")
print("1 - Withdraw")
print("2 - Deposit")
print("3 - Balance Inquiry")
print("4 - Exit")
choice = int(input("Number: "))
if (choice == 1):
money = float(input("Money to withdraw: "))
cash -= money
print("Money successfully withdrawn.")
bank(cash)
elif (choice == 2):
money = float(input("Money to deposit: "))
cash += money
print("Money successfully deposited.")
bank(cash)
elif (choice == 3):
print("Balance: Php{0:.02f}".format(cash))
bank(cash)
elif (choice == 4):
exit()
else:
print("ERROR: Invalid input")
main()
os.system("pause") |
from bs4 import BeautifulSoup # 解析网页
from fake_useragent import UserAgent # 随机生成User-agent
import chardet # 有时会遇到编码问题 需要检测网页编码
import re, urllib.request, socket, time, random, csv, json,requests
from requests import RequestException
import xlwings as xw
import pandas as pd
"""
网址:
https://m.weibo.cn/p/searchall?containerid=100103type%3D1%26q%3D%E5%B0%8F%E8%B5%A2%E5%8D%A1%E8%B4%B7
"""
url = 'https://m.weibo.cn/api/container/getIndex?containerid=100103type%3D61%26q%3D%E5%B0%8F%E8%B5%A2%E5%8D%A1%E8%B4%B7%26t%3D0&page_type=searchall'
def get_one_page(url):
headers = {
'User-Agent':'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/47.0.2526.108 Safari/537.36 2345Explorer/8.7.0.16013'
}
# 定义异常
try:
response = requests.get(url,headers=headers)
# 根据状态码判断是否抓取成功
if response.status_code == 200:
return response.text
return None
except RequestException:
return None
# 在 mblog 下'mblog': {'created_at': '07-01',‘text’:“xxxx”
created_at = [] # 发表日期
text = [] #发表内容
source = [] # 来源
# ‘mblog’{....,'user':{ }}下的
ID = [] # 用户id
screen_name = [] # 昵称
statuses_count = [] # 发表微博数量
verified = [] # 认证
gender = [] # 性别
description = [] # 个性签名
followers_count = [] # 粉丝数
follow_count = [] # 关注数
reposts_count = [] # 转发数
comments_count = [] # 评论数
attitudes_count = [] #点赞树
def parse_one_page(data):
# data['data']['cards']和data['data']['cards']['card_group'] 都是一个一维数组,需要得到字典
card_group = data['data']['cards'][0]['card_group']
#print(card_group)
#print(len(card_group))
for i in range(len(card_group)):
mblog = card_group[i]['mblog']
created_at.append(mblog['created_at'])
comment = mblog['text']
# 去除部分特殊字符
label_filter = re.compile(r'</?\w+[^>]*>', re.S)
comment = re.sub(label_filter, '', comment)
'''
# 匹配全部特殊字符转为空格
pattern = re.compile('[^\u4E00-\u9FD5]+')
comment = re.sub(pattern, ' ', string)
'''
text.append(comment)
source.append(mblog['source'])
reposts_count.append(mblog['reposts_count']) # 转发数
comments_count.append(mblog['comments_count']) # 评论数
attitudes_count.append(mblog['attitudes_count']) # 点赞数
user = mblog['user']
ID.append(user['id']) # 用户id
screen_name.append(user['screen_name']) # 昵称
statuses_count.append(user['statuses_count']) # 发表微博数量
verified.append(user['verified']) # 认证
gender.append(user['gender']) # 性别
description.append(user['description']) # 个性签名
followers_count.append(user['followers_count']) # 粉丝数
follow_count.append(user['follow_count']) # 关注数
print("第{}条数据已解析".format(i))
def save_to_excel():
saveFileName = 'C:\\Users\\ms\\Desktop\\Weibo.xlsx'
wb = xw.Book(saveFileName)
detail_sheet = xw.Sheet("Sheet1")
colList = ['screen_name','id','gender','verified','description','statuses_count','follow_count','followers_count',
'created_at','text','source','reposts_count','attitudes_count','comments_count']
dataList = [screen_name,ID,gender,verified,description,statuses_count,follow_count,followers_count,
created_at,text,source,reposts_count,attitudes_count,comments_count]
xw.Range('A1').expand('table').value =colList
colNum = ['A','B','C','D','E','F','G','H','I','J','K','L','M','N']
detail_sheet.range('A2').options(transpose=True).value = dataList[0]
detail_sheet.range('B2').options(transpose=True).value = dataList[1]
detail_sheet.range('C2').options(transpose=True).value = dataList[2]
detail_sheet.range('D2').options(transpose=True).value = dataList[3]
detail_sheet.range('E2').options(transpose=True).value = dataList[4]
detail_sheet.range('F2').options(transpose=True).value = dataList[5]
detail_sheet.range('G2').options(transpose=True).value = dataList[6]
detail_sheet.range('H2').options(transpose=True).value = dataList[7]
detail_sheet.range('I2').options(transpose=True).value = dataList[8]
detail_sheet.range('J2').options(transpose=True).value = dataList[9]
detail_sheet.range('K2').options(transpose=True).value = dataList[10]
detail_sheet.range('L2').options(transpose=True).value = dataList[11]
detail_sheet.range('M2').options(transpose=True).value = dataList[12]
detail_sheet.range('N2').options(transpose=True).value = dataList[13]
wb.save()
for i in range(1,11):
print("------解析第{}页------".format(i))
html = get_one_page(url+'&page='+str(i))
data = json.loads(html,encoding='utf-8')
parse_one_page(data)
save_to_excel() |
import random as rnd
price = [20,50,10,30,99,1,2,3]
print(len(price))
for j in range(len(price)):
print("iterate through the list")
#loop through the list
for i in range(10):
print(i)
print(rnd.random())
#loop 10 times
for i in range(10):
print(i)
#loop through a list
for i in range(2,10,2):
print(i)
#random integer between a range
print(rnd.randint(1,50)) |
from info2soft import config
from info2soft import https
class Gauss (object):
def __init__(self, auth):
self.auth = auth
'''
* 高斯同步规则-新建
*
* @param dict $body 参数详见 API 手册
* @return list
'''
def createGaussRule(self, body):
url = '{0}/gauss/rule'.format(config.get_default('default_api_host'))
res = https._post(url, body, self.auth)
return res
'''
* 高斯同步规则-修改
*
* @param dict $body 参数详见 API 手册
* @return list
'''
def modifyGaussRule(self, body):
url = '{0}/gauss/rule'.format(config.get_default('default_api_host'))
res = https._put(url, body, self.auth)
return res
'''
* 高斯同步规则-删除
*
* @param dict $body 参数详见 API 手册
* @return list
'''
def deleteGaussRule(self, body):
url = '{0}/gauss/rule'.format(config.get_default('default_api_host'))
res = https._delete(url, body, self.auth)
return res
'''
* 高斯同步规则-列表
*
* @param dict $body 参数详见 API 手册
* @return list
'''
def listGaussRules(self, body):
url = '{0}/gauss/rule'.format(config.get_default('default_api_host'))
res = https._get(url, body, self.auth)
return res
'''
* 高斯同步规则-状态
*
* @param dict $body 参数详见 API 手册
* @return list
'''
def listGaussStatus(self, body):
url = '{0}/gauss/rule/status'.format(config.get_default('default_api_host'))
res = https._get(url, body, self.auth)
return res
'''
* 高斯同步规则-流量图
*
* @param dict $body 参数详见 API 手册
* @return list
'''
def describeGaussTraffic(self, body):
url = '{0}/gauss/rule/mrtg'.format(config.get_default('default_api_host'))
res = https._post(url, body, self.auth)
return res
'''
* 高斯同步规则-获取单个
*
* @body['uuid'] String 必填 节点uuid
* @param dict $body 参数详见 API 手册
* @return list
'''
def describeGuassRule(self, body, uuid):
if uuid is None:
exit()
url = '{0}/gauss/rule/{1}'.format(config.get_default('default_api_host'), uuid)
res = https._get(url, body, self.auth)
return res
'''
* 数据库预检测
*
* @param dict $body 参数详见 API 手册
* @return list
'''
def dbCheckGuass(self, body):
url = '{0}gauss/rule/db_check'.format(config.get_default('default_api_host'))
res = https._post(url, body, self.auth)
return res
'''
* 高斯同步规则-日志
*
* @param dict $body 参数详见 API 手册
* @return list
'''
def listGuassRuleLog(self, body):
url = '{0}gauss/rule/log'.format(config.get_default('default_api_host'))
res = https._post(url, body, self.auth)
return res
|
import json
import requests
requests.packages.urllib3.disable_warnings()
#ISE get requires headers. AMP does not.
def iseget(url, headers):
try:
response = requests.get(url, headers=headers, verify=False)
# Consider any status other than 2xx an error
if not response.status_code // 100 == 2:
return "Error: Unexpected response {}".format(response)
try:
return response.json()
except:
return "Error: Non JSON response {}".format(response.text)
except requests.exceptions.RequestException as e:
# A serious problem happened, like an SSLError or InvalidURL
return "Error: {}".format(e)
def ampget(url):
try:
response = requests.get(url)
# Consider any status other than 2xx an error
if not response.status_code // 100 == 2:
return "Error: Unexpected response {}".format(response)
try:
return response.json()
except:
return "Error: Non JSON response {}".format(response.text)
except requests.exceptions.RequestException as e:
# A serious problem happened, like an SSLError or InvalidURL
return "Error: {}".format(e)
def smcget(url, headers):
try:
response = requests.get(url, headers=headers, verify=False)
# Consider any status other than 2xx an error
if not response.status_code // 100 == 2:
return "Error: Unexpected response {}".format(response)
try:
return response.json()
except:
return "Error: Non JSON response {}".format(response.text)
except requests.exceptions.RequestException as e:
# A serious problem happened, like an SSLError or InvalidURL
return "Error: {}".format(e)
def smcauth(SMC_hostname, SMC_username, SMC_password):
try:
url = "https://{}/token/v2/authenticate".format(SMC_hostname)
smcheaders = {
"Content-Type": "application/x-www-form-urlencoded",
}
data = "username="+SMC_username+"&password="+SMC_password
response = requests.post(url, data, headers=smcheaders, verify=False)
# Consider any status other than 2xx an error
if not response.status_code // 100 == 2:
return "Error: Unexpected response {}".format(response)
try:
return response.headers['Set-Cookie'][:]
except:
return "Error: Non JSON response {}".format(response.text)
except requests.exceptions.RequestException as e:
# A serious problem happened, like an SSLError or InvalidURL
return "Error: {}".format(e)
def smcpost(url, headers, data):
try:
response = requests.post(url, json.dumps(data), headers=headers, verify=False)
# Consider any status other than 2xx an error
if not response.status_code // 100 == 2:
return "Error: Unexpected response {}".format(response)
try:
return response.json()
except:
return "Error: Non JSON response {}".format(response.text)
except requests.exceptions.RequestException as e:
# A serious problem happened, like an SSLError or InvalidURL
return "Error: {}".format(e)
def amppatch(url, headers, data):
try:
response = requests.patch(url, json.dumps(data), headers=headers, verify=True)
# Consider any status other than 2xx an error
if not response.status_code // 100 == 2:
return "Error: Unexpected response {}".format(response)
try:
return response.json()
except:
return "Error: Non JSON response {}".format(response.text)
except requests.exceptions.RequestException as e:
# A serious problem happened, like an SSLError or InvalidURL
return "Error: {}".format(e)
def iseput(url, headers, data):
try:
response = requests.put(url, json.dumps(data), headers=headers, verify=False)
# Consider any status other than 2xx an error
if not response.status_code // 100 == 2:
return "Error: Unexpected response {}".format(response)
try:
return response.json()
except:
if response.status_code:
return "Null"
return "Error: Non JSON response {}".format(response.text)
except requests.exceptions.RequestException as e:
# A serious problem happened, like an SSLError or InvalidURL
return "Error: {}".format(e)
|
#! /usr/bin/env python
import rospy
from std_msgs.msg import Header
from gazebo_msgs.srv import GetModelState, GetModelStateRequest
from snake_control.msg import snake_head_rel_pos
import numpy as np
rospy.init_node('snake_head_pos_pub')
pos_pub=rospy.Publisher('/snake_head_pos', snake_head_rel_pos)
rospy.wait_for_service('/gazebo/get_model_state')
get_model_srv = rospy.ServiceProxy('/gazebo/get_model_state', GetModelState)
pos = snake_head_rel_pos()
model = GetModelStateRequest()
model.model_name = 'robot'
model.relative_entity_name = 'target::link'
model2 = GetModelStateRequest()#to get coordinate of snake head
model2.model_name='robot'
model2.relative_entity_name = ''
# r = rospy.Rate(1/(2*np.pi))
r = rospy.Rate(30)
# create training dataset
# pos_log = []
# pos_num = 400
while not rospy.is_shutdown():
result = get_model_srv(model)
result2 = get_model_srv(model2)
x_rel_world = -result.pose.position.x
y_rel_world = -result.pose.position.y
z_rel_world = -result.pose.position.z
qx = result.pose.orientation.x
qy = result.pose.orientation.y
qz = result.pose.orientation.z
qw = result.pose.orientation.w
rotation_matrix = np.array([[1-2*qy*qy-2*qz*qz, 2*qx*qy-2*qz*qw, 2*qx*qz+2*qy*qw],
[2*qx*qy+2*qz*qw, 1-2*qx*qx-2*qz*qz,2*qy*qz-2*qx*qw],
[2*qx*qz-2*qy*qw, 2*qy*qz+2*qx*qw, 1-2*qx*qx-2*qy*qy]])
pos_rel = np.dot(rotation_matrix.T,np.array([x_rel_world, y_rel_world, z_rel_world]))
pos.x_rel = pos_rel[0]
pos.y_rel = pos_rel[1]
time = rospy.get_rostime()
rospy.loginfo("%s, %f, %f"%(time.secs, pos.x_rel, pos.y_rel))
# create training dataset
# if len(pos_log) < pos_num:
# pos_log.append([pos.x_rel, pos.y_rel])
# elif len(pos_log) == pos_num:
# with open('pos_log.txt', 'w') as f:
# for dat in pos_log:
# f.write(str(dat[0]) + ',' + str(dat[1]) + '\n')
# f.close()
# else:
# pass
pos_pub.publish(pos)
r.sleep()
|
# -*- coding: utf-8 -*-
"""
Provides methods to process raw instacart data into a single datafile
containing a subset of categories and products from the original file
@author: Fenna ten Haaf
Written for the Econometrics & Operations Research Bachelor Thesis
Erasmus School of Economics
"""
import pandas as pd
# Own modules:
import visualise
import utils
import dataPrep
class instacart_processor:
def __init__(self,indir,outdir,output_name,
cat_subselect = 50, prod_subselect = 10,
custExtension = False,
n_cust = 300,
reset_indexes = True,
seed = 1234, n_min=20,
cust_id = "i",
prod_id_name ="j",
basket_id_name = "basket_hash",
cat_name = "c"):
"""This method should process the instacart_2017_05_01 raw dataset,
into a form to use with the other dataPrep methods"""
print(f"Processing instacart dataset, at {utils.get_time()}")
#-------------------------INITIALISATION-----------------------
self.indir = indir
self.outdir = outdir
self.output_name = output_name
self.cat_subselect = cat_subselect
self.prod_subselect = prod_subselect
self.seed = seed # for the random sample of categories
self.n_min= n_min
self.cust_id = cust_id
self.prod_id_name = prod_id_name
self.basket_id_name = basket_id_name
self.cat_name = cat_name
self.reset_indexes = reset_indexes
#---------------------READING IN RAW DATA---------------------
#We treat the aisles as 'categories', but alternatively we could also
# just consider the departments. There are 21 departments and 134 aisles
self.aisles_df = pd.read_csv(f'{indir}/aisles.csv')
self.departments_df = pd.read_csv(f'{indir}/departments.csv')
# Order_products links products to basket ids.
# There is a train set and a prior set that we can use, prior is larger
# than train as it contains more orders per customer.
self.orders_df = pd.read_csv(f'{indir}/order_products__prior.csv')
# The orders file links orders to customer ids:
self.cust_df = pd.read_csv(f'{indir}/orders.csv')
# The products file links products to names and aisles:
self.product_info_df = pd.read_csv(f'{indir}/products.csv')
#---------------------COMBINING INTO ONE FILE---------------------
## Create a dataframe with basket hash and product id
self.final_df = pd.DataFrame.from_dict({"basket_hash":self.orders_df["order_id"],
"j": self.orders_df["product_id"]})
## Add customer IDs by doing a left join
self.final_df = self.final_df.merge(self.cust_df[["order_id","user_id"]],
how="left", left_on="basket_hash",
right_on = "order_id",)
## Add product names, aisle ids and department ids by left join
self.final_df = self.final_df.merge(self.product_info_df[["product_id",
"product_name",
"aisle_id",
"department_id"]],
how="left", left_on="j",
right_on = "product_id",)
## Add aisle names by left join
self.final_df = self.final_df.merge(self.aisles_df[["aisle_id","aisle"]],
how="left", left_on="aisle_id",
right_on = "aisle_id",)
## Add deparment names by left join
self.final_df= self.final_df.merge(self.departments_df[["department_id",
"department"]],
how="left", left_on="department_id",
right_on = "department_id",)
## Rename and drop some things, so it is in the right format
self.final_df = self.final_df.rename(columns={"aisle": cat_name, # could also rename aisle
"user_id": cust_id,
"j":"prod_id",
"product_name":"j"})
self.final_df = self.final_df.drop(["product_id","order_id"],1)
#---------------------TAKING SUBSELECTIONS---------------------
num_unique = len(self.final_df["j"].unique())
print(f"There are {num_unique} products in the full instacart dataset,"
f" we will now be taking subselections")
if cat_subselect is not None:
self.final_df = self.category_subselect(self.final_df,
self.cat_subselect,
self.seed)
if prod_subselect is not None:
self.final_df = self.product_subselect(self.final_df)
#---------------------SAVING RESULTS---------------------
# First rename before saving so that c and j are the integer values
self.final_df = self.final_df.rename(columns={cat_name: "aisle", # could also rename aisle
"aisle_id": cat_name,
"j":"prod_name",
"prod_id":self.prod_id_name})
## RESET INDICES SO THAT IT WORKS WITH THE MAPPING WHICH NEEDS THE INDICES
## TO MATCH WITH THE ROWS
if self.reset_indexes:
print("resetting indexing")
self.reset_indexing()
print("saving to csv")
utils.save_df_to_csv(self.final_df, self.outdir,
self.output_name, add_time = False )
print(f"Finished and output saved, at {utils.get_time()}")
def reset_indexing(self):
"""We need to reset the indexes to start at 0 and end at the
number of products, because otherwise the training and the mapping won't
work"""
temp_df = self.final_df.sort_values(by=self.cat_name)
unique_prods = temp_df[self.prod_id_name].unique()
self.final_df = self.final_df.replace(
{self.prod_id_name: {unique_prods[i] : i
for i in range(len(unique_prods))}})
def category_subselect(self, final_df, cat_subselect = 30, seed = 1234):
"""Method to take a random sample of n unique categories and returns
only the purchases that belong to those categories"""
print(f"Randomly selecting {cat_subselect} categories (aisles) from"
" the instacart dataset, but not those that appear in too"
" few baskets")
## Take a subselection of prodcuts from only a few categories
## (without subselection, the final file is 2 GB!)
# Before taking a sample, we should remove the categories that appear
# in very few baskets out of consideration
print("counting cats")
cat_counts = final_df.groupby(self.cat_name)[self.basket_id_name].nunique()
cat_counts.reset_index()
print(cat_counts)
min_count = cat_counts.min()
print(min_count)
# We would like on average at least n_min of each product per category
min_baskets = (self.prod_subselect*self.n_min)
print(f"making to_remove, min_baskets = {min_baskets}")
to_remove = cat_counts[cat_counts < min_baskets]
smaller_aisles_df = self.aisles_df.loc[~self.aisles_df["aisle"].isin(to_remove)]
## In practice, there aren't actually any categories that get removed...
## We also don't want the category "missing" in our dataframe, because
## They are vague
smaller_aisles_df = smaller_aisles_df.loc[self.aisles_df["aisle"] != "missing"]
smaller_aisles_df = smaller_aisles_df.loc[self.aisles_df["aisle"] != "more household"]
print(f"Taken {len(to_remove)} categories out of consideration,"
f" as they appeared in fewer than {min_baskets} baskets")
# Now we can randomly select a sample (random_state is a seed,
# for consistent results)
cat_subset = smaller_aisles_df["aisle"].sample(n=cat_subselect,
random_state = seed)
final_df = final_df.loc[final_df[self.cat_name].isin(cat_subset)]
print("Done, the following categories were selected:")
print(final_df[self.cat_name].unique())
return final_df
def product_subselect(self, final_df, prod_subselect = 10):
"""Takes a subsample of an instacart dataframe by only selecting
the products that are in the top n most frequent products for their
respective category"""
# NOTE: not sure if this works when there are fewer than n products
# in a category
print(f"Taking a subset of {prod_subselect} most frequent products"
" per category")
product_counts = dataPrep.get_product_counts(final_df)
product_counts = product_counts.rename(columns={self.basket_id_name: "product_frequency"})
## Do a left join to add the product frequencies to the dataframe
# print(product_counts.head())
final_df = final_df.merge(product_counts,
how="left", left_on=self.prod_id_name,
right_on = self.prod_id_name)
products_to_keep = pd.DataFrame()
## Now loop over categories and take out the n most common products
# print(final_df[prod_id_name].head())
for i,category in enumerate(final_df[self.cat_name].unique()):
print(category)
products = final_df.loc[final_df[self.cat_name].isin([category])]
products = products[self.prod_id_name]
products = products.value_counts()
products = products.head(prod_subselect) # take only the top n products
products.reset_index()
products_to_keep = products_to_keep.append(products)
final_df = final_df.loc[final_df[self.prod_id_name].isin(products_to_keep)]
# print("Done! Now visualising the resulting dataframe")
# visualise.instacart_df(final_df, self.n_min)
return final_df
if __name__ == "__main__":
instacart_data_prep= True
if instacart_data_prep:
indir = "./instacart_2017_05_01"
outdir= "./instacart_2017_05_01"
filename = "instacart_cat50prod10seed1234" # remember to adjust filename
instacart_data = instacart_processor(indir,outdir,filename,
cat_subselect=50,prod_subselect=10,
seed = 1234)
instacart_df = instacart_data.final_df
|
from neuralpy.layers import Dense
from neuralpy.activation_functions import ReLU
from neuralpy.loss_functions import MSELoss
from neuralpy.models.model_helper import generate_layer_name, is_valid_layer
def test_generate_layer_name():
assert generate_layer_name('Dense', 1) == 'dense_layer_2'
assert generate_layer_name('ReLU', 1) == 'relu_layer_2'
assert generate_layer_name('Softmax', 1) == 'softmax_layer_2'
def test_is_valid_layer():
assert is_valid_layer(Dense(n_nodes=3)) == True
assert is_valid_layer(ReLU()) == True
assert is_valid_layer(MSELoss()) == False |
import random
import collections
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from tensorflow.keras.layers import Dense
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.models import Sequential
from config import reward_for_eating, reward_for_dying
class Agent(object):
def __init__(self,
weights_path='./weights/weights.h5',
load=True,
learning_rate=0.0005,
gamma=0.9,
neurons_each_layer=(12, 50, 300, 50),
batch_size=1000):
self.input_neurons = neurons_each_layer[0]
self._learning_rate = learning_rate
self._gamma = gamma
self._memory = collections.deque(maxlen=2500)
self._batch_size = batch_size
self._weights_path = weights_path
self._model = self.model(Adam(learning_rate), neurons_each_layer)
if load:
self.load_model()
def model(self, optimizer, neurons_each_layer):
model = Sequential()
for no_of_neurons in neurons_each_layer:
model.add(Dense(no_of_neurons, activation='relu'))
model.add(Dense(4, activation='softmax'))
model.compile(loss='mse', optimizer=optimizer)
return model
def save_model(self):
self._model.save_weights(self._weights_path)
def load_model(self):
self._model.fit(np.array([0] * 12).reshape(1, self.input_neurons),
np.array([0., 0., 1., 0.]).reshape(1, 4), epochs=1, verbose=0)
self._model.load_weights(self._weights_path)
def remember(self, decision):
self._memory.append(decision)
def train(self, prev_state, action, reward, new_state, done):
target = reward
if not done:
target = reward + self._gamma * \
np.amax(self._model.predict(np.array(new_state).reshape((1, self.input_neurons)))[0])
target_f = self._model.predict(np.array(prev_state).reshape((1, self.input_neurons)))
target_f[0][np.argmax(action)] = target
self._model.fit(prev_state.reshape((1, self.input_neurons)), target_f, epochs=1, verbose=0)
def long_train(self, memory):
if len(self._memory) > self._batch_size:
minibatches = random.sample(memory, self._batch_size)
else:
minibatches = memory
for prev_state, action, reward, new_state, done in minibatches:
self.train(prev_state, action, reward, new_state, done)
@staticmethod
def state(width, height, snake, food):
danger_left = (snake.head_pos()[0] <= 0) or \
((snake.head_pos()[0] - 20, snake.head_pos()[1]) in snake.positions())
danger_front = (snake.head_pos()[1] <= 0) or \
((snake.head_pos()[0], snake.head_pos()[1] - 20) in snake.positions())
danger_right = (snake.head_pos()[0] >= width - 20) or \
((snake.head_pos()[0] + 20, snake.head_pos()[1]) in snake.positions())
danger_down = (snake.head_pos()[1] >= height - 20) or \
((snake.head_pos()[0], snake.head_pos()[1] + 20) in snake.positions())
food_left = food.position()[0] < snake.head_pos()[0]
food_right = food.position()[0] > snake.head_pos()[0]
food_down = food.position()[1] > snake.head_pos()[1]
food_front = food.position()[1] < snake.head_pos()[1]
state = [
danger_left,
danger_right,
danger_front,
danger_down,
snake.moving_left,
snake.moving_right,
snake.moving_down,
snake.moving_up,
food_left,
food_right,
food_down,
food_front
]
for i in state:
if state[i]:
state[i] = 1
else:
state[i] = 0
return np.asarray(state)
@staticmethod
def in_bounds(x, y, width, height):
return 0 <= x < width and 0 <= y < height
@staticmethod
def look_direction(direction, food, snake, height, width):
food_x = food.position()[0]
food_y = food.position()[1]
cur_x, cur_y = snake.head_pos()[0], snake.head_pos()[1]
cur_x += direction[0]
cur_y += direction[1]
distance = 1
food_dir = snake_dir = 0
while Agent.in_bounds(cur_x, cur_y, width, height):
if (food_dir == 0) and (cur_x == food_x and cur_y == food_y):
food_dir = 1
if (snake_dir == 0) and ((cur_x, cur_y) in snake.positions()[1:]):
snake_dir = 1/distance
cur_x += direction[0]
cur_y += direction[1]
distance += 1
wall_dir = 1/distance
return food_dir, wall_dir, snake_dir
@staticmethod
def reward(snake, food, height, prev_pos):
reward = Agent.initial_reward(snake)
"""Checking if snake came closer to food"""
x_food = food.position()[0]
y_food = height - food.position()[1]
x_prev = prev_pos[0]
y_prev = height - prev_pos[1]
x_next = snake.head_pos()[0]
y_next = height - snake.head_pos()[1]
x = np.abs(x_prev - x_food)
y = np.abs(y_prev - y_food)
c_prev = np.sqrt(np.square(x) + np.square(y))
x = np.abs(x_next - x_food)
y = np.abs(y_next - y_food)
c_next = np.sqrt(np.square(x) + np.square(y))
action_value = c_prev - c_next
reward += action_value
return reward/2
@staticmethod
def rew(food, prev_pos, snake, prev_state):
reward = Agent.initial_reward(snake)
x_food = food.position()[0]
y_food = food.position()[1]
x_prev = prev_pos[0]
y_prev = prev_pos[1]
x_next = snake.head_pos()[0]
y_next = snake.head_pos()[1]
diff_x = np.abs(x_food - x_prev)
diff_y = np.abs(y_food - y_prev)
if 1 in prev_state[0:8]:
next_diff_x = np.abs(x_food - x_next)
next_diff_y = np.abs(y_food - y_next)
if next_diff_x < diff_x or next_diff_y < diff_y:
reward += 5
else:
reward -= 5
return reward
@staticmethod
def initial_reward(snake):
reward = 0
if snake.eaten():
reward = reward_for_eating
if snake.done():
reward = reward_for_dying
return reward
def plot(self):
losses = pd.DataFrame(self._model.history.history)
losses.plot()
plt.show()
def memory(self):
return self._memory
def get_model(self):
return self._model
|
from unittest.mock import patch
import pytest
from django.core.exceptions import ValidationError
from django.test import override_settings
from core.validators import dataregistry_path_validator, validate_url_or_path
def test_validate_url_successful():
url = "https://www.google.com"
assert validate_url_or_path(url) is None
def test_validate_failed():
url = "https://google"
with pytest.raises(ValueError) as e:
validate_url_or_path(url)
assert "Input URL is invalid" in str(e)
def test_validate_unsupported_url():
url = "ftps://www.link.com"
with pytest.raises(NotImplementedError) as e:
validate_url_or_path(url)
assert "This type of URL is not supported yet" in str(e)
def test_dataregistry_path_validate_no_dataregistry_path():
path = "file:///file.json"
with pytest.raises(ValidationError) as e:
dataregistry_path_validator(path)
assert "No file found on this address" in str(e)
@override_settings(DATAREGISTRY_MEDIA_ROOT="/tmp/data_registry/")
def test_dataregistry_path_validatator_non_existing_path():
path = "file:///file.json"
with pytest.raises(ValidationError) as e:
dataregistry_path_validator(path)
assert "No file found on this address" in str(e)
@override_settings(DATAREGISTRY_MEDIA_ROOT="/tmp/data_registry/")
@patch("os.path.islink", return_value=True)
def test_dataregistry_path_validatator_symlink_not_allowed(mock_islink):
path = "file:///file.json"
with pytest.raises(ValidationError) as e:
dataregistry_path_validator(path)
assert "No file found on this address" in str(e)
@override_settings(DATAREGISTRY_MEDIA_ROOT="/tmp/data_registry/")
@patch("core.validators.settings.DATAREGISTRY_JAIL", False)
@patch("pathlib.Path.is_file", return_value=True)
def test_dataregistry_path_validatator_no_jail(mock_isfile):
path = "file:///random/folder/file.json"
result = dataregistry_path_validator(path)
assert result is None
@override_settings(DATAREGISTRY_MEDIA_ROOT="/tmp/data_registry/")
@patch("pathlib.Path.is_file", return_value=True)
def test_dataregistry_path_validatator_success(mock_isfile):
path = "file:///tmp/data_registry/file.json"
result = dataregistry_path_validator(path)
assert result is None
@override_settings(DATAREGISTRY_MEDIA_ROOT="/tmp/data_registry/")
def test_dataregistry_path_validatator_attribute_error():
path = ["file1.rar", "file2.txt"]
with pytest.raises(ValidationError) as e:
dataregistry_path_validator(path)
assert "No file found on this address" in str(e)
|
import json
import torch.optim as optim
from models.train_loop import train_model
from models.seq_model import SeqModel
dir = "data_/"
entpair2id = json.load(fp=open(dir + "entpair2id.json"))
path2id = json.load(fp=open(dir + "path2id.json"))
exp_dir = "experiments/"
seq_model_dir = exp_dir+"seqmodel/"
log = open("experiments/log_word2vec.txt","a")
model = SeqModel(ent_size=len(entpair2id), path_size=len(path2id), embed_size=300)
optimizer = optim.Adam(model.parameters(), lr=0.01)
num_epoch = 20
batch_size =4096
log.write("epochs"+str(num_epoch) + "\n")
log.write("batch_size"+str(batch_size)+'\n')
log.write("optimizer: adam , lr:0.01\n")
train_model(model, dir=dir, optimizer=optimizer, num_epochs=num_epoch,
entpair2id=entpair2id, path2id=path2id, batch_size=batch_size, dev_ratio=0.01,
name='linear_relu_2_adam_0.01'+"_"+str(num_epoch)+"_"+str(batch_size), outputdir=seq_model_dir, gpu=True)
log.write("+++++++++++++++++++++++++\n")
|
import torch
import torch.nn.functional as F
class HingeLoss(object):
def __init__(self):
pass
def __call__(self, logits, loss_type):
assert loss_type in ['gen', 'dis_real', 'dis_fake']
if loss_type == 'gen':
return -torch.mean(logits)
elif loss_type == 'dis_real':
return F.relu(1.0 - logits).mean()
elif loss_type == 'dis_fake':
return F.relu(1.0 + logits).mean()
|
from typing import List
from off_policy_rl.config.config import Config
from off_policy_rl.utils.bin import Bin
from off_policy_rl.utils.types import Pose
class Environment:
def __init__(self, bins: List[Bin]):
assert len(bins) == 2
self.bins = bins
#self.current_bin = Config.current_bin
self.pick_bin = Config.start_bin
self.drop_bin = int(not self.pick_bin)
def switch_bins(self):
self.pick_bin = int(not self.pick_bin)
self.drop_bin = int(not self.pick_bin)
def get_pick_bin(self) -> Bin:
return self.bins[self.pick_bin]
def get_drop_bin(self) -> Bin:
return self.bins[self.drop_bin]
|
# -*- coding:utf-8 -*-
# Author: Roc-J
class Solution(object):
def intersect(self, nums1, nums2):
"""
:type nums1: List[int]
:type nums2: List[int]
:rtype: List[int]
"""
result = []
for item in nums2:
if len(nums1) == 0:
return result
if item in nums1:
result.append(item)
nums1.remove(item)
return result
if __name__ == '__main__':
print Solution().intersect([1, 2, 1], [1, 2, 2])
|
import pyautogui
import time
pyautogui.FAILSAFE = False
print("\n██╗ █████╗ ███████╗██╗ ██╗ ██╗████████╗ ██████╗ ██╗ ██╗██████╗ ███████╗\n"
"██║ ██╔══██╗╚══███╔╝╚██╗ ██╔╝ ██║╚══██╔══╝ ██╔══██╗██║ ██║██╔══██╗██╔════╝\n"
"██║ ███████║ ███╔╝ ╚████╔╝ ██║ ██║ ██║ ██║██║ ██║██║ ██║█████╗\n"
"██║ ██╔══██║ ███╔╝ ╚██╔╝ ██║ ██║ ██║ ██║██║ ██║██║ ██║██╔══╝\n"
"███████╗██║ ██║███████╗ ██║ ██║ ██║ ██████╔╝╚██████╔╝██████╔╝███████╗\n"
"╚══════╝╚═╝ ╚═╝╚══════╝ ╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚═════╝ ╚═════╝ ╚══════╝\n")
print("This program was developed by SANTHOSH-SNTS\n\n")
print("[+] This is the life saving hack for (Work From Home) employee, this program will keep your screen alive...\n ")
print("\t[+] LAZY IT DUDE is turned ON \n\t[+] Mouse will be moved that won't let your computer screen down \n\t[+] To quit this program press ctrl+c ")
print("\n(NOTE : Your mouse is in the left top corner of your screen)\n \t60 sec = 1 minute\n \t300 sec = 5 minutes ")
try:
counts = input("\n[+] Please enter the number of seconds (eg: 15) : ")
print("\n[+] Your mouse will be moved for every " + counts + " second\n")
except KeyboardInterrupt:
print("\n\n[+] Detected ctrl + c.............................. Quitting lazy IT dude Developed by SANTHOSH")
def mouse(counts):
time.sleep(counts)
# pyautogui.moveRel(0, 50, duration=5)
# pyautogui.moveRel(0, -50, duration=5)
for i in range(0, 50):
pyautogui.moveTo(0, i * 5)
for i in range(0, 3):
pyautogui.press('shift')
try:
times_count = 0
while True:
mouse(int(counts))
times_count = times_count + 1
print("\r [+] Mouse moved count : " + str(times_count), end="")
except KeyboardInterrupt:
print("\n\n[+] Detected ctrl + c.............................. Quitting lazy IT dude Developed by SANTHOSH")
|
from app import app,db
from tables import User
from flask import request,abort,jsonify,url_for
#to register a new user
@app.route('/api/users', methods = ['POST'])
def newUser():
username = request.json.get('username')
password = request.json.get('password')
#checking validity of entered username
if username is None or password is None:
return "Username or password is Null" # missing arguments
if User.query.filter_by(username = username).first() is not None:
return "Username exists"
user = User(username = username)
user.hashPassword(password)
#making changes in the Database
db.session.add(user)
db.session.commit()
return "User Added Successfully"
#login and check username and password
@app.route('/api/login', methods = ['POST'])
def checkLoginDetails():
username = request.json.get('username')
password = request.json.get('password')
#checking validity of entered username
if username is None or password is None:
abort(400) # missing arguments
if User.query.filter_by(username = username).first() is not None:
user = User.query.filter_by(username=username).first()
pass_entered=user.verifyPassword(password)
if pass_entered==True:
return "Login Successful"
else:
return "Wrong Password"
else:
return "user not found"
if __name__ == "__main__":
app.run(port=5000, debug=True)
|
#!/usr/bin/python
#Date: 1.23.18
import urllib2
import re
import requests
## Get IP ##
OPENER = urllib2.build_opener()
OPENER.addheaders = [('User-agent', 'Mozilla/5.0')]
MY_IP = OPENER.open('http://ipchicken.com/')
MY_IP = MY_IP.read()
IP = re.findall(r'[0-9]+(?:\.[0-9]+){3}', MY_IP)[0]
print "IP Address: %s" % IP
## Get GeoLOCation - Country ##
GEO_LO = requests.get('http://ipinfo.io/%s/country' % IP)
LOC = str(GEO_LO.content).rstrip()
print "Country: %s" % LOC
## Get GeoLOCation - City ##
GEO_REGION = requests.get('http://ipinfo.io/%s/region' % IP)
REGION = str(GEO_REGION.content).rstrip()
print "Region: %s" % REGION
|
from collections import defaultdict
import re
with open('/Users/joakimkoljonen/src/adventofcode/2017/22.input', 'r') as file:
input = file.read()
test_input = '''..#
#..
...
'''
#input = test_input
ROUNDS = 10000000
parsed = [line for line in input.split('\n') if line != '']
infected = set()
flagged = set()
weakened = set()
middle = len(parsed) // 2
for rowidx, row in enumerate(parsed):
for colidx, col in enumerate(row):
if col == '#':
infected.add((rowidx - middle, colidx - middle))
def getans():
pos = (0, 0)
direction = 'u'
infections = 0
for _ in range(ROUNDS):
if ROUNDS < 100:
print(pos, direction, infections)
if pos in infected:
if ROUNDS < 100:
print('infected, flagging')
direction = {
'l' : 'u',
'd' : 'l',
'r' : 'd',
'u' : 'r',
}[direction]
infected.remove(pos)
flagged.add(pos)
elif pos in flagged:
if ROUNDS < 100:
print('flagged, cleaning')
direction = {
'l' : 'r',
'd' : 'u',
'r' : 'l',
'u' : 'd',
}[direction]
flagged.remove(pos)
elif pos in weakened:
if ROUNDS < 100:
print('weakened, infecting')
weakened.remove(pos)
infected.add(pos)
infections += 1
else:
if ROUNDS < 100:
print('clean, weakening')
direction = {
'u': 'l',
'l': 'd',
'd': 'r',
'r': 'u',
}[direction]
weakened.add(pos)
pos = {
'l' : (pos[0], pos[1] - 1),
'd' : (pos[0] + 1, pos[1]),
'r' : (pos[0], pos[1] + 1),
'u' : (pos[0] - 1, pos[1]),
}[direction]
return infections
print(getans())
|
# Convert all of the text columns in train to the categorical data type.
# Select the Utilities column, return the categorical codes, and display the unique value counts for those codes: train['Utilities'].cat.codes.value_counts()
import pandas as pd
data = pd.read_csv('AmesHousing.txt', delimiter="\t")
train = data[0:1460].copy()
test = data[1460:].copy()
train_null_counts = train.isnull().sum()
df_no_mv = train[train_null_counts[train_null_counts==0].index]
text_cols = df_no_mv.select_dtypes(include=['object']).columns
for col in text_cols:
train[col] = train[col].astype('category')
dummy_cols = pd.DataFrame()
for col in text_cols:
col_dummies = pd.get_dummies(train[col])
train = pd.concat([train, col_dummies], axis=1)
del train[col]
print(train.head(5)) |
# coding: utf-8
import random
from gat_games.game_engine.engine import *
from gat_games.game_engine.cardgame import *
# TODOs:
# who wins start next round => must_start_new_cycle BUG?
class TrucoPlayer(Player):
def play(self, context, **kwargs):
if kwargs.get('action', 'play') == 'accept_truco':
if self.has_accepted_truco(context):
self.accept_truco()
else:
self.reject_truco()
else:
self.upcard_or_truco(context)
def has_accepted_truco(self, context): pass
def upcard_or_truco(self, context): pass
def can_truco(self, context):
return context['round_can_truco']
def upcard(self, card):
self.game.execute_command(Upcard(self, card=card))
def truco(self):
self.game.execute_command(TrucoCommand(self))
def accept_truco(self):
self.game.execute_command(AcceptTruco(self))
def reject_truco(self):
self.game.execute_command(RejectTruco(self))
class RandomTrucoPlayer(TrucoPlayer):
def has_accepted_truco(self, context):
return bool(random.randint(0, 1))
def upcard_or_truco(self, context):
option = random.randint(0, 10)
if self.can_truco(context) and option > 5:
self.truco()
else:
hand = context['hand']
random_index = random.randint(0, len(hand)-1)
random_card = hand.see(random_index)
self.upcard(random_card)
class TrucoCard(Card):
suit_weights = {SPADES: 2, HEARTS: 3, DIAMONDS: 1, CLUBS: 4}
ranks = (4, 5, 6, 7, Q, J, K, AS, 2, 3)
# ranks = (Q, J, K, AS, 2, 3)
def is_manilha(self, center_card):
i1 = self.ranks.index(self.rank)
i2 = self.ranks.index(center_card.rank)
return i1 == i2 + 1 or (i2 == len(self.ranks) - 1 and i1 == 0)
class TrucoDeck(Deck):
Card = TrucoCard
def get_key_by_value(the_dict, value):
index = list(the_dict.values()).index(value)
key = list(the_dict.keys())[index]
return key
class Upcard(PlayerGameCommand):
def validate(self, game, context):
card = self.kwargs.get('card')
if game.player_in_turn != self.player:
raise InvalidCommandError('Player can not upcard right now')
if not game.hand(self.player).contains(card):
raise InvalidCommandError('Invalid card to upcard')
if game.state != 'playing':
raise InvalidCommandError('Need to accept/reject Truco before upcard')
def execute(self, game):
card = self.kwargs.get('card')
card = game.hand(self.player).remove(card)
game.table[str(self.player)] = card
class TrucoCommand(PlayerGameCommand):
def validate(self, game, context):
if game.player_in_turn != self.player and game.next_player() != self.player:
raise InvalidCommandError('It is not your resposibility to Truco right now')
if game.last_player_who_truco == self.player:
raise InvalidCommandError('You can not truco until another player re-truco')
if game.value >= 12:
raise InvalidCommandError('Game has already been setted to all-in')
if game.truco_value >= 12:
raise InvalidCommandError('Game has already been setted to all-in')
def execute(self, game):
if game.value == 1:
game.truco_value = 3
else:
game.truco_value = game.value + 3
game.last_player_who_truco = self.player
game.state = 'truco'
class AcceptTruco(PlayerGameCommand):
def validate(self, game, context):
if game.last_player_who_truco == self.player:
raise InvalidCommandError('You can not accept your own Truco')
if game.state != 'truco':
raise InvalidCommandError('No truco to accept')
def execute(self, game):
game.value = game.truco_value
game.truco_value = 0
game.state = 'playing'
class RejectTruco(PlayerGameCommand):
def validate(self, game, context):
if game.last_player_who_truco == self.player:
raise InvalidCommandError('You can not reject your own Truco')
if game.state != 'truco':
raise InvalidCommandError('No truco to reject')
def execute(self, game):
self.truco_rejected = True
game.wins[str(game.player_in_turn)] = 3
game.state = 'playing'
raise EndGame()
class StartRoundCommand(StartRoundCommand):
def read_context(self, game):
self.kwargs['center_card'] = game.center_card
for player in game.players:
self.kwargs[str(player)] = game.hand(player).get_cards()
class TrucoRound(CardGameRound):
Deck = TrucoDeck
StartGameCommand = StartRoundCommand
def prepare(self): pass
def start_round(self):
self.truco_rejected = False
self.wins = {}
for player in self.players:
self.wins[str(player)] = 0
self.value = 1 # accepted truco update round value
self.truco_value = 0
self.last_player_who_truco = None
self.center_card = self.deck.pop()
self.new_deck()
self.distribute_cards_to_each_player(3)
self.table = {}
self.state = 'playing'
def get_context(self, player):
context = super(TrucoRound, self).get_context(player)
context['round_value'] = self.value
context['round_truco_value'] = self.truco_value
context['round_center_card'] = self.center_card
context['round_table'] = self.table
context['round_wins'] = self.wins
context['round_state'] = self.state
context['round_can_truco'] = self.player_in_turn != self.last_player_who_truco and self.value < 12 and self.truco_value < 12
return context
def before_play(self): pass
def must_start_new_cycle(self):
return len(self.table) == len(self.players)
def start_new_cycle(self):
self.table = {}
# FIXME player_in_turn = self.winner_last_cycle
def before_player_play(self, player, context): pass
def play(self):
if self.must_start_new_cycle():
self.start_new_cycle()
self.player_in_turn = self.next_player()
if self.state == 'truco':
self.player_play(self.player_in_turn, action='accept_truco')
else:
self.player_play(self.player_in_turn)
def after_player_play(self, player, context, response=None):
if len(self.table) == len(self.players):
cards = list(self.table.values())
winning_card = self.winning_card(cards, self.center_card)
indexes = [i for i, x in enumerate(list(self.table.values())) if x == winning_card] # can have a draw
# draw count one win for each player
for index in indexes:
winner = list(self.table.keys())[index]
self.wins[winner] += 1
def after_play(self): pass
def is_the_end(self):
# draw count one win for each player
return max(self.wins.values()) >= 2 or self.truco_rejected
def the_end(self):
winner_wins = max(self.wins.values())
loser_wins = min(self.wins.values())
self.winners = [get_key_by_value(self.wins, winner_wins)]
self.losers = [get_key_by_value(self.wins, loser_wins)]
def summary(self):
s = super(TrucoRound, self).summary()
s.update({
'round_value': self.value,
'wins': self.wins,
})
return s
# Auxiliar methods
def winning_card(self, cards, center_card):
manilhas = []
for card in cards:
if card.is_manilha(center_card):
manilhas.append(card)
if manilhas:
manilhas = sorted(manilhas, key=lambda card: card.suit_weight())
return manilhas[-1]
else:
return max(cards)
class Truco(CardGame):
Round = TrucoRound
RandomStrategy = RandomTrucoPlayer
Player = TrucoPlayer
def __init__(self, seed, players, **kwargs):
super(Truco, self).__init__(seed, players, **kwargs)
self.scoreboard = {}
for player in self.players:
self.scoreboard[str(player)] = 0
def prepare(self): pass
def get_context(self, player):
return super(Truco, self).get_context(player)
def before_play(self): pass
def before_start_round(self, round_game): pass
def after_end_round(self, round_game):
for winner in round_game.winners:
self.scoreboard[str(winner)] += round_game.value
def after_play(self): pass
def is_the_end(self):
return max(self.scoreboard.values()) >= 12
def the_end(self):
index_winner = max(self.scoreboard.values())
index_loser = min(self.scoreboard.values())
self.winners = [get_key_by_value(self.scoreboard, index_winner)]
self.losers = [get_key_by_value(self.scoreboard, index_loser)]
def summary(self):
s = super(Truco, self).summary()
s.update({
'scoreboard': self.scoreboard
})
return s
|
###########################################################################
### Chapter 17 - Projecting Major League Performance ###
# Mathletics: How Gamblers, Managers, and Sports Enthusiasts #
# Use Mathematics in Baseball, Basketball, and Football #
###########################################################################
import pandas as pd
# these are the weights for each one of the 3 past years for HR rates and PA- earlier first, so recent years are weighted more
weights = [3, 4, 5]
weights_pa = [0, 0.1, 0.5]
# age adjustment
player_age = 31
if player_age < 29:
player_adj = 0.006*(29-player_age)
else:
player_adj = -0.003*(player_age-29)
## Steps 1 - 2
player_data = pd.read_csv("marcel.csv")
## Step 3
player_data['player_rate'] = player_data['HR']/player_data['PA']
## Step 4
player_wavg = sum([player_data['PA'].iloc[i]*player_data['player_rate'].iloc[i]*weights[i] for i in range(len(weights))])/sum([player_data['PA'].iloc[i]*weights[i] for i in range(len(weights))])
## Step 5
league_wavg = sum([player_data['PA'].iloc[i]*player_data['LeagueAvgR'].iloc[i]*weights[i] for i in range(len(weights))])/sum([player_data['PA'].iloc[i]*weights[i] for i in range(len(weights))])
## Step 6
playerw = sum([player_data['PA'].iloc[i]*weights[i] for i in range(len(weights))])/(sum([player_data['PA'].iloc[i]*weights[i] for i in range(len(weights))])+1200)
leaguew = 1 - playerw
## Step 7
pred_playerHRprob = (player_wavg*playerw) + (league_wavg*leaguew)
pred_playerPA = 200 + sum([player_data['PA'].iloc[i]*weights_pa[i] for i in range(len(weights_pa))])
## Step 8
predicted_playerHR = pred_playerPA*pred_playerHRprob
## Step 9
predicted_playerHR_age_adjusted = (1+player_adj) * predicted_playerHR
print("This player's predicted HRs for next season is: ", predicted_playerHR_age_adjusted)
|
import urllib2,json,codecs,string #importing necessary libraries
var0="N/A" #initialise the variables with any random values
var10="N/A"
var12="N/A"
fob=codecs.open('/Users/gudaprudhvihemanth/Desktop/ram.csv','a','utf-8') #create and open a csv file with utf-8 encoding format
fob.write("Event_Name,Event_url,City,Address,End_date,Organiser_name,Organiser url,Category_name,Format") #print the column names
fob.write("\n")
for line in open('/Users/gudaprudhvihemanth/Desktop/input.txt','r').readlines(): #input.txt is a file containing url's one per line,read each line and input the line to urllib,that line must contain oauth token to authenticate
data=json.load(urllib2.urlopen(line)) #load response in json format
for item in data['events']: #walking through the json data through a for loop
var1=item['name']['text'] #var1 is the variable1 containing 'event name' and all the variables follow the order of column names..
var1=var1.replace(',','') #replace any commas with white space in the variable
var2=item['url'] #var2 is the variable2 represrenting url of the event
var3=item['venue']['address']['city'] #var3 gives city in which event will be geld
var4=item['venue']['address']['address_1'] #var4,var5 contains address of the event in two parts
if var4 is None: #check if there is None case in var3,var4,var5 and if exits replace with N/A
var4="N/A"
if var3 is None:
var3="N/A"
if var4 is not None:
var4=var4.replace(',','') #replace any commas in the address with the space
var5=item['venue']['address']['address_2']
if var5 is not None:
var5=var5.replace(',','')
if var4 is not None:
if var5 is not None:
if var4!=var5:
var0=var4+var5 #var0 is concatenation of two parts of the addresses if both are not None
var6=item['end']['local'].split("T")[0] #var6 is the variable6 containing end_date of the event
var7=item['organizer']['name'] #var7 gives the organiser name
if var7 is None: #if var7 is None replace it with N/A
var7="N/A"
if var7 is not None:
var7=string.replace(var7,',',' ') #replace commas with space in the replace function
var8=item['organizer']['url'] #var8 gives the organiser url
var8=var8.replace(","," ")
var9=item['category'] #var10 gives the category name
if var9 is not None:
var10=item['category']['name'] #var12 gives the format name
var10=string.replace(var10,","," ")
var11=item['format']
if var11 is not None:
var12= item['format']['name']
var12=string.replace(var12,',',' ',9)
if var4==var5: #There are two cases in address field if they are same the write the following output to the output file saperated with commas which is required for a csv file
fob.write(var1+","+var2+","+var3+","+var0+","+var6+","+var7+","+var8+","+var10+","+var12)
fob.write("\n")
else:
fob.write(var1+","+var2+","+var3+","+var4+","+var6+","+var7+","+var8+","+var10+","+var12)
fob.write("\n")
fob.close() #close the file |
def is_postal_code(code):
if type(code) == str:
if len(code) == 7:
for i in range(7):
if i == 0 or i == 2 or i == 5:
if not code[i].isalpha() or not code[i].isupper():
return False
elif i == 1 or i == 4 or i == 6:
if not code[i].isdigit():
return False
else:
if code[i] != " ":
return False
return True
return False
|
from django.shortcuts import render
from django.http import HttpResponse
from background_task import background
from django.contrib.auth.models import User
from django.core.mail import send_mail
from pricetracker import settings
from track.models import Product
from track.views import ProductCreateView
@background(schedule=60)
def run_task():
all_products=Product.objects.all()
for p in all_products:
myurl= p.product_url
obj=AmazonBot()
p.current_price=obj.get_price_only(myurl)#make list rather than variable ..we can accesss all previous
#current_price through list
p.current_price=float(p.current_price)
try:
if p.current_price<=p.desire_price:
subject='woohoo..! Price Dropped'
message=f'Price Dropped for product {p.title}..Grab it now '
from_email=settings.EMAIL_HOST_USER
recipients_list=['yashpatel7025@gmail.com']
send_mail(subject, message, from_email,recipients_list)
except:
pass
def background_view(request):
run_task()
@background(schedule=20)
def bg_task_run_view():
all_products = Product.objects.all()
for index,p in enumerate(all_products):
ProductCreateView.new_product(p,-1)
|
#Dimensionality Reduction
'''Ref: https://www.analyticsvidhya.com/blog/2018/08/dimensionality-reduction-techniques-python'''
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
train = pd.read_csv('D:\Programming Tutorials\Machine Learning\Projects\Datasets\Train_UWu5bXk.txt')
#checking the percentage of missing values in each variable
train.isnull().sum() / len(train) * 100
'''Using Missing value ratio'''
#saving missing values in a variable
missing = train.isnull().sum() / len(train) * 100
#saving column names in a variable and adding them to a list by setting a threshold and features above that can be removed from feature set
features = train.columns
feature = []
for i in range(0, 12):
if missing[i] <= 20: #features to be used are stored in 'feature' that contains features where the missing values are less than 20%
feature.append(features[i])
#Imputing missing values
train['Item_Weight'].fillna(train['Item_Weight'].median, inplace = True)
train['Outlet_Size'].fillna(train['Outlet_Size'].mode()[0], inplace = True)
#check if all the missing values are filled
train.isnull().sum()/len(train) * 100
'''Using Low Variance Filter'''
#variables with a low variance will not affect the target variable
train.var() #calculate the variance of all the numerical variables and drop columns that has very less variance as compared to the other features
numeric = train[['Item_Weight', 'Item_Visibility', 'Item_MRP', 'Outlet_Establishment_Year']]
var = numeric.var()
numeric = numeric.columns
variable = []
for i in range(0, len(var)):
if var[i] >= 10: #setting the threshold as 10%
variable.append(numeric[i + 1]) #gives us the list of variables that have a variance greater than 10
'''Using High Correlation filter'''
df = train.drop('Item_Outlet_Sales', 1)
df.corr() #No variables with high correlation
'''Using Random Forest'''
#Used with in-built feature_importance function.Convert the data into numeric by OneHotEncoding, it takes only numeric inputs
from sklearn.ensemble import RandomForestRegressor
df = df.drop(['Item_Identifier', 'Outlet_Identifier'], axis = 1)
model = RandomForestRegressor(random_state = 1, max_depth = 10)
df = pd.get_dummies(df)
model.fit(df, train.Item_Outlet_Sales)
#After fitting the model,plot the feature importance graph
features = df.columns
importances = model.feature_importances_
indices = np.argsort(importances)[-9:] # top 10 features
plt.title('Feature Importances')
plt.barh(range(len(indices)), importances[indices], color = 'b', align = 'center')
plt.yticks(range(len(indices)), [features[i] for i in indices])
plt.xlabel('Relative Importance')
plt.show()
#Alernate way using the SelectFromModel of sklearn. Selects the features based on their weights
from sklearn.feature_selection import SelectFromModel
feature = SelectFromModel(model)
fit = feature.fit_transform(df, train.Item_Outlet_Sales)
'''Using Factor Analysis'''
from glob import glob
import cv2
from sklearn.decomposition import FactorAnalysis
import matplotlib.pyplot as plt
images = [cv2.imread(file) for file in glob('train/*.png')]
images = np.array(images)
images.shape
image = []
for i in range(0, 60000):
img = images[i].flatten()
image.append(img)
image = np.array(image)
train = pd.read_csv('D:\Programming Tutorials\Machine Learning\Projects\Datasets\Train_UWu5bXk.txt')
feat_cols = ['pixel' + str(i) for i in range(image.shape[1])]
df = pd.DataFrame(image, columns = feat_cols)
df['label'] = train['label']
FA = FactorAnalysis(n_components = 3).fit_transform(df[feat_cols].values)
plt.figure(figsize = (12, 8))
plt.title('Factor Analysis Components')
plt.scatter(FA[:, 0], FA[:, 1])
plt.scatter(FA[:, 1], FA[:, 2])
plt.scatter(FA[:, 2], FA[:, 0])
'''Using Principal Component Analysis'''
#visualizing how much variance has been explained using these n components
rndperm = np.random.permutation(df.shape[0])
plt.gray()
fig = plt.figure(figsize=(20,10))
for i in range(0,15):
ax = fig.add_subplot(3,5,i+1)
ax.matshow(df.loc[rndperm[i],feat_cols].values.reshape((28,28*3)).astype(float))
from sklearn.decomposition import PCA
pca = PCA(n_components = 3)
variance = pca.explained_variance_ratio_
plt.plot(range(4), variance)
plt.plot(range(4), np.cumsum(variance))
plt.title("Component-wise and Cumulative Explained Variance")
'''Using t-SNE'''
from sklearn.manifold import TSNE
tsne = TSNE(n_components = 3, n_iter = 300).fit_transform(df[features][:6000].values)
plt.figure(figsize = (12, 8))
plt.title('t-SNE components')
plt.scatter(tsne[:,0], tsne[:,1])
plt.scatter(tsne[:,1], tsne[:,2])
plt.scatter(tsne[:,2], tsne[:,0]) |
from django.urls import path
from django.urls.conf import include
from . import views
urlpatterns = [
path('', views.index),
path('new_user', views.new_user),
] |
import logging
import pytest
from ocs_ci.framework.pytest_customization.marks import tier1, tier2
from ocs_ci.framework.testlib import MCGTest
from ocs_ci.ocs import constants
from ocs_ci.ocs.bucket_utils import (
compare_bucket_object_list,
patch_replication_policy_to_bucket,
sync_object_directory,
write_random_test_objects_to_bucket,
verify_s3_object_integrity,
)
from ocs_ci.ocs.constants import AWSCLI_TEST_OBJ_DIR
from ocs_ci.ocs.resources.pod import cal_md5sum
from ocs_ci.framework.testlib import skipif_ocs_version
logger = logging.getLogger(__name__)
@skipif_ocs_version("<4.9")
class TestReplication(MCGTest):
"""
Test suite for MCG object replication policies
"""
@pytest.mark.parametrize(
argnames=["source_bucketclass", "target_bucketclass"],
argvalues=[
pytest.param(
{
"interface": "OC",
"backingstore_dict": {"aws": [(1, "eu-central-1")]},
},
{"interface": "OC", "backingstore_dict": {"azure": [(1, None)]}},
marks=[tier1, pytest.mark.polarion_id("OCS-2678")],
),
pytest.param(
{
"interface": "OC",
"backingstore_dict": {"gcp": [(1, None)]},
},
{
"interface": "OC",
"backingstore_dict": {"aws": [(1, "eu-central-1")]},
},
marks=[tier2],
),
pytest.param(
{
"interface": "CLI",
"backingstore_dict": {"azure": [(1, None)]},
},
{"interface": "CLI", "backingstore_dict": {"gcp": [(1, None)]}},
marks=[tier2],
),
pytest.param(
{
"interface": "CLI",
"backingstore_dict": {"aws": [(1, "eu-central-1")]},
},
{"interface": "CLI", "backingstore_dict": {"azure": [(1, None)]}},
marks=[tier1, pytest.mark.polarion_id("OCS-2679")],
),
pytest.param(
{
"interface": "OC",
"namespace_policy_dict": {
"type": "Single",
"namespacestore_dict": {"aws": [(1, "eu-central-1")]},
},
},
{
"interface": "OC",
"namespace_policy_dict": {
"type": "Single",
"namespacestore_dict": {"azure": [(1, None)]},
},
},
marks=[tier2],
),
pytest.param(
{
"interface": "OC",
"namespace_policy_dict": {
"type": "Single",
"namespacestore_dict": {"azure": [(1, None)]},
},
},
{
"interface": "CLI",
"backingstore_dict": {"gcp": [(1, None)]},
},
marks=[tier1],
),
],
ids=[
"AWStoAZURE-BS-OC",
"GCPtoAWS-BS-OC",
"AZUREtoCGP-BS-CLI",
"AWStoAZURE-BS-CLI",
"AWStoAZURE-NS-OC",
"AZUREtoGCP-NS-Hybrid",
],
)
def test_unidirectional_bucket_replication(
self,
awscli_pod_session,
mcg_obj_session,
bucket_factory,
source_bucketclass,
target_bucketclass,
):
"""
Test unidirectional bucket replication using CLI and YAML by adding objects
to a backingstore- and namespacestore-backed buckets
"""
target_bucket_name = bucket_factory(bucketclass=target_bucketclass)[0].name
replication_policy = ("basic-replication-rule", target_bucket_name, None)
source_bucket_name = bucket_factory(
1, bucketclass=source_bucketclass, replication_policy=replication_policy
)[0].name
full_object_path = f"s3://{source_bucket_name}"
standard_test_obj_list = awscli_pod_session.exec_cmd_on_pod(
f"ls -A1 {AWSCLI_TEST_OBJ_DIR}"
).split(" ")
sync_object_directory(
awscli_pod_session, AWSCLI_TEST_OBJ_DIR, full_object_path, mcg_obj_session
)
written_objects = mcg_obj_session.s3_list_all_objects_in_bucket(
source_bucket_name
)
assert set(standard_test_obj_list) == {
obj.key for obj in written_objects
}, "Needed uploaded objects could not be found"
compare_bucket_object_list(
mcg_obj_session, source_bucket_name, target_bucket_name
)
@pytest.mark.parametrize(
argnames=["source_bucketclass", "target_bucketclass"],
argvalues=[
pytest.param(
{
"interface": "OC",
"namespace_policy_dict": {
"type": "Single",
"namespacestore_dict": {"aws": [(1, "eu-central-1")]},
},
},
{
"interface": "CLI",
"backingstore_dict": {"azure": [(1, None)]},
},
marks=[tier2, pytest.mark.polarion_id("OCS-2682")],
),
],
ids=[
"AZUREtoAWS-NS-Hybrid",
],
)
def test_unidirectional_namespace_bucket_replication(
self,
awscli_pod_session,
mcg_obj_session,
cld_mgr,
bucket_factory,
source_bucketclass,
target_bucketclass,
test_directory_setup,
):
"""
Test unidirectional bucket replication by adding objects directly
to the underlying storage bucket of namespacestore-backed bucket
"""
target_bucket_name = bucket_factory(bucketclass=target_bucketclass)[0].name
replication_policy = ("basic-replication-rule", target_bucket_name, None)
source_bucket = bucket_factory(
1, bucketclass=source_bucketclass, replication_policy=replication_policy
)[0]
source_bucket_name = source_bucket.name
source_bucket_uls_name = source_bucket.bucketclass.namespacestores[0].uls_name
namespacestore_aws_s3_creds = {
"access_key_id": cld_mgr.aws_client.access_key,
"access_key": cld_mgr.aws_client.secret_key,
"endpoint": constants.AWS_S3_ENDPOINT,
"region": source_bucketclass["namespace_policy_dict"][
"namespacestore_dict"
]["aws"][0][1],
}
written_random_objects = write_random_test_objects_to_bucket(
awscli_pod_session,
source_bucket_uls_name,
test_directory_setup.origin_dir,
amount=5,
s3_creds=namespacestore_aws_s3_creds,
)
listed_obejcts = mcg_obj_session.s3_list_all_objects_in_bucket(
source_bucket_name
)
compare_bucket_object_list(
mcg_obj_session, source_bucket_name, target_bucket_name
)
assert set(written_random_objects) == {
obj.key for obj in listed_obejcts
}, "Some of the uploaded objects are missing"
@pytest.mark.parametrize(
argnames=["first_bucketclass", "second_bucketclass"],
argvalues=[
pytest.param(
{
"interface": "OC",
"backingstore_dict": {"aws": [(1, "eu-central-1")]},
},
{"interface": "OC", "backingstore_dict": {"azure": [(1, None)]}},
marks=[tier1, pytest.mark.polarion_id("OCS-2683")],
),
],
ids=[
"AWStoAZURE-BS-OC",
],
)
def test_bidirectional_bucket_replication(
self,
awscli_pod_session,
mcg_obj_session,
bucket_factory,
first_bucketclass,
second_bucketclass,
test_directory_setup,
):
"""
Test bidirectional bucket replication using CLI and YAML
"""
first_bucket_name = bucket_factory(bucketclass=first_bucketclass)[0].name
replication_policy = ("basic-replication-rule", first_bucket_name, None)
second_bucket_name = bucket_factory(
1, bucketclass=second_bucketclass, replication_policy=replication_policy
)[0].name
patch_replication_policy_to_bucket(
first_bucket_name, "basic-replication-rule-2", second_bucket_name
)
standard_test_obj_list = awscli_pod_session.exec_cmd_on_pod(
f"ls -A1 {AWSCLI_TEST_OBJ_DIR}"
).split(" ")
# Write all downloaded objects to the bucket
sync_object_directory(
awscli_pod_session,
AWSCLI_TEST_OBJ_DIR,
f"s3://{first_bucket_name}",
mcg_obj_session,
)
first_bucket_set = set(standard_test_obj_list)
assert first_bucket_set == {
obj.key
for obj in mcg_obj_session.s3_list_all_objects_in_bucket(first_bucket_name)
}, "Needed uploaded objects could not be found"
compare_bucket_object_list(
mcg_obj_session, first_bucket_name, second_bucket_name
)
written_objects = write_random_test_objects_to_bucket(
awscli_pod_session,
second_bucket_name,
test_directory_setup.origin_dir,
amount=5,
mcg_obj=mcg_obj_session,
)
second_bucket_set = set(written_objects)
second_bucket_set.update(standard_test_obj_list)
assert second_bucket_set == {
obj.key
for obj in mcg_obj_session.s3_list_all_objects_in_bucket(second_bucket_name)
}, "Needed uploaded objects could not be found"
compare_bucket_object_list(
mcg_obj_session, first_bucket_name, second_bucket_name
)
@pytest.mark.parametrize(
argnames=["source_bucketclass", "target_bucketclass"],
argvalues=[
pytest.param(
{
"interface": "OC",
"backingstore_dict": {"aws": [(1, "eu-central-1")]},
},
{"interface": "OC", "backingstore_dict": {"azure": [(1, None)]}},
marks=[tier2, pytest.mark.polarion_id("OCS-2680")],
),
pytest.param(
{
"interface": "CLI",
"backingstore_dict": {"azure": [(1, None)]},
},
{
"interface": "CLI",
"backingstore_dict": {"aws": [(1, "eu-central-1")]},
},
marks=[tier2, pytest.mark.polarion_id("OCS-2684")],
),
pytest.param(
{
"interface": "CLI",
"backingstore_dict": {"aws": [(1, "eu-central-1")]},
},
{
"interface": "OC",
"backingstore_dict": {"gcp": [(1, None)]},
},
marks=[tier1],
),
],
ids=["AWStoAZURE-BC-OC", "AZUREtoAWS-BC-CLI", "AWStoGCP-BC-Hybrid"],
)
def test_unidirectional_bucketclass_replication(
self,
awscli_pod_session,
mcg_obj_session,
bucket_factory,
source_bucketclass,
target_bucketclass,
):
"""
Test unidirectional bucketclass replication using CLI and YAML
"""
target_bucket_name = bucket_factory(bucketclass=target_bucketclass)[0].name
source_bucketclass["replication_policy"] = (
"basic-replication-rule",
target_bucket_name,
None,
)
source_bucket_name = bucket_factory(1, bucketclass=source_bucketclass)[0].name
full_object_path = f"s3://{source_bucket_name}"
standard_test_obj_list = awscli_pod_session.exec_cmd_on_pod(
f"ls -A1 {AWSCLI_TEST_OBJ_DIR}"
).split(" ")
sync_object_directory(
awscli_pod_session, AWSCLI_TEST_OBJ_DIR, full_object_path, mcg_obj_session
)
written_objects = mcg_obj_session.s3_list_all_objects_in_bucket(
source_bucket_name
)
assert set(standard_test_obj_list) == {
obj.key for obj in written_objects
}, "Needed uploaded objects could not be found"
compare_bucket_object_list(
mcg_obj_session, source_bucket_name, target_bucket_name
)
@pytest.mark.parametrize(
argnames=["source_bucketclass", "target_bucketclass"],
argvalues=[
pytest.param(
{
"interface": "OC",
"backingstore_dict": {"aws": [(1, "eu-central-1")]},
},
{"interface": "OC", "backingstore_dict": {"aws": [(1, None)]}},
marks=[tier1, pytest.mark.polarion_id("OCS-2685")],
),
],
ids=[
"AWStoAZURE-BS-OC",
],
)
def test_unidirectional_bucket_object_change_replication(
self,
awscli_pod_session,
mcg_obj_session,
bucket_factory,
source_bucketclass,
target_bucketclass,
test_directory_setup,
):
"""
Test unidirectional bucket replication when objects are changed
"""
target_bucket_name = bucket_factory(bucketclass=target_bucketclass)[0].name
replication_policy = ("basic-replication-rule", target_bucket_name, None)
source_bucket = bucket_factory(
1, bucketclass=source_bucketclass, replication_policy=replication_policy
)[0]
source_bucket_name = source_bucket.name
origin_dir = test_directory_setup.origin_dir
target_dir = test_directory_setup.result_dir
written_random_objects = write_random_test_objects_to_bucket(
awscli_pod_session,
source_bucket_name,
origin_dir,
amount=3,
mcg_obj=mcg_obj_session,
)
listed_obejcts = mcg_obj_session.s3_list_all_objects_in_bucket(
source_bucket_name
)
compare_bucket_object_list(
mcg_obj_session, source_bucket_name, target_bucket_name
)
assert set(written_random_objects) == {
obj.key for obj in listed_obejcts
}, "Some of the uploaded objects are missing"
sync_object_directory(
awscli_pod_session,
f"s3://{target_bucket_name}",
target_dir,
mcg_obj_session,
)
(
original_obj_sums,
obj_sums_after_rewrite,
obj_sums_after_rw_and_replication,
) = (
[],
[],
[],
)
for i in range(3):
original_obj_sums.append(
cal_md5sum(
awscli_pod_session, f"{origin_dir}/ObjKey-{i}", raw_path=True
)
)
assert verify_s3_object_integrity(
f"{origin_dir}/ObjKey-{i}",
f"{target_dir}/ObjKey-{i}",
awscli_pod_session,
), "The uploaded and downloaded objects have different hashes"
written_random_objects = write_random_test_objects_to_bucket(
awscli_pod_session,
source_bucket_name,
origin_dir,
amount=4,
mcg_obj=mcg_obj_session,
)
compare_bucket_object_list(
mcg_obj_session, source_bucket_name, target_bucket_name
)
awscli_pod_session.exec_cmd_on_pod(command=f"rm -rf {target_dir}")
sync_object_directory(
awscli_pod_session,
f"s3://{target_bucket_name}",
target_dir,
mcg_obj_session,
)
for i in range(4):
obj_sums_after_rewrite.append(
cal_md5sum(
awscli_pod_session, f"{origin_dir}/ObjKey-{i}", raw_path=True
)
)
obj_sums_after_rw_and_replication.append(
cal_md5sum(
awscli_pod_session, f"{target_dir}/ObjKey-{i}", raw_path=True
)
)
for i in range(3):
assert (
obj_sums_after_rewrite[i] == obj_sums_after_rw_and_replication[i]
), "Object change was not uploaded/downloaded correctly"
assert (
original_obj_sums[i] != obj_sums_after_rw_and_replication[i]
), "Object change was not replicated"
|
import os
from flask import Flask
from flask_restful import Api
from flask_cors import CORS
from flask_migrate import Migrate
from models.db import db
from models import user, item, cart, cartItem
from resources import user, item, cart, cartItem
app = Flask(__name__)
CORS(app)
api = Api(app)
DATABASE_URL = os.getenv('DATABASE_URL')
if DATABASE_URL:
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
app.config['SQLALCHEMY_DATABASE_URI'] = DATABASE_URL.replace(
"://", "ql://", 1)
app.config['SQLALCHEMY_ECHO'] = False
app.env = 'production'
else:
app.debug = True
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
app.config['SQLALCHEMY_DATABASE_URI'] = "postgresql://localhost:5432/zerowaste"
app.config['SQLALCHEMY_ECHO'] = True
db.init_app(app)
migrate = Migrate(app, db)
api.add_resource(user.Users, "/api/users")
api.add_resource(user.UserInfo, "/api/users/<int:user_id>")
api.add_resource(item.Items, "/api/items")
api.add_resource(item.ItemInfo, "/api/items/<int:item_id>")
api.add_resource(cart.Carts, "/api/carts")
api.add_resource(cart.CartInfo, "/api/carts/<int:user_id>")
api.add_resource(cartItem.CartItems, "/api/cartitems")
api.add_resource(cartItem.CartItemInfo, "/api/cartitems/<int:cart_id>")
if __name__ == '__main__':
app.run() |
class Solution(object):
def partition(self, lo, hi, pivot):
i0 = lo
for i1 in range(lo, hi):
if self.nums[i1] < pivot:
self.nums[i0], self.nums[i1] = self.nums[i1], self.nums[i0]
i0 += 1
return i0
def sortColors(self, nums):
"""
:type nums: List[int]
:rtype: None Do not return anything, modify nums in-place instead.
"""
self.nums = nums
if len(nums) > 1:
p = self.partition(0, len(nums), 1)
self.partition(p, len(nums), 2)
|
# encoding: utf-8
import tool_utils
import acmd.repo
get_command = tool_utils.get_command
get_argument = tool_utils.get_argument
filter_system = tool_utils.filter_system
def init_default_tools(config=None):
acmd.repo.import_tools(__file__, 'acmd.tools', prefix=None, config=config)
|
# 'Hello world'
print('Hello world \n')
# '1__2__4__8__16'
print(1,2,4,8,16, sep ='__' , end = '\n\n')
print("#########\n#\t\t#\n#\t\t#\n#\t\t#\n#########\n\n#\t\t#\n#\t\t#\n#########\n#\t\t#\n#\t\t# \n")
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sun Jan 21 01:40:28 2018
@author: nick
"""
import shutil,os,re
datePattern = re.compile(r"""^(.*?)
((0|1)?\d)-
((0|1|2|3)?\d)-
((19|20)\d\d)
(.*?)$
""",re.VERBOSE)
for amerFilename in os.listdir('.'):
mo = datePattern.search(amerFilename)
if mo == None:
continue
beforPart = mo.group(1)
monthPart = mo.group(2)
dayPart = mo.group(4)
yearPart = mo.group(6)
afterPart = mo.group(8)
euroFilename = beforPart + dayPart + '-' + monthPart + '-' + yearPart + afterPart
absWorkingDir = os.path.abspath('.')
amerFilename = os.path.join(absWorkingDir,amerFilename)
euroFilename = os.path.join(absWorkingDir,euroFilename)
print('Renaming "%s" to "%s".....' % (amerFilename,euroFilename))
shutil.move(amerFilename,euroFilename) |
import json
import requests
response = requests.get(
"https://www.esheba.cnsbd.com/v1/trains?journey_date=2021-02-27&from_station=DA&to_station=KFJ&class=S_CHAIR&adult=1&child=0")
js = response.json()
dic = json.dumps(js, indent=4)
print(dic)
|
from selenium import webdriver
from webdriver_manager.chrome import ChromeDriverManager
from textformat import timeAndText
import time, config as c
# Check how to remove error messages
def initalizeDriver():
timeAndText('Initalizing driver')
options = webdriver.ChromeOptions()
if c.optionHeadless: options.headless = True
driver = webdriver.Chrome(ChromeDriverManager().install(), options=options)
timeAndText('Driver initalized successfully')
return driver
def openPage(driver, url: str, pageName=None):
if pageName is not None: timeAndText('Opening {} page'.format(pageName))
driver.get(url)
time.sleep(3) |
from PyQt5.QtCore import *
from PyQt5.QtGui import *
from PyQt5.QtWidgets import *
import copy
class AbstractCodeView(QTableView):
def init(self): # slot, not constructor!!!
self.code_model = self.ModelClass(parent = self)
self.setModel(self.code_model) # add model to set size of header
self.horizontalHeader().setStretchLastSection(True) # for column with source line
self.setSelectionMode(QAbstractItemView.NoSelection)
self.doubleClicked.connect(self.addBreakpoint)
def addBreakpoint(self, index):
self.code_model.addBreakpoint(index)
def setBreakpointSet(self, breaks):
self.breaks = breaks
def changeFont(self, new_font):
self.setFont(new_font)
font_metrics = QFontMetrics(self.font())
addr = font_metrics.width("0000")
word = font_metrics.width("+ 0000 00 00 00")
h_header = self.horizontalHeader()
h_header.setStretchLastSection(True) # for column with source line
h_header.resizeSection(h_header.logicalIndex(0), addr + 20)
h_header.resizeSection(h_header.logicalIndex(1), word + 20)
def snapshotMem(self):
"""Takes a copy of mem to do diff it after run"""
self.snap_mem = copy.deepcopy(self.code_model.words)
def resetVM(self, vm_data, asm_data):
self.code_model = self.ModelClass(vm_data, asm_data, self)
self.code_model.breaks = self.breaks
self.setModel(self.code_model)
self.caChanged()
def updateVM(self, vm_data): # pure virtual
pass
def caChanged(self): # pure virtual
pass
def hook(self, item, old, new):
self.code_model.hook(item, old, new)
if item == "cur_addr":
self.caChanged()
|
# %%
import cv2
import numpy as np
# %%
img = cv2.imread('bookpage.jpg')
retval, threshold = cv2.threshold(img, 12, 255, cv2.THRESH_BINARY)
grayscaled = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
teval2, threshold2 = cv2.threshold(grayscaled, 12, 255, cv2.THRESH_BINARY)
gaus = cv2.adaptiveThreshold(
grayscaled, 255, cv2.ADAPTIVE_THRESH_GAUSSIAN_C, cv2.THRESH_BINARY, 115, 1)
cv2.imshow('original', img)
cv2.imshow('threshold', threshold)
cv2.imshow('threshold2', threshold2)
cv2.imshow('gaussian', gaus)
# %%
cv2.waitKey(0)
cv2.destroyAllWindows
|
# is_male = True
# is_tall = False
# if is_male and is_tall:
# print("you are a male")
# elif is_male and not(is_tall):
# print("you are not a male and you are not tall")
# elif not(is_male) and is_tall:
# print("blah blah")
# else:
# print("you are a female")
def is_male(boolean):
if is_male:
return "you are a male!!!!!"
results = is_male(True)
print(results)
|
from gw_app.nas_managers.mod_mikrotik import MikrotikTransmitter
from gw_app.nas_managers.core import NasNetworkError, NasFailedResult
from gw_app.nas_managers.structs import SubnetQueue
# Указываем какие реализации шлюзов у нас есть, это будет использоваться в
# web интерфейсе
NAS_TYPES = (
('mktk', MikrotikTransmitter),
)
|
N = int(input())
A = list(int(x) for x in input().split())
cur = 0
s = set()
s.add(0)
for a in A:
cur += a
cur %= 360
s.add(cur)
li = sorted(list(s))
li.append(360)
print(max(li[i] - li[i-1] for i in range(1,len(li)))) |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Course',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=255, verbose_name=b'\xd0\xbd\xd0\xb0\xd0\xb7\xd0\xb2\xd0\xb0\xd0\xbd\xd0\xb8\xd0\xb5')),
('short_description', models.CharField(max_length=255, verbose_name=b'\xd0\xba\xd1\x80\xd0\xb0\xd1\x82\xd0\xba\xd0\xbe\xd0\xb5 \xd0\xbe\xd0\xbf\xd0\xb8\xd1\x81\xd0\xb0\xd0\xbd\xd0\xb8\xd0\xb5')),
('description', models.TextField(verbose_name=b'\xd0\xbf\xd0\xbe\xd0\xbb\xd0\xbd\xd0\xbe\xd0\xb5 \xd0\xbe\xd0\xbf\xd0\xb8\xd1\x81\xd0\xb0\xd0\xbd\xd0\xb8\xd0\xb5')),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Lesson',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('subject', models.CharField(max_length=255, verbose_name=b'\xd1\x82\xd0\xb5\xd0\xbc\xd0\xb0')),
('description', models.TextField(verbose_name=b'\xd0\xbe\xd0\xbf\xd0\xb8\xd1\x81\xd0\xb0\xd0\xbd\xd0\xb8\xd0\xb5')),
('order', models.PositiveIntegerField(verbose_name=b'\xd0\xbd\xd0\xbe\xd0\xbc\xd0\xb5\xd1\x80 \xd0\xbf\xd0\xbe \xd0\xbf\xd0\xbe\xd1\x80\xd1\x8f\xd0\xb4\xd0\xba\xd1\x83')),
('course', models.ForeignKey(verbose_name=b'\xd0\xba\xd1\x83\xd1\x80\xd1\x81', to='courses.Course')),
],
options={
},
bases=(models.Model,),
),
]
|
import numpy as np
from scipy.stats import multivariate_normal as mvn
import matplotlib.pyplot as plt
import seaborn as sns
from sklearn.gaussian_process.kernels import RBF
import matplotlib
font = {"size": 30}
matplotlib.rc("font", **font)
matplotlib.rcParams["text.usetex"] = True
lengthscale = 1.0
amplitude = 1.0
noise_stddev = 1e-6
xlims = [-5, 5]
n = 100
X = np.linspace(xlims[0], xlims[1], n)
X = np.expand_dims(X, 1)
## Draw function
K_XX = amplitude * RBF(length_scale=lengthscale)(X, X) + noise_stddev * np.eye(n)
mean = X.squeeze()
# mean = np.zeros(n)
Y = mvn(mean, K_XX).rvs()
# import ipdb; ipdb.set_trace()
plt.figure(figsize=(7, 6))
plt.plot(X, Y, linewidth=5)
plt.xlabel("Observed spatial coordinate")
plt.ylabel("Warped spatial coordinate")
plt.title(r"$\sigma^2 = {}, \ell = {}$".format(amplitude, lengthscale))
plt.tight_layout()
plt.savefig("../../plots/mean_function_example.png")
plt.show()
|
def myfun(n):
return lambda a:a*n
mydoubler=myfun(2)
print(mydoubler(11))
def myfun(n):
return lambda a:a*n
mydoubler=myfun(2)
mytr=myfun(3)
print(mydoubler(11))
print(mytr(11))
|
import numpy as np
import torchvision.transforms as transforms
import torch.utils.data as tudata
from dataset.folder import ImageFolderInstance
# import pdb
class ToNumpy(object):
def __call__(self, pic):
"""
Args:
pic (PIL Image or numpy.ndarray): Image to be converted to numpy.
Returns:
Numpy: Converted image.
"""
return np.array(pic)
class Normalize(object):
def __init__(self, mean, std = None):
self.mean = mean
self.std = std
def __call__(self, pic):
"""
Args:
pic (PIL Image or numpy.ndarray): Image to be converted to numpy.
Returns:
Numpy: Converted image.
"""
# All you need to do: subtract mean
pic = pic.astype(np.float32)
pic -= self.mean # already use mean value
return pic
def simple_collate(batch):
imgs = np.array([b[0] for b in batch])
labels = np.array([b[1] for b in batch])
# for b in batch:
# #b is 3 item typle
# print(b[0].shape)
# print(b[1:3])
return (imgs, labels)
def create_idx_to_class(train_dataset):
ret_dataset = {}
for k,v in train_dataset.class_to_idx.items():
ret_dataset[v] = k
train_dataset.idx_to_class = ret_dataset
class ImageNetReader(object):
"""
Reads images and corresponding into a Tensorflow queue.
"""
def __init__(self, data_dir, batch_size, input_size, num_workers, shuffle=True):
#todo: replace with imagenet reader
self.train_dataset = ImageFolderInstance(
data_dir,
transforms.Compose([
transforms.RandomResizedCrop(input_size,
scale=(1.,1.),ratio=(0.9, 1.1)),
#don't data-augment for now
# transforms.RandomResizedCrop(input_size, scale=(0.2,1.)),
# transforms.RandomGrayscale(p=0.1),
# transforms.ColorJitter(0.4, 0.4, 0.4, 0.4),
transforms.RandomHorizontalFlip(),
ToNumpy(),
Normalize(mean=np.array([122.675, 116.669, 104.008])),
]))
create_idx_to_class(self.train_dataset)
# i = 0
# while True:
# print(self.train_dataset[i][1:3])
# i += 300
# print(self.train_dataset[3][1:3])
# print(self.train_dataset[15][1:3])
self.train_loader = tudata.DataLoader(
self.train_dataset,
batch_size=batch_size, shuffle=shuffle,
collate_fn=simple_collate,
num_workers=num_workers, pin_memory=False
)
self.num_batches = len(self.train_loader)
self.total_imgs = len(self.train_loader.sampler)
self.iter = iter(self.train_loader)
# i = 0
# while True:
# img, lab = self.dequeue()
# if i % 10 == 0:
# print(lab)
# i += 1
# print(i)
# 1/0
def dequeue(self):
return next(self.iter)
def get_idx_to_class(self):
return self.train_dataset.idx_to_class |
from django.template import Library
register = Library()
def admin_media_prefix():
"""
Returns the string contained in the setting ADMIN_MEDIA_PREFIX.
"""
try:
from django.conf import settings
return settings.ADMIN_MEDIA_PREFIX
except ImportError:
return ''
admin_media_prefix = register.simple_tag(admin_media_prefix)
|
from geoserver.catalog import Catalog
# Connect to Catalog, with REST URL, user and password
cat = Catalog("http://tethys.icimod.org:8080/geoserver/rest/", "admin", "mapserver109#")
globalsetting = Catalog("http://tethys.icimod.org:8080/geoserver/rest/settings[HTML]", "admin", "mapserver109#")
# layerlist = cat.get_layers()
print(globalsetting.status_code)
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# Computes a figure showing the amount of probes Ally, RadarGun, MIDAR and TreeNET will use to
# conduct alias resolution. The amounts are not necessarily the exact amount that will be used,
# they are more like predicted amounts that should however be faithful to experiment. The script
# requires a .fingerprint and .ip file per AS to compute the right quantities.
import numpy as np
import os
import sys
import math
from matplotlib import pyplot as plt
if __name__ == "__main__":
if len(sys.argv) < 4:
print("Use this command: python ProbeAmount.py [year] [date] [path to AS file]")
sys.exit()
yearOfMeasurements = str(sys.argv[1])
dateOfMeasurements = str(sys.argv[2])
ASFilePath = str(sys.argv[3])
# Parses AS file
if not os.path.isfile(ASFilePath):
print("AS file does not exist")
sys.exit()
with open(ASFilePath) as f:
ASesRaw = f.read().splitlines()
# For this particular file, we do not class by type. We remove the :[type] part.
ASes = []
for i in range(0, len(ASesRaw)):
splitted = ASesRaw[i].split(':')
ASes.append(splitted[0])
# Computes the required data
correctlyParsedASes = []
predictionsAlly = []
predictionsMIDAR = []
predictionsTreeNET = []
dataPath = "/home/jefgrailet/PhD/Campaigns" # TODO: edit this
for i in range(0, len(ASes)):
dataFilePath = dataPath + "/" + ASes[i] + "/" + yearOfMeasurements + "/"
dataFilePath += dateOfMeasurements + "/" + ASes[i] + "_" + dateOfMeasurements
# Checks existence of the file
if not os.path.isfile(dataFilePath + ".ip") or not os.path.isfile(dataFilePath + ".fingerprint"):
print(dataFilePath + ".ip and/or .fingerprint do not exist")
sys.exit()
else:
correctlyParsedASes.append(ASes[i])
# Parses and counts IP entries
with open(dataFilePath + ".ip") as f:
ipEntries = f.read().splitlines()
nbIPs = len(ipEntries)
# Parses and counts fingerprints
with open(dataFilePath + ".fingerprint") as f:
fingerprints = f.read().splitlines()
nbFingerprints = len(fingerprints)
# Computes predicted amounts
predictionsAlly.append(nbIPs * nbIPs)
predictionsMIDAR.append(30 * nbIPs)
predictionsTreeNET.append(6 * nbFingerprints)
ind = np.arange(len(correctlyParsedASes)) # The x locations
width = 0.8
center = 0.5
padding = 0.1
# Font for labels and ticks
hfont = {'fontname':'serif',
'fontweight':'bold',
'fontsize':21}
hfont2 = {'fontname':'serif',
'fontsize':21}
plt.figure(figsize=(11,7))
p1 = plt.bar(ind + padding, predictionsAlly, width, color='#F0F0F0')
p2 = plt.bar(ind + padding, predictionsMIDAR, width, color='#D0D0D0')
p3 = plt.bar(ind + padding, predictionsTreeNET, width, color='#888888')
plt.xlabel('AS index', **hfont)
plt.xlim([0,20])
plt.xticks(ind + center, range(1,21,1), **hfont2)
plt.ylabel('Amount of probes', **hfont)
plt.yscale('log', nonposy='clip')
plt.rc('font', family='serif', size=15)
plt.legend((p1[0], p2[0], p3[0]),
('Ally', 'MIDAR/RadarGun', 'TreeNET'),
bbox_to_anchor=(0.05, 1.02, 0.90, .102),
loc=3,
ncol=4,
mode="expand",
borderaxespad=0.)
plt.savefig("ProbeAmount_" + yearOfMeasurements + "_" + dateOfMeasurements + "_" + ASFilePath + ".pdf")
plt.clf()
|
from traitlets import Bool, Float, Unicode, observe
from jdaviz.core.events import AddDataMessage, RemoveDataMessage, CanvasRotationChangedMessage
from jdaviz.core.registries import tray_registry
from jdaviz.core.template_mixin import PluginTemplateMixin, ViewerSelectMixin
from jdaviz.core.user_api import PluginUserApi
__all__ = ['Compass']
@tray_registry('imviz-compass', label="Compass")
class Compass(PluginTemplateMixin, ViewerSelectMixin):
"""
See the :ref:`Compass Plugin Documentation <imviz-compass>` for more details.
Only the following attributes and methods are available through the
:ref:`public plugin API <plugin-apis>`:
* :meth:`~jdaviz.core.template_mixin.PluginTemplateMixin.show`
* :meth:`~jdaviz.core.template_mixin.PluginTemplateMixin.open_in_tray`
* ``viewer`` (:class:`~jdaviz.core.template_mixin.ViewerSelect`):
Viewer to show orientation/compass information.
* ``data_label``: label of the top-layer shown in the compass (read-only)
"""
template_file = __file__, "compass.vue"
uses_active_status = Bool(True).tag(sync=True)
icon = Unicode("").tag(sync=True)
data_label = Unicode("").tag(sync=True)
img_data = Unicode("").tag(sync=True)
canvas_angle = Float(0).tag(sync=True) # set by canvas rotation plugin
canvas_flip_horizontal = Bool(False).tag(sync=True) # set by canvas rotation plugin
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.hub.subscribe(self, AddDataMessage, handler=self._on_viewer_data_changed)
self.hub.subscribe(self, RemoveDataMessage, handler=self._on_viewer_data_changed)
self.hub.subscribe(self, CanvasRotationChangedMessage, handler=self._on_canvas_rotation_changed) # noqa
@property
def user_api(self):
return PluginUserApi(self, expose=('viewer',), readonly=('data_label',))
def _on_viewer_data_changed(self, msg=None):
if self.viewer_selected:
viewer = self.viewer.selected_obj
viewer.on_limits_change() # Force redraw
def _on_canvas_rotation_changed(self, msg=None):
viewer_id = msg.viewer_id
if viewer_id != self.viewer_selected:
return
self._set_compass_rotation()
def _set_compass_rotation(self):
viewer_item = self.app._viewer_item_by_id(self.viewer.selected_id)
self.canvas_angle = viewer_item.get('canvas_angle', 0) # noqa
self.canvas_flip_horizontal = viewer_item.get('canvas_flip_horizontal', False)
@observe("viewer_selected", "is_active")
def _compass_with_new_viewer(self, *args, **kwargs):
if not hasattr(self, 'viewer'):
# mixin object not yet initialized
return
if not self.is_active:
return
# There can be only one!
for vid, viewer in self.app._viewer_store.items():
if vid == self.viewer.selected_id:
viewer.compass = self
viewer.on_limits_change() # Force redraw
self._set_compass_rotation()
else:
viewer.compass = None
def clear_compass(self):
"""Clear the content of the plugin."""
self.icon = ''
self.data_label = ''
self.img_data = ''
def draw_compass(self, data_label, img_data):
"""Draw compass in the plugin.
Input is rendered buffer from Matplotlib.
"""
if self.app.loading or (icn := self.app.state.layer_icons.get(data_label)) is None:
return
self.icon = icn
self.data_label = data_label
self.img_data = img_data
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11a1 on 2017-05-12 17:31
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='ArrivalMessage',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('time', models.DateField(auto_now_add=True)),
('userID', models.BigIntegerField()),
('bookID', models.BigIntegerField()),
],
),
migrations.CreateModel(
name='Book',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('bookID', models.BigIntegerField()),
('ISBN', models.CharField(max_length=20)),
('bookName', models.CharField(max_length=20)),
('bookType', models.CharField(max_length=10)),
('price', models.IntegerField()),
],
),
migrations.CreateModel(
name='FavoredBook',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('userID', models.BigIntegerField()),
('bookID', models.BigIntegerField()),
],
),
migrations.CreateModel(
name='StudentUser',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('userID', models.BigIntegerField(unique=True)),
('userName', models.CharField(max_length=20)),
('password', models.CharField(max_length=20)),
('name', models.CharField(max_length=20)),
('mailBox', models.EmailField(max_length=254)),
('phoneNumber', models.CharField(max_length=20)),
],
),
migrations.CreateModel(
name='SubscribedBook',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('userID', models.BigIntegerField()),
('bookID', models.BigIntegerField()),
],
),
migrations.DeleteModel(
name='allBook',
),
migrations.DeleteModel(
name='favor',
),
migrations.DeleteModel(
name='message',
),
migrations.DeleteModel(
name='student_users',
),
migrations.DeleteModel(
name='subscribeBooks',
),
]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
import os
import requests
import cherrypy
from base64 import b64encode
from girder import events
from girder.utility.webroot import Webroot
from girder.utility.model_importer import ModelImporter
from girder.plugins.minerva.rest import \
analysis, dataset, session, \
wms_dataset, geojson_dataset, wms_styles, feature
from girder.plugins.minerva.utility.minerva_utility import decryptCredentials
from girder.plugins.minerva.utility.cookie import getExtraHeaders
class WmsProxy(object):
exposed = True
def GET(self, url, **params):
headers = getExtraHeaders()
if 'minerva_credentials' in params:
creds = params['minerva_credentials']
del params['minerva_credentials']
auth = 'Basic ' + b64encode(decryptCredentials(bytes(creds)))
headers['Authorization'] = auth
r = requests.get(url, params=params, headers=headers)
cherrypy.response.headers['Content-Type'] = r.headers['content-type']
return r.content
def validate_settings(event):
"""Validate minerva specific settings."""
key = event.info['key']
val = event.info['value']
def load(info):
# Load the mako template for Minerva and serve it as the root document.
minerva_mako = os.path.join(os.path.dirname(__file__), "minerva.mako")
minerva_webroot = Webroot(minerva_mako)
minerva_webroot.updateHtmlVars(info['serverRoot'].vars)
minerva_html_vars = {'title': 'Minerva', 'externalJsUrls': []}
minerva_webroot.updateHtmlVars(minerva_html_vars)
def add_downstream_plugin_js_urls(downstream_plugin_js_urls):
""" Allow additional external JS resources to be loaded from downstream plugins. """
minerva_html_vars.setdefault('externalJsUrls', []).extend(downstream_plugin_js_urls.info)
minerva_webroot.updateHtmlVars(minerva_html_vars)
events.bind('minerva.additional_js_urls', 'minerva', add_downstream_plugin_js_urls)
# Move girder app to /girder, serve minerva app from /
info['serverRoot'], info['serverRoot'].girder = (minerva_webroot,
info['serverRoot'])
info['serverRoot'].api = info['serverRoot'].girder.api
events.bind('model.setting.validate', 'minerva', validate_settings)
info['apiRoot'].minerva_dataset = dataset.Dataset()
info['apiRoot'].minerva_analysis = analysis.Analysis()
info['apiRoot'].minerva_session = session.Session()
info['apiRoot'].minerva_datasets_wms = wms_dataset.WmsDataset()
info['apiRoot'].minerva_style_wms = wms_styles.Sld()
info['apiRoot'].minerva_dataset_geojson = geojson_dataset.GeojsonDataset()
info['apiRoot'].minerva_get_feature_info = feature.FeatureInfo()
info['serverRoot'].wms_proxy = WmsProxy()
# If we are started up in testing mode, then serve minerva's sources as well
# for debugging client tests.
if '/test' in info['config']:
info['config']['/src/minerva'] = {
'tools.staticdir.on': True,
'tools.staticdir.dir': 'plugins/minerva/web_external'
}
info['config']['/test/minerva'] = {
'tools.staticdir.on': True,
'tools.staticdir.dir': 'plugins/minerva/plugin_tests/client'
}
|
from tkinter import *
import random
from tkinter.messagebox import showinfo
top = Tk()
top.title("덧셈 및 뺄셈 학습")
class Ed(Frame):
def __init__(self, top):
Frame.__init__(self, top)
self.pack()
self.calc = Entry(self)
self.res = Entry(self)
self.bt = Button(self, command=self.check, text="Enter", width=20)
self.cnt = 0
self.question()
self.calc.pack(padx=100)
self.res.pack(padx=100)
self.bt.pack()
self.Enter()
def question(self):
a = random.randrange(1, 10)
b = random.randrange(1, 10)
o = ['+', '-'].pop(random.randrange(0, 2))
if o == '-' and a - b < 0:
self.calc.insert(0, str(b) + o + str(a))
else:
self.calc.insert(0, str(a) + o + str(b))
def check(self, event):
self.cnt = self.cnt + 1
result = eval(self.calc.get())
if int(self.res.get()) == result:
showinfo(message='Yot got it! ' + str(self.cnt) + "회 만에 성공")
self.cnt = 0
self.calc.delete(0, END)
self.question()
else:
showinfo(message='Try again')
self.res.delete(0, END)
def Enter(self):
self.res.bind("<Return>", self.check)
Ed(top).pack()
top.mainloop()
|
from mechanics.events import Trigger
from mechanics.events import DamageEvent, BuffAppliedEvent
from game_objects.attributes import Bonus, Attribute
from game_objects.battlefield_objects import CharAttributes as ca
from mechanics.buffs import Buff
def battle_rage_callback(t,e:DamageEvent):
chance = t.chance
if e.game.random.random() < chance:
BuffAppliedEvent(
Buff(8, bonus=Bonus(
{ca.STREINGTH: Attribute(1, 5, 0),
ca.ENDURANCE: Attribute(1, 5, 0),
ca.AGILITY: Attribute(1, 5, 0),
ca.PERCEPTION: Attribute(-1, -5, 0)})),
e.target)
def battle_rage(unit, chance):
assert 0 < chance <= 1
trig = Trigger(DamageEvent,
platform=unit.game.events_platform,
conditions=[lambda t,e : e.target.uid == unit.uid],
callbacks=[battle_rage_callback])
trig.chance = chance
return trig |
# calc energy of a certain configuration
import numpy as np
class energyCalc():
def __init__(self):
self.kapa = 1.0
def createFaceList(self, vert, faces):
# find all faces that contain a certain vertex
faceList = []
for faceIndex in range(len(faces)):
if (faces[faceIndex][0] == int(vert) or faces[faceIndex][1] == int(vert) or faces[faceIndex][2] == int(vert) ):
faceList.append(faces[faceIndex])
return faceList
def createEdgeList(self, vert, faceList):
# find all edges that belong to a certain vertex (so the other vertex of the edge)
edgeList = []
for faceIndex in range(len(faceList)):
for i in range(3):
if faceList[faceIndex][i] != vert:
edgeList.append(faceList[faceIndex][i])
# eliminate the doulbe edges
edgeList = list(set(edgeList))
return edgeList
def calcPhi(self, vert1, edgeListIndex, faceList, verts1):
# calculate the angle between the unit normal vectors of two triangles sharing an edge
# define all vectors
i = verts1[vert1]
j = verts1[edgeListIndex]
ijVec = np.subtract(i,j)
#find remeining vertices
mnlist = []
for a in range(len(faceList)):
if faceList[a][0] == edgeListIndex or faceList[a][1] == edgeListIndex or faceList[a][2] == edgeListIndex:
for b in range(3):
if faceList[a][b] != vert1 and faceList[a][b] != edgeListIndex:
mnlist.append(faceList[a][b])
if len(mnlist) < 2:
phi = np.pi
else:
m = verts1[mnlist[0]]
n = verts1[mnlist[1]]
# define remaining two vectors
inVec = np.subtract(i,n)
imVec = np.subtract(i,m)
# calculate normal vectors of the two faces
n1 = np.cross(imVec, ijVec)
norm1 = np.linalg.norm(n1)
if norm1 <= 0.0000001:
norm1 = 1.0
n1 = n1/norm1
n2 = np.cross(ijVec, inVec)
norm2 = np.linalg.norm(n2)
if norm2 <= 0.0000002:
norm2 = 1.0
n2 = n2/norm2
# calculate the angle between the two normal vectors
test = np.dot(n1,n2)
if test >= 1.0: test = 0.99999
if test <= -1.0: test = -0.99999
phi = np.arccos( test )
return phi
def calcLength(self, vert, edge, verts):
# calculate length of an edge
i = verts[vert]
j = verts[edge]
ijVec = np.subtract(i,j)
length = np.linalg.norm(ijVec)
return length
def calcM(self, vertex, faces, verts):
# calculate the mean curvature curvature contribution M of a vertex
M = 0.0
faceList = self.createFaceList(vertex, faces)
edgeList = self.createEdgeList(vertex, faceList)
for i in range(len(edgeList)):
#calculate the angle between to faces
phi = self.calcPhi(vertex, edgeList[0], faceList, verts)
# calculate length of edge
length = self.calcLength(vertex, edgeList[0], verts)
M += phi*length
M *= 0.25
return M
def calcAreaTriangel(self, face, verts):
# calculate area of a face (triangel) using Herons formula
A = verts[face[0]]
B = verts[face[1]]
C = verts[face[2]]
a = np.linalg.norm( np.subtract(B,C) )
b = np.linalg.norm( np.subtract(A,C) )
c = np.linalg.norm( np.subtract(A,B) )
s = (a+b+c)*0.5
area = np.sqrt( s*(s-a)*(s-b)*(s-c) )
return area
def calcA(self, vertex, faces, verts):
# calculate the area corresponding to a vertex
A = 0.0
faceList = self.createFaceList(vertex, faces)
for i in range(len(faceList)):
A += self.calcAreaTriangel(faceList[i], verts)
A *= 1.0/3.0
return A
def calcEbend(self, verts, faces):
# calulate the bending energy of the membrane
Ebend = 0.0
for i in range(len(verts)):
M = self.calcM(i, faces, verts)
A = self.calcA(i, faces, verts)
if A < 0.0000000000001:
Ebend += 0.0
else:
Ebend += M*M/A
Ebend *= 2*self.kapa
return Ebend |
# dai = int(input("cdai:"))
# rong = int(input("crong:"))
#
# for i in range(dai):
# print(x) #can phai sua lai
#
# for i in range(rong):
# print(x)
for i in range(3): #cach 01
# print("*" * 4) #print on line
for j in range(4):
print("*", end="9")
print()
|
from collections import deque
def find_miner(grid):
for r in range(len(grid)):
for c in range(len(grid[r])):
if grid[r][c] == "s":
return r, c
def left_coal(grid):
x = 0
for r in range(len(grid)):
for c in range(len(grid[r])):
if grid[r][c] == "c":
x += 1
return x
field_size = int(input())
commands = deque(input().split())
field = [input().split() for _ in range(field_size)]
while commands:
direction = commands.popleft()
row, col = find_miner(field)
field[row][col] = "*"
if direction == "up" and row-1 in range(len(field)):
row -= 1
elif direction == "down" and row+1 in range(len(field)):
row += 1
elif direction == "left" and col-1 in range(len(field[0])):
col -= 1
elif direction == "right" and col+1 in range(len(field[0])):
col += 1
else:
field[row][col] = "s"
continue
if field[row][col] == "e":
print(f"Game over! ({row}, {col})")
exit()
field[row][col] = "s"
if not left_coal(field):
print(f"You collected all coal! ({row}, {col})")
exit()
row, col = find_miner(field)
remaining_coal = left_coal(field)
print(f"{remaining_coal} pieces of coal left. ({row}, {col})")
|
"""imports"""
from django.contrib import admin
from django.urls import include, path
from . import views
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.conf.urls.static import static
from django.conf import settings
urlpatterns = [
path('admin/', admin.site.urls),
path('', views.homepage, name='landing'),
path('about/', views.about, name='about'),
path('articles/', include('articles.urls')),
path('accounts/', include('accounts.urls')),
]
if settings.DEBUG:
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
from drink import Drink
from food import Food
food1 = Food('Sandwich', 5)
food1.calorie_count = 330
print(food1.info())
drink1 = Drink('Coffee', 3)
drink1.volume = 180
print(drink1.info())
|
from nvd3 import multiBarChart,multiBarHorizontalChart
import json
import requests
import gmplot
# Cumulated="https://services6.arcgis.com/bKYAIlQgwHslVRaK/arcgis/rest/services/Cumulative_Date_Grouped_ViewLayer/FeatureServer/0/query?where=1%3D1&outFields=*&outSR=4326&f=json"
# RdataCumulated= requests.get(Cumulated).json()
# CumulatedJson = json.dumps(RdataCumulated, sort_keys=True, indent=4) # Beautify the JSON
DailyCases="https://services6.arcgis.com/bKYAIlQgwHslVRaK/arcgis/rest/services/VWPlacesCasesHostedView/FeatureServer/0/query?where=1%3D1&outFields=*&outSR=4326&f=json"
RdataDaily = requests.get(DailyCases).json()
DailyJson = json.dumps(RdataDaily, sort_keys=True, indent=4) # Beautify the JSON
# Add all the cases togethor
def Total():
All_city= []
All_Geo_x= []
All_Geo_y= []
for city in range(len(RdataDaily['features'])):
Name_Eng = RdataDaily['features'][city]['attributes']["Name_Eng"]
GEOx = RdataDaily['features'][city]['geometry']['x']
GEOy = RdataDaily['features'][city]['geometry']['y']
if Name_Eng not in All_city:
All_city.append(Name_Eng)
All_Geo_x.append(GEOx)
All_Geo_y.append(GEOy)
# print(len(All_city)) # print the length
All_confirmed= []
All_Deaths= []
All_Tested= []
All_Recovered= []
for city in range(len(All_city)):
Confirmed = 0
Deaths = 0
Tested = 0
Recovered = 0
date2 = []
# Check for the last date
for cases in range(len(RdataDaily['features'])) :
date = RdataDaily['features'][cases]['attributes']["Reportdt"]
if date not in date2:
date2.append(date)
date_now = max(date2)
# Check for Confimed Cases
for cases in range(len(RdataDaily['features'])) :
check = RdataDaily['features'][cases]['attributes']["Name_Eng"]
date = RdataDaily['features'][cases]['attributes']["Reportdt"]
if All_city[city] == check and date == date_now:
Deaths += RdataDaily['features'][cases]['attributes']["Deaths"]
Recovered += RdataDaily['features'][cases]['attributes']["Recovered"]
Confirmed += RdataDaily['features'][cases]['attributes']["Confirmed"]
All_confirmed.append(Confirmed)
All_Deaths.append(Deaths)
All_Recovered.append(Recovered)
rec = 0
deaths = 0
conf = 0
Total_file=open('Total.json','w+')
All_info2 = []
for cases in range(len(All_city)):
a={"city":All_city[cases],
"Deaths":All_Deaths[cases],
"Recovered":All_Recovered[cases],
"Confirmed":All_confirmed[cases]}
All_info2.append(dict(a))
conf += All_confirmed[cases]
rec += All_Recovered[cases]
deaths += All_Deaths[cases]
print(f"Total Confirmed: {str(conf)}")
print(f"Total Recovered: {str(rec)}")
print(f"Total Deaths: {str(deaths)}")
Total_file.write(json.dumps(All_info2))
Total_file.close()
New_Geo_x= []
New_Geo_y= []
for Infected in range(len(All_info2)):
infction = All_info2[Infected]['Confirmed']
if infction != 0:
New_Geo_x.append(All_Geo_x[Infected])
New_Geo_y.append(All_Geo_y[Infected])
random=[]
for i in range(len(New_Geo_x)):
random.append(0.009)
gmap5 = gmplot.GoogleMapPlotter(23.68363, 45.76787, 6 )
gmap5.heatmap(New_Geo_y,New_Geo_x,radius=20 , gradient=[(0, 0, 255, 0), (0, 255, 0, 0.9), (255, 0, 0, 1)] )
gmap5.draw('map.html')
Chart_all = open('Visiual.html','w+')
chart = multiBarChart(width=3000, height=1000, x_axis_format=None)
chart.add_serie(name="Recovered 1", y=All_Recovered, x=All_city)
chart.add_serie(name="Infected 2", y=All_confirmed, x=All_city)
chart.buildhtml()
Chart_all.write(chart.htmlcontent)
Chart_all.close()
Total()
|
# Slide 128
# Step 1: Assign a value to radius
radius = 20
# Step 2: Calculate the area
area = radius * radius * 3.14
# Step 3: Display the result
print("The area for the circle of radius ", radius, "is", area)
|
from tkinter import *
from tkmacosx import Button
from tkinter import filedialog
from tkinter import messagebox
import pandas as pd
import os
import tkinter as tk
import cmath
root= Tk(className = " Receipt Scrapper")
# Declared canvas
canvas1 = Canvas(root
, width = 300
, height = 400
, bg = '#474749'
, relief = 'raised')
canvas1.pack()
# Declared label
label1 = Label(root
, text='The Receipt Scrapper'
, bg = '#474749'
, fg="white")
label1.config(font=('helvetica', 20))
canvas1.create_window(150, 60, window=label1)
# Function declaration of the browse_button_img
def get_img ():
global image_path
global image_name
global path
image_path = filedialog.askopenfilename()
image_name = os.path.basename(image_path)
test = image_path.split(image_name)
path = test[0]
# Creating a photoimage object to use image
photo = PhotoImage(file="/Users/jonathanhald/Documents/SAI/receipt_scanner/Receipt_Scanner/dist/icons/upload.png")
# Resizing image to fit on button
photoimage = photo.subsample(9, 9)
# Button to allow the user to select an image
browse_button_img = Button(root
, height = 40
, width = 200
, image = photoimage
, text=" Import Image File"
, command=get_img
, fg='white'
, bg='#1db954'
, font=('helvetica', 12, 'bold')
,compound=LEFT)
browse_button_img.pack()
canvas1.create_window(150, 150, window=browse_button_img)
# Function declaration of the convert_excel
def convert_excel ():
global new_file_name
new_file_name = 'poppy'
clear_dir = 'cd\ '
run_tesseract = 'tesseract ' + image_name + ' ' + new_file_name
# Input the path and image file into the CNN to have the information
# on the iamge extracted from the image.
os.system(clear_dir)
os.system('cd ' + path)
os.chdir(path)
os.system(run_tesseract)
# Converts txt file to cvs
read_file = pd.read_csv (path + new_file_name + '.txt', sep='delimiter', header=None)
# Converts cvs file to excel file
read_file.to_excel (path + new_file_name + '.xlsx', index = None, header=True)
photo2 = PhotoImage(file="/Users/jonathanhald/Documents/SAI/receipt_scanner/Receipt_Scanner/dist/icons/convert.png")
# Resizing image to fit on button
photoimage2 = photo2.subsample(7, 7)
# Button that takes the selected image file and inputs it into the CNN
# to have the information extracted from it. It will then convert the
# new file with the extracted information into a csv and then excel file.
button_excel = Button(root
, height = 40
, width = 200
, image = photoimage2
, text=' Convert to Excel'
, command=convert_excel
, bg='#1db954'
, fg='white'
, font=('helvetica', 12, 'bold')
, compound=LEFT)
button_excel.pack()
canvas1.create_window(150, 210, window=button_excel)
# Function declaration to open the new excel file that was created
def open_file():
os.system('open ' + new_file_name + '.xlsx')
photo3 = PhotoImage(file="/Users/jonathanhald/Documents/SAI/receipt_scanner/Receipt_Scanner/dist/icons/open.png")
# Resizing image to fit on button
photoimage3 = photo3.subsample(17, 17)
# Button to the new fileopen file
open_file_excel = Button(root
,height = 40
,width = 200
,image = photoimage3
,text=' Open File '
,command=open_file
,bg='#1db954'
,fg='white'
,font=('helvetica', 12, 'bold')
,compound=LEFT)
open_file_excel.pack()
canvas1.create_window(150, 270, window=open_file_excel)
# Function declaration to exit the program
def exit_application():
MsgBox = messagebox.askquestion ('Exit Application','Are you sure you want to exit the application'
,icon = 'warning')
if MsgBox == 'yes':
root.destroy()
# Button to exit the program
exit_button = Button (root
, width = 200
, height = 40
, text=' Exit Application '
,command=exit_application
, bg='#ec002d'
, fg='white'
, font=('helvetica', 12
, 'bold'))
canvas1.create_window(150, 330, window=exit_button)
root.mainloop()
|
def sparse_dot_product(dict1,dict2):
lista1 = list(dict1.items())
lista2 = list(dict2.items())
print(lista1)
print(lista2)
soma = 0
for (index1,valor1) in lista1:
produto = 0
for (index2,valor2) in lista2:
if index1 == index2:
produto = valor1*valor2
soma += produto
return soma |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# @author: Longxing Tan, tanlongxing888@163.com
# @date: 2020-01
# paper:
# use UniLM language model, but in time series, the feature dimension is not equally like token embedding of NLP
import tensorflow as tf
from ..layers.attention_layer import *
class BERT(object):
def __init__(self):
pass
|
import cv2
import face_recognition
import sys
'''
# PIL做图比较复杂,不使用PIL库
from PIL import Image
ret = Image.open('similar_two_face.jpg')
print(ret)
'''
'''
ret:
<PIL.JpegImagePlugin.JpegImageFile image mode=RGB size=440x571 at 0x7F0D5F9FD810>
<PIL.JpegImagePlugin.JpegImageFile image mode=RGB size=440x571 at 0x7F0D7D201390>
'''
# 1.读取图像内容
face_image = face_recognition.load_image_file(file='images/similar_three_faces.jpg') # 可以读出来每wt一个像素点的值
# print(face_image)
# 2.进行特征提取向量化,用数据展示五官特征
face_encodings = face_recognition.face_encodings(face_image=face_image) # 两个人脸的128维度人脸编码
# print(face_encodings) # 两个array表示两张人脸编码,放在一个列表中
# print(len(face_encodings))
if len(face_encodings) > 2:
print("超过两张脸,该程序暂不支持多张人脸的比对!")
sys.exit()
# 3.获取两张人脸坐标
face_locations = face_recognition.face_locations(img=face_image) # 获取人脸的坐标
# print(face_locations) # 一般确定左上角和右下角就可以确定一张脸的位置,[(64, 201, 219, 46), (67, 339, 196, 210)]
# 4.分别获取两张脸的人脸编码
face01 = face_encodings[0]
face02 = face_encodings[1]
# print('face01:', face01) # list类型,[ 1.82563066e-02 2.23452318e-02 -4.11391482e-02 -8.67729932e-02,……]
# print('face02:', face02)
# 5.比较这两张脸
ret = face_recognition.compare_faces(known_face_encodings=[face01], face_encoding_to_check=face02,
tolerance=0.35) # ret: <class 'list'>;tolerance越小容忍度越小
print(ret)
if ret == [True]:
print("识别结果是同一个人!")
flag = "Yes!"
else:
print("识别结果不是同一个人!")
flag = "No"
# 6.用opencv打开图像并显示窗口界面
for i in range(len(face_encodings)):
# 从后向前取
# face_encoding = face_encodings[i - 1]
# face_locations = face_locations[i - 1]
# 从前向后取
face_encoding = face_encodings[i]
face_location = face_locations[i]
top, right, bottom, left = face_location # 方便使用opencv画框
# print(top, right, bottom, left) # 上、右、下、左
'''
64 201 219 46
67 339 196 210
'''
# 7.画框
cv2.rectangle(img=face_image, pt1=(left, top), pt2=(right, bottom), color=(0, 255, 0),
thickness=1) # thickness:粗细程度
# 8.写字
cv2.putText(img=face_image, text=flag, org=(left - 10, top - 10), fontFace=cv2.FONT_HERSHEY_SIMPLEX, fontScale=0.8,
color=(255, 255, 0), thickness=2)
# 9.渲染新的图像
face_image_rgb = cv2.cvtColor(face_image, cv2.COLOR_BGR2RGB)
# 10.显示图像
cv2.imshow('demo', face_image_rgb)
# 11.关闭的命令,点击x才会关闭
cv2.waitKey(0)
|
from django.db import models
class Articulo(models.Model):
titulo = models.CharField(max_length=150)
contenido = models.CharField(max_length=150)
imagen = models.ImageField(upload_to = 'articulos')
creted = models.DateTimeField(auto_now_add=True)
updated =models.DateTimeField(auto_now_add=True)
class Meta:
verbose_name = 'articulo'
verbose_name_plural = 'articulos'
def __str__(self) :
return self.titulo
|
import numpy as np
import collections
import sys
import csv
import keras
import json
from keras import initializers
from keras.models import Sequential, load_model
from keras.layers import Dense, Dropout, Embedding, LSTM, Bidirectional
from keras.preprocessing import sequence
from keras.callbacks import EarlyStopping
test = []
ID_test = []
X_test = []
X_train = []
Y_train = []
cnt_train = 0
cnt_test = 0
#train_data = "training_label.txt"
#test_data = "/home/yao/workspace/ML_data/hw4/testing_data.txt"
#prediction_data = "prediction.csv"
test_data = sys.argv[1]
prediction_data = sys.argv[2]
'''
## Read Train Data
fin = open(train_data, 'r')
for line in fin:
split = line.split(' ')
Y_train.append(split[0])
split[len(split)-1] = split[len(split)-1][:len(split[len(split)-1])-1] #去除換行
X_train.append([])
for i in range(len(split)-2):
X_train[cnt_train].append(split[i+2])
cnt_train = cnt_train + 1
#print(X_trailabel)
#print(Y_train[0])
fin.close()
'''
## Read Test Data
fin = open(test_data, 'r')
for line in fin:
split = line.split(',',1)
ID_test.append(split[0])
split = split[1].split(' ')
split[len(split)-1] = split[len(split)-1][:len(split[len(split)-1])-1] #去除換行
X_test.append([])
for i in range(len(split)):
X_test[cnt_test].append(split[i])
cnt_test = cnt_test + 1
ID_test = ID_test[1:]
X_test = X_test[1:]
#print(ID_test)
#print(X_test)
fin.close()
## Build Dictionary
dic = {} # Dictionary
'''
word_index = 1
for i in range(len(X_train)):
for j in range(len(X_train[i])):
if X_train[i][j] not in dic:
dic[X_train[i][j]] = word_index
word_index = word_index + 1
#print(dic)
'''
with open('dic.txt', 'r') as f:
dic = json.load(f)
## Bag of Words(BOW)
'''
#print([ind for ind, v in enumerate(X_train[0]) if v=='dfsda'])
for i in range(len(X_train)):
for j in range(len(X_train[i])):
X_train[i][j] = dic[X_train[i][j]]
#print(X_train[i])
'''
for i in range(len(X_test)):
for j in range(len(X_test[i])):
if X_test[i][j] in dic:
X_test[i][j] = dic[X_test[i][j]]
else:
X_test[i][j] = 0
#X_train = sequence.pad_sequences(X_train, maxlen = 100)
X_test = sequence.pad_sequences(X_test, maxlen = 100)
#print(X_train)
'''
model = Sequential()
model.add(Embedding(len(dic), 256, embeddings_initializer=initializers.random_normal(stddev=1)))
#model.add(LSTM(128),dropout=0.3, recurrent_dropout=0.2)
model.add(Bidirectional(LSTM(256, dropout=0.3, recurrent_dropout=0.2, return_sequences=True)))
model.add(Bidirectional(LSTM(256, dropout=0.3, recurrent_dropout=0.2)))
model.add(Dense(units=1, activation='sigmoid'))
earlystop = EarlyStopping(monitor='val_acc', patience=1, verbose=1, mode='max')
model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])
model.fit(X_train, Y_train, batch_size=128, epochs=4, validation_split = 0.1, callbacks=[earlystop])
model.save('model.h5')
'''
model = load_model('model.h5')
prediction = model.predict(X_test)
ans = []
fout = open(prediction_data,'w')
#Testing
for x in range(len(X_test)):
if prediction[x]>=0.5:
ans.append(1)
else:
ans.append(0)
print(len(ans))
fout.write('id,label\n')
for x in range(len(ans)):
fout.write(str(x)+','+str(ans[x])+'\n')
fout.close()
|
"""Module to do recommendations by user. """
import matplotlib.pyplot as plt
import pandas as pd
import tensorflow as tf
from pyframework.container import Container
from pyframework.helpers.configuration import is_production_env
from pyframework.helpers.lists import array_column
from tensorflow import keras
from tensorflow.keras import layers
from .sentimental_analyser import SentimentalAnalyser
from ..models.restaurant import Restaurant
from ..models.review import Review
def normalize(x, stats):
return (x - stats['mean']) / stats['std']
def stats_from_dataset(train_dataset):
"""Returns information from dataset,.
:param train_dataset:
:return:
"""
train_stats = train_dataset.describe()
train_stats.pop("stars")
train_stats = train_stats.transpose()
return train_stats
class PrintDot(keras.callbacks.Callback):
""" Display training progress by printing a single dot for each completed epoch. """
def on_batch_end(self, batch, logs=None):
if batch % 100 == 0:
print('')
print('.', end='')
class RestaurantRecommender:
"""Class to do recommendations by user. """
MODEL_NAME = 'tripadvisor_stars.h5'
EPOCHS = 1000
"""Number of epoch to train perceptron. """
_model = None
"""Model with perceptron. """
def __init__(self):
self._load_model_from_storage()
def train(self, cities: list):
"""Train new model with data from cities.
:param cities:
:return:
"""
# Get data.
reviews = self._get_reviews(cities)
data_set = self.prepare_data_set(reviews)
# Split train and test data.
train_dataset = data_set.sample(frac=0.8, random_state=0)
test_dataset = data_set.drop(train_dataset.index)
# Gets stats to can normalize.
stats = stats_from_dataset(train_dataset)
# Split objective labels.
train_labels = train_dataset.pop('stars')
test_labels = test_dataset.pop('stars')
# Normalize data.
normalized_train_dataset = normalize(train_dataset, stats)
normalized_test_dataset = normalize(test_dataset, stats)
aa = self.predict(normalized_test_dataset)
# Create new model.
self._model = self.build_model(train_dataset)
# Fit model.
history = self._model.fit(
normalized_train_dataset,
train_labels,
epochs=self.EPOCHS,
validation_split=0.2,
verbose=0,
callbacks=[PrintDot()]
)
# Save model after train it.
self.save_model()
# Plot train detail.
self._plot_train_history(history)
# Some test to get accuracy data.
self._test_model(normalized_test_dataset, test_labels)
def _load_model_from_storage(self):
"""Load model from storage to avoid retrain it.
:return:
"""
path = self._model_path()
self._model = tf.keras.models.load_model(path)
def save_model(self):
""" Save the entire model to a HDF5 file.
The '.h5' extension indicates that the model should be saved to HDF5.
:return:
"""
path = self._model_path()
self._model.save(path)
def prepare_data_set(self, reviews: list):
"""Prepare data set to be trained.
:param reviews:
:return:
"""
sentimental_analyzer = SentimentalAnalyser()
# Fill sentiment of each review
sentiments = [sentimental_analyzer.classify(text)
for text in array_column(reviews, 'english')]
for sentiment, review in zip(sentiments, reviews):
review['sentiment'] = int(sentiment)
# Fill subjective qualities of restaurants.
qualities = self._get_quality_per_restaurant(reviews)
for review in reviews:
review['entityStars'] = qualities[str(review['entity_id'])]
# Format dates.
for review in reviews:
visit_date = review['visitDate']
year = visit_date[0:4]
month = visit_date[4:6]
review['year'] = int(year)
review['month'] = int(month)
# Delete unnecessary data.
for review in reviews:
del review['_id']
del review['text']
del review['english']
del review['visitDate']
data_set = pd.DataFrame(reviews)
return data_set
def predict(self, normalized_dataset, target_name='target'):
"""Do predictions on dataset. Append new columns in input dataset
with name target_name.
Returns new data set.
:param normalized_dataset:
:param target_name:
:return:
"""
predictions = self._model.predict(normalized_dataset).flatten()
result = normalized_dataset.copy()
result[target_name] = predictions
return result
@staticmethod
def build_model(data_set):
""""Create new empty model.
:param data_set:
:return:
"""
model = keras.Sequential([
layers.Dense(64, activation='relu', input_shape=[len(data_set.keys())]),
layers.Dense(64, activation='relu'),
layers.Dense(1),
])
optimizer = tf.keras.optimizers.RMSprop(0.001)
model.compile(
loss='mse',
optimizer=optimizer,
metrics=['mae', 'mse']
)
return model
@staticmethod
def _get_quality_per_restaurant(reviews: list):
"""Calculate subjective quality for each restaurant using sentiments.
:param reviews:
:return:
"""
result = {}
restaurants = {doc['entity_id'] for doc in reviews}
for restaurant in restaurants:
selected = [item for item in reviews if item['entity_id'] == restaurant]
positive = [item for item in selected if str(item['sentiment']) == SentimentalAnalyser.POSITIVE]
# Over 5.
quality = 5 * (len(positive) / len(selected))
result[str(restaurant)] = quality
return result
@staticmethod
def _get_reviews(cities: list):
"""Return all reviews from a city.
:param cities:
:return:
"""
restaurants = []
restaurant_obj = Restaurant()
reviews_obj = Review()
# reviews_obj.translate()
for city in cities:
restaurants += restaurant_obj.get_restaurants_on_city(city)
restaurants_ids = array_column(restaurants, 'id')
reviews = reviews_obj.get_from_restaurants(restaurants_ids)
return reviews
def _test_model(self, normalized_test_dataset, test_labels):
"""Check model accuracy.
:param normalized_test_dataset:
:return:
"""
loss, mae, mse = self._model.evaluate(normalized_test_dataset, test_labels, verbose=2)
@staticmethod
def _plot_train_history(history):
"""Plot evolution of error in each epoch of train.
:param history:
:return:
"""
if not is_production_env():
hist = pd.DataFrame(history.history)
hist['epoch'] = history.epoch
plt.figure()
plt.xlabel('Epoch')
plt.ylabel('Mean Abs Error [stars]')
plt.plot(hist['epoch'], hist['mae'], label='Train Error')
plt.plot(hist['epoch'], hist['val_mae'], label='Val Error')
plt.ylim([0, 5])
plt.legend()
plt.figure()
plt.xlabel('Epoch')
plt.ylabel('Mean Square Error [$stars^2$]')
plt.plot(hist['epoch'], hist['mse'], label='Train Error')
plt.plot(hist['epoch'], hist['val_mse'], label='Val Error')
plt.ylim([0, 5])
plt.legend()
plt.show()
def _model_path(self) -> str:
"""Returns the path where model must be stored.
:return:
"""
return Container().data_path() + '/' + self.MODEL_NAME
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.