code
stringlengths 13
6.09M
| order_type
stringclasses 2
values | original_example
dict | step_ids
listlengths 1
5
|
|---|---|---|---|
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
config.read('dwh.cfg')
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
config = configparser.ConfigParser()
config.read('dwh.cfg')
drop_schema = 'DROP SCHEMA IF EXISTS sparkifydb;'
set_search_path = 'SET SEARCH_PATH to sparkifydb;'
staging_events_table_drop = 'DROP TABLE IF EXISTS staging_events;'
staging_songs_table_drop = 'DROP TABLE IF EXISTS staging_songs;'
songplay_table_drop = 'DROP TABLE IF EXISTS songplay;'
user_table_drop = 'DROP TABLE IF EXISTS sparkifydb.users;'
song_table_drop = 'DROP TABLE IF EXISTS sparkifydb.songs;'
artist_table_drop = 'DROP TABLE IF EXISTS sparkifydb.artists;'
time_table_drop = 'DROP TABLE IF EXISTS sparkifydb.time;'
create_sparkify_schema = 'CREATE SCHEMA IF NOT EXISTS sparkifydb;'
staging_events_table_create = """
CREATE TABLE staging_events
(
event_id int identity(0,1) SORTKEY,
artist_name text NULL DISTKEY,
auth text NULL,
firstName text NULL,
gender varchar(5) NULL,
itemInSession bigint NULL,
lastName text NULL,
length double precision NULL,
level text NULL,
location text NULL,
method text NULL,
page text NULL,
registration text NULL,
sessionId bigint NULL,
song text NULL,
status int NULL,
ts text NULL,
userAgent text NULL,
userId bigint
);
"""
staging_songs_table_create = """
CREATE TABLE staging_songs
(
num_songs int,
artist_id varchar(255) DISTKEY,
artist_latitude varchar(255) NULL,
artist_longitude varchar(255) NULL,
artist_location varchar(255) NULL,
artist_name text NOT NULL,
song_id varchar(255) SORTKEY NOT NULL,
title text NOT NULL,
duration double precision NOT NULL,
year int NULL
);
"""
songplay_table_create = """
CREATE TABLE songplay
(
songplay_id int identity(0,1) PRIMARY KEY SORTKEY NOT NULL,
start_time timestamp NOT NULL,
user_id text NOT NULL,
level text,
song_id text NOT NULL,
artist_id text NOT NULL DISTKEY,
session_id text,
location text,
user_agent text);
"""
user_table_create = """
CREATE TABLE users(
user_id bigint PRIMARY KEY SORTKEY NOT NULL ,
first_name text,
last_name text,
gender varchar(10),
level text
)diststyle all;
"""
song_table_create = """
CREATE TABLE songs(
song_id varchar(255) SORTKEY PRIMARY KEY NOT NULL,
artist_id text NOT NULL,
year int,
duration double precision,
level text
)diststyle all;
"""
artist_table_create = """
CREATE TABLE artists(
artist_id text PRIMARY KEY SORTKEY,
artist_name text,
location text,
lattitude text,
longitude text
) diststyle all;
"""
time_table_create = """
CREATE TABLE time(
start_time timestamp PRIMARY KEY SORTKEY,
hour int,
day int,
week int,
month int,
year int,
weekday int
) diststyle all;
"""
staging_events_copy = (
"""copy staging_events from '{}'
credentials 'aws_iam_role={}'
compupdate off
region 'us-west-2'
JSON '{}'
"""
.format(config['S3']['LOG_DATA'], config['IAM_ROLE']['ARN'], config[
'S3']['LOG_JSONPATH']))
staging_songs_copy = (
"""copy staging_songs from '{}'
credentials 'aws_iam_role={}'
compupdate off
region 'us-west-2'
JSON 'auto'
"""
.format(config['S3']['SONG_DATA'], config['IAM_ROLE']['ARN']))
songplay_table_insert = """
INSERT INTO songplay(start_time,user_id,level,song_id,artist_id,session_id,location,user_agent)
SELECT
TIMESTAMP 'epoch' + se.ts/1000 * INTERVAL '1 Second ' AS start_time,
se.userId AS user_id,
se.level AS level,
ss.song_id AS song_id,
ss.artist_id AS artist_id,
se.sessionId AS session_id,
ss.artist_location AS location,
se.userAgent AS user_agent
FROM staging_songs AS ss
JOIN staging_events AS se ON (ss.title=se.song AND ss.artist_name=se.artist_name)
AND
se.page = 'NextSong';
"""
user_table_insert = """
INSERT INTO users(user_id,first_name,last_name,gender,level)
SELECT DISTINCT(s.userId) AS user_id,
s.firstName AS first_name,
s.lastName AS last_name,
s.gender AS gender,
s.level AS level
FROM
staging_events as s
WHERE s.page = 'NextSong'
"""
song_table_insert = """
INSERT INTO songs (song_id,artist_id,year, duration)
SELECT DISTINCT(ss.song_id) AS song_id,
ss.artist_id AS artist_id,
ss.year AS year,
ss.duration AS duration
FROM
staging_songs AS ss
"""
artist_table_insert = """
INSERT INTO artists (artist_id,artist_name,location,lattitude,longitude)
SELECT DISTINCT(s.artist_id) AS artist_id,
s.artist_name AS artist_name,
s.artist_location AS location,
s.artist_latitude AS lattitude,
s.artist_longitude AS longitude
FROM
staging_songs AS s;
"""
time_table_insert = """
INSERT INTO time (start_time,hour,day,week,month,year,weekday)
SELECT DISTINCT(TIMESTAMP 'epoch' + s.ts/1000 * INTERVAL '1 Second ') AS start_time,
EXTRACT(HOUR from start_time) AS hour,
EXTRACT(DAY from start_time) AS day,
EXTRACT(WEEK from start_time) AS week,
EXTRACT(MONTH from start_time) AS month,
EXTRACT(YEAR from start_time) AS year,
EXTRACT(DOW from start_time) AS weekday
FROM
staging_events AS s
WHERE
s.page = 'NextSong';
"""
create_table_queries = [set_search_path, songplay_table_create,
user_table_create, song_table_create, artist_table_create,
time_table_create, staging_events_table_create, staging_songs_table_create]
drop_table_queries = [create_sparkify_schema, set_search_path,
staging_events_table_drop, staging_songs_table_drop,
songplay_table_drop, user_table_drop, song_table_drop,
artist_table_drop, time_table_drop]
copy_table_queries = [set_search_path, staging_events_copy, staging_songs_copy]
insert_table_queries = [set_search_path, user_table_insert,
song_table_insert, artist_table_insert, time_table_insert,
songplay_table_insert]
<|reserved_special_token_1|>
import configparser
config = configparser.ConfigParser()
config.read('dwh.cfg')
drop_schema = 'DROP SCHEMA IF EXISTS sparkifydb;'
set_search_path = 'SET SEARCH_PATH to sparkifydb;'
staging_events_table_drop = 'DROP TABLE IF EXISTS staging_events;'
staging_songs_table_drop = 'DROP TABLE IF EXISTS staging_songs;'
songplay_table_drop = 'DROP TABLE IF EXISTS songplay;'
user_table_drop = 'DROP TABLE IF EXISTS sparkifydb.users;'
song_table_drop = 'DROP TABLE IF EXISTS sparkifydb.songs;'
artist_table_drop = 'DROP TABLE IF EXISTS sparkifydb.artists;'
time_table_drop = 'DROP TABLE IF EXISTS sparkifydb.time;'
create_sparkify_schema = 'CREATE SCHEMA IF NOT EXISTS sparkifydb;'
staging_events_table_create = """
CREATE TABLE staging_events
(
event_id int identity(0,1) SORTKEY,
artist_name text NULL DISTKEY,
auth text NULL,
firstName text NULL,
gender varchar(5) NULL,
itemInSession bigint NULL,
lastName text NULL,
length double precision NULL,
level text NULL,
location text NULL,
method text NULL,
page text NULL,
registration text NULL,
sessionId bigint NULL,
song text NULL,
status int NULL,
ts text NULL,
userAgent text NULL,
userId bigint
);
"""
staging_songs_table_create = """
CREATE TABLE staging_songs
(
num_songs int,
artist_id varchar(255) DISTKEY,
artist_latitude varchar(255) NULL,
artist_longitude varchar(255) NULL,
artist_location varchar(255) NULL,
artist_name text NOT NULL,
song_id varchar(255) SORTKEY NOT NULL,
title text NOT NULL,
duration double precision NOT NULL,
year int NULL
);
"""
songplay_table_create = """
CREATE TABLE songplay
(
songplay_id int identity(0,1) PRIMARY KEY SORTKEY NOT NULL,
start_time timestamp NOT NULL,
user_id text NOT NULL,
level text,
song_id text NOT NULL,
artist_id text NOT NULL DISTKEY,
session_id text,
location text,
user_agent text);
"""
user_table_create = """
CREATE TABLE users(
user_id bigint PRIMARY KEY SORTKEY NOT NULL ,
first_name text,
last_name text,
gender varchar(10),
level text
)diststyle all;
"""
song_table_create = """
CREATE TABLE songs(
song_id varchar(255) SORTKEY PRIMARY KEY NOT NULL,
artist_id text NOT NULL,
year int,
duration double precision,
level text
)diststyle all;
"""
artist_table_create = """
CREATE TABLE artists(
artist_id text PRIMARY KEY SORTKEY,
artist_name text,
location text,
lattitude text,
longitude text
) diststyle all;
"""
time_table_create = """
CREATE TABLE time(
start_time timestamp PRIMARY KEY SORTKEY,
hour int,
day int,
week int,
month int,
year int,
weekday int
) diststyle all;
"""
staging_events_copy = (
"""copy staging_events from '{}'
credentials 'aws_iam_role={}'
compupdate off
region 'us-west-2'
JSON '{}'
"""
.format(config['S3']['LOG_DATA'], config['IAM_ROLE']['ARN'], config[
'S3']['LOG_JSONPATH']))
staging_songs_copy = (
"""copy staging_songs from '{}'
credentials 'aws_iam_role={}'
compupdate off
region 'us-west-2'
JSON 'auto'
"""
.format(config['S3']['SONG_DATA'], config['IAM_ROLE']['ARN']))
songplay_table_insert = """
INSERT INTO songplay(start_time,user_id,level,song_id,artist_id,session_id,location,user_agent)
SELECT
TIMESTAMP 'epoch' + se.ts/1000 * INTERVAL '1 Second ' AS start_time,
se.userId AS user_id,
se.level AS level,
ss.song_id AS song_id,
ss.artist_id AS artist_id,
se.sessionId AS session_id,
ss.artist_location AS location,
se.userAgent AS user_agent
FROM staging_songs AS ss
JOIN staging_events AS se ON (ss.title=se.song AND ss.artist_name=se.artist_name)
AND
se.page = 'NextSong';
"""
user_table_insert = """
INSERT INTO users(user_id,first_name,last_name,gender,level)
SELECT DISTINCT(s.userId) AS user_id,
s.firstName AS first_name,
s.lastName AS last_name,
s.gender AS gender,
s.level AS level
FROM
staging_events as s
WHERE s.page = 'NextSong'
"""
song_table_insert = """
INSERT INTO songs (song_id,artist_id,year, duration)
SELECT DISTINCT(ss.song_id) AS song_id,
ss.artist_id AS artist_id,
ss.year AS year,
ss.duration AS duration
FROM
staging_songs AS ss
"""
artist_table_insert = """
INSERT INTO artists (artist_id,artist_name,location,lattitude,longitude)
SELECT DISTINCT(s.artist_id) AS artist_id,
s.artist_name AS artist_name,
s.artist_location AS location,
s.artist_latitude AS lattitude,
s.artist_longitude AS longitude
FROM
staging_songs AS s;
"""
time_table_insert = """
INSERT INTO time (start_time,hour,day,week,month,year,weekday)
SELECT DISTINCT(TIMESTAMP 'epoch' + s.ts/1000 * INTERVAL '1 Second ') AS start_time,
EXTRACT(HOUR from start_time) AS hour,
EXTRACT(DAY from start_time) AS day,
EXTRACT(WEEK from start_time) AS week,
EXTRACT(MONTH from start_time) AS month,
EXTRACT(YEAR from start_time) AS year,
EXTRACT(DOW from start_time) AS weekday
FROM
staging_events AS s
WHERE
s.page = 'NextSong';
"""
create_table_queries = [set_search_path, songplay_table_create,
user_table_create, song_table_create, artist_table_create,
time_table_create, staging_events_table_create, staging_songs_table_create]
drop_table_queries = [create_sparkify_schema, set_search_path,
staging_events_table_drop, staging_songs_table_drop,
songplay_table_drop, user_table_drop, song_table_drop,
artist_table_drop, time_table_drop]
copy_table_queries = [set_search_path, staging_events_copy, staging_songs_copy]
insert_table_queries = [set_search_path, user_table_insert,
song_table_insert, artist_table_insert, time_table_insert,
songplay_table_insert]
<|reserved_special_token_1|>
import configparser
# CONFIG
config = configparser.ConfigParser()
config.read('dwh.cfg')
# DROP TABLES
drop_schema="DROP SCHEMA IF EXISTS sparkifydb;"
set_search_path="SET SEARCH_PATH to sparkifydb;"
staging_events_table_drop = "DROP TABLE IF EXISTS staging_events;"
staging_songs_table_drop = "DROP TABLE IF EXISTS staging_songs;"
songplay_table_drop = "DROP TABLE IF EXISTS songplay;"
user_table_drop = "DROP TABLE IF EXISTS sparkifydb.users;"
song_table_drop ="DROP TABLE IF EXISTS sparkifydb.songs;"
artist_table_drop = "DROP TABLE IF EXISTS sparkifydb.artists;"
time_table_drop = "DROP TABLE IF EXISTS sparkifydb.time;"
#CREATE SCHEMA
create_sparkify_schema="CREATE SCHEMA IF NOT EXISTS sparkifydb;"
# CREATE TABLES
staging_events_table_create= ("""
CREATE TABLE staging_events
(
event_id int identity(0,1) SORTKEY,
artist_name text NULL DISTKEY,
auth text NULL,
firstName text NULL,
gender varchar(5) NULL,
itemInSession bigint NULL,
lastName text NULL,
length double precision NULL,
level text NULL,
location text NULL,
method text NULL,
page text NULL,
registration text NULL,
sessionId bigint NULL,
song text NULL,
status int NULL,
ts text NULL,
userAgent text NULL,
userId bigint
);
""")
staging_songs_table_create = ("""
CREATE TABLE staging_songs
(
num_songs int,
artist_id varchar(255) DISTKEY,
artist_latitude varchar(255) NULL,
artist_longitude varchar(255) NULL,
artist_location varchar(255) NULL,
artist_name text NOT NULL,
song_id varchar(255) SORTKEY NOT NULL,
title text NOT NULL,
duration double precision NOT NULL,
year int NULL
);
""")
songplay_table_create = ("""
CREATE TABLE songplay
(
songplay_id int identity(0,1) PRIMARY KEY SORTKEY NOT NULL,
start_time timestamp NOT NULL,
user_id text NOT NULL,
level text,
song_id text NOT NULL,
artist_id text NOT NULL DISTKEY,
session_id text,
location text,
user_agent text);
""")
user_table_create = ("""
CREATE TABLE users(
user_id bigint PRIMARY KEY SORTKEY NOT NULL ,
first_name text,
last_name text,
gender varchar(10),
level text
)diststyle all;
""")
song_table_create = ("""
CREATE TABLE songs(
song_id varchar(255) SORTKEY PRIMARY KEY NOT NULL,
artist_id text NOT NULL,
year int,
duration double precision,
level text
)diststyle all;
""")
artist_table_create = ("""
CREATE TABLE artists(
artist_id text PRIMARY KEY SORTKEY,
artist_name text,
location text,
lattitude text,
longitude text
) diststyle all;
""")
time_table_create = ("""
CREATE TABLE time(
start_time timestamp PRIMARY KEY SORTKEY,
hour int,
day int,
week int,
month int,
year int,
weekday int
) diststyle all;
""")
# STAGING TABLES
staging_events_copy = ("""copy staging_events from '{}'
credentials 'aws_iam_role={}'
compupdate off
region 'us-west-2'
JSON '{}'
""").format(config['S3']['LOG_DATA'],config['IAM_ROLE']['ARN'],config['S3']['LOG_JSONPATH'])
staging_songs_copy = ("""copy staging_songs from '{}'
credentials 'aws_iam_role={}'
compupdate off
region 'us-west-2'
JSON 'auto'
""").format(config['S3']['SONG_DATA'],config['IAM_ROLE']['ARN'])
# FINAL TABLES
songplay_table_insert = ("""
INSERT INTO songplay(start_time,user_id,level,song_id,artist_id,session_id,location,user_agent)
SELECT
TIMESTAMP 'epoch' + se.ts/1000 * INTERVAL '1 Second ' AS start_time,
se.userId AS user_id,
se.level AS level,
ss.song_id AS song_id,
ss.artist_id AS artist_id,
se.sessionId AS session_id,
ss.artist_location AS location,
se.userAgent AS user_agent
FROM staging_songs AS ss
JOIN staging_events AS se ON (ss.title=se.song AND ss.artist_name=se.artist_name)
AND
se.page = 'NextSong';
""")
user_table_insert = ("""
INSERT INTO users(user_id,first_name,last_name,gender,level)
SELECT DISTINCT(s.userId) AS user_id,
s.firstName AS first_name,
s.lastName AS last_name,
s.gender AS gender,
s.level AS level
FROM
staging_events as s
WHERE s.page = 'NextSong'
""")
song_table_insert = ("""
INSERT INTO songs (song_id,artist_id,year, duration)
SELECT DISTINCT(ss.song_id) AS song_id,
ss.artist_id AS artist_id,
ss.year AS year,
ss.duration AS duration
FROM
staging_songs AS ss
""")
artist_table_insert = ("""
INSERT INTO artists (artist_id,artist_name,location,lattitude,longitude)
SELECT DISTINCT(s.artist_id) AS artist_id,
s.artist_name AS artist_name,
s.artist_location AS location,
s.artist_latitude AS lattitude,
s.artist_longitude AS longitude
FROM
staging_songs AS s;
""")
time_table_insert = ("""
INSERT INTO time (start_time,hour,day,week,month,year,weekday)
SELECT DISTINCT(TIMESTAMP 'epoch' + s.ts/1000 * INTERVAL '1 Second ') AS start_time,
EXTRACT(HOUR from start_time) AS hour,
EXTRACT(DAY from start_time) AS day,
EXTRACT(WEEK from start_time) AS week,
EXTRACT(MONTH from start_time) AS month,
EXTRACT(YEAR from start_time) AS year,
EXTRACT(DOW from start_time) AS weekday
FROM
staging_events AS s
WHERE
s.page = 'NextSong';
""")
# QUERY LISTS
create_table_queries =[set_search_path,songplay_table_create, user_table_create, song_table_create, artist_table_create, time_table_create,staging_events_table_create,staging_songs_table_create]
drop_table_queries = [create_sparkify_schema,set_search_path,staging_events_table_drop, staging_songs_table_drop, songplay_table_drop, user_table_drop, song_table_drop, artist_table_drop, time_table_drop]
copy_table_queries = [set_search_path,staging_events_copy, staging_songs_copy]
insert_table_queries = [set_search_path,user_table_insert, song_table_insert, artist_table_insert, time_table_insert,songplay_table_insert]
|
flexible
|
{
"blob_id": "652918e09a3506869c939be39b71a06467459f8a",
"index": 5992,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nconfig.read('dwh.cfg')\n<mask token>\n",
"step-3": "<mask token>\nconfig = configparser.ConfigParser()\nconfig.read('dwh.cfg')\ndrop_schema = 'DROP SCHEMA IF EXISTS sparkifydb;'\nset_search_path = 'SET SEARCH_PATH to sparkifydb;'\nstaging_events_table_drop = 'DROP TABLE IF EXISTS staging_events;'\nstaging_songs_table_drop = 'DROP TABLE IF EXISTS staging_songs;'\nsongplay_table_drop = 'DROP TABLE IF EXISTS songplay;'\nuser_table_drop = 'DROP TABLE IF EXISTS sparkifydb.users;'\nsong_table_drop = 'DROP TABLE IF EXISTS sparkifydb.songs;'\nartist_table_drop = 'DROP TABLE IF EXISTS sparkifydb.artists;'\ntime_table_drop = 'DROP TABLE IF EXISTS sparkifydb.time;'\ncreate_sparkify_schema = 'CREATE SCHEMA IF NOT EXISTS sparkifydb;'\nstaging_events_table_create = \"\"\"\nCREATE TABLE staging_events\n(\nevent_id int identity(0,1) SORTKEY,\nartist_name text NULL DISTKEY,\nauth text NULL,\nfirstName text NULL,\ngender varchar(5) NULL,\nitemInSession bigint NULL,\nlastName text NULL,\nlength double precision NULL,\nlevel text NULL,\nlocation text NULL,\nmethod text NULL,\npage text NULL,\nregistration text NULL,\nsessionId bigint NULL,\nsong text NULL,\nstatus int NULL,\nts text NULL,\nuserAgent text NULL,\nuserId bigint \n);\n\"\"\"\nstaging_songs_table_create = \"\"\"\nCREATE TABLE staging_songs\n(\nnum_songs int,\nartist_id varchar(255) DISTKEY,\nartist_latitude varchar(255) NULL,\nartist_longitude varchar(255) NULL,\nartist_location varchar(255) NULL,\nartist_name text NOT NULL,\nsong_id varchar(255) SORTKEY NOT NULL,\ntitle text NOT NULL,\nduration double precision NOT NULL,\nyear int NULL\n);\n\"\"\"\nsongplay_table_create = \"\"\"\nCREATE TABLE songplay\n(\nsongplay_id int identity(0,1) PRIMARY KEY SORTKEY NOT NULL, \nstart_time timestamp NOT NULL, \nuser_id text NOT NULL, \nlevel text, \nsong_id text NOT NULL, \nartist_id text NOT NULL DISTKEY, \nsession_id text, \nlocation text, \nuser_agent text);\n\"\"\"\nuser_table_create = \"\"\"\nCREATE TABLE users(\nuser_id bigint PRIMARY KEY SORTKEY NOT NULL ,\nfirst_name text,\nlast_name text, \ngender varchar(10),\nlevel text\n)diststyle all;\n\"\"\"\nsong_table_create = \"\"\"\nCREATE TABLE songs(\nsong_id varchar(255) SORTKEY PRIMARY KEY NOT NULL,\nartist_id text NOT NULL,\nyear int, \nduration double precision,\nlevel text\n)diststyle all;\n\"\"\"\nartist_table_create = \"\"\"\nCREATE TABLE artists(\nartist_id text PRIMARY KEY SORTKEY, \nartist_name text, \nlocation text, \nlattitude text, \nlongitude text\n) diststyle all;\n\n\"\"\"\ntime_table_create = \"\"\"\nCREATE TABLE time(\nstart_time timestamp PRIMARY KEY SORTKEY,\nhour int,\nday int,\nweek int,\nmonth int,\nyear int,\nweekday int\n) diststyle all;\n\"\"\"\nstaging_events_copy = (\n \"\"\"copy staging_events from '{}'\n credentials 'aws_iam_role={}'\n compupdate off \n region 'us-west-2'\n JSON '{}'\n\"\"\"\n .format(config['S3']['LOG_DATA'], config['IAM_ROLE']['ARN'], config[\n 'S3']['LOG_JSONPATH']))\nstaging_songs_copy = (\n \"\"\"copy staging_songs from '{}'\n credentials 'aws_iam_role={}'\n compupdate off \n region 'us-west-2' \n JSON 'auto'\n\"\"\"\n .format(config['S3']['SONG_DATA'], config['IAM_ROLE']['ARN']))\nsongplay_table_insert = \"\"\"\nINSERT INTO songplay(start_time,user_id,level,song_id,artist_id,session_id,location,user_agent)\n\nSELECT\n TIMESTAMP 'epoch' + se.ts/1000 * INTERVAL '1 Second ' AS start_time,\n se.userId AS user_id,\n se.level AS level,\n ss.song_id AS song_id,\n ss.artist_id AS artist_id,\n se.sessionId AS session_id,\n ss.artist_location AS location,\n se.userAgent AS user_agent\nFROM staging_songs AS ss \nJOIN staging_events AS se ON (ss.title=se.song AND ss.artist_name=se.artist_name)\nAND\n se.page = 'NextSong';\n \n\"\"\"\nuser_table_insert = \"\"\"\nINSERT INTO users(user_id,first_name,last_name,gender,level)\n\nSELECT DISTINCT(s.userId) AS user_id,\n s.firstName AS first_name,\n s.lastName AS last_name,\n s.gender AS gender,\n s.level AS level\n\nFROM\n staging_events as s\nWHERE s.page = 'NextSong' \n\n\"\"\"\nsong_table_insert = \"\"\"\nINSERT INTO songs (song_id,artist_id,year, duration)\n\nSELECT DISTINCT(ss.song_id) AS song_id,\n ss.artist_id AS artist_id,\n ss.year AS year,\n ss.duration AS duration\nFROM\n staging_songs AS ss\n\n\"\"\"\nartist_table_insert = \"\"\"\nINSERT INTO artists (artist_id,artist_name,location,lattitude,longitude)\n\nSELECT DISTINCT(s.artist_id) AS artist_id,\n s.artist_name AS artist_name,\n s.artist_location AS location,\n s.artist_latitude AS lattitude,\n s.artist_longitude AS longitude\nFROM\n staging_songs AS s;\n\"\"\"\ntime_table_insert = \"\"\"\nINSERT INTO time (start_time,hour,day,week,month,year,weekday)\n\nSELECT DISTINCT(TIMESTAMP 'epoch' + s.ts/1000 * INTERVAL '1 Second ') AS start_time,\n EXTRACT(HOUR from start_time) AS hour,\n EXTRACT(DAY from start_time) AS day,\n EXTRACT(WEEK from start_time) AS week,\n EXTRACT(MONTH from start_time) AS month,\n EXTRACT(YEAR from start_time) AS year,\n EXTRACT(DOW from start_time) AS weekday\nFROM \n staging_events AS s\nWHERE \n s.page = 'NextSong'; \n\n\"\"\"\ncreate_table_queries = [set_search_path, songplay_table_create,\n user_table_create, song_table_create, artist_table_create,\n time_table_create, staging_events_table_create, staging_songs_table_create]\ndrop_table_queries = [create_sparkify_schema, set_search_path,\n staging_events_table_drop, staging_songs_table_drop,\n songplay_table_drop, user_table_drop, song_table_drop,\n artist_table_drop, time_table_drop]\ncopy_table_queries = [set_search_path, staging_events_copy, staging_songs_copy]\ninsert_table_queries = [set_search_path, user_table_insert,\n song_table_insert, artist_table_insert, time_table_insert,\n songplay_table_insert]\n",
"step-4": "import configparser\nconfig = configparser.ConfigParser()\nconfig.read('dwh.cfg')\ndrop_schema = 'DROP SCHEMA IF EXISTS sparkifydb;'\nset_search_path = 'SET SEARCH_PATH to sparkifydb;'\nstaging_events_table_drop = 'DROP TABLE IF EXISTS staging_events;'\nstaging_songs_table_drop = 'DROP TABLE IF EXISTS staging_songs;'\nsongplay_table_drop = 'DROP TABLE IF EXISTS songplay;'\nuser_table_drop = 'DROP TABLE IF EXISTS sparkifydb.users;'\nsong_table_drop = 'DROP TABLE IF EXISTS sparkifydb.songs;'\nartist_table_drop = 'DROP TABLE IF EXISTS sparkifydb.artists;'\ntime_table_drop = 'DROP TABLE IF EXISTS sparkifydb.time;'\ncreate_sparkify_schema = 'CREATE SCHEMA IF NOT EXISTS sparkifydb;'\nstaging_events_table_create = \"\"\"\nCREATE TABLE staging_events\n(\nevent_id int identity(0,1) SORTKEY,\nartist_name text NULL DISTKEY,\nauth text NULL,\nfirstName text NULL,\ngender varchar(5) NULL,\nitemInSession bigint NULL,\nlastName text NULL,\nlength double precision NULL,\nlevel text NULL,\nlocation text NULL,\nmethod text NULL,\npage text NULL,\nregistration text NULL,\nsessionId bigint NULL,\nsong text NULL,\nstatus int NULL,\nts text NULL,\nuserAgent text NULL,\nuserId bigint \n);\n\"\"\"\nstaging_songs_table_create = \"\"\"\nCREATE TABLE staging_songs\n(\nnum_songs int,\nartist_id varchar(255) DISTKEY,\nartist_latitude varchar(255) NULL,\nartist_longitude varchar(255) NULL,\nartist_location varchar(255) NULL,\nartist_name text NOT NULL,\nsong_id varchar(255) SORTKEY NOT NULL,\ntitle text NOT NULL,\nduration double precision NOT NULL,\nyear int NULL\n);\n\"\"\"\nsongplay_table_create = \"\"\"\nCREATE TABLE songplay\n(\nsongplay_id int identity(0,1) PRIMARY KEY SORTKEY NOT NULL, \nstart_time timestamp NOT NULL, \nuser_id text NOT NULL, \nlevel text, \nsong_id text NOT NULL, \nartist_id text NOT NULL DISTKEY, \nsession_id text, \nlocation text, \nuser_agent text);\n\"\"\"\nuser_table_create = \"\"\"\nCREATE TABLE users(\nuser_id bigint PRIMARY KEY SORTKEY NOT NULL ,\nfirst_name text,\nlast_name text, \ngender varchar(10),\nlevel text\n)diststyle all;\n\"\"\"\nsong_table_create = \"\"\"\nCREATE TABLE songs(\nsong_id varchar(255) SORTKEY PRIMARY KEY NOT NULL,\nartist_id text NOT NULL,\nyear int, \nduration double precision,\nlevel text\n)diststyle all;\n\"\"\"\nartist_table_create = \"\"\"\nCREATE TABLE artists(\nartist_id text PRIMARY KEY SORTKEY, \nartist_name text, \nlocation text, \nlattitude text, \nlongitude text\n) diststyle all;\n\n\"\"\"\ntime_table_create = \"\"\"\nCREATE TABLE time(\nstart_time timestamp PRIMARY KEY SORTKEY,\nhour int,\nday int,\nweek int,\nmonth int,\nyear int,\nweekday int\n) diststyle all;\n\"\"\"\nstaging_events_copy = (\n \"\"\"copy staging_events from '{}'\n credentials 'aws_iam_role={}'\n compupdate off \n region 'us-west-2'\n JSON '{}'\n\"\"\"\n .format(config['S3']['LOG_DATA'], config['IAM_ROLE']['ARN'], config[\n 'S3']['LOG_JSONPATH']))\nstaging_songs_copy = (\n \"\"\"copy staging_songs from '{}'\n credentials 'aws_iam_role={}'\n compupdate off \n region 'us-west-2' \n JSON 'auto'\n\"\"\"\n .format(config['S3']['SONG_DATA'], config['IAM_ROLE']['ARN']))\nsongplay_table_insert = \"\"\"\nINSERT INTO songplay(start_time,user_id,level,song_id,artist_id,session_id,location,user_agent)\n\nSELECT\n TIMESTAMP 'epoch' + se.ts/1000 * INTERVAL '1 Second ' AS start_time,\n se.userId AS user_id,\n se.level AS level,\n ss.song_id AS song_id,\n ss.artist_id AS artist_id,\n se.sessionId AS session_id,\n ss.artist_location AS location,\n se.userAgent AS user_agent\nFROM staging_songs AS ss \nJOIN staging_events AS se ON (ss.title=se.song AND ss.artist_name=se.artist_name)\nAND\n se.page = 'NextSong';\n \n\"\"\"\nuser_table_insert = \"\"\"\nINSERT INTO users(user_id,first_name,last_name,gender,level)\n\nSELECT DISTINCT(s.userId) AS user_id,\n s.firstName AS first_name,\n s.lastName AS last_name,\n s.gender AS gender,\n s.level AS level\n\nFROM\n staging_events as s\nWHERE s.page = 'NextSong' \n\n\"\"\"\nsong_table_insert = \"\"\"\nINSERT INTO songs (song_id,artist_id,year, duration)\n\nSELECT DISTINCT(ss.song_id) AS song_id,\n ss.artist_id AS artist_id,\n ss.year AS year,\n ss.duration AS duration\nFROM\n staging_songs AS ss\n\n\"\"\"\nartist_table_insert = \"\"\"\nINSERT INTO artists (artist_id,artist_name,location,lattitude,longitude)\n\nSELECT DISTINCT(s.artist_id) AS artist_id,\n s.artist_name AS artist_name,\n s.artist_location AS location,\n s.artist_latitude AS lattitude,\n s.artist_longitude AS longitude\nFROM\n staging_songs AS s;\n\"\"\"\ntime_table_insert = \"\"\"\nINSERT INTO time (start_time,hour,day,week,month,year,weekday)\n\nSELECT DISTINCT(TIMESTAMP 'epoch' + s.ts/1000 * INTERVAL '1 Second ') AS start_time,\n EXTRACT(HOUR from start_time) AS hour,\n EXTRACT(DAY from start_time) AS day,\n EXTRACT(WEEK from start_time) AS week,\n EXTRACT(MONTH from start_time) AS month,\n EXTRACT(YEAR from start_time) AS year,\n EXTRACT(DOW from start_time) AS weekday\nFROM \n staging_events AS s\nWHERE \n s.page = 'NextSong'; \n\n\"\"\"\ncreate_table_queries = [set_search_path, songplay_table_create,\n user_table_create, song_table_create, artist_table_create,\n time_table_create, staging_events_table_create, staging_songs_table_create]\ndrop_table_queries = [create_sparkify_schema, set_search_path,\n staging_events_table_drop, staging_songs_table_drop,\n songplay_table_drop, user_table_drop, song_table_drop,\n artist_table_drop, time_table_drop]\ncopy_table_queries = [set_search_path, staging_events_copy, staging_songs_copy]\ninsert_table_queries = [set_search_path, user_table_insert,\n song_table_insert, artist_table_insert, time_table_insert,\n songplay_table_insert]\n",
"step-5": "import configparser\n\n\n# CONFIG\nconfig = configparser.ConfigParser()\nconfig.read('dwh.cfg')\n\n# DROP TABLES\ndrop_schema=\"DROP SCHEMA IF EXISTS sparkifydb;\"\nset_search_path=\"SET SEARCH_PATH to sparkifydb;\"\nstaging_events_table_drop = \"DROP TABLE IF EXISTS staging_events;\"\nstaging_songs_table_drop = \"DROP TABLE IF EXISTS staging_songs;\"\nsongplay_table_drop = \"DROP TABLE IF EXISTS songplay;\"\nuser_table_drop = \"DROP TABLE IF EXISTS sparkifydb.users;\"\nsong_table_drop =\"DROP TABLE IF EXISTS sparkifydb.songs;\"\nartist_table_drop = \"DROP TABLE IF EXISTS sparkifydb.artists;\"\ntime_table_drop = \"DROP TABLE IF EXISTS sparkifydb.time;\"\n\n#CREATE SCHEMA\n\ncreate_sparkify_schema=\"CREATE SCHEMA IF NOT EXISTS sparkifydb;\"\n\n# CREATE TABLES\n\nstaging_events_table_create= (\"\"\"\nCREATE TABLE staging_events\n(\nevent_id int identity(0,1) SORTKEY,\nartist_name text NULL DISTKEY,\nauth text NULL,\nfirstName text NULL,\ngender varchar(5) NULL,\nitemInSession bigint NULL,\nlastName text NULL,\nlength double precision NULL,\nlevel text NULL,\nlocation text NULL,\nmethod text NULL,\npage text NULL,\nregistration text NULL,\nsessionId bigint NULL,\nsong text NULL,\nstatus int NULL,\nts text NULL,\nuserAgent text NULL,\nuserId bigint \n);\n\"\"\")\n\nstaging_songs_table_create = (\"\"\"\nCREATE TABLE staging_songs\n(\nnum_songs int,\nartist_id varchar(255) DISTKEY,\nartist_latitude varchar(255) NULL,\nartist_longitude varchar(255) NULL,\nartist_location varchar(255) NULL,\nartist_name text NOT NULL,\nsong_id varchar(255) SORTKEY NOT NULL,\ntitle text NOT NULL,\nduration double precision NOT NULL,\nyear int NULL\n);\n\"\"\")\n\nsongplay_table_create = (\"\"\"\nCREATE TABLE songplay\n(\nsongplay_id int identity(0,1) PRIMARY KEY SORTKEY NOT NULL, \nstart_time timestamp NOT NULL, \nuser_id text NOT NULL, \nlevel text, \nsong_id text NOT NULL, \nartist_id text NOT NULL DISTKEY, \nsession_id text, \nlocation text, \nuser_agent text);\n\"\"\")\n\nuser_table_create = (\"\"\"\nCREATE TABLE users(\nuser_id bigint PRIMARY KEY SORTKEY NOT NULL ,\nfirst_name text,\nlast_name text, \ngender varchar(10),\nlevel text\n)diststyle all;\n\"\"\")\n\nsong_table_create = (\"\"\"\nCREATE TABLE songs(\nsong_id varchar(255) SORTKEY PRIMARY KEY NOT NULL,\nartist_id text NOT NULL,\nyear int, \nduration double precision,\nlevel text\n)diststyle all;\n\"\"\")\n\nartist_table_create = (\"\"\"\nCREATE TABLE artists(\nartist_id text PRIMARY KEY SORTKEY, \nartist_name text, \nlocation text, \nlattitude text, \nlongitude text\n) diststyle all;\n\n\"\"\")\n\ntime_table_create = (\"\"\"\nCREATE TABLE time(\nstart_time timestamp PRIMARY KEY SORTKEY,\nhour int,\nday int,\nweek int,\nmonth int,\nyear int,\nweekday int\n) diststyle all;\n\"\"\")\n\n# STAGING TABLES\n\nstaging_events_copy = (\"\"\"copy staging_events from '{}'\n credentials 'aws_iam_role={}'\n compupdate off \n region 'us-west-2'\n JSON '{}'\n\"\"\").format(config['S3']['LOG_DATA'],config['IAM_ROLE']['ARN'],config['S3']['LOG_JSONPATH'])\n\nstaging_songs_copy = (\"\"\"copy staging_songs from '{}'\n credentials 'aws_iam_role={}'\n compupdate off \n region 'us-west-2' \n JSON 'auto'\n\"\"\").format(config['S3']['SONG_DATA'],config['IAM_ROLE']['ARN'])\n\n# FINAL TABLES\n\nsongplay_table_insert = (\"\"\"\nINSERT INTO songplay(start_time,user_id,level,song_id,artist_id,session_id,location,user_agent)\n\nSELECT\n TIMESTAMP 'epoch' + se.ts/1000 * INTERVAL '1 Second ' AS start_time,\n se.userId AS user_id,\n se.level AS level,\n ss.song_id AS song_id,\n ss.artist_id AS artist_id,\n se.sessionId AS session_id,\n ss.artist_location AS location,\n se.userAgent AS user_agent\nFROM staging_songs AS ss \nJOIN staging_events AS se ON (ss.title=se.song AND ss.artist_name=se.artist_name)\nAND\n se.page = 'NextSong';\n \n\"\"\")\n\nuser_table_insert = (\"\"\"\nINSERT INTO users(user_id,first_name,last_name,gender,level)\n\nSELECT DISTINCT(s.userId) AS user_id,\n s.firstName AS first_name,\n s.lastName AS last_name,\n s.gender AS gender,\n s.level AS level\n\nFROM\n staging_events as s\nWHERE s.page = 'NextSong' \n\n\"\"\")\n\nsong_table_insert = (\"\"\"\nINSERT INTO songs (song_id,artist_id,year, duration)\n\nSELECT DISTINCT(ss.song_id) AS song_id,\n ss.artist_id AS artist_id,\n ss.year AS year,\n ss.duration AS duration\nFROM\n staging_songs AS ss\n\n\"\"\")\n\nartist_table_insert = (\"\"\"\nINSERT INTO artists (artist_id,artist_name,location,lattitude,longitude)\n\nSELECT DISTINCT(s.artist_id) AS artist_id,\n s.artist_name AS artist_name,\n s.artist_location AS location,\n s.artist_latitude AS lattitude,\n s.artist_longitude AS longitude\nFROM\n staging_songs AS s;\n\"\"\")\n\ntime_table_insert = (\"\"\"\nINSERT INTO time (start_time,hour,day,week,month,year,weekday)\n\nSELECT DISTINCT(TIMESTAMP 'epoch' + s.ts/1000 * INTERVAL '1 Second ') AS start_time,\n EXTRACT(HOUR from start_time) AS hour,\n EXTRACT(DAY from start_time) AS day,\n EXTRACT(WEEK from start_time) AS week,\n EXTRACT(MONTH from start_time) AS month,\n EXTRACT(YEAR from start_time) AS year,\n EXTRACT(DOW from start_time) AS weekday\nFROM \n staging_events AS s\nWHERE \n s.page = 'NextSong'; \n\n\"\"\")\n\n# QUERY LISTS\n\ncreate_table_queries =[set_search_path,songplay_table_create, user_table_create, song_table_create, artist_table_create, time_table_create,staging_events_table_create,staging_songs_table_create]\n\ndrop_table_queries = [create_sparkify_schema,set_search_path,staging_events_table_drop, staging_songs_table_drop, songplay_table_drop, user_table_drop, song_table_drop, artist_table_drop, time_table_drop]\n\ncopy_table_queries = [set_search_path,staging_events_copy, staging_songs_copy]\n\ninsert_table_queries = [set_search_path,user_table_insert, song_table_insert, artist_table_insert, time_table_insert,songplay_table_insert]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class Solution(object):
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class Solution(object):
def addBinary(self, a, b):
"""
:type a: str
:type b: str
:rtype: str
"""
max_len = max(len(a), len(b))
a = a.zfill(max_len)
b = b.zfill(max_len)
carry = 0
res = ''
for i in range(max_len - 1, -1, -1):
sums = int(a[i]) + int(b[i]) + carry
if sums < 2:
res += str(sums)
carry = 0
elif sums == 2:
res += '0'
carry = 1
else:
res += '1'
carry = 1
if carry == 1:
res += '1'
return res[::-1]
<|reserved_special_token_1|>
# Given two binary strings, return their sum (also a binary string).
#
# For example,
# a = "11"
# b = "1"
# Return "100".
#
# Show Company Tags
# Show Tags
# Show Similar Problems
class Solution(object):
def addBinary(self, a, b):
"""
:type a: str
:type b: str
:rtype: str
"""
max_len = max(len(a), len(b))
a = a.zfill(max_len)
b = b.zfill(max_len)
carry = 0
res = ''
for i in range(max_len - 1, -1, -1):
sums = int(a[i]) + int(b[i]) + carry
if sums < 2:
res += str(sums)
carry = 0
elif sums == 2:
res += '0'
carry = 1
else:
res += '1'
carry = 1
if carry == 1:
res += '1'
return res[::-1]
|
flexible
|
{
"blob_id": "9655cba5b459ae8b6812bcebc31cc46e19e52386",
"index": 2741,
"step-1": "<mask token>\n",
"step-2": "class Solution(object):\n <mask token>\n",
"step-3": "class Solution(object):\n\n def addBinary(self, a, b):\n \"\"\"\n :type a: str\n :type b: str\n :rtype: str\n \"\"\"\n max_len = max(len(a), len(b))\n a = a.zfill(max_len)\n b = b.zfill(max_len)\n carry = 0\n res = ''\n for i in range(max_len - 1, -1, -1):\n sums = int(a[i]) + int(b[i]) + carry\n if sums < 2:\n res += str(sums)\n carry = 0\n elif sums == 2:\n res += '0'\n carry = 1\n else:\n res += '1'\n carry = 1\n if carry == 1:\n res += '1'\n return res[::-1]\n",
"step-4": "# Given two binary strings, return their sum (also a binary string).\n#\n# For example,\n# a = \"11\"\n# b = \"1\"\n# Return \"100\".\n#\n# Show Company Tags\n# Show Tags\n# Show Similar Problems\n\n\nclass Solution(object):\n def addBinary(self, a, b):\n \"\"\"\n :type a: str\n :type b: str\n :rtype: str\n \"\"\"\n max_len = max(len(a), len(b))\n a = a.zfill(max_len)\n b = b.zfill(max_len)\n carry = 0\n res = ''\n for i in range(max_len - 1, -1, -1):\n sums = int(a[i]) + int(b[i]) + carry\n if sums < 2:\n res += str(sums)\n carry = 0\n elif sums == 2:\n res += '0'\n carry = 1\n else:\n res += '1'\n carry = 1\n if carry == 1:\n res += '1'\n return res[::-1]\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
#!/bin/python3
import sys
# import numpy as np
def _get_change_making_matrix(set_of_coins, r):
matrix = [[0 for _ in range(r + 1)] for _ in range(len(set_of_coins) + 1)]
# matrix = np.array(matrix)
for i in range(1,len(set_of_coins) + 1):
matrix[i][0] = i
return matrix
def change_making(coins, target):
"""This function assumes that all coins are available infinitely.
n is the number that we need to obtain with the fewest number of coins.
coins is a list or tuple with the available denominations."""
matrix = _get_change_making_matrix(coins, target)
for coin in range(1, len(coins) + 1):
for sub_target in range(1, target + 1):
# Just use the coin coins[c - 1].
if coins[coin - 1] == sub_target:
matrix[coin][sub_target] = 1+matrix[coin-1][sub_target]
# coins[c - 1] cannot be included.
# We use the previous solution for making r,
# excluding coins[c - 1].
elif coins[coin - 1] > sub_target:
matrix[coin][sub_target] = matrix[coin - 1][sub_target]
# We can use coins[c - 1].
# We need to decide which one of the following solutions is the best:
# 1. Using the previous solution for making r (without using coins[c - 1]).
# 2. Using the previous solution for making r - coins[c - 1] (without using coins[c - 1]) plus this 1 extra coin.
else:
matrix[coin][sub_target] = (matrix[coin - 1][sub_target]) + (
matrix[coin][sub_target - coins[coin - 1]])
return matrix[-1][-1]
input1 = input()
input2 = input()
# input1 = "10 4"
# input2 = "2 5 3 6"
n, m = input1.strip().split(' ')
n, m = [int(n), int(m)]
c = list(map(int, input2.strip().split(' ')))
# Print the number of ways of making change for 'n' units using coins having the values given by 'c'
ways = change_making(c, n)
print(ways)
|
normal
|
{
"blob_id": "f15bc62fad2c47fed2e9e5d269284ebe7487b789",
"index": 2297,
"step-1": "<mask token>\n\n\ndef _get_change_making_matrix(set_of_coins, r):\n matrix = [[(0) for _ in range(r + 1)] for _ in range(len(set_of_coins) + 1)\n ]\n for i in range(1, len(set_of_coins) + 1):\n matrix[i][0] = i\n return matrix\n\n\ndef change_making(coins, target):\n \"\"\"This function assumes that all coins are available infinitely.\n n is the number that we need to obtain with the fewest number of coins.\n coins is a list or tuple with the available denominations.\"\"\"\n matrix = _get_change_making_matrix(coins, target)\n for coin in range(1, len(coins) + 1):\n for sub_target in range(1, target + 1):\n if coins[coin - 1] == sub_target:\n matrix[coin][sub_target] = 1 + matrix[coin - 1][sub_target]\n elif coins[coin - 1] > sub_target:\n matrix[coin][sub_target] = matrix[coin - 1][sub_target]\n else:\n matrix[coin][sub_target] = matrix[coin - 1][sub_target\n ] + matrix[coin][sub_target - coins[coin - 1]]\n return matrix[-1][-1]\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef _get_change_making_matrix(set_of_coins, r):\n matrix = [[(0) for _ in range(r + 1)] for _ in range(len(set_of_coins) + 1)\n ]\n for i in range(1, len(set_of_coins) + 1):\n matrix[i][0] = i\n return matrix\n\n\ndef change_making(coins, target):\n \"\"\"This function assumes that all coins are available infinitely.\n n is the number that we need to obtain with the fewest number of coins.\n coins is a list or tuple with the available denominations.\"\"\"\n matrix = _get_change_making_matrix(coins, target)\n for coin in range(1, len(coins) + 1):\n for sub_target in range(1, target + 1):\n if coins[coin - 1] == sub_target:\n matrix[coin][sub_target] = 1 + matrix[coin - 1][sub_target]\n elif coins[coin - 1] > sub_target:\n matrix[coin][sub_target] = matrix[coin - 1][sub_target]\n else:\n matrix[coin][sub_target] = matrix[coin - 1][sub_target\n ] + matrix[coin][sub_target - coins[coin - 1]]\n return matrix[-1][-1]\n\n\n<mask token>\nprint(ways)\n",
"step-3": "<mask token>\n\n\ndef _get_change_making_matrix(set_of_coins, r):\n matrix = [[(0) for _ in range(r + 1)] for _ in range(len(set_of_coins) + 1)\n ]\n for i in range(1, len(set_of_coins) + 1):\n matrix[i][0] = i\n return matrix\n\n\ndef change_making(coins, target):\n \"\"\"This function assumes that all coins are available infinitely.\n n is the number that we need to obtain with the fewest number of coins.\n coins is a list or tuple with the available denominations.\"\"\"\n matrix = _get_change_making_matrix(coins, target)\n for coin in range(1, len(coins) + 1):\n for sub_target in range(1, target + 1):\n if coins[coin - 1] == sub_target:\n matrix[coin][sub_target] = 1 + matrix[coin - 1][sub_target]\n elif coins[coin - 1] > sub_target:\n matrix[coin][sub_target] = matrix[coin - 1][sub_target]\n else:\n matrix[coin][sub_target] = matrix[coin - 1][sub_target\n ] + matrix[coin][sub_target - coins[coin - 1]]\n return matrix[-1][-1]\n\n\ninput1 = input()\ninput2 = input()\nn, m = input1.strip().split(' ')\nn, m = [int(n), int(m)]\nc = list(map(int, input2.strip().split(' ')))\nways = change_making(c, n)\nprint(ways)\n",
"step-4": "import sys\n\n\ndef _get_change_making_matrix(set_of_coins, r):\n matrix = [[(0) for _ in range(r + 1)] for _ in range(len(set_of_coins) + 1)\n ]\n for i in range(1, len(set_of_coins) + 1):\n matrix[i][0] = i\n return matrix\n\n\ndef change_making(coins, target):\n \"\"\"This function assumes that all coins are available infinitely.\n n is the number that we need to obtain with the fewest number of coins.\n coins is a list or tuple with the available denominations.\"\"\"\n matrix = _get_change_making_matrix(coins, target)\n for coin in range(1, len(coins) + 1):\n for sub_target in range(1, target + 1):\n if coins[coin - 1] == sub_target:\n matrix[coin][sub_target] = 1 + matrix[coin - 1][sub_target]\n elif coins[coin - 1] > sub_target:\n matrix[coin][sub_target] = matrix[coin - 1][sub_target]\n else:\n matrix[coin][sub_target] = matrix[coin - 1][sub_target\n ] + matrix[coin][sub_target - coins[coin - 1]]\n return matrix[-1][-1]\n\n\ninput1 = input()\ninput2 = input()\nn, m = input1.strip().split(' ')\nn, m = [int(n), int(m)]\nc = list(map(int, input2.strip().split(' ')))\nways = change_making(c, n)\nprint(ways)\n",
"step-5": "#!/bin/python3\n\nimport sys\n# import numpy as np\n\n\ndef _get_change_making_matrix(set_of_coins, r):\n matrix = [[0 for _ in range(r + 1)] for _ in range(len(set_of_coins) + 1)]\n # matrix = np.array(matrix)\n for i in range(1,len(set_of_coins) + 1):\n matrix[i][0] = i\n\n return matrix\n\n\ndef change_making(coins, target):\n \"\"\"This function assumes that all coins are available infinitely.\n n is the number that we need to obtain with the fewest number of coins.\n coins is a list or tuple with the available denominations.\"\"\"\n matrix = _get_change_making_matrix(coins, target)\n\n for coin in range(1, len(coins) + 1):\n\n for sub_target in range(1, target + 1):\n\n # Just use the coin coins[c - 1].\n if coins[coin - 1] == sub_target:\n matrix[coin][sub_target] = 1+matrix[coin-1][sub_target]\n\n # coins[c - 1] cannot be included.\n # We use the previous solution for making r,\n # excluding coins[c - 1].\n elif coins[coin - 1] > sub_target:\n matrix[coin][sub_target] = matrix[coin - 1][sub_target]\n\n # We can use coins[c - 1].\n # We need to decide which one of the following solutions is the best:\n # 1. Using the previous solution for making r (without using coins[c - 1]).\n # 2. Using the previous solution for making r - coins[c - 1] (without using coins[c - 1]) plus this 1 extra coin.\n else:\n matrix[coin][sub_target] = (matrix[coin - 1][sub_target]) + (\n matrix[coin][sub_target - coins[coin - 1]])\n\n return matrix[-1][-1]\n\n\ninput1 = input()\ninput2 = input()\n\n# input1 = \"10 4\"\n# input2 = \"2 5 3 6\"\n\nn, m = input1.strip().split(' ')\nn, m = [int(n), int(m)]\nc = list(map(int, input2.strip().split(' ')))\n# Print the number of ways of making change for 'n' units using coins having the values given by 'c'\nways = change_making(c, n)\nprint(ways)\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
#Embedded file name: c:/depot/games/branches/release/EVE-TRANQUILITY/eve/client/script/paperDoll/SkinRaytracing.py
import trinity
import blue
import telemetry
import ctypes
import math
import time
import geo2
import struct
import itertools
import weakref
import uthread
import paperDoll as PD
import log
import random
mylog = log.Channel('optix', 'python')
def LogInfo(text, *args):
for arg in args:
text += ' ' + str(arg)
mylog.Log(text, log.LGINFO)
def LogWarn(text, *args):
for arg in args:
text = text + ' ' + str(arg)
mylog.Log(text, log.LGWARN)
class SkinRaytracingTools():
__guid__ = 'paperDoll.SkinRaytracingTools'
@staticmethod
def SetOptixMatrixFromTrinity(optix, matrixName, ratio = None):
proj = trinity.TriProjection()
view = trinity.TriView()
view.transform = trinity.GetViewTransform()
proj.PerspectiveFov(trinity.GetFieldOfView(), trinity.GetAspectRatio() if ratio is None else ratio, trinity.GetFrontClip(), trinity.GetBackClip())
projToView = geo2.MatrixInverse(proj.transform)
viewToWorld = geo2.MatrixInverse(view.transform)
projToWorld = geo2.MatrixMultiply(projToView, viewToWorld)
r0 = projToWorld[0]
r1 = projToWorld[1]
r2 = projToWorld[2]
r3 = projToWorld[3]
mat = trinity.TriMatrix(r0[0], r0[1], r0[2], r0[3], r1[0], r1[1], r1[2], r1[3], r2[0], r2[1], r2[2], r2[3], r3[0], r3[1], r3[2], r3[3])
optix.SetMatrix4x4(matrixName, mat)
r0 = view.transform[0]
r1 = view.transform[1]
r2 = view.transform[2]
r3 = view.transform[3]
mat = trinity.TriMatrix(r0[0], r0[1], r0[2], r0[3], r1[0], r1[1], r1[2], r1[3], r2[0], r2[1], r2[2], r2[3], r3[0], r3[1], r3[2], r3[3])
optix.SetMatrix4x4('viewTransform', mat)
return mat
@staticmethod
def CreateSamplerForTexture(name, map, waitForFinish):
rt = trinity.Tr2RenderTarget(map.width, map.height, 1, trinity.PIXEL_FORMAT.B8G8R8A8_UNORM)
job = trinity.CreateRenderJob()
job.PushRenderTarget(rt)
job.PushDepthStencil(None)
job.SetStdRndStates(trinity.RM_FULLSCREEN)
job.RenderTexture(map)
job.PopDepthStencil()
job.PopRenderTarget()
job.ScheduleOnce()
if waitForFinish:
job.WaitForFinish()
sampler = trinity.Tr2OptixTextureSampler()
if True:
res = trinity.TriTextureRes()
res.CreateAndCopyFromRenderTarget(rt)
sampler.CreateFromTexture(res)
else:
sampler.CreateFromRenderTarget(rt)
sampler.SetNormalizedIndexingMode(True)
if True:
return (sampler, res)
else:
return (sampler, rt)
@staticmethod
def ConvertCubeToTextures(cube):
names = ['PX',
'NX',
'PY',
'NY',
'PZ',
'NZ']
viewVec = [(1, 0, 0),
(-1, 0, 0),
(0, 1, 0),
(0, -1, 0),
(0, 0, 1),
(0, 0, -1)]
upVec = [(0, 1, 0),
(0, 1, 0),
(0, 0, 1),
(0, 0, -1),
(0, 1, 0),
(0, 1, 0)]
spaceScene = trinity.EveSpaceScene()
spaceScene.envMap1ResPath = str(cube.resourcePath)
spaceScene.envMapScaling = (1, 1, -1)
spaceScene.backgroundRenderingEnabled = True
spaceScene.backgroundEffect = trinity.Load('res:/dx9/scene/starfield/bakeNebula.red')
blue.resMan.Wait()
node = PD.FindParameterByName(spaceScene.backgroundEffect, 'NebulaBrightness')
if node is None:
node = trinity.Tr2FloatParameter()
node.name = 'NebulaBrightness'
spaceScene.backgroundEffect.parameters.append(node)
if node is not None:
node.value = 100
node = PD.FindResourceByName(spaceScene.backgroundEffect, 'NebulaMap')
if node is None:
node = trinity.TriTexture2DParam()
node.name = 'NebulaMap'
spaceScene.backgroundEffect.resources.append(node)
node.SetResource(cube.resource)
blue.resMan.Wait()
mipmapped = []
useTexture = True
for i in xrange(len(names)):
name = names[i]
rt = PD.SkinLightmapRenderer.CreateRenderTarget(cube.resource.width, cube.resource.height, trinity.PIXEL_FORMAT.B8G8R8A8_UNORM, useRT=True)
job = trinity.CreateRenderJob(name=name)
job.PushRenderTarget(rt)
job.PushDepthStencil(None)
job.Clear([(1, 0, 0),
(0.2, 0, 0),
(0, 1, 0),
(0, 0.2, 0),
(0, 0, 1),
(0, 0, 0.2)][i], None)
proj = trinity.TriProjection()
proj.PerspectiveFov(math.pi * 0.5, 1, 0.1, 1000)
view = trinity.TriView()
view.SetLookAtPosition((0, 0, 0), viewVec[i], upVec[i])
viewport = trinity.TriViewport(0, 0, cube.resource.width, cube.resource.height, 0.0, 1.0)
job.SetView(view)
job.SetProjection(proj)
job.SetViewport(viewport)
job.Update(spaceScene)
job.RenderScene(spaceScene)
job.PopDepthStencil()
job.PopRenderTarget()
if useTexture:
tex = trinity.TriTextureRes(cube.resource.width, cube.resource.height, 1, trinity.PIXEL_FORMAT.B8G8R8A8_UNORM)
if True:
job.ScheduleOnce()
job.WaitForFinish()
if useTexture:
mipmapped.append(tex)
else:
mipmapped.append(rt)
else:
job.ScheduleRecurring()
return (mipmapped, names)
@staticmethod
def FindAllTextureResourcesFromEffect(effect, scope):
textures = {}
samplers = []
cubemaps = []
if effect is not None:
for r in effect.resources:
if type(r) == trinity.TriTexture2DParameter and r.resource is not None:
textures[r.name] = r.resource
elif type(r) == trinity.TriTextureCubeParameter and r.resource is not None:
if r.name in cubemaps:
continue
LogInfo('', r.name, ': Converting to individual textures')
cubemaps.append(r.name)
mipmaps, names = SkinRaytracingTools.ConvertCubeToTextures(r)
for i in range(len(names)):
if i < len(mipmaps):
sampler = trinity.Tr2OptixTextureSampler()
sampler.CreateFromTexture(mipmaps[i])
sampler.SetNormalizedIndexingMode(True)
scope.SetSampler(r.name + names[i], sampler)
LogInfo('No-Copy Cube Side Interop for ' + r.name + names[i])
samplers.append(mipmaps[i])
samplers.append(sampler)
return (textures, samplers)
@staticmethod
def FindAllTextureResources(dynamic, scope):
textures = {}
samplers = []
cubemaps = []
def ProcessMesh(mesh):
for area in itertools.chain(mesh.opaqueAreas, mesh.decalAreas, mesh.transparentAreas):
newTextures, newSamplers = SkinRaytracingTools.FindAllTextureResourcesFromEffect(area.effect, scope)
textures.update(newTextures)
samplers.extend(newSamplers)
if type(dynamic) == trinity.Tr2IntSkinnedObject:
for mesh in dynamic.visualModel.meshes:
ProcessMesh(mesh)
elif type(dynamic) == trinity.EveShip2:
ProcessMesh(dynamic.highDetailMesh.object)
elif type(dynamic) == trinity.EveStation2:
ProcessMesh(dynamic.highDetailMesh.object)
return (textures, samplers)
@staticmethod
def InteropTexture(name, texture, waitForFinish, scope):
if texture.format == trinity.PIXEL_FORMAT.B8G8R8A8_UNORM:
sampler = trinity.Tr2OptixTextureSampler()
sampler.CreateFromTexture(texture)
sampler.SetNormalizedIndexingMode(True)
scope.SetSampler(name, sampler)
LogInfo('No-Copy Interop for', name)
return (sampler, None)
if texture.type == trinity.TRIRTYPE_CUBETEXTURE:
LogInfo('Copy-Interop for cubes not supported, skipping', name)
return
sampler_rt = SkinRaytracingTools.CreateSamplerForTexture(name, texture, waitForFinish)
if sampler_rt is None or len(sampler_rt) < 1:
LogInfo('InteropTexture failed for', name)
else:
scope.SetSampler(name, sampler_rt[0])
LogInfo('Interop for', name)
return sampler_rt
@staticmethod
def InteropAllTexturesFromEffect(optix, effect, waitForFinish, nameTranslation = None, scope = None, cache = None):
if scope is None:
scope = optix
textures, samplers = SkinRaytracingTools.FindAllTextureResourcesFromEffect(effect, scope)
for name, texture in textures.iteritems():
if 'spotlight' in name.lower():
continue
if nameTranslation is not None:
name = nameTranslation.get(name, name)
if cache is not None and texture in cache:
sampler = cache[texture]
scope.SetSampler(name, sampler[0])
LogInfo('Interop cache for', name)
else:
sampler = SkinRaytracingTools.InteropTexture(name, texture, waitForFinish, scope)
if sampler and cache is not None:
cache[texture] = sampler
if sampler is not None:
samplers.append(sampler)
return samplers
@staticmethod
def InteropAllTextures(optix, dynamic, waitForFinish, nameTranslation = None, scope = None):
if scope is None:
scope = optix
textures, samplers = SkinRaytracingTools.FindAllTextureResources(dynamic, scope)
for name, texture in textures.iteritems():
if 'spotlight' in name.lower():
continue
if nameTranslation is not None:
name = nameTranslation.get(name, name)
sampler = SkinRaytracingTools.InteropTexture(name, texture, waitForFinish, scope)
if sampler is not None:
samplers.append(sampler)
return samplers
@staticmethod
def SafeLinearize(values):
peak = max(1, max(values[0], max(values[1], values[2])))
return (peak * math.pow(values[0] / peak, 2.2),
peak * math.pow(values[1] / peak, 2.2),
peak * math.pow(values[2] / peak, 2.2),
values[3])
@staticmethod
def CopyParametersToContext(effect, instance, linearNames = None):
for p in effect.parameters:
if type(p) is trinity.Tr2Vector4Parameter:
value = SkinRaytracingTools.SafeLinearize(p.value) if linearNames is not None and p.name in linearNames else p.value
instance.SetFloat4(p.name, value[0], value[1], value[2], value[3])
elif type(p) is trinity.TriFloatParameter or type(p) is trinity.Tr2FloatParameter:
instance.SetFloat4(p.name, p.value, 0, 0, 0)
@staticmethod
def CreateBufferForLights(lights, leaveEmpty = False, preserveAlpha = False):
bufEveLights = trinity.Tr2OptixBuffer()
bufEveLights.CreateUserData(64, len(lights), trinity.OPTIX_BUFFER_OUTPUT, False)
bufEveLights.MapUser()
buffer = ''
if leaveEmpty:
lights = []
for light in lights:
innerAngle = light.coneAlphaInner
outerAngle = light.coneAlphaOuter
if innerAngle + 1.0 > outerAngle:
innerAngle = outerAngle - 1.0
innerAngle = math.cos(innerAngle * 3.1415927 / 180.0)
outerAngle = math.cos(outerAngle * 3.1415927 / 180.0)
coneDir = geo2.Vec3Normalize((light.coneDirection[0], light.coneDirection[1], light.coneDirection[2]))
import struct
buffer += struct.pack('16f', light.position[0], light.position[1], light.position[2], light.radius, math.pow(light.color[0], 2.2), math.pow(light.color[1], 2.2), math.pow(light.color[2], 2.2), light.falloff if not preserveAlpha else light.color[3], coneDir[0], coneDir[1], coneDir[2], outerAngle, innerAngle, 0, 0, 0)
bufEveLights.SetUserDataFromStruct(buffer)
bufEveLights.UnmapUser()
return bufEveLights
@staticmethod
def CreateUInt1Buffer(optix, name):
buffer = trinity.Tr2OptixBuffer()
buffer.CreateUInt1(1, 1, trinity.OPTIX_BUFFER_INPUT_OUTPUT)
buffer.Map()
buffer.SetUserDataI(0, 0)
buffer.Unmap()
optix.SetBuffer(name, buffer)
return buffer
@staticmethod
def matEqual(m1, m2):
return m1._11 == m2._11 and m1._12 == m2._12 and m1._13 == m2._13 and m1._14 == m2._14 and m1._21 == m2._21 and m1._22 == m2._22 and m1._23 == m2._23 and m1._24 == m2._24 and m1._31 == m2._31 and m1._32 == m2._32 and m1._33 == m2._33 and m1._34 == m2._34 and m1._41 == m2._41 and m1._42 == m2._42 and m1._43 == m2._43 and m1._44 == m2._44
@staticmethod
def FuncWrapper(weakSelf, func):
if weakSelf():
func(weakSelf())
class OitHelper():
def __init__(self, optix):
self.oitAllocatorBuffer = SkinRaytracingTools.CreateUInt1Buffer(optix, 'oit_allocator')
oitPoolBuffer = trinity.Tr2OptixBuffer()
oitPoolBuffer.CreateUserData(64 + 112, 1048576, trinity.OPTIX_BUFFER_INPUT_OUTPUT, True)
optix.SetBuffer('oit_pool', oitPoolBuffer)
self.oitPoolBuffer = oitPoolBuffer
def ResetAllocationCount(self):
self.oitAllocatorBuffer.Map()
self.oitAllocatorBuffer.SetUserDataI(0, 0)
self.oitAllocatorBuffer.Unmap()
def GetAllocationCount(self):
self.oitAllocatorBuffer.Map()
count = self.oitAllocatorBuffer.GetUserDataI(0)
self.oitAllocatorBuffer.Unmap()
return count
class RayCountHelper():
def __init__(self, optix):
self.rayCountBuffer = SkinRaytracingTools.CreateUInt1Buffer(optix, 'ray_count')
def ResetCount(self):
self.rayCountBuffer.Map()
self.rayCountBuffer.SetUserDataI(0, 0)
self.rayCountBuffer.Unmap()
def GetCount(self):
self.rayCountBuffer.Map()
count = self.rayCountBuffer.GetUserDataI(0)
self.rayCountBuffer.Unmap()
return count
class CaptureHelper():
def __init__(self, width, height):
self.capture = trinity.Tr2RenderTarget(width, height, 1, trinity.PIXEL_FORMAT.B8G8R8A8_UNORM)
def SaveSurfaceToFile(self, filename):
trinity.SaveRenderTarget(filename, self.capture)
LogInfo('Saved to', filename)
def CreateRenderSteps(self, rj, blitfx):
rj.PushRenderTarget(self.capture).name = 'Begin screenshot capture'
rj.PushDepthStencil(None).name = ' push depth'
rj.RenderEffect(blitfx).name = ' Blit to screenshot'
rj.PopDepthStencil().name = ' pop depth'
rj.PopRenderTarget().name = 'End screenshot capture'
class FullScreenBlitter():
def __init__(self, width, height):
self.effect = trinity.Tr2Effect()
self.effect.effectFilePath = 'res:/graphics/effect/optix/shaders/gammaBlit.fx'
if self.effect.effectResource is None:
LogWarn('Failed to load effect 1')
return
self.highpassEffect = trinity.Tr2Effect()
self.highpassEffect.effectFilePath = 'res:/graphics/effect/optix/shaders/highpassFilter.fx'
if self.highpassEffect.effectResource is None:
LogWarn('Failed to load effect 1')
return
self.gaussianHorizEffect = trinity.Tr2Effect()
self.gaussianHorizEffect.effectFilePath = 'res:/graphics/effect/optix/shaders/gaussianBlur.fx'
if self.gaussianHorizEffect.effectResource is None:
LogWarn('Failed to load effect 3')
return
self.gaussianVertEffect = trinity.Tr2Effect()
self.gaussianVertEffect.effectFilePath = 'res:/graphics/effect/optix/shaders/gaussianBlur.fx'
if self.gaussianVertEffect.effectResource is None:
LogWarn('Failed to load effect 3')
return
for effect in [self.effect,
self.highpassEffect,
self.gaussianHorizEffect,
self.gaussianVertEffect]:
while effect.effectResource.isLoading:
PD.Yield()
self.blitcolor = trinity.Tr2Vector4Parameter()
self.blitcolor.name = 'Color'
for effect in [self.effect,
self.highpassEffect,
self.gaussianHorizEffect,
self.gaussianVertEffect]:
effect.PopulateParameters()
effect.RebuildCachedData()
effect.parameters.append(self.blitcolor)
sizesParam = trinity.Tr2Vector4Parameter()
sizesParam.name = 'InvSize'
sizesParam.value = (1.0 / width,
1.0 / height,
0,
0)
for effect in [self.effect, self.highpassEffect]:
effect.parameters.append(sizesParam)
sizesHorizParam = trinity.Tr2Vector4Parameter()
sizesHorizParam.name = 'invTexelSize'
sizesHorizParam.value = (1.0 / width,
0.0,
0,
0)
self.gaussianHorizEffect.parameters.append(sizesHorizParam)
sizesVertParam = trinity.Tr2Vector4Parameter()
sizesVertParam.name = 'invTexelSize'
sizesVertParam.value = (0.0,
1.0 / height,
0,
0)
self.gaussianVertEffect.parameters.append(sizesVertParam)
def SetTexture(self, optixOutputTexture, highpassTexture, filteredTexture):
tex = trinity.TriTexture2DParameter()
tex.name = 'Texture'
tex.SetResource(optixOutputTexture)
for effect in [self.effect, self.highpassEffect]:
effect.resources.append(tex)
tex = trinity.TriTexture2DParameter()
tex.name = 'Texture'
tex.SetResource(highpassTexture)
self.gaussianHorizEffect.resources.append(tex)
tex = trinity.TriTexture2DParameter()
tex.name = 'Texture'
tex.SetResource(filteredTexture)
self.gaussianVertEffect.resources.append(tex)
tex = trinity.TriTexture2DParameter()
tex.name = 'BloomTexture'
tex.SetResource(highpassTexture)
self.effect.resources.append(tex)
def UpdateFrameCount(self, framecount):
invFC = 1.0 / framecount if framecount > 0 else 1.0
self.blitcolor.value = (invFC,
invFC,
invFC,
invFC)
class FullOptixRenderer():
__guid__ = 'paperDoll.FullOptixRenderer'
instance = None
def AddCallback(self, func, name, rj):
cb = trinity.TriStepPythonCB()
weakSelf = weakref.ref(self)
cb.SetCallback(lambda : SkinRaytracingTools.FuncWrapper(weakSelf, func))
cb.name = name
rj.steps.append(cb)
def GetFrameCount(self):
return self.framecount
def SaveScreenshot(self, filename):
self.capture.SaveSurfaceToFile(filename)
def AddRenderPreviewStep(self, renderJob):
renderJob.SetStdRndStates(trinity.RM_FULLSCREEN).name = ' [optix] fullscreen quad'
renderJob.PushDepthStencil(None).name = ' [optix] push depth'
renderJob.RenderEffect(self.blitfx.effect).name = ' [optix] Blit to screenshot'
renderJob.PopDepthStencil().name = ' [optix] pop depth'
def RefreshMatrices(self):
model = self.skinnedObject
self.optix.RefreshMatrices(model, self.skinnedOptix)
self.RunSkinningAndTesselation()
self.ApplySettings()
print 'Refreshed'
@staticmethod
def RaytraceFrame(selfRef):
start = time.time()
VP = SkinRaytracingTools.SetOptixMatrixFromTrinity(selfRef.optix, 'clipToWorld', selfRef.width / float(selfRef.height))
if not SkinRaytracingTools.matEqual(VP, selfRef.previousVP):
selfRef.previousVP = VP
selfRef.outputBuffer.Clear()
selfRef.framecount = 0
model = selfRef.skinnedObject
pos1 = model.GetBonePosition(model.GetBoneIndex('fj_eyeballLeft'))
pos2 = model.GetBonePosition(model.GetBoneIndex('fj_eyeballRight'))
dist1 = geo2.Vec3Distance(pos1, trinity.GetViewPosition())
dist2 = geo2.Vec3Distance(pos2, trinity.GetViewPosition())
autodof = min(dist1, dist2)
dof = selfRef.settings.get('lens_focal_distance', autodof)
print 'Auto-depth-of-field is at', autodof, ', actual focal distance is', dof
selfRef.optix.SetFloat3('depthOfField', dof - trinity.GetFrontClip(), selfRef.settings['lens_radius'], 0)
else:
selfRef.framecount += 1
selfRef.optix.SetUInt('frameIteration', selfRef.framecount)
selfRef.oit.ResetAllocationCount()
selfRef.rayCounter.ResetCount()
time1 = time.time()
selfRef.optix.Run(0, selfRef.width, selfRef.height)
time2 = time.time()
sec = time2 - time1
raycount = selfRef.rayCounter.GetCount()
raysec = 0
if sec > 0:
raysec = raycount / float(sec)
time3 = time.time()
if selfRef.framecount % 32 == 0:
stop = time.time()
print selfRef.oit.GetAllocationCount(), 'oit allocations'
selfRef.blitfx.UpdateFrameCount(selfRef.framecount)
selfRef.outputBuffer.CopyToTexture(selfRef.outputTexture)
print 'time %05.3f / %05.3f / %05.3f / %05.3f msec' % (float(time1 - start) * 1000,
float(time2 - time1) * 1000,
float(time3 - time2) * 1000,
float(stop - time3) * 1000),
print '%d rays in %05.3f ms / %10d Krays/sec / %d rays per pixel' % (raycount,
sec * 1000,
raysec / 1000,
selfRef.framecount)
@telemetry.ZONE_METHOD
def OnBeforeOptixPositionsUV(self):
PD.SkinLightmapRenderer.DoChangeEffect('oxPosWorldUVEffect', self.oxMeshes)
if self.skinnedObject is not None and self.skinnedObject.visualModel is not None:
self.savedMeshes = self.skinnedObject.visualModel.meshes[:]
filteredMeshes = [ ref.object for ref in self.oxMeshes.iterkeys() if ref.object is not None ]
PD.SkinLightmapRenderer.DoCopyMeshesToVisual(self.skinnedObject, filteredMeshes)
self.scene.filterList.removeAt(-1)
self.scene.filterList.append(self.skinnedObject)
self.scene.useFilterList = True
@telemetry.ZONE_METHOD
def OnBeforeOptixNormalsUV(self):
PD.SkinLightmapRenderer.DoChangeEffect('oxNormalWorldUVEffect', self.oxMeshes)
def OnAfterOptix(self):
PD.SkinLightmapRenderer.DoRestoreShaders(meshes=self.oxMeshes)
PD.SkinLightmapRenderer.DoCopyMeshesToVisual(self.skinnedObject, self.savedMeshes)
del self.savedMeshes
self.scene.useFilterList = False
self.scene.filterList.removeAt(-1)
def _InitUVUnwrap(self):
self.oxMeshes = {}
self.scatterFX = set()
self.unwrapSize = 1024
posUV = PD.SkinLightmapRenderer.PreloadEffect(PD.SkinLightmapRenderer.OPTIX_POSWORLD_UV_EFFECT)
normalUV = PD.SkinLightmapRenderer.PreloadEffect(PD.SkinLightmapRenderer.OPTIX_NORMALWORLD_UV_EFFECT)
deriv = PD.SkinLightmapRenderer.PreloadEffect(PD.SkinLightmapRenderer.STRETCHMAP_RENDERER_EFFECT)
self.oxDepth = trinity.Tr2DepthStencil(self.unwrapSize, self.unwrapSize, trinity.DEPTH_STENCIL_FORMAT.D24S8, 1, 0)
for mesh in self.skinnedObject.visualModel.meshes:
if PD.SkinLightmapRenderer.IsScattering(mesh):
m = PD.SkinLightmapRenderer.Mesh()
m.ExtractOrigEffect(mesh)
m.CreateOptixEffects(includeStretchMap=True)
PD.AddWeakBlue(self, 'oxMeshes', mesh, m)
fx = PD.GetEffectsFromMesh(mesh)
for f in fx:
self.scatterFX.add(f)
self.oxWorldPosMapUV = PD.SkinLightmapRenderer.CreateRenderTarget(self.unwrapSize, self.unwrapSize, trinity.PIXEL_FORMAT.R32G32B32A32_FLOAT, useRT=True)
self.oxWorldNormalMapUV = PD.SkinLightmapRenderer.CreateRenderTarget(self.unwrapSize, self.unwrapSize, trinity.PIXEL_FORMAT.R32G32B32A32_FLOAT, useRT=True)
self.stretchMap = PD.SkinLightmapRenderer.CreateRenderTarget(self.unwrapSize / 2, self.unwrapSize / 2, trinity.PIXEL_FORMAT.R32G32B32A32_FLOAT, useRT=True)
rj = trinity.CreateRenderJob('Optix UV Unwrap')
rj.PushRenderTarget(self.oxWorldPosMapUV)
rj.PushDepthStencil(self.oxDepth)
rj.Clear((0, 0, 0, 0), 1.0)
rj.SetStdRndStates(trinity.RM_FULLSCREEN)
vp = trinity.TriViewport()
vp.x = 0
vp.y = 0
vp.width = self.unwrapSize
vp.height = self.unwrapSize
rj.SetViewport(vp)
PD.SkinLightmapRenderer.AddCallback(self, FullOptixRenderer.OnBeforeOptixPositionsUV, 'onBeforeOptixPositionsUV', rj)
rj.RenderScene(self.scene).name = 'Optix WorldPos (UV space)'
PD.SkinLightmapRenderer.AddCallback(self, lambda weakSelf: PD.SkinLightmapRenderer.DoChangeEffect('oxNormalWorldUVEffect', meshes=weakSelf.oxMeshes), '', rj)
rj.SetRenderTarget(self.oxWorldNormalMapUV)
rj.Clear((0, 0, 0, 0), 1.0)
rj.RenderScene(self.scene).name = 'Optix Normals (UV space)'
rj.SetRenderTarget(self.stretchMap)
rj.Clear((0, 0, 0, 0), 1.0)
vp2 = trinity.TriViewport()
vp2.x = 0
vp2.y = 0
vp2.width = self.unwrapSize / 2
vp2.height = self.unwrapSize / 2
rj.SetViewport(vp2)
PD.SkinLightmapRenderer.AddCallback(self, lambda weakSelf: PD.SkinLightmapRenderer.DoChangeEffect('stretchmapRenderEffect', meshes=weakSelf.oxMeshes), '', rj)
rj.RenderScene(self.scene).name = 'Stretchmap'
PD.SkinLightmapRenderer.AddCallback(self, FullOptixRenderer.OnAfterOptix, 'onAfterOptix', rj)
rj.PopRenderTarget()
rj.PopDepthStencil()
rj.ScheduleOnce()
rj.WaitForFinish()
if False:
PD.SkinLightmapRenderer.SaveTarget(self.oxWorldPosMapUV, 'c:/depot/oxworldposuv2.dds', isRT=True)
PD.SkinLightmapRenderer.SaveTarget(self.oxWorldNormalMapUV, 'c:/depot/oxworldnormaluv2.dds', isRT=True)
PD.SkinLightmapRenderer.SaveTarget(self.stretchMap, 'c:/depot/stretchmap2.dds', isRT=True)
print '** MAPS SAVED **'
def RunSkinningAndTesselation(self):
print '*** Tesselation phase ***'
batchTypes = self.skinnedOptix[0]
optix = self.optix
ptx = {}
ptx[72] = self.path + 'eve_skinning_kernel72.ptx'
ptx[64] = self.path + 'eve_skinning_kernel64.ptx'
for bytes, ptxfile in ptx.iteritems():
LogInfo('Processing ', bytes, 'bytes/vertex')
skinningProgram = trinity.Tr2OptixProgram(ptxfile, 'kernel_no_tesselation')
skinningProgramTesselate = trinity.Tr2OptixProgram(ptxfile, 'kernel_tesselation')
optix.SetEntryPointCount(2)
optix.SetRayGenerationProgram(0, skinningProgram)
optix.SetRayGenerationProgram(1, skinningProgramTesselate)
for batchType in range(len(batchTypes)):
batches = batchTypes[batchType]
out = []
def needsTesselation(fx):
return 'skinnedavatarhair_detailed.fx' in fx.effectFilePath.lower()
for batch in batches:
if 'furshell' in batch[1].effectFilePath.lower():
out.append(None)
continue
tesselate = needsTesselation(batch[1])
triangle_count = batch[6]
bytes_per_vertex = batch[8]
if bytes_per_vertex != bytes:
out.append(None)
continue
vertex_buffer_output = trinity.Tr2OptixBuffer()
vertex_buffer_output.CreateUserData(bytes_per_vertex, triangle_count * 3 * 4 if tesselate else triangle_count * 3, trinity.OPTIX_BUFFER_INPUT_OUTPUT, True)
out.append(vertex_buffer_output)
for i, batch in enumerate(batches):
if 'furshell' in batch[1].effectFilePath.lower():
continue
triangle_count = batch[6]
tesselate = needsTesselation(batch[1])
bytes_per_vertex = batch[8]
if bytes_per_vertex != bytes:
continue
if tesselate:
LogInfo('Tesselating geometry ', batch, ' of type ', batchType)
else:
LogInfo('Skinning geometry ', batch, ' of type ', batchType)
optix.SetBuffer('vertex_buffer', batch[2])
optix.SetBuffer('index_buffer', batch[3])
optix.SetBuffer('vertex_buffer_output', out[i])
optix.SetUInt('first_index_index', batch[5])
optix.SetBuffer('matrix_buffer', batch[7])
program = int(tesselate)
optix.Run(program, triangle_count, 1)
batch[0].SetBuffer('vertex_buffer', out[i])
if tesselate:
batch[0].SetPrimitiveCount(triangle_count * 4)
optix.SetRayGenerationProgram(0, self.raygen)
optix.SetRayGenerationProgram(1, self.raygen)
def RemoveBadGeometry(self, model):
self.haveBeard = False
self.beardFx = None
for mesh in model.visualModel.meshes:
for area in mesh.decalAreas:
if PD.IsBeard(area):
self.haveBeard = True
self.beardFx = area.effect
area.debugIsHidden = True
break
for mesh in model.visualModel.meshes:
for area in mesh.transparentAreas:
lname = area.name.lower()
if lname.startswith('eyeshadow_'):
mesh.transparentAreas.removeAt(-1)
break
if False:
for mesh in model.visualModel.meshes:
for area in mesh.opaqueAreas:
lname = area.name.lower()
if 'eye' not in lname or 'eyewet' in lname or 'eyelash' in lname:
mesh.opaqueAreas.removeAt(-1)
break
for area in mesh.transparentAreas:
lname = area.name.lower()
if 'eye' not in lname or 'eyewet' in lname or 'eyelash' in lname:
mesh.transparentAreas.removeAt(-1)
break
if False:
print 'raytracing', len(model.visualModel.meshes), 'meshes'
for mesh in model.visualModel.meshes:
lname = mesh.name.lower()
if not lname.startswith('hair'):
print 'removing', lname
mesh.opaqueAreas.removeAt(-1)
mesh.decalAreas.removeAt(-1)
mesh.transparentAreas.removeAt(-1)
elif False:
print 'removing', lname
for a in mesh.opaqueAreas:
print 'opaque', a.name
for a in mesh.decalAreas:
print 'decal', a.name
for a in mesh.transparentAreas:
print 'transp', a.name
mesh.opaqueAreas.removeAt(-1)
mesh.decalAreas.removeAt(-1)
mesh.transparentAreas.removeAt(-1)
else:
print 'keeping', lname
def TransferBeardParameters(self, optix):
if self.haveBeard:
LogInfo('Beard found')
beardLength = self.settings['beardLength']
optix.SetFloat3('beardOptions', beardLength[0], beardLength[1], self.settings['beardGravity'])
floatMap = {'FurLength': 'beard_fur_length',
'UVScale': 'beard_uv_scale',
'AlphaMultiplier': 'beard_alpha_multiplier',
'CombStrength': 'beard_comb_strength',
'FurGrainRotation': 'beard_fur_grain_rotation',
'MirrorGrain': 'beard_mirror_grain',
'FurParallax': 'beard_fur_parallax'}
float3Map = {'gravityOffset': 'beard_gravity_offset',
'MaterialDiffuseColor': 'beard_diffuse_color'}
for param in self.beardFx.parameters:
optixName = floatMap.get(param.name, None)
if optixName is not None:
optix.SetFloat(optixName, param.value)
else:
optixName = float3Map.get(param.name, None)
if optixName is not None:
optix.SetFloat3(optixName, param.value[0], param.value[1], param.value[2])
def GenerateBeardGeometry(self, optix, path, any_hit_shadow):
if not self.haveBeard:
return None
LogInfo('generating beard splines')
SkinRaytracingTools.SetOptixMatrixFromTrinity(optix, 'clipToWorld')
beardProgram = trinity.Tr2OptixProgram(path + 'eve_beard_kernel.ptx', 'kernel')
curveOutputBuffer = trinity.Tr2OptixBuffer()
curveCount = 512
curveOutputBuffer.CreateUserData(80, curveCount * curveCount, trinity.OPTIX_BUFFER_INPUT_OUTPUT, True)
optix.SetBuffer('output', curveOutputBuffer)
rayTypeCount = optix.GetRayTypeCount()
optix.SetRayTypeCount(1)
optix.SetEntryPointCount(2)
optix.SetRayGenerationProgram(0, beardProgram)
optix.SetRayGenerationProgram(1, beardProgram)
optix.SetEntryPointCount(1)
LogInfo('beard: about to Run')
optix.Run(0, curveCount, curveCount)
LogInfo('beard: Run done')
optix.SetRayTypeCount(rayTypeCount)
hairGeometry = trinity.Tr2OptixGeometry()
hairGeometry.InitializeFromProgram(path + 'bezier_curves.ptx', 'intersect', 'bounds')
subdivideDepth = 2
hairGeometry.SetPrimitiveCount(curveCount * curveCount * (1 << subdivideDepth))
optix.SetUInt('presubdivide_depth', subdivideDepth)
optix.SetBuffer('curves', curveOutputBuffer)
LogInfo('beard: geometry setup done')
beardInstance = trinity.Tr2OptixGeometryInstance()
beardInstance.SetGeometry(hairGeometry)
closest_hit_BeardShader = trinity.Tr2OptixProgram(path + 'eve_beard_shader.ptx', 'closest_hit_BeardShader')
beardMaterial = trinity.Tr2OptixMaterial()
beardMaterial.SetClosestHit(0, closest_hit_BeardShader)
beardMaterial.SetAnyHit(1, any_hit_shadow)
beardInstance.SetMaterial(beardMaterial)
LogInfo('beard: geometry instance setup done')
return beardInstance
def _DoInit(self, scene = None):
model = None
if scene is None:
scene = PD.SkinLightmapRenderer.Scene()
self.scene = scene
self.previousVP = trinity.TriMatrix()
self.framecount = 1
self.useOIT = True
if scene is None:
LogWarn('No scene!')
return
for dynamic in scene.dynamics:
if dynamic.__typename__ == 'Tr2IntSkinnedObject':
model = dynamic
break
else:
LogWarn('No Tr2IntSkinnedObject found')
return
if model is None:
LogWarn('No Tr2IntSkinnedObject found')
return
self.skinnedObject = model
if self.skinnedObject.visualModel is None:
LogWarn('skinnedObject has no visualMeshes')
return
bg = trinity.renderContext.GetDefaultBackBuffer()
step = trinity.renderJobs.FindStepByName('SET_SWAPCHAIN_RT')
if step is not None:
bg = step.renderTarget
self.width = self.settings.get('outputWidth', bg.width)
self.height = self.settings.get('outputHeight', bg.height)
self.blitfx = FullScreenBlitter(self.width, self.height)
self.RemoveBadGeometry(model)
outputTexture = trinity.TriTextureRes(self.width, self.height, 1, trinity.PIXEL_FORMAT.R32G32B32A32_FLOAT)
self.outputTexture = outputTexture
self.capture = CaptureHelper(self.width, self.height)
self._InitUVUnwrap()
for steps in trinity.renderJobs.recurring:
if steps.name == 'FullOptixRenderer':
steps.UnscheduleRecurring()
start = time.clock()
optix = trinity.Tr2Optix()
self.optix = optix
optix.SetInteropDevice()
optix.SetRayTypeCount(4)
optix.SetEntryPointCount(1)
if False:
optix.EnableAllExceptions()
optix.SetPrintEnabled(True)
optix.SetPrintBufferSize(16384)
optix.SetUInt('radiance_ray_type', 0)
optix.SetUInt('shadow_ray_type', 1)
optix.SetUInt('translucency_ray_type', 2)
optix.SetUInt('translucency_ray_type', 3)
optix.SetFloat('scene_epsilon', 0.001)
optix.SetUInt('frameIteration', 0)
self.outputBuffer = trinity.Tr2OptixBuffer()
self.outputBuffer.CreateFloat4(self.width, self.height, trinity.OPTIX_BUFFER_INPUT_OUTPUT)
optix.SetBuffer('output_buffer', self.outputBuffer)
self.ApplySettings()
path = str(blue.paths.ResolvePath('res:/graphics/effect/optix/NCC/'))
self.path = path
LogInfo('Getting files from', path)
everything = []
any_hit_shadow = trinity.Tr2OptixProgram(path + 'eve_shadow.ptx', 'any_hit_shadow')
any_hit_shadow_blend = trinity.Tr2OptixProgram(path + 'eve_shadow.ptx', 'any_hit_shadow_blend')
shader_diffuse_only_feeler = trinity.Tr2OptixProgram(path + 'eve_bounce.ptx', 'closest_hit_DiffuseOnlyFeeler2')
any_hit_cutout = trinity.Tr2OptixProgram(path + 'eve_cutout.ptx', 'any_hit_CutoutMask')
any_hit_diffuse_feeler_blend = trinity.Tr2OptixProgram(path + 'eve_shadow.ptx', 'any_hit_diffuse_feeler_blend')
everything.append(any_hit_shadow)
everything.append(any_hit_shadow_blend)
everything.append(shader_diffuse_only_feeler)
everything.append(any_hit_cutout)
mainRay = 0
shadowRay = 1
bounceRay = 3
def MakeMaterialWithShader(shader):
material = trinity.Tr2OptixMaterial()
material.SetClosestHit(mainRay, shader)
material.SetAnyHit(shadowRay, any_hit_shadow)
material.SetClosestHit(bounceRay, shader_diffuse_only_feeler)
everything.append(material)
return (material, shader)
def MakeMaterial(ptxFile, shaderName):
shader = trinity.Tr2OptixProgram(path + ptxFile + '.ptx', shaderName)
everything.append(shader)
return MakeMaterialWithShader(shader)
def MakeDecal(material):
material.SetAnyHit(mainRay, any_hit_cutout)
material.SetAnyHit(shadowRay, any_hit_shadow_blend)
material.SetAnyHit(bounceRay, any_hit_cutout)
skin_single_material, skin_single_shade = MakeMaterial('eve_skin', 'closest_hit_ShadeSinglePassSkin_Single2')
skin_single_material_scatter = MakeMaterial('eve_skin', 'closest_hit_ShadeSinglePassSkin_Single_Scatter2')[0]
skin_single_material_decal = MakeMaterialWithShader(skin_single_shade)[0]
MakeDecal(skin_single_material_decal)
glasses_shade = trinity.Tr2OptixProgram(path + 'eve_glasses.ptx', 'glasses_shade')
glasses_shadow = trinity.Tr2OptixProgram(path + 'eve_glasses.ptx', 'glasses_shadow')
glass_material = trinity.Tr2OptixMaterial()
glass_material.SetAnyHit(mainRay, glasses_shade)
glass_material.SetAnyHit(shadowRay, glasses_shadow)
glass_material.SetClosestHit(bounceRay, shader_diffuse_only_feeler)
everything.append(glasses_shade)
everything.append(glasses_shadow)
vizNames = ['closest_hit_VizNormal',
'closest_hit_VizUV',
'closest_hit_VizConstantColor',
'closest_hit_VizDiffuse']
vizualizer, vizualizer_shade = MakeMaterial('eve_basic', vizNames[0])
vizualizer_decal = MakeMaterialWithShader(vizualizer_shade)[0]
MakeDecal(vizualizer_decal)
skin_double_material, skin_double_shade = MakeMaterial('eve_skin', 'closest_hit_ShadeSinglePassSkin_Double2')
skin_double_material_decal = MakeMaterialWithShader(skin_double_shade)[0]
MakeDecal(skin_double_material_decal)
skin_double_material_transparent = MakeMaterial('eve_skin', 'closest_hit_ShadeSinglePassSkin_Double2_Blend')[0]
skin_double_material_transparent.SetAnyHit(mainRay, any_hit_cutout)
skin_double_material_transparent.SetAnyHit(shadowRay, any_hit_shadow_blend)
skin_double_material_transparent.SetAnyHit(bounceRay, any_hit_cutout)
avatar_brdf_material, avatar_brdf_shade = MakeMaterial('eve_brdf', 'closest_hit_ShadeAvatarBRDF_Single2')
avatar_brdf_material_decal = MakeMaterialWithShader(avatar_brdf_shade)[0]
MakeDecal(avatar_brdf_material_decal)
avatar_brdf_double_material, avatar_brdf_double_shade = MakeMaterial('eve_brdf', 'closest_hit_ShadeAvatarBRDF_Double2')
avatar_brdf_double_material_decal = MakeMaterialWithShader(avatar_brdf_double_shade)[0]
MakeDecal(avatar_brdf_double_material_decal)
avatar_hair_material = trinity.Tr2OptixMaterial()
avatar_hair_shade = trinity.Tr2OptixProgram(path + 'eve_hair.ptx', 'closest_hit_ShadeAvatarHair2' if self.useOIT else 'closest_hit_ShadeAvatarHair2_Blend')
avatar_hair_material.SetClosestHit(mainRay, avatar_hair_shade)
if self.useOIT:
avatar_hair_oit = trinity.Tr2OptixProgram(path + 'eve_hair.ptx', 'any_hit_HairOIT')
avatar_hair_material.SetAnyHit(mainRay, avatar_hair_oit)
avatar_hair_material.SetAnyHit(shadowRay, any_hit_shadow_blend)
avatar_hair_material.SetClosestHit(bounceRay, shader_diffuse_only_feeler)
everything.append(avatar_hair_shade)
everything.append(avatar_hair_material)
avatar_hair_material_decal = trinity.Tr2OptixMaterial()
avatar_hair_material_decal.SetClosestHit(mainRay, avatar_hair_shade)
avatar_hair_material_decal.SetAnyHit(mainRay, avatar_hair_oit if self.useOIT else any_hit_cutout)
avatar_hair_material_decal.SetAnyHit(shadowRay, any_hit_shadow_blend)
avatar_hair_material_decal.SetClosestHit(bounceRay, shader_diffuse_only_feeler)
avatar_hair_material_decal.SetAnyHit(bounceRay, any_hit_cutout)
everything.append(avatar_hair_material_decal)
eye_shade = trinity.Tr2OptixProgram(path + 'eve_eyes.ptx', 'closest_hit_ShadeEye')
eye_material = trinity.Tr2OptixMaterial()
eye_material.SetClosestHit(mainRay, eye_shade)
eye_material.SetAnyHit(shadowRay, any_hit_shadow)
everything.append(eye_shade)
everything.append(eye_material)
eye_wetness_shade = trinity.Tr2OptixProgram(path + 'eve_eyes.ptx', 'closest_hit_ShadeEyeWetness')
eye_wetness_material = trinity.Tr2OptixMaterial()
eye_wetness_material.SetClosestHit(mainRay, eye_wetness_shade)
eye_wetness_material.SetAnyHit(shadowRay, any_hit_shadow)
everything.append(eye_wetness_shade)
everything.append(eye_wetness_material)
portrait_basic_material, portrait_basic_shade = MakeMaterial('eve_basic', 'closest_hit_ShadePortraitBasic')
portrait_basic_material_decal = MakeMaterialWithShader(portrait_basic_shade)[0]
MakeDecal(portrait_basic_material_decal)
LogInfo('global setup OK', time.clock() - start, 'seconds')
def MakeSamplerFromMap(texture, name):
sampler = trinity.Tr2OptixTextureSampler()
sampler.CreateFromSurface(texture)
sampler.SetNormalizedIndexingMode(True)
optix.SetSampler(name, sampler)
LogInfo('No-Copy Interop for ', name)
everything.append(sampler)
MakeSamplerFromMap(self.oxWorldPosMapUV, 'world_pos_uv_buffer')
MakeSamplerFromMap(self.oxWorldNormalMapUV, 'world_normal_uv_buffer')
MakeSamplerFromMap(self.stretchMap, 'stretchmap_buffer')
useHdrProbe = False
if useHdrProbe:
optix.SetSamplerFromProbe('hdr_probe_sampler', 'c:/depot/optix/data/Japan_subway2_FINAL.hdr')
start = time.clock()
self.skinnedOptix = optix.CreateFromSkinnedModel(model, 72, path + 'triangle72.ptx', 'mesh_intersect', 'mesh_bounds', 64, path + 'triangle64.ptx', 'mesh_intersect', 'mesh_bounds')
optixBatches = self.skinnedOptix[0]
self.TransferBeardParameters(optix)
group = trinity.Tr2OptixGeometryGroup()
groupChildren = []
self.rayCounter = RayCountHelper(self.optix)
self.oit = OitHelper(self.optix)
self.raygen = trinity.Tr2OptixProgram(path + 'raygen.ptx', 'ray_request')
self.RunSkinningAndTesselation()
start = time.clock()
samplers = SkinRaytracingTools.InteropAllTextures(optix, model, waitForFinish=True)
everything.append(samplers)
backdrop = trinity.TriTexture2DParameter()
backdrop.resourcePath = self.settings['backgroundBitmap']
skinmap = trinity.TriTexture2DParameter()
skinmap.resourcePath = 'res:/Graphics/Character/female/paperdoll/head/head_generic/SkinMap.png'
blue.resMan.Wait()
everything.append(SkinRaytracingTools.InteropTexture('BackgroundEnvMap', backdrop.resource, waitForFinish=True, scope=optix))
everything.append(SkinRaytracingTools.InteropTexture('SkinMap', skinmap.resource, waitForFinish=True, scope=optix))
LogInfo('texture interop OK', time.clock() - start, 'seconds')
splines = self.GenerateBeardGeometry(optix, path, any_hit_shadow)
if splines is not None:
groupChildren.append(splines)
print '*** Raytracing phase ***'
def SetAlphaRef(instance, batchType):
if batchType == 1:
instance.SetFloat4('alphaRef', 0.75, 0, 0, 0)
elif batchType == 2:
instance.SetFloat4('alphaRef', 0.01, 0, 0, 0)
haveGlasses = False
for batchType in range(len(optixBatches)):
isOpaque = batchType == 0
batches = optixBatches[batchType]
for batch in batches:
if 'furshell' in batch[1].effectFilePath.lower():
continue
instance = trinity.Tr2OptixGeometryInstance()
everything.append(instance)
instance.SetGeometry(batch[0])
r = random.random()
g = random.random()
b = random.random()
instance.SetFloat4('viz_constant_color', r, g, b, 1.0)
fxpath = batch[1].effectFilePath.lower()
if False:
instance.SetMaterial(vizualizer if isOpaque else vizualizer_decal)
elif 'glassshader' in fxpath:
instance.SetMaterial(glass_material)
if not haveGlasses:
haveGlasses = True
elif 'skinnedavatarbrdfsinglepassskin_single.fx' in fxpath:
if batch[1] in self.scatterFX:
instance.SetMaterial(skin_single_material_scatter)
else:
instance.SetMaterial(skin_single_material if isOpaque else skin_single_material_decal)
SetAlphaRef(instance, batchType)
elif 'skinnedavatarbrdfsinglepassskin_double.fx' in fxpath:
instance.SetMaterial([skin_double_material, skin_double_material_decal, skin_double_material_transparent][batchType])
SetAlphaRef(instance, batchType)
elif 'skinnedavatarbrdflinear.fx' in fxpath:
instance.SetMaterial(avatar_brdf_material if isOpaque else avatar_brdf_material_decal)
elif 'skinnedavatarbrdfdoublelinear.fx' in fxpath:
instance.SetMaterial(avatar_brdf_double_material if isOpaque else avatar_brdf_double_material_decal)
elif 'skinnedavatarhair_detailed.fx' in fxpath:
instance.SetMaterial(avatar_hair_material if isOpaque else avatar_hair_material_decal)
instance.SetFloat4('alphaRef', 0.01, 0, 0, 0)
instance.SetUInt('enableCulling', 0)
elif 'eyeshader.fx' in fxpath:
instance.SetMaterial(eye_material)
elif 'eyewetnessshader.fx' in fxpath:
instance.SetMaterial(eye_wetness_material)
elif 'portraitbasic.fx' in fxpath:
instance.SetMaterial(portrait_basic_material if isOpaque else portrait_basic_material_decal)
else:
instance.SetMaterial(vizualizer if isOpaque else vizualizer_decal)
SkinRaytracingTools.CopyParametersToContext(batch[1], instance)
groupChildren.append(instance)
group.SetChildCount(len(groupChildren))
for x in xrange(len(groupChildren)):
group.SetChild(x, groupChildren[x])
everything.append(group)
group.SetAcceleration('Bvh', 'Bvh')
LogInfo('scene interop OK', time.clock() - start, 'seconds')
start = time.clock()
bufEveLights = SkinRaytracingTools.CreateBufferForLights(scene.lights, useHdrProbe)
optix.SetBuffer('trinity_lights', bufEveLights)
LogInfo('lights interop OK', time.clock() - start, 'seconds')
start = time.clock()
optix.SetGeometryGroup('top_scene', group)
optix.SetGeometryGroup('shadow_casters', group)
optix.SetRayGenerationProgram(0, self.raygen)
optix.SetEntryPointCount(1)
miss = None
if not useHdrProbe:
miss = trinity.Tr2OptixProgram(path + 'eve_miss.ptx', 'miss')
else:
miss = trinity.Tr2OptixProgram(path + 'eve_miss_probe.ptx', 'miss')
optix.SetMissProgram(3, miss)
optix.SetFloat3('bg_color', 1.0, 0, 0)
everything.append(miss)
if False:
exception = trinity.Tr2OptixProgram(path + 'eve_miss.ptx', 'exception')
optix.SetExceptionProgram(0, exception)
everything.append(exception)
optix.SetStackSize(4096)
self.everything = everything
SkinRaytracingTools.SetOptixMatrixFromTrinity(optix, 'clipToWorld', self.width / float(self.height))
LogInfo('general setup OK', time.clock() - start, 'seconds')
optix.ReportObjectCounts()
start = time.clock()
optix.Compile()
LogInfo('compile OK', time.clock() - start, 'seconds')
start = time.clock()
optix.Validate()
LogInfo('validate OK', time.clock() - start, 'seconds')
start = time.clock()
optix.Run(0, 0, 0)
LogInfo('BVH OK', time.clock() - start, 'seconds')
start = time.clock()
self.blitfx.SetTexture(outputTexture, outputTexture, outputTexture)
rj = trinity.CreateRenderJob('FullOptixRenderer')
rj.PushRenderTarget(self.outputRT)
rj.PushDepthStencil(None)
self.AddCallback(FullOptixRenderer.RaytraceFrame, 'Raytrace Frame', rj)
rj.CopyRtToTexture(outputTexture).name = 'cuda -> outputTexture'
rj.PopDepthStencil()
rj.PopRenderTarget()
rj.SetStdRndStates(trinity.RM_FULLSCREEN).name = 'fullscreen quad'
rj.RenderEffect(self.blitfx.effect).name = ' blit'
self.capture.CreateRenderSteps(rj, self.blitfx.effect)
rj.steps.append(trinity.TriStepRenderFps())
rj.ScheduleRecurring(insertFront=False)
self.renderJob = rj
LogInfo('final setup OK', time.clock() - start, 'seconds')
model.display = False
self.EnablePaperDollJobs(False)
@staticmethod
def EnablePaperDollJobs(enable):
if False:
for job in trinity.renderJobs.recurring:
if 'paperdollrenderjob' in job.name.lower():
for step in job.steps:
step.enabled = enable
if enable:
trinity.device.tickInterval = 10
else:
trinity.device.tickInterval = 0
def ApplySettings(self):
self.optix.SetFloat('light_size', self.settings['light_size'])
self.optix.SetFloat3('depthOfField', 1.0, self.settings['lens_radius'], 0)
self.optix.SetFloat('HairShadows', self.settings['HairShadows'])
self.optix.SetFloat('EnvMapBoost', self.settings['EnvMapBoost'] / 3.1415927)
self.previousVP.Identity()
def SetLensRadius(self, lens_radius):
self.settings['lens_radius'] = lens_radius
self.ApplySettings()
def SetLensFocalDistance(self, lens_focal_distance):
if lens_focal_distance <= 0:
self.settings.pop('lens_focal_distance', 0)
else:
self.settings['lens_focal_distance'] = lens_focal_distance
self.ApplySettings()
def SetLightSize(self, light_size):
self.settings['light_size'] = light_size
self.ApplySettings()
def SetHairShadowsEnabled(self, enabled):
self.settings['HairShadows'] = float(enabled)
self.ApplySettings()
def SetBackgroundIntensity(self, intensity):
self.settings['EnvMapBoost'] = intensity
self.ApplySettings()
def __init__(self, scene = None, backgroundBitmap = None, memento = None, beardLength = (0.01, 0.01), beardGravity = 0.0005, outputWidth = None, outputHeight = None, asyncSetup = True, listenForUpdate = True):
LogInfo('init', self)
blue.motherLode.maxMemUsage = 0
blue.resMan.ClearAllCachedObjects()
self.framecount = 0
self.listenForUpdate = listenForUpdate
if memento is not None:
self.settings = memento
else:
self.settings = {}
self.settings['light_size'] = 0.125
self.settings['lens_radius'] = 0.001
self.settings['HairShadows'] = 1.0
self.settings['EnvMapBoost'] = 1.0
self.settings['backgroundBitmap'] = backgroundBitmap if backgroundBitmap is not None else 'res:/texture/global/red_blue_ramp.dds'
self.settings['beardLength'] = beardLength
self.settings['beardGravity'] = beardGravity
if outputWidth is not None:
self.settings['outputWidth'] = outputWidth
if outputHeight is not None:
self.settings['outputHeight'] = outputHeight
if asyncSetup:
uthread.new(self._DoInit, scene=scene)
else:
self._DoInit(scene=scene)
def GetMemento(self):
return self.settings
def __del__(self):
LogInfo('deleting', self)
if hasattr(self, 'renderJob'):
self.renderJob.UnscheduleRecurring()
self.renderJob = None
del self.raygen
del self.rayCounter
del self.oit
del self.outputBuffer
del self.skinnedOptix
del self.everything
LogInfo('Post-cleanup leak check:')
self.optix.ReportObjectCounts()
self.EnablePaperDollJobs(True)
@staticmethod
def Pause():
if FullOptixRenderer.instance is not None:
FullOptixRenderer.instance.renderJob.UnscheduleRecurring()
@staticmethod
def NotifyUpdate():
if FullOptixRenderer.instance is not None and FullOptixRenderer.instance.listenForUpdate:
LogInfo('NotifyUpdate, restarting', FullOptixRenderer.instance)
memento = FullOptixRenderer.instance.GetMemento()
FullOptixRenderer.instance = None
FullOptixRenderer.instance = FullOptixRenderer(memento=memento)
class ShipOptixRenderer():
__guid__ = 'paperDoll.ShipOptixRenderer'
instance = None
def AddCallback(self, func, name, rj):
cb = trinity.TriStepPythonCB()
weakSelf = weakref.ref(self)
cb.SetCallback(lambda : SkinRaytracingTools.FuncWrapper(weakSelf, func))
cb.name = name
rj.steps.append(cb)
def GetFrameCount(self):
return self.framecount
def SaveScreenshot(self, filename):
self.capture.SaveSurfaceToFile(filename)
def AddRenderPreviewStep(self, renderJob):
renderJob.SetStdRndStates(trinity.RM_FULLSCREEN).name = ' [optix] fullscreen quad'
renderJob.PushDepthStencil(None).name = ' [optix] push depth'
renderJob.RenderEffect(self.blitfx.effect).name = ' [optix] Blit to screenshot'
renderJob.PopDepthStencil().name = ' [optix] pop depth'
@staticmethod
def RaytraceFrame(selfRef):
start = time.time()
VP = SkinRaytracingTools.SetOptixMatrixFromTrinity(selfRef.optix, 'clipToWorld', selfRef.width / float(selfRef.height))
if not SkinRaytracingTools.matEqual(VP, selfRef.previousVP):
selfRef.previousVP = VP
selfRef.outputBuffer.Clear()
selfRef.framecount = 0
pos1 = (0, 0, 0)
pos2 = pos1
dist1 = geo2.Vec3Distance(pos1, trinity.GetViewPosition())
dist2 = geo2.Vec3Distance(pos2, trinity.GetViewPosition())
autodof = min(dist1, dist2)
dof = selfRef.settings.get('lens_focal_distance', autodof)
LogInfo('Auto-depth-of-field is at', autodof, ', actual focal distance is', dof)
selfRef.optix.SetFloat3('depthOfField', dof - trinity.GetFrontClip(), selfRef.settings['lens_radius'], 0)
else:
selfRef.framecount += 1
selfRef.optix.SetUInt('frameIteration', selfRef.framecount)
selfRef.oit.ResetAllocationCount()
selfRef.rayCounter.ResetCount()
time1 = time.time()
selfRef.optix.Run(0, selfRef.width, selfRef.height)
time2 = time.time()
traceTime = time2 - time1
raycount = selfRef.rayCounter.GetCount()
raysec = 0
if traceTime > 0:
raysec = raycount / float(traceTime)
time3 = time.time()
if selfRef.framecount % 32 == 0:
oit = selfRef.oit.GetAllocationCount()
if oit > 0:
print oit, 'oit allocations'
selfRef.blitfx.UpdateFrameCount(selfRef.framecount)
selfRef.outputBuffer.CopyToTexture(selfRef.outputTexture)
stop = time.time()
message = 'time: call %05.3f / trace %05.3f / read %05.3f ms' % (float(time1 - start) * 1000, float(time2 - time1) * 1000, float(stop - time3) * 1000)
message += '// traced %d rays in %05.3f ms / %10d Krays/sec / %d frames' % (raycount,
traceTime * 1000,
raysec / 1000,
selfRef.framecount)
LogInfo(message)
def ConvertCubeMapToSH(self, optix, ptxPath, cubeResPath):
self.shBuffer = trinity.Tr2OptixBuffer()
self.shBuffer.CreateFloat4(9, 1, trinity.OPTIX_BUFFER_INPUT_OUTPUT)
optix.SetBuffer('sh_buffer', self.shBuffer)
self.shBuffer.Clear()
program = trinity.Tr2OptixProgram(ptxPath + 'cubemapsh.ptx', 'kernel')
optix.SetRayGenerationProgram(0, program)
optix.ReportObjectCounts()
cube = trinity.TriTextureCubeParameter()
cube.resourcePath = cubeResPath
cube.name = 'Nebula'
blue.resMan.Wait()
mipmaps, names = SkinRaytracingTools.ConvertCubeToTextures(cube)
for i in range(len(names)):
if i < len(mipmaps):
sampler = trinity.Tr2OptixTextureSampler()
sampler.CreateFromTexture(mipmaps[i])
sampler.SetNormalizedIndexingMode(True)
optix.SetSampler(cube.name + names[i], sampler)
LogInfo('No-Copy Cube Side Interop for ' + cube.name + names[i])
optix.Run(0, cube.resource.width, cube.resource.width)
if False:
names = ['Y00',
'Y1m1',
'Y10',
'Y11',
'Y2m2',
'Y2m1',
'Y20',
'Y21',
'Y22']
self.shBuffer.Map()
ofs = 0
for name in names:
print name, ': (',
print self.shBuffer.GetUserDataF(ofs), ',',
ofs = ofs + 4
print self.shBuffer.GetUserDataF(ofs), ',',
ofs = ofs + 4
print self.shBuffer.GetUserDataF(ofs), ')'
ofs = ofs + 4
self.shBuffer.Unmap()
def CachedCreateMaterial(self, path, effect):
material = self.materialCache.get(effect, None)
if material is not None:
return material
shader = None
if effect in ('tripleglowv3', 'doubleglowv3', 'singleglowv3'):
shader = trinity.Tr2OptixProgram(path + 'v3ship_glow.ptx', 'closest_hit_' + effect)
elif effect in ('singleheatv3',):
shader = trinity.Tr2OptixProgram(path + 'v3ship_heat.ptx', 'closest_hit_' + effect)
elif effect in ('tripleglowoilv3',):
shader = trinity.Tr2OptixProgram(path + 'v3ship_glow_oil.ptx', 'closest_hit_' + effect)
elif effect == 'skinned_tripleglowv3':
shader = trinity.Tr2OptixProgram(path + 'v3ship_glow.ptx', 'closest_hit_tripleglowv3')
if shader is None:
return
material = trinity.Tr2OptixMaterial()
material.SetClosestHit(0, shader)
material.SetAnyHit(1, self.any_hit_shadow)
return material
def _DoInit(self, scene = None):
if scene is None:
scene = trinity.device.scene
self.scene = scene
self.previousVP = trinity.TriMatrix()
self.framecount = 1
self.materialCache = {}
self.useOIT = True
if scene is None:
LogWarn('No scene!')
return
bg = trinity.renderContext.GetDefaultBackBuffer()
step = trinity.renderJobs.FindStepByName('SET_SWAPCHAIN_RT')
if step is not None:
bg = step.renderTarget
self.width = self.settings.get('outputWidth', bg.width)
self.height = self.settings.get('outputHeight', bg.height)
self.blitfx = FullScreenBlitter(self.width, self.height)
bloomScale = 4
if False:
self.highpassRT = PD.SkinLightmapRenderer.CreateRenderTarget(self.width / bloomScale, self.height / bloomScale, trinity.PIXEL_FORMAT.R32G32B32A32_FLOAT)
self.filteredRT = PD.SkinLightmapRenderer.CreateRenderTarget(self.width / bloomScale, self.height / bloomScale, trinity.PIXEL_FORMAT.R32G32B32A32_FLOAT)
outputTexture = trinity.TriTextureRes(self.width, self.height, 1, trinity.PIXEL_FORMAT.R32G32B32A32_FLOAT)
self.outputTexture = outputTexture
self.capture = CaptureHelper(self.width, self.height)
for steps in trinity.renderJobs.recurring:
if steps.name == 'ShipOptixRenderer':
steps.UnscheduleRecurring()
path = str(blue.paths.ResolvePath('res:/graphics/effect/optix/ship/'))
self.path = path
LogInfo('Getting files from', path)
start = time.clock()
optix = trinity.Tr2Optix()
self.optix = optix
optix.SetInteropDevice()
optix.SetRayTypeCount(4)
optix.SetEntryPointCount(1)
if False:
optix.EnableAllExceptions()
if False:
optix.SetPrintEnabled(True)
optix.SetPrintBufferSize(16384)
optix.SetFloat('scene_epsilon', 0.01)
optix.SetUInt('frameIteration', 0)
nebula = PD.FindResourceByName(scene.backgroundEffect, 'NebulaMap') if scene.backgroundEffect is not None else None
if nebula is not None:
LogInfo('Converting to SH ', nebula.resourcePath)
self.ConvertCubeMapToSH(optix, path, nebula.resourcePath)
else:
self.shBuffer = trinity.Tr2OptixBuffer()
self.shBuffer.CreateFloat4(9, 1, trinity.OPTIX_BUFFER_INPUT_OUTPUT)
optix.SetBuffer('sh_buffer', self.shBuffer)
self.shBuffer.Clear()
self.outputBuffer = trinity.Tr2OptixBuffer()
self.outputBuffer.CreateFloat4(self.width, self.height, trinity.OPTIX_BUFFER_INPUT_OUTPUT)
optix.SetBuffer('output_buffer', self.outputBuffer)
self.ApplySettings()
everything = []
mainRay = 0
shadowRay = 1
bounceRay = 3
def MakeMaterialWithShader(shader):
return (material, shader)
def MakeMaterial(ptxFile, shaderName):
everything.append(shader)
return MakeMaterialWithShader(shader)
LogInfo('global setup OK', time.clock() - start, 'seconds')
useHdrProbe = False
start = time.clock()
self.rayCounter = RayCountHelper(self.optix)
self.oit = OitHelper(self.optix)
self.raygen = trinity.Tr2OptixProgram(path + 'raygen.ptx', 'ray_request')
shader = trinity.Tr2OptixProgram(path + 'vizualizer.ptx', 'closest_hit_VizGreen')
viz_material = trinity.Tr2OptixMaterial()
viz_material.SetClosestHit(0, shader)
everything.append(viz_material)
if False:
any_hit_shadow = trinity.Tr2OptixProgram(path + 'shadow.ptx', 'any_hit_shadow')
viz_material.SetAnyHit(1, any_hit_shadow)
self.any_hit_shadow = any_hit_shadow
else:
self.any_hit_shadow = None
start = time.clock()
nameTranslation = {'GlowNormalSpecularMap': 'NormalMap'}
def GroupByVertexBuffer(optixBatches):
output = []
for batchType in range(len(optixBatches)):
batches = optixBatches[batchType]
vbDict = {}
for batch in batches:
vb = batch[2]
list = vbDict.get(vb, None)
if list is not None:
list.append(batch)
else:
vbDict[vb] = [batch]
list = []
for vb in vbDict.iterkeys():
list.append(vbDict[vb])
output.append(list)
return output
cache = {}
programs = {'skinned_tripleglowv3_48': 'triangle48',
'singlev3_48': 'triangle48',
'singleheatv3_48': 'triangle48',
'tripleglowv3_40': 'triangle40',
'singleheatv3_40': 'triangle40',
'singlefresnelreflectionwithglow_56': 'triangle56',
'doublefresnelreflectionwithglow_56': 'triangle56',
'tripleglowoilv3_80': 'triangle80'}
if False:
nullintersect = trinity.Tr2OptixProgram(path + 'nullgeometry.ptx', 'intersect')
nullbounds = trinity.Tr2OptixProgram(path + 'nullgeometry.ptx', 'bounds')
everything.append(nullintersect)
everything.append(nullbounds)
mylogOK = set({})
mylogFail = set({})
linearNames = set({})
linearNames.add('MaterialDiffuseColor')
linearNames.add('MaterialReflectionColor')
linearNames.add('MaskDiffuseColor')
linearNames.add('MaskReflectionColor')
linearNames.add('SubMaskDiffuseColor')
linearNames.add('SubMaskReflectionColor')
linearNames.add('GlowColor')
topScene = trinity.Tr2OptixGroup()
interopSamplerCache = {}
for dynamic in scene.objects:
if dynamic.__typename__ not in ('EveShip2', 'EveStation2'):
continue
model = dynamic
if model.highDetailMesh is None or model.highDetailMesh.object is None:
LogWarn('ship has no high detail meshes')
continue
skinnedOptix = optix.CreateFromEveSpaceObject2(model, 0, '', '', '')
everything.append(skinnedOptix)
optixBatches = skinnedOptix[0]
self.objectsToRefresh[model] = skinnedOptix
sorted = GroupByVertexBuffer(optixBatches)
groups = []
for batchType in range(len(optixBatches)):
isOpaque = batchType == 0
vbBatches = sorted[batchType]
for batches in vbBatches:
groupChildren = []
for batch in batches:
effect = batch[1].effectFilePath.lower()
effect = effect[effect.rfind('/') + 1:]
effect = effect[:effect.rfind('.fx')]
ptx = programs.get(effect + '_' + str(batch[8]), '')
if ptx == '':
mylogFail.add(effect)
batch[0].SetIntersectProgram(nullintersect)
batch[0].SetBoundsProgram(nullbounds)
continue
mylogOK.add(effect)
intersect, bounds = cache.get(ptx, (None, None))
if intersect is None:
intersect = trinity.Tr2OptixProgram(path + ptx + '.ptx', 'intersect')
bounds = trinity.Tr2OptixProgram(path + ptx + '.ptx', 'bounds')
cache[ptx] = (intersect, bounds)
batch[0].SetIntersectProgram(intersect)
batch[0].SetBoundsProgram(bounds)
batchGeometryInstance = trinity.Tr2OptixGeometryInstance()
everything.append(batchGeometryInstance)
batchGeometryInstance.SetGeometry(batch[0])
if True:
material = self.CachedCreateMaterial(path, effect)
if material is None:
material = viz_material
else:
material = viz_material
batchGeometryInstance.SetMaterial(material)
SkinRaytracingTools.CopyParametersToContext(batch[1], batchGeometryInstance, linearNames)
groupChildren.append(batchGeometryInstance)
samplers = SkinRaytracingTools.InteropAllTexturesFromEffect(optix, batch[1], waitForFinish=True, nameTranslation=nameTranslation, scope=batchGeometryInstance, cache=interopSamplerCache)
everything.append(samplers)
group = trinity.Tr2OptixGeometryGroup()
group.SetChildCount(len(groupChildren))
for x in xrange(len(groupChildren)):
group.SetChild(x, groupChildren[x])
group.SetAcceleration('Bvh', 'Bvh')
self.objectsToMarkDirty.append(group)
groups.append(group)
everything.append(cache)
baseOffset = topScene.GetChildCount()
topScene.SetChildCount(baseOffset + len(groups))
for x in xrange(len(groups)):
topScene.SetChild(baseOffset + x, groups[x])
everything.append(groups)
if False:
sphereGeometry = trinity.Tr2OptixGeometry()
sphereGeometry.InitializeFromProgram(path + 'sphere_program.ptx', 'intersect', 'bounds')
sphereGeometry.SetPrimitiveCount(1)
everything.append(sphereGeometry)
sphereInstance = trinity.Tr2OptixGeometryInstance()
sphereInstance.SetGeometry(sphereGeometry)
sphereInstance.SetMaterial(viz_material)
sphereInstance.SetFloat4('pos_r', 0, 0, 0, 100)
sphereInstance.SetFloat4('color_watt', 1, 0, 0, 1)
everything.append(sphereInstance)
group = trinity.Tr2OptixGeometryGroup()
group.SetChildCount(1)
group.SetChild(0, sphereInstance)
group.SetAcceleration('Bvh', 'Bvh')
topScene.SetChildCount(topScene.GetChildCount() + 1)
topScene.SetChild(topScene.GetChildCount() - 1, group)
everything.append(topScene)
topScene.SetAcceleration('Bvh', 'Bvh')
self.objectsToMarkDirty.append(topScene)
optix.SetGroup('top_scene', topScene)
optix.SetGroup('shadow_casters', topScene)
if len(mylogOK) > 0:
LogInfo('Converted succesfully:', str(mylogOK))
else:
LogWarn('No effects converted succesfully!')
if len(mylogFail) > 0:
LogWarn('Failed to convert:', str(mylogFail))
if type(scene) == trinity.EveSpaceScene:
c = SkinRaytracingTools.SafeLinearize(scene.sunDiffuseColor)
optix.SetFloat4('SunDiffuseColor', c[0], c[1], c[2], c[3])
c = scene.sunDirection
optix.SetFloat4('SunDirWorld', -c[0], -c[1], -c[2], 0)
c = SkinRaytracingTools.SafeLinearize(scene.ambientColor)
optix.SetFloat4('SceneAmbientColor', c[0], c[1], c[2], c[3])
c = SkinRaytracingTools.SafeLinearize(scene.fogColor)
optix.SetFloat4('SceneFogColor', c[0], c[1], c[2], c[3])
LogInfo('scene interop OK', time.clock() - start, 'seconds')
start = time.clock()
light = trinity.Tr2InteriorLightSource()
if True:
wattage = 2000000
light.color = (1,
1,
1,
wattage)
light.radius = 50
light.position = (200, 500, -300)
else:
wattage = 10000000
light.color = (1,
1,
1,
wattage)
light.radius = 1000
light.position = (0, 0, 0)
bufEveLights = SkinRaytracingTools.CreateBufferForLights([], useHdrProbe, preserveAlpha=True)
optix.SetBuffer('trinity_lights', bufEveLights)
LogInfo('lights interop OK', time.clock() - start, 'seconds')
if False:
sphereGeometry = trinity.Tr2OptixGeometry()
sphereGeometry.InitializeFromProgram(path + 'sphere_program.ptx', 'intersect', 'bounds')
sphereGeometry.SetPrimitiveCount(1)
sphereMaterial = trinity.Tr2OptixMaterial()
sphereShader = trinity.Tr2OptixProgram(path + 'sphere_program.ptx', 'closest_hit_radiance')
sphereMaterial.SetClosestHit(0, sphereShader)
sphereInstance = trinity.Tr2OptixGeometryInstance()
sphereInstance.SetGeometry(sphereGeometry)
sphereInstance.SetMaterial(sphereMaterial)
sphereInstance.SetFloat4('pos_r', light.position[0], light.position[1], light.position[2], light.radius)
sphereInstance.SetFloat4('color_watt', light.color[0], light.color[1], light.color[2], light.color[3])
n = topScene.GetChildCount()
topScene.SetChildCount(n + 1)
sphereGroup = trinity.Tr2OptixGeometryGroup()
sphereGroup.SetChildCount(1)
sphereGroup.SetChild(0, sphereInstance)
sphereGroup.SetAcceleration('Bvh', 'Bvh')
topScene.SetChild(n, sphereGroup)
start = time.clock()
optix.SetRayGenerationProgram(0, self.raygen)
optix.SetEntryPointCount(1)
miss = None
if not useHdrProbe:
miss = trinity.Tr2OptixProgram(path + 'eve_miss.ptx', 'miss')
else:
miss = trinity.Tr2OptixProgram(path + 'eve_miss_probe.ptx', 'miss')
optix.SetMissProgram(3, miss)
optix.SetFloat3('bg_color', 1.0, 0, 0)
everything.append(miss)
if False:
exception = trinity.Tr2OptixProgram(path + 'eve_miss.ptx', 'exception')
optix.SetExceptionProgram(0, exception)
everything.append(exception)
optix.SetStackSize(4096)
self.everything = everything
SkinRaytracingTools.SetOptixMatrixFromTrinity(optix, 'clipToWorld', self.width / float(self.height))
LogInfo('general setup OK', time.clock() - start, 'seconds')
optix.ReportObjectCounts()
start = time.clock()
optix.Compile()
LogInfo('compile OK', time.clock() - start, 'seconds')
start = time.clock()
optix.Validate()
LogInfo('validate OK', time.clock() - start, 'seconds')
start = time.clock()
optix.Run(0, 0, 0)
LogInfo('BVH OK', time.clock() - start, 'seconds')
start = time.clock()
if False:
self.blitfx.SetTexture(outputTexture, self.highpassRT, self.filteredRT)
else:
self.blitfx.SetTexture(outputTexture, outputTexture, outputTexture)
rj = trinity.CreateRenderJob('ShipOptixRenderer')
self.AddCallback(ShipOptixRenderer.RaytraceFrame, 'Raytrace Frame', rj)
rj.SetStdRndStates(trinity.RM_FULLSCREEN).name = 'fullscreen state'
if False:
rj.SetRenderTarget(self.highpassRT.wrappedRenderTarget).name = ' SetRT highpassRT'
rj.RenderEffect(self.blitfx.highpassEffect).name = ' high pass'
rj.SetRenderTarget(self.filteredRT.wrappedRenderTarget).name = ' SetRT filteredRT'
rj.RenderEffect(self.blitfx.gaussianHorizEffect).name = ' horizontal blur'
rj.SetRenderTarget(self.highpassRT.wrappedRenderTarget).name = ' SetRT highpassRT'
rj.RenderEffect(self.blitfx.gaussianVertEffect).name = ' vertical blur'
rj.SetStdRndStates(trinity.RM_FULLSCREEN).name = 'fullscreen state'
rj.RenderEffect(self.blitfx.effect).name = ' blit'
tp2 = None
for job in trinity.renderJobs.recurring:
if job.name == 'TrinityPanel:View1':
tp2 = job
if tp2 is None:
rj.ScheduleRecurring(insertFront=False)
else:
final = None
for step in tp2.steps:
if step.name == 'SET_FINAL_RT':
final = step
break
if final is not None:
tp2.steps.insert(tp2.steps.index(final), trinity.TriStepRunJob(rj))
else:
tp2.steps.append(trinity.TriStepRunJob(rj))
self.renderJob = rj
LogInfo('final setup OK', time.clock() - start, 'seconds')
FullOptixRenderer.EnablePaperDollJobs(False)
def ApplySettings(self):
self.optix.SetFloat('light_size', self.settings['light_size'])
self.optix.SetFloat3('depthOfField', 1.0, self.settings['lens_radius'], 0)
self.optix.SetFloat('HairShadows', self.settings['HairShadows'])
self.optix.SetFloat('EnvMapBoost', self.settings['EnvMapBoost'] / 3.1415927)
self.previousVP.Identity()
def SetLensRadius(self, lens_radius):
self.settings['lens_radius'] = lens_radius
self.ApplySettings()
def SetLensFocalDistance(self, lens_focal_distance):
if lens_focal_distance <= 0:
self.settings.pop('lens_focal_distance', 0)
else:
self.settings['lens_focal_distance'] = lens_focal_distance
self.ApplySettings()
def SetLightSize(self, light_size):
self.settings['light_size'] = light_size
self.ApplySettings()
def SetHairShadowsEnabled(self, enabled):
self.settings['HairShadows'] = float(enabled)
self.ApplySettings()
def SetBackgroundIntensity(self, intensity):
self.settings['EnvMapBoost'] = intensity
self.ApplySettings()
def __init__(self, scene = None, backgroundBitmap = None, memento = None, beardLength = (0.01, 0.01), beardGravity = 0.0005, outputWidth = None, outputHeight = None, asyncSetup = True, listenForUpdate = True):
LogInfo('init', self)
blue.motherLode.maxMemUsage = 0
blue.resMan.ClearAllCachedObjects()
self.framecount = 0
self.listenForUpdate = listenForUpdate
self.everything = None
self.objectsToRefresh = {}
self.objectsToMarkDirty = []
if memento is not None:
self.settings = memento
else:
self.settings = {}
self.settings['light_size'] = 0.125
self.settings['lens_radius'] = 0.001
self.settings['HairShadows'] = 1.0
self.settings['EnvMapBoost'] = 1.0
self.settings['backgroundBitmap'] = backgroundBitmap if backgroundBitmap is not None else 'res:/texture/global/red_blue_ramp.dds'
self.settings['beardLength'] = beardLength
self.settings['beardGravity'] = beardGravity
if outputWidth is not None:
self.settings['outputWidth'] = outputWidth
if outputHeight is not None:
self.settings['outputHeight'] = outputHeight
if asyncSetup:
uthread.new(self._DoInit, scene=scene)
else:
self._DoInit(scene=scene)
def GetMemento(self):
return self.settings
def __del__(self):
LogInfo('deleting', self)
if hasattr(self, 'renderJob'):
self.renderJob.UnscheduleRecurring()
self.renderJob = None
del self.any_hit_shadow
del self.raygen
del self.rayCounter
del self.oit
del self.shBuffer
del self.outputBuffer
del self.everything
del self.objectsToRefresh
del self.objectsToMarkDirty
self.optix.ClearObjects()
LogInfo('Post-cleanup leak check:')
self.optix.ReportObjectCounts()
FullOptixRenderer.EnablePaperDollJobs(True)
def RefreshMatrices(self):
for ship, optixList in self.objectsToRefresh.iteritems():
self.optix.RefreshMatrices(ship, optixList)
for dirty in self.objectsToMarkDirty:
dirty.MarkDirty()
self.ApplySettings()
LogInfo('Refreshed')
@staticmethod
def Pause():
if FullOptixRenderer.instance is not None:
FullOptixRenderer.instance.renderJob.UnscheduleRecurring()
@staticmethod
def NotifyUpdate():
if FullOptixRenderer.instance is not None and FullOptixRenderer.instance.listenForUpdate:
LogInfo('NotifyUpdate, restarting', FullOptixRenderer.instance)
memento = FullOptixRenderer.instance.GetMemento()
FullOptixRenderer.instance = None
FullOptixRenderer.instance = FullOptixRenderer(memento=memento)
|
normal
|
{
"blob_id": "3c01ca27a5eef877b606b93b04ffe6f73168cd6b",
"index": 9090,
"step-1": "#Embedded file name: c:/depot/games/branches/release/EVE-TRANQUILITY/eve/client/script/paperDoll/SkinRaytracing.py\nimport trinity\nimport blue\nimport telemetry\nimport ctypes\nimport math\nimport time\nimport geo2\nimport struct\nimport itertools\nimport weakref\nimport uthread\nimport paperDoll as PD\nimport log\nimport random\nmylog = log.Channel('optix', 'python')\n\ndef LogInfo(text, *args):\n for arg in args:\n text += ' ' + str(arg)\n\n mylog.Log(text, log.LGINFO)\n\n\ndef LogWarn(text, *args):\n for arg in args:\n text = text + ' ' + str(arg)\n\n mylog.Log(text, log.LGWARN)\n\n\nclass SkinRaytracingTools():\n __guid__ = 'paperDoll.SkinRaytracingTools'\n\n @staticmethod\n def SetOptixMatrixFromTrinity(optix, matrixName, ratio = None):\n proj = trinity.TriProjection()\n view = trinity.TriView()\n view.transform = trinity.GetViewTransform()\n proj.PerspectiveFov(trinity.GetFieldOfView(), trinity.GetAspectRatio() if ratio is None else ratio, trinity.GetFrontClip(), trinity.GetBackClip())\n projToView = geo2.MatrixInverse(proj.transform)\n viewToWorld = geo2.MatrixInverse(view.transform)\n projToWorld = geo2.MatrixMultiply(projToView, viewToWorld)\n r0 = projToWorld[0]\n r1 = projToWorld[1]\n r2 = projToWorld[2]\n r3 = projToWorld[3]\n mat = trinity.TriMatrix(r0[0], r0[1], r0[2], r0[3], r1[0], r1[1], r1[2], r1[3], r2[0], r2[1], r2[2], r2[3], r3[0], r3[1], r3[2], r3[3])\n optix.SetMatrix4x4(matrixName, mat)\n r0 = view.transform[0]\n r1 = view.transform[1]\n r2 = view.transform[2]\n r3 = view.transform[3]\n mat = trinity.TriMatrix(r0[0], r0[1], r0[2], r0[3], r1[0], r1[1], r1[2], r1[3], r2[0], r2[1], r2[2], r2[3], r3[0], r3[1], r3[2], r3[3])\n optix.SetMatrix4x4('viewTransform', mat)\n return mat\n\n @staticmethod\n def CreateSamplerForTexture(name, map, waitForFinish):\n rt = trinity.Tr2RenderTarget(map.width, map.height, 1, trinity.PIXEL_FORMAT.B8G8R8A8_UNORM)\n job = trinity.CreateRenderJob()\n job.PushRenderTarget(rt)\n job.PushDepthStencil(None)\n job.SetStdRndStates(trinity.RM_FULLSCREEN)\n job.RenderTexture(map)\n job.PopDepthStencil()\n job.PopRenderTarget()\n job.ScheduleOnce()\n if waitForFinish:\n job.WaitForFinish()\n sampler = trinity.Tr2OptixTextureSampler()\n if True:\n res = trinity.TriTextureRes()\n res.CreateAndCopyFromRenderTarget(rt)\n sampler.CreateFromTexture(res)\n else:\n sampler.CreateFromRenderTarget(rt)\n sampler.SetNormalizedIndexingMode(True)\n if True:\n return (sampler, res)\n else:\n return (sampler, rt)\n\n @staticmethod\n def ConvertCubeToTextures(cube):\n names = ['PX',\n 'NX',\n 'PY',\n 'NY',\n 'PZ',\n 'NZ']\n viewVec = [(1, 0, 0),\n (-1, 0, 0),\n (0, 1, 0),\n (0, -1, 0),\n (0, 0, 1),\n (0, 0, -1)]\n upVec = [(0, 1, 0),\n (0, 1, 0),\n (0, 0, 1),\n (0, 0, -1),\n (0, 1, 0),\n (0, 1, 0)]\n spaceScene = trinity.EveSpaceScene()\n spaceScene.envMap1ResPath = str(cube.resourcePath)\n spaceScene.envMapScaling = (1, 1, -1)\n spaceScene.backgroundRenderingEnabled = True\n spaceScene.backgroundEffect = trinity.Load('res:/dx9/scene/starfield/bakeNebula.red')\n blue.resMan.Wait()\n node = PD.FindParameterByName(spaceScene.backgroundEffect, 'NebulaBrightness')\n if node is None:\n node = trinity.Tr2FloatParameter()\n node.name = 'NebulaBrightness'\n spaceScene.backgroundEffect.parameters.append(node)\n if node is not None:\n node.value = 100\n node = PD.FindResourceByName(spaceScene.backgroundEffect, 'NebulaMap')\n if node is None:\n node = trinity.TriTexture2DParam()\n node.name = 'NebulaMap'\n spaceScene.backgroundEffect.resources.append(node)\n node.SetResource(cube.resource)\n blue.resMan.Wait()\n mipmapped = []\n useTexture = True\n for i in xrange(len(names)):\n name = names[i]\n rt = PD.SkinLightmapRenderer.CreateRenderTarget(cube.resource.width, cube.resource.height, trinity.PIXEL_FORMAT.B8G8R8A8_UNORM, useRT=True)\n job = trinity.CreateRenderJob(name=name)\n job.PushRenderTarget(rt)\n job.PushDepthStencil(None)\n job.Clear([(1, 0, 0),\n (0.2, 0, 0),\n (0, 1, 0),\n (0, 0.2, 0),\n (0, 0, 1),\n (0, 0, 0.2)][i], None)\n proj = trinity.TriProjection()\n proj.PerspectiveFov(math.pi * 0.5, 1, 0.1, 1000)\n view = trinity.TriView()\n view.SetLookAtPosition((0, 0, 0), viewVec[i], upVec[i])\n viewport = trinity.TriViewport(0, 0, cube.resource.width, cube.resource.height, 0.0, 1.0)\n job.SetView(view)\n job.SetProjection(proj)\n job.SetViewport(viewport)\n job.Update(spaceScene)\n job.RenderScene(spaceScene)\n job.PopDepthStencil()\n job.PopRenderTarget()\n if useTexture:\n tex = trinity.TriTextureRes(cube.resource.width, cube.resource.height, 1, trinity.PIXEL_FORMAT.B8G8R8A8_UNORM)\n if True:\n job.ScheduleOnce()\n job.WaitForFinish()\n if useTexture:\n mipmapped.append(tex)\n else:\n mipmapped.append(rt)\n else:\n job.ScheduleRecurring()\n\n return (mipmapped, names)\n\n @staticmethod\n def FindAllTextureResourcesFromEffect(effect, scope):\n textures = {}\n samplers = []\n cubemaps = []\n if effect is not None:\n for r in effect.resources:\n if type(r) == trinity.TriTexture2DParameter and r.resource is not None:\n textures[r.name] = r.resource\n elif type(r) == trinity.TriTextureCubeParameter and r.resource is not None:\n if r.name in cubemaps:\n continue\n LogInfo('', r.name, ': Converting to individual textures')\n cubemaps.append(r.name)\n mipmaps, names = SkinRaytracingTools.ConvertCubeToTextures(r)\n for i in range(len(names)):\n if i < len(mipmaps):\n sampler = trinity.Tr2OptixTextureSampler()\n sampler.CreateFromTexture(mipmaps[i])\n sampler.SetNormalizedIndexingMode(True)\n scope.SetSampler(r.name + names[i], sampler)\n LogInfo('No-Copy Cube Side Interop for ' + r.name + names[i])\n samplers.append(mipmaps[i])\n samplers.append(sampler)\n\n return (textures, samplers)\n\n @staticmethod\n def FindAllTextureResources(dynamic, scope):\n textures = {}\n samplers = []\n cubemaps = []\n\n def ProcessMesh(mesh):\n for area in itertools.chain(mesh.opaqueAreas, mesh.decalAreas, mesh.transparentAreas):\n newTextures, newSamplers = SkinRaytracingTools.FindAllTextureResourcesFromEffect(area.effect, scope)\n textures.update(newTextures)\n samplers.extend(newSamplers)\n\n if type(dynamic) == trinity.Tr2IntSkinnedObject:\n for mesh in dynamic.visualModel.meshes:\n ProcessMesh(mesh)\n\n elif type(dynamic) == trinity.EveShip2:\n ProcessMesh(dynamic.highDetailMesh.object)\n elif type(dynamic) == trinity.EveStation2:\n ProcessMesh(dynamic.highDetailMesh.object)\n return (textures, samplers)\n\n @staticmethod\n def InteropTexture(name, texture, waitForFinish, scope):\n if texture.format == trinity.PIXEL_FORMAT.B8G8R8A8_UNORM:\n sampler = trinity.Tr2OptixTextureSampler()\n sampler.CreateFromTexture(texture)\n sampler.SetNormalizedIndexingMode(True)\n scope.SetSampler(name, sampler)\n LogInfo('No-Copy Interop for', name)\n return (sampler, None)\n if texture.type == trinity.TRIRTYPE_CUBETEXTURE:\n LogInfo('Copy-Interop for cubes not supported, skipping', name)\n return\n sampler_rt = SkinRaytracingTools.CreateSamplerForTexture(name, texture, waitForFinish)\n if sampler_rt is None or len(sampler_rt) < 1:\n LogInfo('InteropTexture failed for', name)\n else:\n scope.SetSampler(name, sampler_rt[0])\n LogInfo('Interop for', name)\n return sampler_rt\n\n @staticmethod\n def InteropAllTexturesFromEffect(optix, effect, waitForFinish, nameTranslation = None, scope = None, cache = None):\n if scope is None:\n scope = optix\n textures, samplers = SkinRaytracingTools.FindAllTextureResourcesFromEffect(effect, scope)\n for name, texture in textures.iteritems():\n if 'spotlight' in name.lower():\n continue\n if nameTranslation is not None:\n name = nameTranslation.get(name, name)\n if cache is not None and texture in cache:\n sampler = cache[texture]\n scope.SetSampler(name, sampler[0])\n LogInfo('Interop cache for', name)\n else:\n sampler = SkinRaytracingTools.InteropTexture(name, texture, waitForFinish, scope)\n if sampler and cache is not None:\n cache[texture] = sampler\n if sampler is not None:\n samplers.append(sampler)\n\n return samplers\n\n @staticmethod\n def InteropAllTextures(optix, dynamic, waitForFinish, nameTranslation = None, scope = None):\n if scope is None:\n scope = optix\n textures, samplers = SkinRaytracingTools.FindAllTextureResources(dynamic, scope)\n for name, texture in textures.iteritems():\n if 'spotlight' in name.lower():\n continue\n if nameTranslation is not None:\n name = nameTranslation.get(name, name)\n sampler = SkinRaytracingTools.InteropTexture(name, texture, waitForFinish, scope)\n if sampler is not None:\n samplers.append(sampler)\n\n return samplers\n\n @staticmethod\n def SafeLinearize(values):\n peak = max(1, max(values[0], max(values[1], values[2])))\n return (peak * math.pow(values[0] / peak, 2.2),\n peak * math.pow(values[1] / peak, 2.2),\n peak * math.pow(values[2] / peak, 2.2),\n values[3])\n\n @staticmethod\n def CopyParametersToContext(effect, instance, linearNames = None):\n for p in effect.parameters:\n if type(p) is trinity.Tr2Vector4Parameter:\n value = SkinRaytracingTools.SafeLinearize(p.value) if linearNames is not None and p.name in linearNames else p.value\n instance.SetFloat4(p.name, value[0], value[1], value[2], value[3])\n elif type(p) is trinity.TriFloatParameter or type(p) is trinity.Tr2FloatParameter:\n instance.SetFloat4(p.name, p.value, 0, 0, 0)\n\n @staticmethod\n def CreateBufferForLights(lights, leaveEmpty = False, preserveAlpha = False):\n bufEveLights = trinity.Tr2OptixBuffer()\n bufEveLights.CreateUserData(64, len(lights), trinity.OPTIX_BUFFER_OUTPUT, False)\n bufEveLights.MapUser()\n buffer = ''\n if leaveEmpty:\n lights = []\n for light in lights:\n innerAngle = light.coneAlphaInner\n outerAngle = light.coneAlphaOuter\n if innerAngle + 1.0 > outerAngle:\n innerAngle = outerAngle - 1.0\n innerAngle = math.cos(innerAngle * 3.1415927 / 180.0)\n outerAngle = math.cos(outerAngle * 3.1415927 / 180.0)\n coneDir = geo2.Vec3Normalize((light.coneDirection[0], light.coneDirection[1], light.coneDirection[2]))\n import struct\n buffer += struct.pack('16f', light.position[0], light.position[1], light.position[2], light.radius, math.pow(light.color[0], 2.2), math.pow(light.color[1], 2.2), math.pow(light.color[2], 2.2), light.falloff if not preserveAlpha else light.color[3], coneDir[0], coneDir[1], coneDir[2], outerAngle, innerAngle, 0, 0, 0)\n\n bufEveLights.SetUserDataFromStruct(buffer)\n bufEveLights.UnmapUser()\n return bufEveLights\n\n @staticmethod\n def CreateUInt1Buffer(optix, name):\n buffer = trinity.Tr2OptixBuffer()\n buffer.CreateUInt1(1, 1, trinity.OPTIX_BUFFER_INPUT_OUTPUT)\n buffer.Map()\n buffer.SetUserDataI(0, 0)\n buffer.Unmap()\n optix.SetBuffer(name, buffer)\n return buffer\n\n @staticmethod\n def matEqual(m1, m2):\n return m1._11 == m2._11 and m1._12 == m2._12 and m1._13 == m2._13 and m1._14 == m2._14 and m1._21 == m2._21 and m1._22 == m2._22 and m1._23 == m2._23 and m1._24 == m2._24 and m1._31 == m2._31 and m1._32 == m2._32 and m1._33 == m2._33 and m1._34 == m2._34 and m1._41 == m2._41 and m1._42 == m2._42 and m1._43 == m2._43 and m1._44 == m2._44\n\n @staticmethod\n def FuncWrapper(weakSelf, func):\n if weakSelf():\n func(weakSelf())\n\n\nclass OitHelper():\n\n def __init__(self, optix):\n self.oitAllocatorBuffer = SkinRaytracingTools.CreateUInt1Buffer(optix, 'oit_allocator')\n oitPoolBuffer = trinity.Tr2OptixBuffer()\n oitPoolBuffer.CreateUserData(64 + 112, 1048576, trinity.OPTIX_BUFFER_INPUT_OUTPUT, True)\n optix.SetBuffer('oit_pool', oitPoolBuffer)\n self.oitPoolBuffer = oitPoolBuffer\n\n def ResetAllocationCount(self):\n self.oitAllocatorBuffer.Map()\n self.oitAllocatorBuffer.SetUserDataI(0, 0)\n self.oitAllocatorBuffer.Unmap()\n\n def GetAllocationCount(self):\n self.oitAllocatorBuffer.Map()\n count = self.oitAllocatorBuffer.GetUserDataI(0)\n self.oitAllocatorBuffer.Unmap()\n return count\n\n\nclass RayCountHelper():\n\n def __init__(self, optix):\n self.rayCountBuffer = SkinRaytracingTools.CreateUInt1Buffer(optix, 'ray_count')\n\n def ResetCount(self):\n self.rayCountBuffer.Map()\n self.rayCountBuffer.SetUserDataI(0, 0)\n self.rayCountBuffer.Unmap()\n\n def GetCount(self):\n self.rayCountBuffer.Map()\n count = self.rayCountBuffer.GetUserDataI(0)\n self.rayCountBuffer.Unmap()\n return count\n\n\nclass CaptureHelper():\n\n def __init__(self, width, height):\n self.capture = trinity.Tr2RenderTarget(width, height, 1, trinity.PIXEL_FORMAT.B8G8R8A8_UNORM)\n\n def SaveSurfaceToFile(self, filename):\n trinity.SaveRenderTarget(filename, self.capture)\n LogInfo('Saved to', filename)\n\n def CreateRenderSteps(self, rj, blitfx):\n rj.PushRenderTarget(self.capture).name = 'Begin screenshot capture'\n rj.PushDepthStencil(None).name = ' push depth'\n rj.RenderEffect(blitfx).name = ' Blit to screenshot'\n rj.PopDepthStencil().name = ' pop depth'\n rj.PopRenderTarget().name = 'End screenshot capture'\n\n\nclass FullScreenBlitter():\n\n def __init__(self, width, height):\n self.effect = trinity.Tr2Effect()\n self.effect.effectFilePath = 'res:/graphics/effect/optix/shaders/gammaBlit.fx'\n if self.effect.effectResource is None:\n LogWarn('Failed to load effect 1')\n return\n self.highpassEffect = trinity.Tr2Effect()\n self.highpassEffect.effectFilePath = 'res:/graphics/effect/optix/shaders/highpassFilter.fx'\n if self.highpassEffect.effectResource is None:\n LogWarn('Failed to load effect 1')\n return\n self.gaussianHorizEffect = trinity.Tr2Effect()\n self.gaussianHorizEffect.effectFilePath = 'res:/graphics/effect/optix/shaders/gaussianBlur.fx'\n if self.gaussianHorizEffect.effectResource is None:\n LogWarn('Failed to load effect 3')\n return\n self.gaussianVertEffect = trinity.Tr2Effect()\n self.gaussianVertEffect.effectFilePath = 'res:/graphics/effect/optix/shaders/gaussianBlur.fx'\n if self.gaussianVertEffect.effectResource is None:\n LogWarn('Failed to load effect 3')\n return\n for effect in [self.effect,\n self.highpassEffect,\n self.gaussianHorizEffect,\n self.gaussianVertEffect]:\n while effect.effectResource.isLoading:\n PD.Yield()\n\n self.blitcolor = trinity.Tr2Vector4Parameter()\n self.blitcolor.name = 'Color'\n for effect in [self.effect,\n self.highpassEffect,\n self.gaussianHorizEffect,\n self.gaussianVertEffect]:\n effect.PopulateParameters()\n effect.RebuildCachedData()\n effect.parameters.append(self.blitcolor)\n\n sizesParam = trinity.Tr2Vector4Parameter()\n sizesParam.name = 'InvSize'\n sizesParam.value = (1.0 / width,\n 1.0 / height,\n 0,\n 0)\n for effect in [self.effect, self.highpassEffect]:\n effect.parameters.append(sizesParam)\n\n sizesHorizParam = trinity.Tr2Vector4Parameter()\n sizesHorizParam.name = 'invTexelSize'\n sizesHorizParam.value = (1.0 / width,\n 0.0,\n 0,\n 0)\n self.gaussianHorizEffect.parameters.append(sizesHorizParam)\n sizesVertParam = trinity.Tr2Vector4Parameter()\n sizesVertParam.name = 'invTexelSize'\n sizesVertParam.value = (0.0,\n 1.0 / height,\n 0,\n 0)\n self.gaussianVertEffect.parameters.append(sizesVertParam)\n\n def SetTexture(self, optixOutputTexture, highpassTexture, filteredTexture):\n tex = trinity.TriTexture2DParameter()\n tex.name = 'Texture'\n tex.SetResource(optixOutputTexture)\n for effect in [self.effect, self.highpassEffect]:\n effect.resources.append(tex)\n\n tex = trinity.TriTexture2DParameter()\n tex.name = 'Texture'\n tex.SetResource(highpassTexture)\n self.gaussianHorizEffect.resources.append(tex)\n tex = trinity.TriTexture2DParameter()\n tex.name = 'Texture'\n tex.SetResource(filteredTexture)\n self.gaussianVertEffect.resources.append(tex)\n tex = trinity.TriTexture2DParameter()\n tex.name = 'BloomTexture'\n tex.SetResource(highpassTexture)\n self.effect.resources.append(tex)\n\n def UpdateFrameCount(self, framecount):\n invFC = 1.0 / framecount if framecount > 0 else 1.0\n self.blitcolor.value = (invFC,\n invFC,\n invFC,\n invFC)\n\n\nclass FullOptixRenderer():\n __guid__ = 'paperDoll.FullOptixRenderer'\n instance = None\n\n def AddCallback(self, func, name, rj):\n cb = trinity.TriStepPythonCB()\n weakSelf = weakref.ref(self)\n cb.SetCallback(lambda : SkinRaytracingTools.FuncWrapper(weakSelf, func))\n cb.name = name\n rj.steps.append(cb)\n\n def GetFrameCount(self):\n return self.framecount\n\n def SaveScreenshot(self, filename):\n self.capture.SaveSurfaceToFile(filename)\n\n def AddRenderPreviewStep(self, renderJob):\n renderJob.SetStdRndStates(trinity.RM_FULLSCREEN).name = ' [optix] fullscreen quad'\n renderJob.PushDepthStencil(None).name = ' [optix] push depth'\n renderJob.RenderEffect(self.blitfx.effect).name = ' [optix] Blit to screenshot'\n renderJob.PopDepthStencil().name = ' [optix] pop depth'\n\n def RefreshMatrices(self):\n model = self.skinnedObject\n self.optix.RefreshMatrices(model, self.skinnedOptix)\n self.RunSkinningAndTesselation()\n self.ApplySettings()\n print 'Refreshed'\n\n @staticmethod\n def RaytraceFrame(selfRef):\n start = time.time()\n VP = SkinRaytracingTools.SetOptixMatrixFromTrinity(selfRef.optix, 'clipToWorld', selfRef.width / float(selfRef.height))\n if not SkinRaytracingTools.matEqual(VP, selfRef.previousVP):\n selfRef.previousVP = VP\n selfRef.outputBuffer.Clear()\n selfRef.framecount = 0\n model = selfRef.skinnedObject\n pos1 = model.GetBonePosition(model.GetBoneIndex('fj_eyeballLeft'))\n pos2 = model.GetBonePosition(model.GetBoneIndex('fj_eyeballRight'))\n dist1 = geo2.Vec3Distance(pos1, trinity.GetViewPosition())\n dist2 = geo2.Vec3Distance(pos2, trinity.GetViewPosition())\n autodof = min(dist1, dist2)\n dof = selfRef.settings.get('lens_focal_distance', autodof)\n print 'Auto-depth-of-field is at', autodof, ', actual focal distance is', dof\n selfRef.optix.SetFloat3('depthOfField', dof - trinity.GetFrontClip(), selfRef.settings['lens_radius'], 0)\n else:\n selfRef.framecount += 1\n selfRef.optix.SetUInt('frameIteration', selfRef.framecount)\n selfRef.oit.ResetAllocationCount()\n selfRef.rayCounter.ResetCount()\n time1 = time.time()\n selfRef.optix.Run(0, selfRef.width, selfRef.height)\n time2 = time.time()\n sec = time2 - time1\n raycount = selfRef.rayCounter.GetCount()\n raysec = 0\n if sec > 0:\n raysec = raycount / float(sec)\n time3 = time.time()\n if selfRef.framecount % 32 == 0:\n stop = time.time()\n print selfRef.oit.GetAllocationCount(), 'oit allocations'\n selfRef.blitfx.UpdateFrameCount(selfRef.framecount)\n selfRef.outputBuffer.CopyToTexture(selfRef.outputTexture)\n print 'time %05.3f / %05.3f / %05.3f / %05.3f msec' % (float(time1 - start) * 1000,\n float(time2 - time1) * 1000,\n float(time3 - time2) * 1000,\n float(stop - time3) * 1000),\n print '%d rays in %05.3f ms / %10d Krays/sec / %d rays per pixel' % (raycount,\n sec * 1000,\n raysec / 1000,\n selfRef.framecount)\n\n @telemetry.ZONE_METHOD\n def OnBeforeOptixPositionsUV(self):\n PD.SkinLightmapRenderer.DoChangeEffect('oxPosWorldUVEffect', self.oxMeshes)\n if self.skinnedObject is not None and self.skinnedObject.visualModel is not None:\n self.savedMeshes = self.skinnedObject.visualModel.meshes[:]\n filteredMeshes = [ ref.object for ref in self.oxMeshes.iterkeys() if ref.object is not None ]\n PD.SkinLightmapRenderer.DoCopyMeshesToVisual(self.skinnedObject, filteredMeshes)\n self.scene.filterList.removeAt(-1)\n self.scene.filterList.append(self.skinnedObject)\n self.scene.useFilterList = True\n\n @telemetry.ZONE_METHOD\n def OnBeforeOptixNormalsUV(self):\n PD.SkinLightmapRenderer.DoChangeEffect('oxNormalWorldUVEffect', self.oxMeshes)\n\n def OnAfterOptix(self):\n PD.SkinLightmapRenderer.DoRestoreShaders(meshes=self.oxMeshes)\n PD.SkinLightmapRenderer.DoCopyMeshesToVisual(self.skinnedObject, self.savedMeshes)\n del self.savedMeshes\n self.scene.useFilterList = False\n self.scene.filterList.removeAt(-1)\n\n def _InitUVUnwrap(self):\n self.oxMeshes = {}\n self.scatterFX = set()\n self.unwrapSize = 1024\n posUV = PD.SkinLightmapRenderer.PreloadEffect(PD.SkinLightmapRenderer.OPTIX_POSWORLD_UV_EFFECT)\n normalUV = PD.SkinLightmapRenderer.PreloadEffect(PD.SkinLightmapRenderer.OPTIX_NORMALWORLD_UV_EFFECT)\n deriv = PD.SkinLightmapRenderer.PreloadEffect(PD.SkinLightmapRenderer.STRETCHMAP_RENDERER_EFFECT)\n self.oxDepth = trinity.Tr2DepthStencil(self.unwrapSize, self.unwrapSize, trinity.DEPTH_STENCIL_FORMAT.D24S8, 1, 0)\n for mesh in self.skinnedObject.visualModel.meshes:\n if PD.SkinLightmapRenderer.IsScattering(mesh):\n m = PD.SkinLightmapRenderer.Mesh()\n m.ExtractOrigEffect(mesh)\n m.CreateOptixEffects(includeStretchMap=True)\n PD.AddWeakBlue(self, 'oxMeshes', mesh, m)\n fx = PD.GetEffectsFromMesh(mesh)\n for f in fx:\n self.scatterFX.add(f)\n\n self.oxWorldPosMapUV = PD.SkinLightmapRenderer.CreateRenderTarget(self.unwrapSize, self.unwrapSize, trinity.PIXEL_FORMAT.R32G32B32A32_FLOAT, useRT=True)\n self.oxWorldNormalMapUV = PD.SkinLightmapRenderer.CreateRenderTarget(self.unwrapSize, self.unwrapSize, trinity.PIXEL_FORMAT.R32G32B32A32_FLOAT, useRT=True)\n self.stretchMap = PD.SkinLightmapRenderer.CreateRenderTarget(self.unwrapSize / 2, self.unwrapSize / 2, trinity.PIXEL_FORMAT.R32G32B32A32_FLOAT, useRT=True)\n rj = trinity.CreateRenderJob('Optix UV Unwrap')\n rj.PushRenderTarget(self.oxWorldPosMapUV)\n rj.PushDepthStencil(self.oxDepth)\n rj.Clear((0, 0, 0, 0), 1.0)\n rj.SetStdRndStates(trinity.RM_FULLSCREEN)\n vp = trinity.TriViewport()\n vp.x = 0\n vp.y = 0\n vp.width = self.unwrapSize\n vp.height = self.unwrapSize\n rj.SetViewport(vp)\n PD.SkinLightmapRenderer.AddCallback(self, FullOptixRenderer.OnBeforeOptixPositionsUV, 'onBeforeOptixPositionsUV', rj)\n rj.RenderScene(self.scene).name = 'Optix WorldPos (UV space)'\n PD.SkinLightmapRenderer.AddCallback(self, lambda weakSelf: PD.SkinLightmapRenderer.DoChangeEffect('oxNormalWorldUVEffect', meshes=weakSelf.oxMeshes), '', rj)\n rj.SetRenderTarget(self.oxWorldNormalMapUV)\n rj.Clear((0, 0, 0, 0), 1.0)\n rj.RenderScene(self.scene).name = 'Optix Normals (UV space)'\n rj.SetRenderTarget(self.stretchMap)\n rj.Clear((0, 0, 0, 0), 1.0)\n vp2 = trinity.TriViewport()\n vp2.x = 0\n vp2.y = 0\n vp2.width = self.unwrapSize / 2\n vp2.height = self.unwrapSize / 2\n rj.SetViewport(vp2)\n PD.SkinLightmapRenderer.AddCallback(self, lambda weakSelf: PD.SkinLightmapRenderer.DoChangeEffect('stretchmapRenderEffect', meshes=weakSelf.oxMeshes), '', rj)\n rj.RenderScene(self.scene).name = 'Stretchmap'\n PD.SkinLightmapRenderer.AddCallback(self, FullOptixRenderer.OnAfterOptix, 'onAfterOptix', rj)\n rj.PopRenderTarget()\n rj.PopDepthStencil()\n rj.ScheduleOnce()\n rj.WaitForFinish()\n if False:\n PD.SkinLightmapRenderer.SaveTarget(self.oxWorldPosMapUV, 'c:/depot/oxworldposuv2.dds', isRT=True)\n PD.SkinLightmapRenderer.SaveTarget(self.oxWorldNormalMapUV, 'c:/depot/oxworldnormaluv2.dds', isRT=True)\n PD.SkinLightmapRenderer.SaveTarget(self.stretchMap, 'c:/depot/stretchmap2.dds', isRT=True)\n print '** MAPS SAVED **'\n\n def RunSkinningAndTesselation(self):\n print '*** Tesselation phase ***'\n batchTypes = self.skinnedOptix[0]\n optix = self.optix\n ptx = {}\n ptx[72] = self.path + 'eve_skinning_kernel72.ptx'\n ptx[64] = self.path + 'eve_skinning_kernel64.ptx'\n for bytes, ptxfile in ptx.iteritems():\n LogInfo('Processing ', bytes, 'bytes/vertex')\n skinningProgram = trinity.Tr2OptixProgram(ptxfile, 'kernel_no_tesselation')\n skinningProgramTesselate = trinity.Tr2OptixProgram(ptxfile, 'kernel_tesselation')\n optix.SetEntryPointCount(2)\n optix.SetRayGenerationProgram(0, skinningProgram)\n optix.SetRayGenerationProgram(1, skinningProgramTesselate)\n for batchType in range(len(batchTypes)):\n batches = batchTypes[batchType]\n out = []\n\n def needsTesselation(fx):\n return 'skinnedavatarhair_detailed.fx' in fx.effectFilePath.lower()\n\n for batch in batches:\n if 'furshell' in batch[1].effectFilePath.lower():\n out.append(None)\n continue\n tesselate = needsTesselation(batch[1])\n triangle_count = batch[6]\n bytes_per_vertex = batch[8]\n if bytes_per_vertex != bytes:\n out.append(None)\n continue\n vertex_buffer_output = trinity.Tr2OptixBuffer()\n vertex_buffer_output.CreateUserData(bytes_per_vertex, triangle_count * 3 * 4 if tesselate else triangle_count * 3, trinity.OPTIX_BUFFER_INPUT_OUTPUT, True)\n out.append(vertex_buffer_output)\n\n for i, batch in enumerate(batches):\n if 'furshell' in batch[1].effectFilePath.lower():\n continue\n triangle_count = batch[6]\n tesselate = needsTesselation(batch[1])\n bytes_per_vertex = batch[8]\n if bytes_per_vertex != bytes:\n continue\n if tesselate:\n LogInfo('Tesselating geometry ', batch, ' of type ', batchType)\n else:\n LogInfo('Skinning geometry ', batch, ' of type ', batchType)\n optix.SetBuffer('vertex_buffer', batch[2])\n optix.SetBuffer('index_buffer', batch[3])\n optix.SetBuffer('vertex_buffer_output', out[i])\n optix.SetUInt('first_index_index', batch[5])\n optix.SetBuffer('matrix_buffer', batch[7])\n program = int(tesselate)\n optix.Run(program, triangle_count, 1)\n batch[0].SetBuffer('vertex_buffer', out[i])\n if tesselate:\n batch[0].SetPrimitiveCount(triangle_count * 4)\n\n optix.SetRayGenerationProgram(0, self.raygen)\n optix.SetRayGenerationProgram(1, self.raygen)\n\n def RemoveBadGeometry(self, model):\n self.haveBeard = False\n self.beardFx = None\n for mesh in model.visualModel.meshes:\n for area in mesh.decalAreas:\n if PD.IsBeard(area):\n self.haveBeard = True\n self.beardFx = area.effect\n area.debugIsHidden = True\n break\n\n for mesh in model.visualModel.meshes:\n for area in mesh.transparentAreas:\n lname = area.name.lower()\n if lname.startswith('eyeshadow_'):\n mesh.transparentAreas.removeAt(-1)\n break\n\n if False:\n for mesh in model.visualModel.meshes:\n for area in mesh.opaqueAreas:\n lname = area.name.lower()\n if 'eye' not in lname or 'eyewet' in lname or 'eyelash' in lname:\n mesh.opaqueAreas.removeAt(-1)\n break\n\n for area in mesh.transparentAreas:\n lname = area.name.lower()\n if 'eye' not in lname or 'eyewet' in lname or 'eyelash' in lname:\n mesh.transparentAreas.removeAt(-1)\n break\n\n if False:\n print 'raytracing', len(model.visualModel.meshes), 'meshes'\n for mesh in model.visualModel.meshes:\n lname = mesh.name.lower()\n if not lname.startswith('hair'):\n print 'removing', lname\n mesh.opaqueAreas.removeAt(-1)\n mesh.decalAreas.removeAt(-1)\n mesh.transparentAreas.removeAt(-1)\n elif False:\n print 'removing', lname\n for a in mesh.opaqueAreas:\n print 'opaque', a.name\n\n for a in mesh.decalAreas:\n print 'decal', a.name\n\n for a in mesh.transparentAreas:\n print 'transp', a.name\n\n mesh.opaqueAreas.removeAt(-1)\n mesh.decalAreas.removeAt(-1)\n mesh.transparentAreas.removeAt(-1)\n else:\n print 'keeping', lname\n\n def TransferBeardParameters(self, optix):\n if self.haveBeard:\n LogInfo('Beard found')\n beardLength = self.settings['beardLength']\n optix.SetFloat3('beardOptions', beardLength[0], beardLength[1], self.settings['beardGravity'])\n floatMap = {'FurLength': 'beard_fur_length',\n 'UVScale': 'beard_uv_scale',\n 'AlphaMultiplier': 'beard_alpha_multiplier',\n 'CombStrength': 'beard_comb_strength',\n 'FurGrainRotation': 'beard_fur_grain_rotation',\n 'MirrorGrain': 'beard_mirror_grain',\n 'FurParallax': 'beard_fur_parallax'}\n float3Map = {'gravityOffset': 'beard_gravity_offset',\n 'MaterialDiffuseColor': 'beard_diffuse_color'}\n for param in self.beardFx.parameters:\n optixName = floatMap.get(param.name, None)\n if optixName is not None:\n optix.SetFloat(optixName, param.value)\n else:\n optixName = float3Map.get(param.name, None)\n if optixName is not None:\n optix.SetFloat3(optixName, param.value[0], param.value[1], param.value[2])\n\n def GenerateBeardGeometry(self, optix, path, any_hit_shadow):\n if not self.haveBeard:\n return None\n LogInfo('generating beard splines')\n SkinRaytracingTools.SetOptixMatrixFromTrinity(optix, 'clipToWorld')\n beardProgram = trinity.Tr2OptixProgram(path + 'eve_beard_kernel.ptx', 'kernel')\n curveOutputBuffer = trinity.Tr2OptixBuffer()\n curveCount = 512\n curveOutputBuffer.CreateUserData(80, curveCount * curveCount, trinity.OPTIX_BUFFER_INPUT_OUTPUT, True)\n optix.SetBuffer('output', curveOutputBuffer)\n rayTypeCount = optix.GetRayTypeCount()\n optix.SetRayTypeCount(1)\n optix.SetEntryPointCount(2)\n optix.SetRayGenerationProgram(0, beardProgram)\n optix.SetRayGenerationProgram(1, beardProgram)\n optix.SetEntryPointCount(1)\n LogInfo('beard: about to Run')\n optix.Run(0, curveCount, curveCount)\n LogInfo('beard: Run done')\n optix.SetRayTypeCount(rayTypeCount)\n hairGeometry = trinity.Tr2OptixGeometry()\n hairGeometry.InitializeFromProgram(path + 'bezier_curves.ptx', 'intersect', 'bounds')\n subdivideDepth = 2\n hairGeometry.SetPrimitiveCount(curveCount * curveCount * (1 << subdivideDepth))\n optix.SetUInt('presubdivide_depth', subdivideDepth)\n optix.SetBuffer('curves', curveOutputBuffer)\n LogInfo('beard: geometry setup done')\n beardInstance = trinity.Tr2OptixGeometryInstance()\n beardInstance.SetGeometry(hairGeometry)\n closest_hit_BeardShader = trinity.Tr2OptixProgram(path + 'eve_beard_shader.ptx', 'closest_hit_BeardShader')\n beardMaterial = trinity.Tr2OptixMaterial()\n beardMaterial.SetClosestHit(0, closest_hit_BeardShader)\n beardMaterial.SetAnyHit(1, any_hit_shadow)\n beardInstance.SetMaterial(beardMaterial)\n LogInfo('beard: geometry instance setup done')\n return beardInstance\n\n def _DoInit(self, scene = None):\n model = None\n if scene is None:\n scene = PD.SkinLightmapRenderer.Scene()\n self.scene = scene\n self.previousVP = trinity.TriMatrix()\n self.framecount = 1\n self.useOIT = True\n if scene is None:\n LogWarn('No scene!')\n return\n for dynamic in scene.dynamics:\n if dynamic.__typename__ == 'Tr2IntSkinnedObject':\n model = dynamic\n break\n else:\n LogWarn('No Tr2IntSkinnedObject found')\n return\n\n if model is None:\n LogWarn('No Tr2IntSkinnedObject found')\n return\n self.skinnedObject = model\n if self.skinnedObject.visualModel is None:\n LogWarn('skinnedObject has no visualMeshes')\n return\n bg = trinity.renderContext.GetDefaultBackBuffer()\n step = trinity.renderJobs.FindStepByName('SET_SWAPCHAIN_RT')\n if step is not None:\n bg = step.renderTarget\n self.width = self.settings.get('outputWidth', bg.width)\n self.height = self.settings.get('outputHeight', bg.height)\n self.blitfx = FullScreenBlitter(self.width, self.height)\n self.RemoveBadGeometry(model)\n outputTexture = trinity.TriTextureRes(self.width, self.height, 1, trinity.PIXEL_FORMAT.R32G32B32A32_FLOAT)\n self.outputTexture = outputTexture\n self.capture = CaptureHelper(self.width, self.height)\n self._InitUVUnwrap()\n for steps in trinity.renderJobs.recurring:\n if steps.name == 'FullOptixRenderer':\n steps.UnscheduleRecurring()\n\n start = time.clock()\n optix = trinity.Tr2Optix()\n self.optix = optix\n optix.SetInteropDevice()\n optix.SetRayTypeCount(4)\n optix.SetEntryPointCount(1)\n if False:\n optix.EnableAllExceptions()\n optix.SetPrintEnabled(True)\n optix.SetPrintBufferSize(16384)\n optix.SetUInt('radiance_ray_type', 0)\n optix.SetUInt('shadow_ray_type', 1)\n optix.SetUInt('translucency_ray_type', 2)\n optix.SetUInt('translucency_ray_type', 3)\n optix.SetFloat('scene_epsilon', 0.001)\n optix.SetUInt('frameIteration', 0)\n self.outputBuffer = trinity.Tr2OptixBuffer()\n self.outputBuffer.CreateFloat4(self.width, self.height, trinity.OPTIX_BUFFER_INPUT_OUTPUT)\n optix.SetBuffer('output_buffer', self.outputBuffer)\n self.ApplySettings()\n path = str(blue.paths.ResolvePath('res:/graphics/effect/optix/NCC/'))\n self.path = path\n LogInfo('Getting files from', path)\n everything = []\n any_hit_shadow = trinity.Tr2OptixProgram(path + 'eve_shadow.ptx', 'any_hit_shadow')\n any_hit_shadow_blend = trinity.Tr2OptixProgram(path + 'eve_shadow.ptx', 'any_hit_shadow_blend')\n shader_diffuse_only_feeler = trinity.Tr2OptixProgram(path + 'eve_bounce.ptx', 'closest_hit_DiffuseOnlyFeeler2')\n any_hit_cutout = trinity.Tr2OptixProgram(path + 'eve_cutout.ptx', 'any_hit_CutoutMask')\n any_hit_diffuse_feeler_blend = trinity.Tr2OptixProgram(path + 'eve_shadow.ptx', 'any_hit_diffuse_feeler_blend')\n everything.append(any_hit_shadow)\n everything.append(any_hit_shadow_blend)\n everything.append(shader_diffuse_only_feeler)\n everything.append(any_hit_cutout)\n mainRay = 0\n shadowRay = 1\n bounceRay = 3\n\n def MakeMaterialWithShader(shader):\n material = trinity.Tr2OptixMaterial()\n material.SetClosestHit(mainRay, shader)\n material.SetAnyHit(shadowRay, any_hit_shadow)\n material.SetClosestHit(bounceRay, shader_diffuse_only_feeler)\n everything.append(material)\n return (material, shader)\n\n def MakeMaterial(ptxFile, shaderName):\n shader = trinity.Tr2OptixProgram(path + ptxFile + '.ptx', shaderName)\n everything.append(shader)\n return MakeMaterialWithShader(shader)\n\n def MakeDecal(material):\n material.SetAnyHit(mainRay, any_hit_cutout)\n material.SetAnyHit(shadowRay, any_hit_shadow_blend)\n material.SetAnyHit(bounceRay, any_hit_cutout)\n\n skin_single_material, skin_single_shade = MakeMaterial('eve_skin', 'closest_hit_ShadeSinglePassSkin_Single2')\n skin_single_material_scatter = MakeMaterial('eve_skin', 'closest_hit_ShadeSinglePassSkin_Single_Scatter2')[0]\n skin_single_material_decal = MakeMaterialWithShader(skin_single_shade)[0]\n MakeDecal(skin_single_material_decal)\n glasses_shade = trinity.Tr2OptixProgram(path + 'eve_glasses.ptx', 'glasses_shade')\n glasses_shadow = trinity.Tr2OptixProgram(path + 'eve_glasses.ptx', 'glasses_shadow')\n glass_material = trinity.Tr2OptixMaterial()\n glass_material.SetAnyHit(mainRay, glasses_shade)\n glass_material.SetAnyHit(shadowRay, glasses_shadow)\n glass_material.SetClosestHit(bounceRay, shader_diffuse_only_feeler)\n everything.append(glasses_shade)\n everything.append(glasses_shadow)\n vizNames = ['closest_hit_VizNormal',\n 'closest_hit_VizUV',\n 'closest_hit_VizConstantColor',\n 'closest_hit_VizDiffuse']\n vizualizer, vizualizer_shade = MakeMaterial('eve_basic', vizNames[0])\n vizualizer_decal = MakeMaterialWithShader(vizualizer_shade)[0]\n MakeDecal(vizualizer_decal)\n skin_double_material, skin_double_shade = MakeMaterial('eve_skin', 'closest_hit_ShadeSinglePassSkin_Double2')\n skin_double_material_decal = MakeMaterialWithShader(skin_double_shade)[0]\n MakeDecal(skin_double_material_decal)\n skin_double_material_transparent = MakeMaterial('eve_skin', 'closest_hit_ShadeSinglePassSkin_Double2_Blend')[0]\n skin_double_material_transparent.SetAnyHit(mainRay, any_hit_cutout)\n skin_double_material_transparent.SetAnyHit(shadowRay, any_hit_shadow_blend)\n skin_double_material_transparent.SetAnyHit(bounceRay, any_hit_cutout)\n avatar_brdf_material, avatar_brdf_shade = MakeMaterial('eve_brdf', 'closest_hit_ShadeAvatarBRDF_Single2')\n avatar_brdf_material_decal = MakeMaterialWithShader(avatar_brdf_shade)[0]\n MakeDecal(avatar_brdf_material_decal)\n avatar_brdf_double_material, avatar_brdf_double_shade = MakeMaterial('eve_brdf', 'closest_hit_ShadeAvatarBRDF_Double2')\n avatar_brdf_double_material_decal = MakeMaterialWithShader(avatar_brdf_double_shade)[0]\n MakeDecal(avatar_brdf_double_material_decal)\n avatar_hair_material = trinity.Tr2OptixMaterial()\n avatar_hair_shade = trinity.Tr2OptixProgram(path + 'eve_hair.ptx', 'closest_hit_ShadeAvatarHair2' if self.useOIT else 'closest_hit_ShadeAvatarHair2_Blend')\n avatar_hair_material.SetClosestHit(mainRay, avatar_hair_shade)\n if self.useOIT:\n avatar_hair_oit = trinity.Tr2OptixProgram(path + 'eve_hair.ptx', 'any_hit_HairOIT')\n avatar_hair_material.SetAnyHit(mainRay, avatar_hair_oit)\n avatar_hair_material.SetAnyHit(shadowRay, any_hit_shadow_blend)\n avatar_hair_material.SetClosestHit(bounceRay, shader_diffuse_only_feeler)\n everything.append(avatar_hair_shade)\n everything.append(avatar_hair_material)\n avatar_hair_material_decal = trinity.Tr2OptixMaterial()\n avatar_hair_material_decal.SetClosestHit(mainRay, avatar_hair_shade)\n avatar_hair_material_decal.SetAnyHit(mainRay, avatar_hair_oit if self.useOIT else any_hit_cutout)\n avatar_hair_material_decal.SetAnyHit(shadowRay, any_hit_shadow_blend)\n avatar_hair_material_decal.SetClosestHit(bounceRay, shader_diffuse_only_feeler)\n avatar_hair_material_decal.SetAnyHit(bounceRay, any_hit_cutout)\n everything.append(avatar_hair_material_decal)\n eye_shade = trinity.Tr2OptixProgram(path + 'eve_eyes.ptx', 'closest_hit_ShadeEye')\n eye_material = trinity.Tr2OptixMaterial()\n eye_material.SetClosestHit(mainRay, eye_shade)\n eye_material.SetAnyHit(shadowRay, any_hit_shadow)\n everything.append(eye_shade)\n everything.append(eye_material)\n eye_wetness_shade = trinity.Tr2OptixProgram(path + 'eve_eyes.ptx', 'closest_hit_ShadeEyeWetness')\n eye_wetness_material = trinity.Tr2OptixMaterial()\n eye_wetness_material.SetClosestHit(mainRay, eye_wetness_shade)\n eye_wetness_material.SetAnyHit(shadowRay, any_hit_shadow)\n everything.append(eye_wetness_shade)\n everything.append(eye_wetness_material)\n portrait_basic_material, portrait_basic_shade = MakeMaterial('eve_basic', 'closest_hit_ShadePortraitBasic')\n portrait_basic_material_decal = MakeMaterialWithShader(portrait_basic_shade)[0]\n MakeDecal(portrait_basic_material_decal)\n LogInfo('global setup OK', time.clock() - start, 'seconds')\n\n def MakeSamplerFromMap(texture, name):\n sampler = trinity.Tr2OptixTextureSampler()\n sampler.CreateFromSurface(texture)\n sampler.SetNormalizedIndexingMode(True)\n optix.SetSampler(name, sampler)\n LogInfo('No-Copy Interop for ', name)\n everything.append(sampler)\n\n MakeSamplerFromMap(self.oxWorldPosMapUV, 'world_pos_uv_buffer')\n MakeSamplerFromMap(self.oxWorldNormalMapUV, 'world_normal_uv_buffer')\n MakeSamplerFromMap(self.stretchMap, 'stretchmap_buffer')\n useHdrProbe = False\n if useHdrProbe:\n optix.SetSamplerFromProbe('hdr_probe_sampler', 'c:/depot/optix/data/Japan_subway2_FINAL.hdr')\n start = time.clock()\n self.skinnedOptix = optix.CreateFromSkinnedModel(model, 72, path + 'triangle72.ptx', 'mesh_intersect', 'mesh_bounds', 64, path + 'triangle64.ptx', 'mesh_intersect', 'mesh_bounds')\n optixBatches = self.skinnedOptix[0]\n self.TransferBeardParameters(optix)\n group = trinity.Tr2OptixGeometryGroup()\n groupChildren = []\n self.rayCounter = RayCountHelper(self.optix)\n self.oit = OitHelper(self.optix)\n self.raygen = trinity.Tr2OptixProgram(path + 'raygen.ptx', 'ray_request')\n self.RunSkinningAndTesselation()\n start = time.clock()\n samplers = SkinRaytracingTools.InteropAllTextures(optix, model, waitForFinish=True)\n everything.append(samplers)\n backdrop = trinity.TriTexture2DParameter()\n backdrop.resourcePath = self.settings['backgroundBitmap']\n skinmap = trinity.TriTexture2DParameter()\n skinmap.resourcePath = 'res:/Graphics/Character/female/paperdoll/head/head_generic/SkinMap.png'\n blue.resMan.Wait()\n everything.append(SkinRaytracingTools.InteropTexture('BackgroundEnvMap', backdrop.resource, waitForFinish=True, scope=optix))\n everything.append(SkinRaytracingTools.InteropTexture('SkinMap', skinmap.resource, waitForFinish=True, scope=optix))\n LogInfo('texture interop OK', time.clock() - start, 'seconds')\n splines = self.GenerateBeardGeometry(optix, path, any_hit_shadow)\n if splines is not None:\n groupChildren.append(splines)\n print '*** Raytracing phase ***'\n\n def SetAlphaRef(instance, batchType):\n if batchType == 1:\n instance.SetFloat4('alphaRef', 0.75, 0, 0, 0)\n elif batchType == 2:\n instance.SetFloat4('alphaRef', 0.01, 0, 0, 0)\n\n haveGlasses = False\n for batchType in range(len(optixBatches)):\n isOpaque = batchType == 0\n batches = optixBatches[batchType]\n for batch in batches:\n if 'furshell' in batch[1].effectFilePath.lower():\n continue\n instance = trinity.Tr2OptixGeometryInstance()\n everything.append(instance)\n instance.SetGeometry(batch[0])\n r = random.random()\n g = random.random()\n b = random.random()\n instance.SetFloat4('viz_constant_color', r, g, b, 1.0)\n fxpath = batch[1].effectFilePath.lower()\n if False:\n instance.SetMaterial(vizualizer if isOpaque else vizualizer_decal)\n elif 'glassshader' in fxpath:\n instance.SetMaterial(glass_material)\n if not haveGlasses:\n haveGlasses = True\n elif 'skinnedavatarbrdfsinglepassskin_single.fx' in fxpath:\n if batch[1] in self.scatterFX:\n instance.SetMaterial(skin_single_material_scatter)\n else:\n instance.SetMaterial(skin_single_material if isOpaque else skin_single_material_decal)\n SetAlphaRef(instance, batchType)\n elif 'skinnedavatarbrdfsinglepassskin_double.fx' in fxpath:\n instance.SetMaterial([skin_double_material, skin_double_material_decal, skin_double_material_transparent][batchType])\n SetAlphaRef(instance, batchType)\n elif 'skinnedavatarbrdflinear.fx' in fxpath:\n instance.SetMaterial(avatar_brdf_material if isOpaque else avatar_brdf_material_decal)\n elif 'skinnedavatarbrdfdoublelinear.fx' in fxpath:\n instance.SetMaterial(avatar_brdf_double_material if isOpaque else avatar_brdf_double_material_decal)\n elif 'skinnedavatarhair_detailed.fx' in fxpath:\n instance.SetMaterial(avatar_hair_material if isOpaque else avatar_hair_material_decal)\n instance.SetFloat4('alphaRef', 0.01, 0, 0, 0)\n instance.SetUInt('enableCulling', 0)\n elif 'eyeshader.fx' in fxpath:\n instance.SetMaterial(eye_material)\n elif 'eyewetnessshader.fx' in fxpath:\n instance.SetMaterial(eye_wetness_material)\n elif 'portraitbasic.fx' in fxpath:\n instance.SetMaterial(portrait_basic_material if isOpaque else portrait_basic_material_decal)\n else:\n instance.SetMaterial(vizualizer if isOpaque else vizualizer_decal)\n SkinRaytracingTools.CopyParametersToContext(batch[1], instance)\n groupChildren.append(instance)\n\n group.SetChildCount(len(groupChildren))\n for x in xrange(len(groupChildren)):\n group.SetChild(x, groupChildren[x])\n\n everything.append(group)\n group.SetAcceleration('Bvh', 'Bvh')\n LogInfo('scene interop OK', time.clock() - start, 'seconds')\n start = time.clock()\n bufEveLights = SkinRaytracingTools.CreateBufferForLights(scene.lights, useHdrProbe)\n optix.SetBuffer('trinity_lights', bufEveLights)\n LogInfo('lights interop OK', time.clock() - start, 'seconds')\n start = time.clock()\n optix.SetGeometryGroup('top_scene', group)\n optix.SetGeometryGroup('shadow_casters', group)\n optix.SetRayGenerationProgram(0, self.raygen)\n optix.SetEntryPointCount(1)\n miss = None\n if not useHdrProbe:\n miss = trinity.Tr2OptixProgram(path + 'eve_miss.ptx', 'miss')\n else:\n miss = trinity.Tr2OptixProgram(path + 'eve_miss_probe.ptx', 'miss')\n optix.SetMissProgram(3, miss)\n optix.SetFloat3('bg_color', 1.0, 0, 0)\n everything.append(miss)\n if False:\n exception = trinity.Tr2OptixProgram(path + 'eve_miss.ptx', 'exception')\n optix.SetExceptionProgram(0, exception)\n everything.append(exception)\n optix.SetStackSize(4096)\n self.everything = everything\n SkinRaytracingTools.SetOptixMatrixFromTrinity(optix, 'clipToWorld', self.width / float(self.height))\n LogInfo('general setup OK', time.clock() - start, 'seconds')\n optix.ReportObjectCounts()\n start = time.clock()\n optix.Compile()\n LogInfo('compile OK', time.clock() - start, 'seconds')\n start = time.clock()\n optix.Validate()\n LogInfo('validate OK', time.clock() - start, 'seconds')\n start = time.clock()\n optix.Run(0, 0, 0)\n LogInfo('BVH OK', time.clock() - start, 'seconds')\n start = time.clock()\n self.blitfx.SetTexture(outputTexture, outputTexture, outputTexture)\n rj = trinity.CreateRenderJob('FullOptixRenderer')\n rj.PushRenderTarget(self.outputRT)\n rj.PushDepthStencil(None)\n self.AddCallback(FullOptixRenderer.RaytraceFrame, 'Raytrace Frame', rj)\n rj.CopyRtToTexture(outputTexture).name = 'cuda -> outputTexture'\n rj.PopDepthStencil()\n rj.PopRenderTarget()\n rj.SetStdRndStates(trinity.RM_FULLSCREEN).name = 'fullscreen quad'\n rj.RenderEffect(self.blitfx.effect).name = ' blit'\n self.capture.CreateRenderSteps(rj, self.blitfx.effect)\n rj.steps.append(trinity.TriStepRenderFps())\n rj.ScheduleRecurring(insertFront=False)\n self.renderJob = rj\n LogInfo('final setup OK', time.clock() - start, 'seconds')\n model.display = False\n self.EnablePaperDollJobs(False)\n\n @staticmethod\n def EnablePaperDollJobs(enable):\n if False:\n for job in trinity.renderJobs.recurring:\n if 'paperdollrenderjob' in job.name.lower():\n for step in job.steps:\n step.enabled = enable\n\n if enable:\n trinity.device.tickInterval = 10\n else:\n trinity.device.tickInterval = 0\n\n def ApplySettings(self):\n self.optix.SetFloat('light_size', self.settings['light_size'])\n self.optix.SetFloat3('depthOfField', 1.0, self.settings['lens_radius'], 0)\n self.optix.SetFloat('HairShadows', self.settings['HairShadows'])\n self.optix.SetFloat('EnvMapBoost', self.settings['EnvMapBoost'] / 3.1415927)\n self.previousVP.Identity()\n\n def SetLensRadius(self, lens_radius):\n self.settings['lens_radius'] = lens_radius\n self.ApplySettings()\n\n def SetLensFocalDistance(self, lens_focal_distance):\n if lens_focal_distance <= 0:\n self.settings.pop('lens_focal_distance', 0)\n else:\n self.settings['lens_focal_distance'] = lens_focal_distance\n self.ApplySettings()\n\n def SetLightSize(self, light_size):\n self.settings['light_size'] = light_size\n self.ApplySettings()\n\n def SetHairShadowsEnabled(self, enabled):\n self.settings['HairShadows'] = float(enabled)\n self.ApplySettings()\n\n def SetBackgroundIntensity(self, intensity):\n self.settings['EnvMapBoost'] = intensity\n self.ApplySettings()\n\n def __init__(self, scene = None, backgroundBitmap = None, memento = None, beardLength = (0.01, 0.01), beardGravity = 0.0005, outputWidth = None, outputHeight = None, asyncSetup = True, listenForUpdate = True):\n LogInfo('init', self)\n blue.motherLode.maxMemUsage = 0\n blue.resMan.ClearAllCachedObjects()\n self.framecount = 0\n self.listenForUpdate = listenForUpdate\n if memento is not None:\n self.settings = memento\n else:\n self.settings = {}\n self.settings['light_size'] = 0.125\n self.settings['lens_radius'] = 0.001\n self.settings['HairShadows'] = 1.0\n self.settings['EnvMapBoost'] = 1.0\n self.settings['backgroundBitmap'] = backgroundBitmap if backgroundBitmap is not None else 'res:/texture/global/red_blue_ramp.dds'\n self.settings['beardLength'] = beardLength\n self.settings['beardGravity'] = beardGravity\n if outputWidth is not None:\n self.settings['outputWidth'] = outputWidth\n if outputHeight is not None:\n self.settings['outputHeight'] = outputHeight\n if asyncSetup:\n uthread.new(self._DoInit, scene=scene)\n else:\n self._DoInit(scene=scene)\n\n def GetMemento(self):\n return self.settings\n\n def __del__(self):\n LogInfo('deleting', self)\n if hasattr(self, 'renderJob'):\n self.renderJob.UnscheduleRecurring()\n self.renderJob = None\n del self.raygen\n del self.rayCounter\n del self.oit\n del self.outputBuffer\n del self.skinnedOptix\n del self.everything\n LogInfo('Post-cleanup leak check:')\n self.optix.ReportObjectCounts()\n self.EnablePaperDollJobs(True)\n\n @staticmethod\n def Pause():\n if FullOptixRenderer.instance is not None:\n FullOptixRenderer.instance.renderJob.UnscheduleRecurring()\n\n @staticmethod\n def NotifyUpdate():\n if FullOptixRenderer.instance is not None and FullOptixRenderer.instance.listenForUpdate:\n LogInfo('NotifyUpdate, restarting', FullOptixRenderer.instance)\n memento = FullOptixRenderer.instance.GetMemento()\n FullOptixRenderer.instance = None\n FullOptixRenderer.instance = FullOptixRenderer(memento=memento)\n\n\nclass ShipOptixRenderer():\n __guid__ = 'paperDoll.ShipOptixRenderer'\n instance = None\n\n def AddCallback(self, func, name, rj):\n cb = trinity.TriStepPythonCB()\n weakSelf = weakref.ref(self)\n cb.SetCallback(lambda : SkinRaytracingTools.FuncWrapper(weakSelf, func))\n cb.name = name\n rj.steps.append(cb)\n\n def GetFrameCount(self):\n return self.framecount\n\n def SaveScreenshot(self, filename):\n self.capture.SaveSurfaceToFile(filename)\n\n def AddRenderPreviewStep(self, renderJob):\n renderJob.SetStdRndStates(trinity.RM_FULLSCREEN).name = ' [optix] fullscreen quad'\n renderJob.PushDepthStencil(None).name = ' [optix] push depth'\n renderJob.RenderEffect(self.blitfx.effect).name = ' [optix] Blit to screenshot'\n renderJob.PopDepthStencil().name = ' [optix] pop depth'\n\n @staticmethod\n def RaytraceFrame(selfRef):\n start = time.time()\n VP = SkinRaytracingTools.SetOptixMatrixFromTrinity(selfRef.optix, 'clipToWorld', selfRef.width / float(selfRef.height))\n if not SkinRaytracingTools.matEqual(VP, selfRef.previousVP):\n selfRef.previousVP = VP\n selfRef.outputBuffer.Clear()\n selfRef.framecount = 0\n pos1 = (0, 0, 0)\n pos2 = pos1\n dist1 = geo2.Vec3Distance(pos1, trinity.GetViewPosition())\n dist2 = geo2.Vec3Distance(pos2, trinity.GetViewPosition())\n autodof = min(dist1, dist2)\n dof = selfRef.settings.get('lens_focal_distance', autodof)\n LogInfo('Auto-depth-of-field is at', autodof, ', actual focal distance is', dof)\n selfRef.optix.SetFloat3('depthOfField', dof - trinity.GetFrontClip(), selfRef.settings['lens_radius'], 0)\n else:\n selfRef.framecount += 1\n selfRef.optix.SetUInt('frameIteration', selfRef.framecount)\n selfRef.oit.ResetAllocationCount()\n selfRef.rayCounter.ResetCount()\n time1 = time.time()\n selfRef.optix.Run(0, selfRef.width, selfRef.height)\n time2 = time.time()\n traceTime = time2 - time1\n raycount = selfRef.rayCounter.GetCount()\n raysec = 0\n if traceTime > 0:\n raysec = raycount / float(traceTime)\n time3 = time.time()\n if selfRef.framecount % 32 == 0:\n oit = selfRef.oit.GetAllocationCount()\n if oit > 0:\n print oit, 'oit allocations'\n selfRef.blitfx.UpdateFrameCount(selfRef.framecount)\n selfRef.outputBuffer.CopyToTexture(selfRef.outputTexture)\n stop = time.time()\n message = 'time: call %05.3f / trace %05.3f / read %05.3f ms' % (float(time1 - start) * 1000, float(time2 - time1) * 1000, float(stop - time3) * 1000)\n message += '// traced %d rays in %05.3f ms / %10d Krays/sec / %d frames' % (raycount,\n traceTime * 1000,\n raysec / 1000,\n selfRef.framecount)\n LogInfo(message)\n\n def ConvertCubeMapToSH(self, optix, ptxPath, cubeResPath):\n self.shBuffer = trinity.Tr2OptixBuffer()\n self.shBuffer.CreateFloat4(9, 1, trinity.OPTIX_BUFFER_INPUT_OUTPUT)\n optix.SetBuffer('sh_buffer', self.shBuffer)\n self.shBuffer.Clear()\n program = trinity.Tr2OptixProgram(ptxPath + 'cubemapsh.ptx', 'kernel')\n optix.SetRayGenerationProgram(0, program)\n optix.ReportObjectCounts()\n cube = trinity.TriTextureCubeParameter()\n cube.resourcePath = cubeResPath\n cube.name = 'Nebula'\n blue.resMan.Wait()\n mipmaps, names = SkinRaytracingTools.ConvertCubeToTextures(cube)\n for i in range(len(names)):\n if i < len(mipmaps):\n sampler = trinity.Tr2OptixTextureSampler()\n sampler.CreateFromTexture(mipmaps[i])\n sampler.SetNormalizedIndexingMode(True)\n optix.SetSampler(cube.name + names[i], sampler)\n LogInfo('No-Copy Cube Side Interop for ' + cube.name + names[i])\n\n optix.Run(0, cube.resource.width, cube.resource.width)\n if False:\n names = ['Y00',\n 'Y1m1',\n 'Y10',\n 'Y11',\n 'Y2m2',\n 'Y2m1',\n 'Y20',\n 'Y21',\n 'Y22']\n self.shBuffer.Map()\n ofs = 0\n for name in names:\n print name, ': (',\n print self.shBuffer.GetUserDataF(ofs), ',',\n ofs = ofs + 4\n print self.shBuffer.GetUserDataF(ofs), ',',\n ofs = ofs + 4\n print self.shBuffer.GetUserDataF(ofs), ')'\n ofs = ofs + 4\n\n self.shBuffer.Unmap()\n\n def CachedCreateMaterial(self, path, effect):\n material = self.materialCache.get(effect, None)\n if material is not None:\n return material\n shader = None\n if effect in ('tripleglowv3', 'doubleglowv3', 'singleglowv3'):\n shader = trinity.Tr2OptixProgram(path + 'v3ship_glow.ptx', 'closest_hit_' + effect)\n elif effect in ('singleheatv3',):\n shader = trinity.Tr2OptixProgram(path + 'v3ship_heat.ptx', 'closest_hit_' + effect)\n elif effect in ('tripleglowoilv3',):\n shader = trinity.Tr2OptixProgram(path + 'v3ship_glow_oil.ptx', 'closest_hit_' + effect)\n elif effect == 'skinned_tripleglowv3':\n shader = trinity.Tr2OptixProgram(path + 'v3ship_glow.ptx', 'closest_hit_tripleglowv3')\n if shader is None:\n return\n material = trinity.Tr2OptixMaterial()\n material.SetClosestHit(0, shader)\n material.SetAnyHit(1, self.any_hit_shadow)\n return material\n\n def _DoInit(self, scene = None):\n if scene is None:\n scene = trinity.device.scene\n self.scene = scene\n self.previousVP = trinity.TriMatrix()\n self.framecount = 1\n self.materialCache = {}\n self.useOIT = True\n if scene is None:\n LogWarn('No scene!')\n return\n bg = trinity.renderContext.GetDefaultBackBuffer()\n step = trinity.renderJobs.FindStepByName('SET_SWAPCHAIN_RT')\n if step is not None:\n bg = step.renderTarget\n self.width = self.settings.get('outputWidth', bg.width)\n self.height = self.settings.get('outputHeight', bg.height)\n self.blitfx = FullScreenBlitter(self.width, self.height)\n bloomScale = 4\n if False:\n self.highpassRT = PD.SkinLightmapRenderer.CreateRenderTarget(self.width / bloomScale, self.height / bloomScale, trinity.PIXEL_FORMAT.R32G32B32A32_FLOAT)\n self.filteredRT = PD.SkinLightmapRenderer.CreateRenderTarget(self.width / bloomScale, self.height / bloomScale, trinity.PIXEL_FORMAT.R32G32B32A32_FLOAT)\n outputTexture = trinity.TriTextureRes(self.width, self.height, 1, trinity.PIXEL_FORMAT.R32G32B32A32_FLOAT)\n self.outputTexture = outputTexture\n self.capture = CaptureHelper(self.width, self.height)\n for steps in trinity.renderJobs.recurring:\n if steps.name == 'ShipOptixRenderer':\n steps.UnscheduleRecurring()\n\n path = str(blue.paths.ResolvePath('res:/graphics/effect/optix/ship/'))\n self.path = path\n LogInfo('Getting files from', path)\n start = time.clock()\n optix = trinity.Tr2Optix()\n self.optix = optix\n optix.SetInteropDevice()\n optix.SetRayTypeCount(4)\n optix.SetEntryPointCount(1)\n if False:\n optix.EnableAllExceptions()\n if False:\n optix.SetPrintEnabled(True)\n optix.SetPrintBufferSize(16384)\n optix.SetFloat('scene_epsilon', 0.01)\n optix.SetUInt('frameIteration', 0)\n nebula = PD.FindResourceByName(scene.backgroundEffect, 'NebulaMap') if scene.backgroundEffect is not None else None\n if nebula is not None:\n LogInfo('Converting to SH ', nebula.resourcePath)\n self.ConvertCubeMapToSH(optix, path, nebula.resourcePath)\n else:\n self.shBuffer = trinity.Tr2OptixBuffer()\n self.shBuffer.CreateFloat4(9, 1, trinity.OPTIX_BUFFER_INPUT_OUTPUT)\n optix.SetBuffer('sh_buffer', self.shBuffer)\n self.shBuffer.Clear()\n self.outputBuffer = trinity.Tr2OptixBuffer()\n self.outputBuffer.CreateFloat4(self.width, self.height, trinity.OPTIX_BUFFER_INPUT_OUTPUT)\n optix.SetBuffer('output_buffer', self.outputBuffer)\n self.ApplySettings()\n everything = []\n mainRay = 0\n shadowRay = 1\n bounceRay = 3\n\n def MakeMaterialWithShader(shader):\n return (material, shader)\n\n def MakeMaterial(ptxFile, shaderName):\n everything.append(shader)\n return MakeMaterialWithShader(shader)\n\n LogInfo('global setup OK', time.clock() - start, 'seconds')\n useHdrProbe = False\n start = time.clock()\n self.rayCounter = RayCountHelper(self.optix)\n self.oit = OitHelper(self.optix)\n self.raygen = trinity.Tr2OptixProgram(path + 'raygen.ptx', 'ray_request')\n shader = trinity.Tr2OptixProgram(path + 'vizualizer.ptx', 'closest_hit_VizGreen')\n viz_material = trinity.Tr2OptixMaterial()\n viz_material.SetClosestHit(0, shader)\n everything.append(viz_material)\n if False:\n any_hit_shadow = trinity.Tr2OptixProgram(path + 'shadow.ptx', 'any_hit_shadow')\n viz_material.SetAnyHit(1, any_hit_shadow)\n self.any_hit_shadow = any_hit_shadow\n else:\n self.any_hit_shadow = None\n start = time.clock()\n nameTranslation = {'GlowNormalSpecularMap': 'NormalMap'}\n\n def GroupByVertexBuffer(optixBatches):\n output = []\n for batchType in range(len(optixBatches)):\n batches = optixBatches[batchType]\n vbDict = {}\n for batch in batches:\n vb = batch[2]\n list = vbDict.get(vb, None)\n if list is not None:\n list.append(batch)\n else:\n vbDict[vb] = [batch]\n\n list = []\n for vb in vbDict.iterkeys():\n list.append(vbDict[vb])\n\n output.append(list)\n\n return output\n\n cache = {}\n programs = {'skinned_tripleglowv3_48': 'triangle48',\n 'singlev3_48': 'triangle48',\n 'singleheatv3_48': 'triangle48',\n 'tripleglowv3_40': 'triangle40',\n 'singleheatv3_40': 'triangle40',\n 'singlefresnelreflectionwithglow_56': 'triangle56',\n 'doublefresnelreflectionwithglow_56': 'triangle56',\n 'tripleglowoilv3_80': 'triangle80'}\n if False:\n nullintersect = trinity.Tr2OptixProgram(path + 'nullgeometry.ptx', 'intersect')\n nullbounds = trinity.Tr2OptixProgram(path + 'nullgeometry.ptx', 'bounds')\n everything.append(nullintersect)\n everything.append(nullbounds)\n mylogOK = set({})\n mylogFail = set({})\n linearNames = set({})\n linearNames.add('MaterialDiffuseColor')\n linearNames.add('MaterialReflectionColor')\n linearNames.add('MaskDiffuseColor')\n linearNames.add('MaskReflectionColor')\n linearNames.add('SubMaskDiffuseColor')\n linearNames.add('SubMaskReflectionColor')\n linearNames.add('GlowColor')\n topScene = trinity.Tr2OptixGroup()\n interopSamplerCache = {}\n for dynamic in scene.objects:\n if dynamic.__typename__ not in ('EveShip2', 'EveStation2'):\n continue\n model = dynamic\n if model.highDetailMesh is None or model.highDetailMesh.object is None:\n LogWarn('ship has no high detail meshes')\n continue\n skinnedOptix = optix.CreateFromEveSpaceObject2(model, 0, '', '', '')\n everything.append(skinnedOptix)\n optixBatches = skinnedOptix[0]\n self.objectsToRefresh[model] = skinnedOptix\n sorted = GroupByVertexBuffer(optixBatches)\n groups = []\n for batchType in range(len(optixBatches)):\n isOpaque = batchType == 0\n vbBatches = sorted[batchType]\n for batches in vbBatches:\n groupChildren = []\n for batch in batches:\n effect = batch[1].effectFilePath.lower()\n effect = effect[effect.rfind('/') + 1:]\n effect = effect[:effect.rfind('.fx')]\n ptx = programs.get(effect + '_' + str(batch[8]), '')\n if ptx == '':\n mylogFail.add(effect)\n batch[0].SetIntersectProgram(nullintersect)\n batch[0].SetBoundsProgram(nullbounds)\n continue\n mylogOK.add(effect)\n intersect, bounds = cache.get(ptx, (None, None))\n if intersect is None:\n intersect = trinity.Tr2OptixProgram(path + ptx + '.ptx', 'intersect')\n bounds = trinity.Tr2OptixProgram(path + ptx + '.ptx', 'bounds')\n cache[ptx] = (intersect, bounds)\n batch[0].SetIntersectProgram(intersect)\n batch[0].SetBoundsProgram(bounds)\n batchGeometryInstance = trinity.Tr2OptixGeometryInstance()\n everything.append(batchGeometryInstance)\n batchGeometryInstance.SetGeometry(batch[0])\n if True:\n material = self.CachedCreateMaterial(path, effect)\n if material is None:\n material = viz_material\n else:\n material = viz_material\n batchGeometryInstance.SetMaterial(material)\n SkinRaytracingTools.CopyParametersToContext(batch[1], batchGeometryInstance, linearNames)\n groupChildren.append(batchGeometryInstance)\n samplers = SkinRaytracingTools.InteropAllTexturesFromEffect(optix, batch[1], waitForFinish=True, nameTranslation=nameTranslation, scope=batchGeometryInstance, cache=interopSamplerCache)\n everything.append(samplers)\n\n group = trinity.Tr2OptixGeometryGroup()\n group.SetChildCount(len(groupChildren))\n for x in xrange(len(groupChildren)):\n group.SetChild(x, groupChildren[x])\n\n group.SetAcceleration('Bvh', 'Bvh')\n self.objectsToMarkDirty.append(group)\n groups.append(group)\n\n everything.append(cache)\n baseOffset = topScene.GetChildCount()\n topScene.SetChildCount(baseOffset + len(groups))\n for x in xrange(len(groups)):\n topScene.SetChild(baseOffset + x, groups[x])\n\n everything.append(groups)\n\n if False:\n sphereGeometry = trinity.Tr2OptixGeometry()\n sphereGeometry.InitializeFromProgram(path + 'sphere_program.ptx', 'intersect', 'bounds')\n sphereGeometry.SetPrimitiveCount(1)\n everything.append(sphereGeometry)\n sphereInstance = trinity.Tr2OptixGeometryInstance()\n sphereInstance.SetGeometry(sphereGeometry)\n sphereInstance.SetMaterial(viz_material)\n sphereInstance.SetFloat4('pos_r', 0, 0, 0, 100)\n sphereInstance.SetFloat4('color_watt', 1, 0, 0, 1)\n everything.append(sphereInstance)\n group = trinity.Tr2OptixGeometryGroup()\n group.SetChildCount(1)\n group.SetChild(0, sphereInstance)\n group.SetAcceleration('Bvh', 'Bvh')\n topScene.SetChildCount(topScene.GetChildCount() + 1)\n topScene.SetChild(topScene.GetChildCount() - 1, group)\n everything.append(topScene)\n topScene.SetAcceleration('Bvh', 'Bvh')\n self.objectsToMarkDirty.append(topScene)\n optix.SetGroup('top_scene', topScene)\n optix.SetGroup('shadow_casters', topScene)\n if len(mylogOK) > 0:\n LogInfo('Converted succesfully:', str(mylogOK))\n else:\n LogWarn('No effects converted succesfully!')\n if len(mylogFail) > 0:\n LogWarn('Failed to convert:', str(mylogFail))\n if type(scene) == trinity.EveSpaceScene:\n c = SkinRaytracingTools.SafeLinearize(scene.sunDiffuseColor)\n optix.SetFloat4('SunDiffuseColor', c[0], c[1], c[2], c[3])\n c = scene.sunDirection\n optix.SetFloat4('SunDirWorld', -c[0], -c[1], -c[2], 0)\n c = SkinRaytracingTools.SafeLinearize(scene.ambientColor)\n optix.SetFloat4('SceneAmbientColor', c[0], c[1], c[2], c[3])\n c = SkinRaytracingTools.SafeLinearize(scene.fogColor)\n optix.SetFloat4('SceneFogColor', c[0], c[1], c[2], c[3])\n LogInfo('scene interop OK', time.clock() - start, 'seconds')\n start = time.clock()\n light = trinity.Tr2InteriorLightSource()\n if True:\n wattage = 2000000\n light.color = (1,\n 1,\n 1,\n wattage)\n light.radius = 50\n light.position = (200, 500, -300)\n else:\n wattage = 10000000\n light.color = (1,\n 1,\n 1,\n wattage)\n light.radius = 1000\n light.position = (0, 0, 0)\n bufEveLights = SkinRaytracingTools.CreateBufferForLights([], useHdrProbe, preserveAlpha=True)\n optix.SetBuffer('trinity_lights', bufEveLights)\n LogInfo('lights interop OK', time.clock() - start, 'seconds')\n if False:\n sphereGeometry = trinity.Tr2OptixGeometry()\n sphereGeometry.InitializeFromProgram(path + 'sphere_program.ptx', 'intersect', 'bounds')\n sphereGeometry.SetPrimitiveCount(1)\n sphereMaterial = trinity.Tr2OptixMaterial()\n sphereShader = trinity.Tr2OptixProgram(path + 'sphere_program.ptx', 'closest_hit_radiance')\n sphereMaterial.SetClosestHit(0, sphereShader)\n sphereInstance = trinity.Tr2OptixGeometryInstance()\n sphereInstance.SetGeometry(sphereGeometry)\n sphereInstance.SetMaterial(sphereMaterial)\n sphereInstance.SetFloat4('pos_r', light.position[0], light.position[1], light.position[2], light.radius)\n sphereInstance.SetFloat4('color_watt', light.color[0], light.color[1], light.color[2], light.color[3])\n n = topScene.GetChildCount()\n topScene.SetChildCount(n + 1)\n sphereGroup = trinity.Tr2OptixGeometryGroup()\n sphereGroup.SetChildCount(1)\n sphereGroup.SetChild(0, sphereInstance)\n sphereGroup.SetAcceleration('Bvh', 'Bvh')\n topScene.SetChild(n, sphereGroup)\n start = time.clock()\n optix.SetRayGenerationProgram(0, self.raygen)\n optix.SetEntryPointCount(1)\n miss = None\n if not useHdrProbe:\n miss = trinity.Tr2OptixProgram(path + 'eve_miss.ptx', 'miss')\n else:\n miss = trinity.Tr2OptixProgram(path + 'eve_miss_probe.ptx', 'miss')\n optix.SetMissProgram(3, miss)\n optix.SetFloat3('bg_color', 1.0, 0, 0)\n everything.append(miss)\n if False:\n exception = trinity.Tr2OptixProgram(path + 'eve_miss.ptx', 'exception')\n optix.SetExceptionProgram(0, exception)\n everything.append(exception)\n optix.SetStackSize(4096)\n self.everything = everything\n SkinRaytracingTools.SetOptixMatrixFromTrinity(optix, 'clipToWorld', self.width / float(self.height))\n LogInfo('general setup OK', time.clock() - start, 'seconds')\n optix.ReportObjectCounts()\n start = time.clock()\n optix.Compile()\n LogInfo('compile OK', time.clock() - start, 'seconds')\n start = time.clock()\n optix.Validate()\n LogInfo('validate OK', time.clock() - start, 'seconds')\n start = time.clock()\n optix.Run(0, 0, 0)\n LogInfo('BVH OK', time.clock() - start, 'seconds')\n start = time.clock()\n if False:\n self.blitfx.SetTexture(outputTexture, self.highpassRT, self.filteredRT)\n else:\n self.blitfx.SetTexture(outputTexture, outputTexture, outputTexture)\n rj = trinity.CreateRenderJob('ShipOptixRenderer')\n self.AddCallback(ShipOptixRenderer.RaytraceFrame, 'Raytrace Frame', rj)\n rj.SetStdRndStates(trinity.RM_FULLSCREEN).name = 'fullscreen state'\n if False:\n rj.SetRenderTarget(self.highpassRT.wrappedRenderTarget).name = ' SetRT highpassRT'\n rj.RenderEffect(self.blitfx.highpassEffect).name = ' high pass'\n rj.SetRenderTarget(self.filteredRT.wrappedRenderTarget).name = ' SetRT filteredRT'\n rj.RenderEffect(self.blitfx.gaussianHorizEffect).name = ' horizontal blur'\n rj.SetRenderTarget(self.highpassRT.wrappedRenderTarget).name = ' SetRT highpassRT'\n rj.RenderEffect(self.blitfx.gaussianVertEffect).name = ' vertical blur'\n rj.SetStdRndStates(trinity.RM_FULLSCREEN).name = 'fullscreen state'\n rj.RenderEffect(self.blitfx.effect).name = ' blit'\n tp2 = None\n for job in trinity.renderJobs.recurring:\n if job.name == 'TrinityPanel:View1':\n tp2 = job\n\n if tp2 is None:\n rj.ScheduleRecurring(insertFront=False)\n else:\n final = None\n for step in tp2.steps:\n if step.name == 'SET_FINAL_RT':\n final = step\n break\n\n if final is not None:\n tp2.steps.insert(tp2.steps.index(final), trinity.TriStepRunJob(rj))\n else:\n tp2.steps.append(trinity.TriStepRunJob(rj))\n self.renderJob = rj\n LogInfo('final setup OK', time.clock() - start, 'seconds')\n FullOptixRenderer.EnablePaperDollJobs(False)\n\n def ApplySettings(self):\n self.optix.SetFloat('light_size', self.settings['light_size'])\n self.optix.SetFloat3('depthOfField', 1.0, self.settings['lens_radius'], 0)\n self.optix.SetFloat('HairShadows', self.settings['HairShadows'])\n self.optix.SetFloat('EnvMapBoost', self.settings['EnvMapBoost'] / 3.1415927)\n self.previousVP.Identity()\n\n def SetLensRadius(self, lens_radius):\n self.settings['lens_radius'] = lens_radius\n self.ApplySettings()\n\n def SetLensFocalDistance(self, lens_focal_distance):\n if lens_focal_distance <= 0:\n self.settings.pop('lens_focal_distance', 0)\n else:\n self.settings['lens_focal_distance'] = lens_focal_distance\n self.ApplySettings()\n\n def SetLightSize(self, light_size):\n self.settings['light_size'] = light_size\n self.ApplySettings()\n\n def SetHairShadowsEnabled(self, enabled):\n self.settings['HairShadows'] = float(enabled)\n self.ApplySettings()\n\n def SetBackgroundIntensity(self, intensity):\n self.settings['EnvMapBoost'] = intensity\n self.ApplySettings()\n\n def __init__(self, scene = None, backgroundBitmap = None, memento = None, beardLength = (0.01, 0.01), beardGravity = 0.0005, outputWidth = None, outputHeight = None, asyncSetup = True, listenForUpdate = True):\n LogInfo('init', self)\n blue.motherLode.maxMemUsage = 0\n blue.resMan.ClearAllCachedObjects()\n self.framecount = 0\n self.listenForUpdate = listenForUpdate\n self.everything = None\n self.objectsToRefresh = {}\n self.objectsToMarkDirty = []\n if memento is not None:\n self.settings = memento\n else:\n self.settings = {}\n self.settings['light_size'] = 0.125\n self.settings['lens_radius'] = 0.001\n self.settings['HairShadows'] = 1.0\n self.settings['EnvMapBoost'] = 1.0\n self.settings['backgroundBitmap'] = backgroundBitmap if backgroundBitmap is not None else 'res:/texture/global/red_blue_ramp.dds'\n self.settings['beardLength'] = beardLength\n self.settings['beardGravity'] = beardGravity\n if outputWidth is not None:\n self.settings['outputWidth'] = outputWidth\n if outputHeight is not None:\n self.settings['outputHeight'] = outputHeight\n if asyncSetup:\n uthread.new(self._DoInit, scene=scene)\n else:\n self._DoInit(scene=scene)\n\n def GetMemento(self):\n return self.settings\n\n def __del__(self):\n LogInfo('deleting', self)\n if hasattr(self, 'renderJob'):\n self.renderJob.UnscheduleRecurring()\n self.renderJob = None\n del self.any_hit_shadow\n del self.raygen\n del self.rayCounter\n del self.oit\n del self.shBuffer\n del self.outputBuffer\n del self.everything\n del self.objectsToRefresh\n del self.objectsToMarkDirty\n self.optix.ClearObjects()\n LogInfo('Post-cleanup leak check:')\n self.optix.ReportObjectCounts()\n FullOptixRenderer.EnablePaperDollJobs(True)\n\n def RefreshMatrices(self):\n for ship, optixList in self.objectsToRefresh.iteritems():\n self.optix.RefreshMatrices(ship, optixList)\n\n for dirty in self.objectsToMarkDirty:\n dirty.MarkDirty()\n\n self.ApplySettings()\n LogInfo('Refreshed')\n\n @staticmethod\n def Pause():\n if FullOptixRenderer.instance is not None:\n FullOptixRenderer.instance.renderJob.UnscheduleRecurring()\n\n @staticmethod\n def NotifyUpdate():\n if FullOptixRenderer.instance is not None and FullOptixRenderer.instance.listenForUpdate:\n LogInfo('NotifyUpdate, restarting', FullOptixRenderer.instance)\n memento = FullOptixRenderer.instance.GetMemento()\n FullOptixRenderer.instance = None\n FullOptixRenderer.instance = FullOptixRenderer(memento=memento)",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
from django.db import models
from datetime import datetime
class Message(models.Model):
text = models.CharField(max_length=200)
votes = models.IntegerField()
date_added = models.DateTimeField(default=datetime.now)
score = models.BigIntegerField()
next_vote = models.IntegerField(default=3600) # 86400 seconds in a day
def __unicode__(self):
return self.text + ' : '+ str(self.votes) + ' : '+str(self.date_added) + ' : ' + str(self.score) + ' : '+str(self.next_vote) + '\n'
|
normal
|
{
"blob_id": "7159b447ed6fcb2005f63c7b7359970defbc9d43",
"index": 1496,
"step-1": "<mask token>\n\n\nclass Message(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Message(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __unicode__(self):\n return self.text + ' : ' + str(self.votes) + ' : ' + str(self.\n date_added) + ' : ' + str(self.score) + ' : ' + str(self.next_vote\n ) + '\\n'\n",
"step-3": "<mask token>\n\n\nclass Message(models.Model):\n text = models.CharField(max_length=200)\n votes = models.IntegerField()\n date_added = models.DateTimeField(default=datetime.now)\n score = models.BigIntegerField()\n next_vote = models.IntegerField(default=3600)\n\n def __unicode__(self):\n return self.text + ' : ' + str(self.votes) + ' : ' + str(self.\n date_added) + ' : ' + str(self.score) + ' : ' + str(self.next_vote\n ) + '\\n'\n",
"step-4": "from django.db import models\nfrom datetime import datetime\n\n\nclass Message(models.Model):\n text = models.CharField(max_length=200)\n votes = models.IntegerField()\n date_added = models.DateTimeField(default=datetime.now)\n score = models.BigIntegerField()\n next_vote = models.IntegerField(default=3600)\n\n def __unicode__(self):\n return self.text + ' : ' + str(self.votes) + ' : ' + str(self.\n date_added) + ' : ' + str(self.score) + ' : ' + str(self.next_vote\n ) + '\\n'\n",
"step-5": "from django.db import models\nfrom datetime import datetime\n\nclass Message(models.Model):\n text = models.CharField(max_length=200)\n votes = models.IntegerField()\n date_added = models.DateTimeField(default=datetime.now)\n score = models.BigIntegerField()\n next_vote = models.IntegerField(default=3600) # 86400 seconds in a day\n\n def __unicode__(self):\n return self.text + ' : '+ str(self.votes) + ' : '+str(self.date_added) + ' : ' + str(self.score) + ' : '+str(self.next_vote) + '\\n'\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
CORS(app)
app.config.from_object(Config)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
app = Flask(__name__)
CORS(app)
app.config.from_object(Config)
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///ws.db'
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
db = SQLAlchemy(app)
migrate = Migrate(app, db)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
from config import Config
from flask import Flask
from flask_cors import CORS
from flask_migrate import Migrate
from flask_sqlalchemy import SQLAlchemy
app = Flask(__name__)
CORS(app)
app.config.from_object(Config)
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///ws.db'
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
db = SQLAlchemy(app)
migrate = Migrate(app, db)
from ws import routes
<|reserved_special_token_1|>
from config import Config
from flask import Flask
from flask_cors import CORS
from flask_migrate import Migrate
from flask_sqlalchemy import SQLAlchemy
app = Flask(__name__)
CORS(app)
app.config.from_object(Config)
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///ws.db'
# app.config['SQLALCHEMY_DATABASE_URI'] = 'mysql+pymysql://api:uyLmQ5M1AjCvm1R2@localhost/ws'
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
db = SQLAlchemy(app)
migrate = Migrate(app, db)
from ws import routes
|
flexible
|
{
"blob_id": "f494d8aeee8c72cce8fc14e44ca896bcf30c100a",
"index": 5627,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nCORS(app)\napp.config.from_object(Config)\n<mask token>\n",
"step-3": "<mask token>\napp = Flask(__name__)\nCORS(app)\napp.config.from_object(Config)\napp.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///ws.db'\napp.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False\ndb = SQLAlchemy(app)\nmigrate = Migrate(app, db)\n<mask token>\n",
"step-4": "from config import Config\nfrom flask import Flask\nfrom flask_cors import CORS\nfrom flask_migrate import Migrate\nfrom flask_sqlalchemy import SQLAlchemy\napp = Flask(__name__)\nCORS(app)\napp.config.from_object(Config)\napp.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///ws.db'\napp.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False\ndb = SQLAlchemy(app)\nmigrate = Migrate(app, db)\nfrom ws import routes\n",
"step-5": "from config import Config\nfrom flask import Flask\nfrom flask_cors import CORS\nfrom flask_migrate import Migrate\nfrom flask_sqlalchemy import SQLAlchemy\n\napp = Flask(__name__)\nCORS(app)\napp.config.from_object(Config)\napp.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///ws.db'\n# app.config['SQLALCHEMY_DATABASE_URI'] = 'mysql+pymysql://api:uyLmQ5M1AjCvm1R2@localhost/ws'\napp.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False\n\ndb = SQLAlchemy(app)\nmigrate = Migrate(app, db)\n\nfrom ws import routes\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
#!/usr/bin/python3
# -*- coding: UTF-8 -*-
import RPi.GPIO as gpio # 导入Rpi.GPIO库函数命名为GPIO
import time
gpio.setmode(gpio.BOARD) #将GPIO编程方式设置为BOARD模式
pin = 40
gpio.setup(pin, gpio.OUT) #控制pin号引脚
gpio.output(pin, gpio.HIGH) #11号引脚输出高电平
time.sleep(5) #计时0.5秒
gpio.output(pin, gpio.LOW) #11号引脚输出低电平
time.sleep(1) #计时1秒
gpio.cleanup() #释放使用的GPIO引脚
|
normal
|
{
"blob_id": "cfdfc490396546b7af732417b506100357cd9a1f",
"index": 6762,
"step-1": "<mask token>\n",
"step-2": "<mask token>\ngpio.setmode(gpio.BOARD)\n<mask token>\ngpio.setup(pin, gpio.OUT)\ngpio.output(pin, gpio.HIGH)\ntime.sleep(5)\ngpio.output(pin, gpio.LOW)\ntime.sleep(1)\ngpio.cleanup()\n",
"step-3": "<mask token>\ngpio.setmode(gpio.BOARD)\npin = 40\ngpio.setup(pin, gpio.OUT)\ngpio.output(pin, gpio.HIGH)\ntime.sleep(5)\ngpio.output(pin, gpio.LOW)\ntime.sleep(1)\ngpio.cleanup()\n",
"step-4": "import RPi.GPIO as gpio\nimport time\ngpio.setmode(gpio.BOARD)\npin = 40\ngpio.setup(pin, gpio.OUT)\ngpio.output(pin, gpio.HIGH)\ntime.sleep(5)\ngpio.output(pin, gpio.LOW)\ntime.sleep(1)\ngpio.cleanup()\n",
"step-5": "#!/usr/bin/python3\n# -*- coding: UTF-8 -*-\n\nimport RPi.GPIO as gpio # 导入Rpi.GPIO库函数命名为GPIO\nimport time\n\ngpio.setmode(gpio.BOARD) #将GPIO编程方式设置为BOARD模式\n\npin = 40\n\ngpio.setup(pin, gpio.OUT) #控制pin号引脚\n\ngpio.output(pin, gpio.HIGH) #11号引脚输出高电平\ntime.sleep(5) #计时0.5秒\ngpio.output(pin, gpio.LOW) #11号引脚输出低电平\ntime.sleep(1) #计时1秒\n\ngpio.cleanup() #释放使用的GPIO引脚",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import matplotlib.pyplot as plt
from partisan_symmetry_noplot import partisan_symmetry
for k in range(1,100):
a=[]
for i in range(1,100):
a.append([])
for j in range(1,100):
a[i-1].append(partisan_symmetry([5*i/100,.20,5*j/100],1000,False))
plt.imshow(a)
plt.colorbar()
plt.xticks(range(99),[x/20 for x in range(1,100)])
plt.yticks(range(99),[x/20 for x in range(1,100)])
plt.title("Partisan Symmetry Difference for (x,"+str(k)+",y)")
plt.savefig("./ps"+str(k)+".png")
plt.close()
print("figure",k,"done")
|
normal
|
{
"blob_id": "cfa0937f1c49b52283c562d9ab1cb0542e71b990",
"index": 5970,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor k in range(1, 100):\n a = []\n for i in range(1, 100):\n a.append([])\n for j in range(1, 100):\n a[i - 1].append(partisan_symmetry([5 * i / 100, 0.2, 5 * j / \n 100], 1000, False))\n plt.imshow(a)\n plt.colorbar()\n plt.xticks(range(99), [(x / 20) for x in range(1, 100)])\n plt.yticks(range(99), [(x / 20) for x in range(1, 100)])\n plt.title('Partisan Symmetry Difference for (x,' + str(k) + ',y)')\n plt.savefig('./ps' + str(k) + '.png')\n plt.close()\n print('figure', k, 'done')\n",
"step-3": "import matplotlib.pyplot as plt\nfrom partisan_symmetry_noplot import partisan_symmetry\nfor k in range(1, 100):\n a = []\n for i in range(1, 100):\n a.append([])\n for j in range(1, 100):\n a[i - 1].append(partisan_symmetry([5 * i / 100, 0.2, 5 * j / \n 100], 1000, False))\n plt.imshow(a)\n plt.colorbar()\n plt.xticks(range(99), [(x / 20) for x in range(1, 100)])\n plt.yticks(range(99), [(x / 20) for x in range(1, 100)])\n plt.title('Partisan Symmetry Difference for (x,' + str(k) + ',y)')\n plt.savefig('./ps' + str(k) + '.png')\n plt.close()\n print('figure', k, 'done')\n",
"step-4": "import matplotlib.pyplot as plt\nfrom partisan_symmetry_noplot import partisan_symmetry\nfor k in range(1,100):\n a=[]\n for i in range(1,100):\n a.append([])\n for j in range(1,100):\n a[i-1].append(partisan_symmetry([5*i/100,.20,5*j/100],1000,False))\n\n plt.imshow(a)\n plt.colorbar()\n plt.xticks(range(99),[x/20 for x in range(1,100)])\n plt.yticks(range(99),[x/20 for x in range(1,100)])\n plt.title(\"Partisan Symmetry Difference for (x,\"+str(k)+\",y)\")\n plt.savefig(\"./ps\"+str(k)+\".png\")\n plt.close()\n print(\"figure\",k,\"done\")\n \n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
#!/usr/bin/env python
import fileinput
#open the file with the matched DNA short reads
#create a file with the modified version
f1 = open('CompleteDNAsequence.txt', 'r')
f2 = open('CompleteDNAsequence.txt.tmp', 'w')
for line in f1:
f2.write(line.replace('_', '\n')) #replaces _ with tab
f1.close()
f2.close()
#opens modified file, reads first line and saves it to new file
lines = open('CompleteDNAsequence.txt.tmp').readlines()
open('ANSWER.txt', 'w').writelines(lines[:+1])
|
normal
|
{
"blob_id": "d02ef5fc27cde353e90dda4090905b89b5be5c49",
"index": 2897,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor line in f1:\n f2.write(line.replace('_', '\\n'))\nf1.close()\nf2.close()\n<mask token>\nopen('ANSWER.txt', 'w').writelines(lines[:+1])\n",
"step-3": "<mask token>\nf1 = open('CompleteDNAsequence.txt', 'r')\nf2 = open('CompleteDNAsequence.txt.tmp', 'w')\nfor line in f1:\n f2.write(line.replace('_', '\\n'))\nf1.close()\nf2.close()\nlines = open('CompleteDNAsequence.txt.tmp').readlines()\nopen('ANSWER.txt', 'w').writelines(lines[:+1])\n",
"step-4": "import fileinput\nf1 = open('CompleteDNAsequence.txt', 'r')\nf2 = open('CompleteDNAsequence.txt.tmp', 'w')\nfor line in f1:\n f2.write(line.replace('_', '\\n'))\nf1.close()\nf2.close()\nlines = open('CompleteDNAsequence.txt.tmp').readlines()\nopen('ANSWER.txt', 'w').writelines(lines[:+1])\n",
"step-5": "#!/usr/bin/env python\n\nimport fileinput\n\n#open the file with the matched DNA short reads\n#create a file with the modified version\nf1 = open('CompleteDNAsequence.txt', 'r')\nf2 = open('CompleteDNAsequence.txt.tmp', 'w')\nfor line in f1:\n f2.write(line.replace('_', '\\n')) #replaces _ with tab\nf1.close()\nf2.close()\n\n#opens modified file, reads first line and saves it to new file\nlines = open('CompleteDNAsequence.txt.tmp').readlines()\nopen('ANSWER.txt', 'w').writelines(lines[:+1])\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class HelloApiHandler(Resource):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class HelloApiHandler(Resource):
def get(self):
return {'resultStatus': 'SUCCESS', 'message': 'Hello Api Handler'}
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class HelloApiHandler(Resource):
def get(self):
return {'resultStatus': 'SUCCESS', 'message': 'Hello Api Handler'}
def post(self):
print(self)
parser = reqparse.RequestParser()
parser.add_argument('type', type=str)
parser.add_argument('message', type=str)
args = parser.parse_args()
print(args)
request_type = args['type']
request_json = args['message']
ret_status = request_type
ret_msg = request_json
if ret_msg:
message = 'Your Message Requested: {}'.format(ret_msg)
else:
message = 'No Msg'
final_ret = {'status': 'Success', 'message': message}
return final_ret
<|reserved_special_token_1|>
from flask_restful import Api, Resource, reqparse
class HelloApiHandler(Resource):
def get(self):
return {'resultStatus': 'SUCCESS', 'message': 'Hello Api Handler'}
def post(self):
print(self)
parser = reqparse.RequestParser()
parser.add_argument('type', type=str)
parser.add_argument('message', type=str)
args = parser.parse_args()
print(args)
request_type = args['type']
request_json = args['message']
ret_status = request_type
ret_msg = request_json
if ret_msg:
message = 'Your Message Requested: {}'.format(ret_msg)
else:
message = 'No Msg'
final_ret = {'status': 'Success', 'message': message}
return final_ret
<|reserved_special_token_1|>
from flask_restful import Api, Resource, reqparse
class HelloApiHandler(Resource):
def get(self):
return {
'resultStatus': 'SUCCESS',
'message': "Hello Api Handler"
}
def post(self):
print(self)
parser = reqparse.RequestParser()
parser.add_argument('type', type=str)
parser.add_argument('message', type=str)
args = parser.parse_args()
print(args)
# note, the post req from frontend needs to match the strings here (e.g. 'type and 'message')
request_type = args['type']
request_json = args['message']
# ret_status, ret_msg = ReturnData(request_type, request_json)
# currently just returning the req straight
ret_status = request_type
ret_msg = request_json
if ret_msg:
message = "Your Message Requested: {}".format(ret_msg)
else:
message = "No Msg"
final_ret = {"status": "Success", "message": message}
return final_ret
|
flexible
|
{
"blob_id": "80c3d9165c1b592122fabf6382e265465604989c",
"index": 1450,
"step-1": "<mask token>\n\n\nclass HelloApiHandler(Resource):\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass HelloApiHandler(Resource):\n\n def get(self):\n return {'resultStatus': 'SUCCESS', 'message': 'Hello Api Handler'}\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass HelloApiHandler(Resource):\n\n def get(self):\n return {'resultStatus': 'SUCCESS', 'message': 'Hello Api Handler'}\n\n def post(self):\n print(self)\n parser = reqparse.RequestParser()\n parser.add_argument('type', type=str)\n parser.add_argument('message', type=str)\n args = parser.parse_args()\n print(args)\n request_type = args['type']\n request_json = args['message']\n ret_status = request_type\n ret_msg = request_json\n if ret_msg:\n message = 'Your Message Requested: {}'.format(ret_msg)\n else:\n message = 'No Msg'\n final_ret = {'status': 'Success', 'message': message}\n return final_ret\n",
"step-4": "from flask_restful import Api, Resource, reqparse\n\n\nclass HelloApiHandler(Resource):\n\n def get(self):\n return {'resultStatus': 'SUCCESS', 'message': 'Hello Api Handler'}\n\n def post(self):\n print(self)\n parser = reqparse.RequestParser()\n parser.add_argument('type', type=str)\n parser.add_argument('message', type=str)\n args = parser.parse_args()\n print(args)\n request_type = args['type']\n request_json = args['message']\n ret_status = request_type\n ret_msg = request_json\n if ret_msg:\n message = 'Your Message Requested: {}'.format(ret_msg)\n else:\n message = 'No Msg'\n final_ret = {'status': 'Success', 'message': message}\n return final_ret\n",
"step-5": "from flask_restful import Api, Resource, reqparse\n\nclass HelloApiHandler(Resource):\n def get(self):\n return {\n 'resultStatus': 'SUCCESS',\n 'message': \"Hello Api Handler\"\n }\n\n def post(self):\n print(self)\n parser = reqparse.RequestParser()\n parser.add_argument('type', type=str)\n parser.add_argument('message', type=str)\n\n args = parser.parse_args()\n\n print(args)\n # note, the post req from frontend needs to match the strings here (e.g. 'type and 'message')\n\n request_type = args['type']\n request_json = args['message']\n # ret_status, ret_msg = ReturnData(request_type, request_json)\n # currently just returning the req straight\n ret_status = request_type\n ret_msg = request_json\n\n if ret_msg:\n message = \"Your Message Requested: {}\".format(ret_msg)\n else:\n message = \"No Msg\"\n \n final_ret = {\"status\": \"Success\", \"message\": message}\n\n return final_ret",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
# !/usr/bin/env python3
# -*- coding:utf-8 -*-
# @Time : 2021/05/08 20:06
# @Author : Yi
# @FileName: show_slices.py
import os
import pydicom
import glob
import shutil
import random
import numpy as np
import cv2
import skimage.io as io
from data_Parameter import parse_args
import matplotlib.pyplot as plt
def dir_create(path):
"""创造新的文件夹。
:param path: 文件夹路径
:return:
"""
if (os.path.exists(path)) and (os.listdir(path) != []):
shutil.rmtree(path)
os.makedirs(path)
if not os.path.exists(path):
os.makedirs(path)
def read_dicom(path):
"""读取一个病例所有的slices,并转成一个720*720*720的numpy.array.
:param path: 一个病例dcm路径
:return:
"""
print(os.path.basename(path))
pi = os.path.basename(path).split("_")[1]
dcm_size = len(glob.glob(path + "/*.dcm"))
dcms = [
path + "/E" + pi + "S101I%d.dcm" % dicom_slicei
for dicom_slicei in range(1, dcm_size + 1)
]
length = int(len(dcms))
print(length)
dcm_f = pydicom.read_file(dcms[0]).pixel_array
dcm_size = max(max(dcm_f.shape), 720)
# print(dcm_f.shape)
dcm_img = np.zeros((dcm_size, dcm_size, dcm_size), dtype=np.float32)
for dcmi in range(len(dcms)):
cdcm = pydicom.read_file(dcms[dcmi]).pixel_array.astype(np.float32)
cdcm -= np.mean(cdcm)
cdcm /= np.std(cdcm)
dcm_img[
dcm_size // 2 - cdcm.shape[0] // 2: dcm_size // 2 + cdcm.shape[0] // 2,
dcm_size // 2 - cdcm.shape[1] // 2: dcm_size // 2 + cdcm.shape[1] // 2,
dcmi,
] = cdcm
return dcm_img
def show_image(input_dir):
"""随机展示一个病例一些病理图像。
:param input_dir:
:return:
"""
# special cases: "P556", "P576", "P887",160*640*640
for casei in os.listdir(input_dir)[5:6]:
pi = casei.split("_")[1]
dcm_img = read_dicom(input_dir + "/" + casei)
print("Dcm shape: ", dcm_img.shape)
# choices = random.sample(list(np.arange(0, 720, 1)), 10)
# choices.append(316)
choices = range(330,350)
for i in choices:
fig = plt.figure(num=i, figsize=(10, 10))
ax = fig.add_subplot(111)
img=ax.imshow(dcm_img[:, :, i], cmap='gray')
ax.set_title(pi + '_' + str(i))
plt.colorbar(img)
plt.show()
def show_image_avail(input_dir):
"""随机展示一个位置的一些有标注的病例图像。
:param input_dir:
:return:
"""
choices = random.sample(os.listdir(input_dir), 15)
for file in choices:
image_numpy = np.load(input_dir + '/' + file)
fig = plt.figure(figsize=(10, 5))
ax1 = fig.add_subplot(111)
img1=ax1.imshow(image_numpy, cmap='gray')
ax1.set_title(str(file))
plt.colorbar(img1)
plt.show()
def show_mask(input_dir):
"""随机展示一个位置标注的mask,2个channels.
:param input_dir:
:return:
"""
index = 0
choices = random.sample(os.listdir(input_dir), 10)
for file in choices:
mask_numpy = np.load(input_dir + '/' + file)
fig = plt.figure(num=index, figsize=(10, 5))
ax1 = fig.add_subplot(211)
ax1.imshow(mask_numpy[:, :, 0], cmap='gray')
ax1.set_title(str(file) + '_outer')
ax2 = fig.add_subplot(212)
ax2.imshow(mask_numpy[:, :, 1], cmap='gray')
ax2.set_title(str(file) + '_luman')
plt.show()
index += 1
def show_mask_circle(input_dir):
"""随机展示一个位置标注的mask环。
:param input_dir:
:return:
"""
choices = random.sample(os.listdir(input_dir), 10)
for file in choices:
mask_numpy = np.load(input_dir + '/' + file)
fig = plt.figure(figsize=(10, 5))
ax1 = fig.add_subplot(111)
img1=ax1.imshow(mask_numpy[:, :], cmap='gray')
ax1.set_title(str(file) + '_circle')
plt.colorbar(img1)
plt.show()
def show_image_mask(image_path,mask_path):
"""随机展示一个位置的病例图像及其标注。
:param image_path:
:param mask_path:
:return:
"""
files_choice=random.sample(os.listdir(image_path),10)
for file_name in files_choice:
image_numpy=np.load(image_path+'/'+file_name)
mask_numpy =np.load(mask_path+'/'+file_name)
fig =plt.figure(figsize=(10,5))
ax1 =fig.add_subplot(211)
img1=ax1.imshow(image_numpy,cmap='gray')
ax1.set_title(str(file_name))
plt.colorbar(img1)
ax2=fig.add_subplot(212)
img2=ax2.imshow(mask_numpy,cmap='gray')
# ax2.set_title(str(file_name))
plt.colorbar(img2)
plt.show()
def main(args):
image_input_dir = args.datasets_path
# image_avail_dir = args.image_save_sep_position + '/ICAR/positive'
# image_avail_dir = args.image_save_sep_position + '/ICAR/negative'
# circle_mask_dir=args.circle_mask_save_sep+'/ICAR/positive'
circle_mask_dir = args.circle_mask_save_sep + '/ICAR/positive'
# show_image(image_input_dir) # 随机展示一些病例图像。
# show_image_avail(image_avail_dir)
show_mask_circle(circle_mask_dir)
# show_image_mask(image_avail_dir,circle_mask_dir)
if __name__ == '__main__':
args = parse_args()
main(args)
|
normal
|
{
"blob_id": "4905b820f33619a80a9915d0603bc39e0d0368d9",
"index": 6175,
"step-1": "<mask token>\n\n\ndef dir_create(path):\n \"\"\"创造新的文件夹。\n\n :param path: 文件夹路径\n :return:\n \"\"\"\n if os.path.exists(path) and os.listdir(path) != []:\n shutil.rmtree(path)\n os.makedirs(path)\n if not os.path.exists(path):\n os.makedirs(path)\n\n\ndef read_dicom(path):\n \"\"\"读取一个病例所有的slices,并转成一个720*720*720的numpy.array.\n\n :param path: 一个病例dcm路径\n :return:\n \"\"\"\n print(os.path.basename(path))\n pi = os.path.basename(path).split('_')[1]\n dcm_size = len(glob.glob(path + '/*.dcm'))\n dcms = [(path + '/E' + pi + 'S101I%d.dcm' % dicom_slicei) for\n dicom_slicei in range(1, dcm_size + 1)]\n length = int(len(dcms))\n print(length)\n dcm_f = pydicom.read_file(dcms[0]).pixel_array\n dcm_size = max(max(dcm_f.shape), 720)\n dcm_img = np.zeros((dcm_size, dcm_size, dcm_size), dtype=np.float32)\n for dcmi in range(len(dcms)):\n cdcm = pydicom.read_file(dcms[dcmi]).pixel_array.astype(np.float32)\n cdcm -= np.mean(cdcm)\n cdcm /= np.std(cdcm)\n dcm_img[dcm_size // 2 - cdcm.shape[0] // 2:dcm_size // 2 + cdcm.\n shape[0] // 2, dcm_size // 2 - cdcm.shape[1] // 2:dcm_size // 2 +\n cdcm.shape[1] // 2, dcmi] = cdcm\n return dcm_img\n\n\ndef show_image(input_dir):\n \"\"\"随机展示一个病例一些病理图像。\n\n :param input_dir:\n :return:\n \"\"\"\n for casei in os.listdir(input_dir)[5:6]:\n pi = casei.split('_')[1]\n dcm_img = read_dicom(input_dir + '/' + casei)\n print('Dcm shape: ', dcm_img.shape)\n choices = range(330, 350)\n for i in choices:\n fig = plt.figure(num=i, figsize=(10, 10))\n ax = fig.add_subplot(111)\n img = ax.imshow(dcm_img[:, :, i], cmap='gray')\n ax.set_title(pi + '_' + str(i))\n plt.colorbar(img)\n plt.show()\n\n\ndef show_image_avail(input_dir):\n \"\"\"随机展示一个位置的一些有标注的病例图像。\n\n :param input_dir:\n :return:\n \"\"\"\n choices = random.sample(os.listdir(input_dir), 15)\n for file in choices:\n image_numpy = np.load(input_dir + '/' + file)\n fig = plt.figure(figsize=(10, 5))\n ax1 = fig.add_subplot(111)\n img1 = ax1.imshow(image_numpy, cmap='gray')\n ax1.set_title(str(file))\n plt.colorbar(img1)\n plt.show()\n\n\ndef show_mask(input_dir):\n \"\"\"随机展示一个位置标注的mask,2个channels.\n\n :param input_dir:\n :return:\n \"\"\"\n index = 0\n choices = random.sample(os.listdir(input_dir), 10)\n for file in choices:\n mask_numpy = np.load(input_dir + '/' + file)\n fig = plt.figure(num=index, figsize=(10, 5))\n ax1 = fig.add_subplot(211)\n ax1.imshow(mask_numpy[:, :, 0], cmap='gray')\n ax1.set_title(str(file) + '_outer')\n ax2 = fig.add_subplot(212)\n ax2.imshow(mask_numpy[:, :, 1], cmap='gray')\n ax2.set_title(str(file) + '_luman')\n plt.show()\n index += 1\n\n\ndef show_mask_circle(input_dir):\n \"\"\"随机展示一个位置标注的mask环。\n\n :param input_dir:\n :return:\n \"\"\"\n choices = random.sample(os.listdir(input_dir), 10)\n for file in choices:\n mask_numpy = np.load(input_dir + '/' + file)\n fig = plt.figure(figsize=(10, 5))\n ax1 = fig.add_subplot(111)\n img1 = ax1.imshow(mask_numpy[:, :], cmap='gray')\n ax1.set_title(str(file) + '_circle')\n plt.colorbar(img1)\n plt.show()\n\n\n<mask token>\n\n\ndef main(args):\n image_input_dir = args.datasets_path\n circle_mask_dir = args.circle_mask_save_sep + '/ICAR/positive'\n show_mask_circle(circle_mask_dir)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef dir_create(path):\n \"\"\"创造新的文件夹。\n\n :param path: 文件夹路径\n :return:\n \"\"\"\n if os.path.exists(path) and os.listdir(path) != []:\n shutil.rmtree(path)\n os.makedirs(path)\n if not os.path.exists(path):\n os.makedirs(path)\n\n\ndef read_dicom(path):\n \"\"\"读取一个病例所有的slices,并转成一个720*720*720的numpy.array.\n\n :param path: 一个病例dcm路径\n :return:\n \"\"\"\n print(os.path.basename(path))\n pi = os.path.basename(path).split('_')[1]\n dcm_size = len(glob.glob(path + '/*.dcm'))\n dcms = [(path + '/E' + pi + 'S101I%d.dcm' % dicom_slicei) for\n dicom_slicei in range(1, dcm_size + 1)]\n length = int(len(dcms))\n print(length)\n dcm_f = pydicom.read_file(dcms[0]).pixel_array\n dcm_size = max(max(dcm_f.shape), 720)\n dcm_img = np.zeros((dcm_size, dcm_size, dcm_size), dtype=np.float32)\n for dcmi in range(len(dcms)):\n cdcm = pydicom.read_file(dcms[dcmi]).pixel_array.astype(np.float32)\n cdcm -= np.mean(cdcm)\n cdcm /= np.std(cdcm)\n dcm_img[dcm_size // 2 - cdcm.shape[0] // 2:dcm_size // 2 + cdcm.\n shape[0] // 2, dcm_size // 2 - cdcm.shape[1] // 2:dcm_size // 2 +\n cdcm.shape[1] // 2, dcmi] = cdcm\n return dcm_img\n\n\ndef show_image(input_dir):\n \"\"\"随机展示一个病例一些病理图像。\n\n :param input_dir:\n :return:\n \"\"\"\n for casei in os.listdir(input_dir)[5:6]:\n pi = casei.split('_')[1]\n dcm_img = read_dicom(input_dir + '/' + casei)\n print('Dcm shape: ', dcm_img.shape)\n choices = range(330, 350)\n for i in choices:\n fig = plt.figure(num=i, figsize=(10, 10))\n ax = fig.add_subplot(111)\n img = ax.imshow(dcm_img[:, :, i], cmap='gray')\n ax.set_title(pi + '_' + str(i))\n plt.colorbar(img)\n plt.show()\n\n\ndef show_image_avail(input_dir):\n \"\"\"随机展示一个位置的一些有标注的病例图像。\n\n :param input_dir:\n :return:\n \"\"\"\n choices = random.sample(os.listdir(input_dir), 15)\n for file in choices:\n image_numpy = np.load(input_dir + '/' + file)\n fig = plt.figure(figsize=(10, 5))\n ax1 = fig.add_subplot(111)\n img1 = ax1.imshow(image_numpy, cmap='gray')\n ax1.set_title(str(file))\n plt.colorbar(img1)\n plt.show()\n\n\ndef show_mask(input_dir):\n \"\"\"随机展示一个位置标注的mask,2个channels.\n\n :param input_dir:\n :return:\n \"\"\"\n index = 0\n choices = random.sample(os.listdir(input_dir), 10)\n for file in choices:\n mask_numpy = np.load(input_dir + '/' + file)\n fig = plt.figure(num=index, figsize=(10, 5))\n ax1 = fig.add_subplot(211)\n ax1.imshow(mask_numpy[:, :, 0], cmap='gray')\n ax1.set_title(str(file) + '_outer')\n ax2 = fig.add_subplot(212)\n ax2.imshow(mask_numpy[:, :, 1], cmap='gray')\n ax2.set_title(str(file) + '_luman')\n plt.show()\n index += 1\n\n\ndef show_mask_circle(input_dir):\n \"\"\"随机展示一个位置标注的mask环。\n\n :param input_dir:\n :return:\n \"\"\"\n choices = random.sample(os.listdir(input_dir), 10)\n for file in choices:\n mask_numpy = np.load(input_dir + '/' + file)\n fig = plt.figure(figsize=(10, 5))\n ax1 = fig.add_subplot(111)\n img1 = ax1.imshow(mask_numpy[:, :], cmap='gray')\n ax1.set_title(str(file) + '_circle')\n plt.colorbar(img1)\n plt.show()\n\n\ndef show_image_mask(image_path, mask_path):\n \"\"\"随机展示一个位置的病例图像及其标注。\n\n :param image_path:\n :param mask_path:\n :return:\n \"\"\"\n files_choice = random.sample(os.listdir(image_path), 10)\n for file_name in files_choice:\n image_numpy = np.load(image_path + '/' + file_name)\n mask_numpy = np.load(mask_path + '/' + file_name)\n fig = plt.figure(figsize=(10, 5))\n ax1 = fig.add_subplot(211)\n img1 = ax1.imshow(image_numpy, cmap='gray')\n ax1.set_title(str(file_name))\n plt.colorbar(img1)\n ax2 = fig.add_subplot(212)\n img2 = ax2.imshow(mask_numpy, cmap='gray')\n plt.colorbar(img2)\n plt.show()\n\n\ndef main(args):\n image_input_dir = args.datasets_path\n circle_mask_dir = args.circle_mask_save_sep + '/ICAR/positive'\n show_mask_circle(circle_mask_dir)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef dir_create(path):\n \"\"\"创造新的文件夹。\n\n :param path: 文件夹路径\n :return:\n \"\"\"\n if os.path.exists(path) and os.listdir(path) != []:\n shutil.rmtree(path)\n os.makedirs(path)\n if not os.path.exists(path):\n os.makedirs(path)\n\n\ndef read_dicom(path):\n \"\"\"读取一个病例所有的slices,并转成一个720*720*720的numpy.array.\n\n :param path: 一个病例dcm路径\n :return:\n \"\"\"\n print(os.path.basename(path))\n pi = os.path.basename(path).split('_')[1]\n dcm_size = len(glob.glob(path + '/*.dcm'))\n dcms = [(path + '/E' + pi + 'S101I%d.dcm' % dicom_slicei) for\n dicom_slicei in range(1, dcm_size + 1)]\n length = int(len(dcms))\n print(length)\n dcm_f = pydicom.read_file(dcms[0]).pixel_array\n dcm_size = max(max(dcm_f.shape), 720)\n dcm_img = np.zeros((dcm_size, dcm_size, dcm_size), dtype=np.float32)\n for dcmi in range(len(dcms)):\n cdcm = pydicom.read_file(dcms[dcmi]).pixel_array.astype(np.float32)\n cdcm -= np.mean(cdcm)\n cdcm /= np.std(cdcm)\n dcm_img[dcm_size // 2 - cdcm.shape[0] // 2:dcm_size // 2 + cdcm.\n shape[0] // 2, dcm_size // 2 - cdcm.shape[1] // 2:dcm_size // 2 +\n cdcm.shape[1] // 2, dcmi] = cdcm\n return dcm_img\n\n\ndef show_image(input_dir):\n \"\"\"随机展示一个病例一些病理图像。\n\n :param input_dir:\n :return:\n \"\"\"\n for casei in os.listdir(input_dir)[5:6]:\n pi = casei.split('_')[1]\n dcm_img = read_dicom(input_dir + '/' + casei)\n print('Dcm shape: ', dcm_img.shape)\n choices = range(330, 350)\n for i in choices:\n fig = plt.figure(num=i, figsize=(10, 10))\n ax = fig.add_subplot(111)\n img = ax.imshow(dcm_img[:, :, i], cmap='gray')\n ax.set_title(pi + '_' + str(i))\n plt.colorbar(img)\n plt.show()\n\n\ndef show_image_avail(input_dir):\n \"\"\"随机展示一个位置的一些有标注的病例图像。\n\n :param input_dir:\n :return:\n \"\"\"\n choices = random.sample(os.listdir(input_dir), 15)\n for file in choices:\n image_numpy = np.load(input_dir + '/' + file)\n fig = plt.figure(figsize=(10, 5))\n ax1 = fig.add_subplot(111)\n img1 = ax1.imshow(image_numpy, cmap='gray')\n ax1.set_title(str(file))\n plt.colorbar(img1)\n plt.show()\n\n\ndef show_mask(input_dir):\n \"\"\"随机展示一个位置标注的mask,2个channels.\n\n :param input_dir:\n :return:\n \"\"\"\n index = 0\n choices = random.sample(os.listdir(input_dir), 10)\n for file in choices:\n mask_numpy = np.load(input_dir + '/' + file)\n fig = plt.figure(num=index, figsize=(10, 5))\n ax1 = fig.add_subplot(211)\n ax1.imshow(mask_numpy[:, :, 0], cmap='gray')\n ax1.set_title(str(file) + '_outer')\n ax2 = fig.add_subplot(212)\n ax2.imshow(mask_numpy[:, :, 1], cmap='gray')\n ax2.set_title(str(file) + '_luman')\n plt.show()\n index += 1\n\n\ndef show_mask_circle(input_dir):\n \"\"\"随机展示一个位置标注的mask环。\n\n :param input_dir:\n :return:\n \"\"\"\n choices = random.sample(os.listdir(input_dir), 10)\n for file in choices:\n mask_numpy = np.load(input_dir + '/' + file)\n fig = plt.figure(figsize=(10, 5))\n ax1 = fig.add_subplot(111)\n img1 = ax1.imshow(mask_numpy[:, :], cmap='gray')\n ax1.set_title(str(file) + '_circle')\n plt.colorbar(img1)\n plt.show()\n\n\ndef show_image_mask(image_path, mask_path):\n \"\"\"随机展示一个位置的病例图像及其标注。\n\n :param image_path:\n :param mask_path:\n :return:\n \"\"\"\n files_choice = random.sample(os.listdir(image_path), 10)\n for file_name in files_choice:\n image_numpy = np.load(image_path + '/' + file_name)\n mask_numpy = np.load(mask_path + '/' + file_name)\n fig = plt.figure(figsize=(10, 5))\n ax1 = fig.add_subplot(211)\n img1 = ax1.imshow(image_numpy, cmap='gray')\n ax1.set_title(str(file_name))\n plt.colorbar(img1)\n ax2 = fig.add_subplot(212)\n img2 = ax2.imshow(mask_numpy, cmap='gray')\n plt.colorbar(img2)\n plt.show()\n\n\ndef main(args):\n image_input_dir = args.datasets_path\n circle_mask_dir = args.circle_mask_save_sep + '/ICAR/positive'\n show_mask_circle(circle_mask_dir)\n\n\nif __name__ == '__main__':\n args = parse_args()\n main(args)\n",
"step-4": "import os\nimport pydicom\nimport glob\nimport shutil\nimport random\nimport numpy as np\nimport cv2\nimport skimage.io as io\nfrom data_Parameter import parse_args\nimport matplotlib.pyplot as plt\n\n\ndef dir_create(path):\n \"\"\"创造新的文件夹。\n\n :param path: 文件夹路径\n :return:\n \"\"\"\n if os.path.exists(path) and os.listdir(path) != []:\n shutil.rmtree(path)\n os.makedirs(path)\n if not os.path.exists(path):\n os.makedirs(path)\n\n\ndef read_dicom(path):\n \"\"\"读取一个病例所有的slices,并转成一个720*720*720的numpy.array.\n\n :param path: 一个病例dcm路径\n :return:\n \"\"\"\n print(os.path.basename(path))\n pi = os.path.basename(path).split('_')[1]\n dcm_size = len(glob.glob(path + '/*.dcm'))\n dcms = [(path + '/E' + pi + 'S101I%d.dcm' % dicom_slicei) for\n dicom_slicei in range(1, dcm_size + 1)]\n length = int(len(dcms))\n print(length)\n dcm_f = pydicom.read_file(dcms[0]).pixel_array\n dcm_size = max(max(dcm_f.shape), 720)\n dcm_img = np.zeros((dcm_size, dcm_size, dcm_size), dtype=np.float32)\n for dcmi in range(len(dcms)):\n cdcm = pydicom.read_file(dcms[dcmi]).pixel_array.astype(np.float32)\n cdcm -= np.mean(cdcm)\n cdcm /= np.std(cdcm)\n dcm_img[dcm_size // 2 - cdcm.shape[0] // 2:dcm_size // 2 + cdcm.\n shape[0] // 2, dcm_size // 2 - cdcm.shape[1] // 2:dcm_size // 2 +\n cdcm.shape[1] // 2, dcmi] = cdcm\n return dcm_img\n\n\ndef show_image(input_dir):\n \"\"\"随机展示一个病例一些病理图像。\n\n :param input_dir:\n :return:\n \"\"\"\n for casei in os.listdir(input_dir)[5:6]:\n pi = casei.split('_')[1]\n dcm_img = read_dicom(input_dir + '/' + casei)\n print('Dcm shape: ', dcm_img.shape)\n choices = range(330, 350)\n for i in choices:\n fig = plt.figure(num=i, figsize=(10, 10))\n ax = fig.add_subplot(111)\n img = ax.imshow(dcm_img[:, :, i], cmap='gray')\n ax.set_title(pi + '_' + str(i))\n plt.colorbar(img)\n plt.show()\n\n\ndef show_image_avail(input_dir):\n \"\"\"随机展示一个位置的一些有标注的病例图像。\n\n :param input_dir:\n :return:\n \"\"\"\n choices = random.sample(os.listdir(input_dir), 15)\n for file in choices:\n image_numpy = np.load(input_dir + '/' + file)\n fig = plt.figure(figsize=(10, 5))\n ax1 = fig.add_subplot(111)\n img1 = ax1.imshow(image_numpy, cmap='gray')\n ax1.set_title(str(file))\n plt.colorbar(img1)\n plt.show()\n\n\ndef show_mask(input_dir):\n \"\"\"随机展示一个位置标注的mask,2个channels.\n\n :param input_dir:\n :return:\n \"\"\"\n index = 0\n choices = random.sample(os.listdir(input_dir), 10)\n for file in choices:\n mask_numpy = np.load(input_dir + '/' + file)\n fig = plt.figure(num=index, figsize=(10, 5))\n ax1 = fig.add_subplot(211)\n ax1.imshow(mask_numpy[:, :, 0], cmap='gray')\n ax1.set_title(str(file) + '_outer')\n ax2 = fig.add_subplot(212)\n ax2.imshow(mask_numpy[:, :, 1], cmap='gray')\n ax2.set_title(str(file) + '_luman')\n plt.show()\n index += 1\n\n\ndef show_mask_circle(input_dir):\n \"\"\"随机展示一个位置标注的mask环。\n\n :param input_dir:\n :return:\n \"\"\"\n choices = random.sample(os.listdir(input_dir), 10)\n for file in choices:\n mask_numpy = np.load(input_dir + '/' + file)\n fig = plt.figure(figsize=(10, 5))\n ax1 = fig.add_subplot(111)\n img1 = ax1.imshow(mask_numpy[:, :], cmap='gray')\n ax1.set_title(str(file) + '_circle')\n plt.colorbar(img1)\n plt.show()\n\n\ndef show_image_mask(image_path, mask_path):\n \"\"\"随机展示一个位置的病例图像及其标注。\n\n :param image_path:\n :param mask_path:\n :return:\n \"\"\"\n files_choice = random.sample(os.listdir(image_path), 10)\n for file_name in files_choice:\n image_numpy = np.load(image_path + '/' + file_name)\n mask_numpy = np.load(mask_path + '/' + file_name)\n fig = plt.figure(figsize=(10, 5))\n ax1 = fig.add_subplot(211)\n img1 = ax1.imshow(image_numpy, cmap='gray')\n ax1.set_title(str(file_name))\n plt.colorbar(img1)\n ax2 = fig.add_subplot(212)\n img2 = ax2.imshow(mask_numpy, cmap='gray')\n plt.colorbar(img2)\n plt.show()\n\n\ndef main(args):\n image_input_dir = args.datasets_path\n circle_mask_dir = args.circle_mask_save_sep + '/ICAR/positive'\n show_mask_circle(circle_mask_dir)\n\n\nif __name__ == '__main__':\n args = parse_args()\n main(args)\n",
"step-5": "# !/usr/bin/env python3\n# -*- coding:utf-8 -*-\n\n# @Time : 2021/05/08 20:06\n# @Author : Yi\n# @FileName: show_slices.py\n\nimport os\nimport pydicom\nimport glob\nimport shutil\nimport random\nimport numpy as np\nimport cv2\nimport skimage.io as io\n\nfrom data_Parameter import parse_args\nimport matplotlib.pyplot as plt\n\n\ndef dir_create(path):\n \"\"\"创造新的文件夹。\n\n :param path: 文件夹路径\n :return:\n \"\"\"\n if (os.path.exists(path)) and (os.listdir(path) != []):\n shutil.rmtree(path)\n os.makedirs(path)\n if not os.path.exists(path):\n os.makedirs(path)\n\n\ndef read_dicom(path):\n \"\"\"读取一个病例所有的slices,并转成一个720*720*720的numpy.array.\n\n :param path: 一个病例dcm路径\n :return:\n \"\"\"\n print(os.path.basename(path))\n\n pi = os.path.basename(path).split(\"_\")[1]\n dcm_size = len(glob.glob(path + \"/*.dcm\"))\n dcms = [\n path + \"/E\" + pi + \"S101I%d.dcm\" % dicom_slicei\n for dicom_slicei in range(1, dcm_size + 1)\n ]\n\n length = int(len(dcms))\n print(length)\n\n dcm_f = pydicom.read_file(dcms[0]).pixel_array\n dcm_size = max(max(dcm_f.shape), 720)\n # print(dcm_f.shape)\n\n dcm_img = np.zeros((dcm_size, dcm_size, dcm_size), dtype=np.float32)\n\n for dcmi in range(len(dcms)):\n cdcm = pydicom.read_file(dcms[dcmi]).pixel_array.astype(np.float32)\n\n cdcm -= np.mean(cdcm)\n cdcm /= np.std(cdcm)\n\n dcm_img[\n dcm_size // 2 - cdcm.shape[0] // 2: dcm_size // 2 + cdcm.shape[0] // 2,\n dcm_size // 2 - cdcm.shape[1] // 2: dcm_size // 2 + cdcm.shape[1] // 2,\n dcmi,\n ] = cdcm\n\n return dcm_img\n\n\ndef show_image(input_dir):\n \"\"\"随机展示一个病例一些病理图像。\n\n :param input_dir:\n :return:\n \"\"\"\n\n # special cases: \"P556\", \"P576\", \"P887\",160*640*640\n for casei in os.listdir(input_dir)[5:6]:\n pi = casei.split(\"_\")[1]\n dcm_img = read_dicom(input_dir + \"/\" + casei)\n print(\"Dcm shape: \", dcm_img.shape)\n\n # choices = random.sample(list(np.arange(0, 720, 1)), 10)\n # choices.append(316)\n\n choices = range(330,350)\n\n for i in choices:\n fig = plt.figure(num=i, figsize=(10, 10))\n ax = fig.add_subplot(111)\n img=ax.imshow(dcm_img[:, :, i], cmap='gray')\n ax.set_title(pi + '_' + str(i))\n plt.colorbar(img)\n plt.show()\n\n\ndef show_image_avail(input_dir):\n \"\"\"随机展示一个位置的一些有标注的病例图像。\n\n :param input_dir:\n :return:\n \"\"\"\n\n choices = random.sample(os.listdir(input_dir), 15)\n for file in choices:\n image_numpy = np.load(input_dir + '/' + file)\n\n fig = plt.figure(figsize=(10, 5))\n ax1 = fig.add_subplot(111)\n img1=ax1.imshow(image_numpy, cmap='gray')\n ax1.set_title(str(file))\n plt.colorbar(img1)\n plt.show()\n\n\ndef show_mask(input_dir):\n \"\"\"随机展示一个位置标注的mask,2个channels.\n\n :param input_dir:\n :return:\n \"\"\"\n\n index = 0\n choices = random.sample(os.listdir(input_dir), 10)\n for file in choices:\n mask_numpy = np.load(input_dir + '/' + file)\n\n fig = plt.figure(num=index, figsize=(10, 5))\n ax1 = fig.add_subplot(211)\n ax1.imshow(mask_numpy[:, :, 0], cmap='gray')\n ax1.set_title(str(file) + '_outer')\n ax2 = fig.add_subplot(212)\n ax2.imshow(mask_numpy[:, :, 1], cmap='gray')\n ax2.set_title(str(file) + '_luman')\n plt.show()\n index += 1\n\n\ndef show_mask_circle(input_dir):\n \"\"\"随机展示一个位置标注的mask环。\n\n :param input_dir:\n :return:\n \"\"\"\n\n choices = random.sample(os.listdir(input_dir), 10)\n for file in choices:\n mask_numpy = np.load(input_dir + '/' + file)\n\n fig = plt.figure(figsize=(10, 5))\n ax1 = fig.add_subplot(111)\n img1=ax1.imshow(mask_numpy[:, :], cmap='gray')\n ax1.set_title(str(file) + '_circle')\n plt.colorbar(img1)\n\n plt.show()\n\n\ndef show_image_mask(image_path,mask_path):\n \"\"\"随机展示一个位置的病例图像及其标注。\n\n :param image_path:\n :param mask_path:\n :return:\n \"\"\"\n\n files_choice=random.sample(os.listdir(image_path),10)\n\n for file_name in files_choice:\n image_numpy=np.load(image_path+'/'+file_name)\n mask_numpy =np.load(mask_path+'/'+file_name)\n\n fig =plt.figure(figsize=(10,5))\n ax1 =fig.add_subplot(211)\n img1=ax1.imshow(image_numpy,cmap='gray')\n ax1.set_title(str(file_name))\n plt.colorbar(img1)\n\n ax2=fig.add_subplot(212)\n img2=ax2.imshow(mask_numpy,cmap='gray')\n # ax2.set_title(str(file_name))\n plt.colorbar(img2)\n plt.show()\n\n\ndef main(args):\n image_input_dir = args.datasets_path\n\n # image_avail_dir = args.image_save_sep_position + '/ICAR/positive'\n # image_avail_dir = args.image_save_sep_position + '/ICAR/negative'\n\n # circle_mask_dir=args.circle_mask_save_sep+'/ICAR/positive'\n circle_mask_dir = args.circle_mask_save_sep + '/ICAR/positive'\n\n # show_image(image_input_dir) # 随机展示一些病例图像。\n # show_image_avail(image_avail_dir)\n show_mask_circle(circle_mask_dir)\n\n # show_image_mask(image_avail_dir,circle_mask_dir)\n\n\nif __name__ == '__main__':\n args = parse_args()\n main(args)",
"step-ids": [
7,
8,
9,
10,
11
]
}
|
[
7,
8,
9,
10,
11
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
print(type(list1))
print(list1[0])
print(list1[len(list1) - 1])
<|reserved_special_token_0|>
print(list1)
<|reserved_special_token_0|>
list4
<|reserved_special_token_0|>
list4
<|reserved_special_token_0|>
list4
<|reserved_special_token_0|>
list4
<|reserved_special_token_0|>
list4
<|reserved_special_token_0|>
list4
list1.append('Sam')
list1
list1.remove(6781)
list1
del list1[2]
list1
<|reserved_special_token_1|>
list1 = ['Sam', 'Rocky', 1989, 1890]
print(type(list1))
print(list1[0])
print(list1[len(list1) - 1])
list1[0] = 6781
print(list1)
list4 = list1[2:4]
list4
list4 = list1[::-1]
list4
list4 = list1[::2]
list4
list4 = list1[2:0:-1]
list4
list4 = list1 + ['Hello', 2]
list4
list4 = list1 * 2
list4
list1.append('Sam')
list1
list1.remove(6781)
list1
del list1[2]
list1
<|reserved_special_token_1|>
# joiner = '+'
# seq = ["Sushil","Bahadur","KC"]
# txt = joiner.join(seq)
# txt
# txt = " Sam "
# ljus = txt.ljust(7,"*")
# ljus
# txtstrip = txt.strip().strip('S')
# txtstrip
# txt = "This is my world."
# txtSplit = txt.split(maxsplit=1)
# txtSplit
# name = input("Enter your full name")
# name = name.strip()
# txt = name.split()
# print("First Name:",txt[0])
# print("Last Name:",txt[1])
# txt = "Amet sint ipsum aliquip ea velit minim.\n \
# Consequat esse do laboris nisi proident nisi tempor magna.\n \
# Occaecat occaecat id qui veniam deserunt ullamco laborum consequat sint ullamco.\n \
# Eu Lorem nisi mollit pariatur commodo minim eu reprehenderit magna ipsum consequat."
# print(txt)
# newData = txt.splitlines()
# newData
# Sequence
#2. List
list1 = ["Sam", "Rocky", 1989, 1890]
print(type(list1))
print(list1[0])
print(list1[len(list1)-1])
list1[0] = 6781
print(list1)
list4 = list1[2:4]
list4
list4 = list1[::-1]
list4
list4 = list1[::2]
list4
list4 = list1[2:0:-1]
list4
list4 = list1+['Hello',2]
list4
list4 = list1*2
list4
list1.append("Sam")
list1
list1.remove(6781)
list1
del list1[2]
list1
|
flexible
|
{
"blob_id": "32b22cccac75c87b8638c76c0c6d27db0de4d750",
"index": 8480,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(type(list1))\nprint(list1[0])\nprint(list1[len(list1) - 1])\n<mask token>\nprint(list1)\n<mask token>\nlist4\n<mask token>\nlist4\n<mask token>\nlist4\n<mask token>\nlist4\n<mask token>\nlist4\n<mask token>\nlist4\nlist1.append('Sam')\nlist1\nlist1.remove(6781)\nlist1\ndel list1[2]\nlist1\n",
"step-3": "list1 = ['Sam', 'Rocky', 1989, 1890]\nprint(type(list1))\nprint(list1[0])\nprint(list1[len(list1) - 1])\nlist1[0] = 6781\nprint(list1)\nlist4 = list1[2:4]\nlist4\nlist4 = list1[::-1]\nlist4\nlist4 = list1[::2]\nlist4\nlist4 = list1[2:0:-1]\nlist4\nlist4 = list1 + ['Hello', 2]\nlist4\nlist4 = list1 * 2\nlist4\nlist1.append('Sam')\nlist1\nlist1.remove(6781)\nlist1\ndel list1[2]\nlist1\n",
"step-4": "# joiner = '+'\n# seq = [\"Sushil\",\"Bahadur\",\"KC\"]\n# txt = joiner.join(seq)\n# txt\n# txt = \" Sam \"\n# ljus = txt.ljust(7,\"*\")\n# ljus\n# txtstrip = txt.strip().strip('S')\n# txtstrip\n# txt = \"This is my world.\"\n# txtSplit = txt.split(maxsplit=1)\n# txtSplit\n\n# name = input(\"Enter your full name\")\n# name = name.strip()\n# txt = name.split()\n# print(\"First Name:\",txt[0])\n# print(\"Last Name:\",txt[1])\n\n\n# txt = \"Amet sint ipsum aliquip ea velit minim.\\n \\\n# Consequat esse do laboris nisi proident nisi tempor magna.\\n \\\n# Occaecat occaecat id qui veniam deserunt ullamco laborum consequat sint ullamco.\\n \\\n# Eu Lorem nisi mollit pariatur commodo minim eu reprehenderit magna ipsum consequat.\"\n# print(txt)\n# newData = txt.splitlines()\n# newData\n\n# Sequence\n#2. List\nlist1 = [\"Sam\", \"Rocky\", 1989, 1890]\nprint(type(list1))\nprint(list1[0])\nprint(list1[len(list1)-1])\nlist1[0] = 6781\nprint(list1)\nlist4 = list1[2:4]\nlist4\nlist4 = list1[::-1]\nlist4\nlist4 = list1[::2]\nlist4\nlist4 = list1[2:0:-1]\nlist4\nlist4 = list1+['Hello',2]\nlist4\nlist4 = list1*2\nlist4\nlist1.append(\"Sam\")\nlist1\nlist1.remove(6781)\nlist1\ndel list1[2]\nlist1\n\n\n\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from .cli import cli
if __name__ == "__main__":
exit(cli.main(prog_name="htmap"))
|
normal
|
{
"blob_id": "069338b188f3cf16357b2502cbb3130b69918bd9",
"index": 286,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif __name__ == '__main__':\n exit(cli.main(prog_name='htmap'))\n",
"step-3": "from .cli import cli\nif __name__ == '__main__':\n exit(cli.main(prog_name='htmap'))\n",
"step-4": "from .cli import cli\n\nif __name__ == \"__main__\":\n exit(cli.main(prog_name=\"htmap\"))\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
class Solution:
<|reserved_special_token_0|>
class Solution:
def countArrangement(self, n: int) ->int:
@cache
def dfs(bm, i):
if i == 0:
return 1
cnt = 0
for num in range(n):
if not bm & 1 << num and ((num + 1) % i == 0 or i % (num +
1) == 0):
cnt += dfs(bm ^ 1 << num, i - 1)
return cnt
return dfs(0, n)
class Solution:
def countArrangement(self, n: int) ->int:
def count(i, nums):
if i == 1:
return 1
return sum(count(i - 1, nums - {num}) for num in nums if num %
i == 0 or i % num == 0)
return count(n, set(range(1, n + 1)))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Solution:
def countArrangement(self, n: int) ->int:
bitset_total = 2 ** n
dp = [[(0) for _ in range(bitset_total)] for _ in range(n + 1)]
dp[0][0] = 1
for i in range(1, n + 1):
for bm in range(bitset_total):
for num in range(n):
if bm & 1 << num and ((num + 1) % i == 0 or i % (num +
1) == 0):
dp[i][bm] += dp[i - 1][bm ^ 1 << num]
return dp[-1][-1]
class Solution:
def countArrangement(self, n: int) ->int:
@cache
def dfs(bm, i):
if i == 0:
return 1
cnt = 0
for num in range(n):
if not bm & 1 << num and ((num + 1) % i == 0 or i % (num +
1) == 0):
cnt += dfs(bm ^ 1 << num, i - 1)
return cnt
return dfs(0, n)
class Solution:
def countArrangement(self, n: int) ->int:
def count(i, nums):
if i == 1:
return 1
return sum(count(i - 1, nums - {num}) for num in nums if num %
i == 0 or i % num == 0)
return count(n, set(range(1, n + 1)))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Solution:
<|reserved_special_token_0|>
class Solution:
def countArrangement(self, n: int) ->int:
bitset_total = 2 ** n
dp = [[(0) for _ in range(bitset_total)] for _ in range(n + 1)]
dp[0][0] = 1
for i in range(1, n + 1):
for bm in range(bitset_total):
for num in range(n):
if bm & 1 << num and ((num + 1) % i == 0 or i % (num +
1) == 0):
dp[i][bm] += dp[i - 1][bm ^ 1 << num]
return dp[-1][-1]
class Solution:
def countArrangement(self, n: int) ->int:
@cache
def dfs(bm, i):
if i == 0:
return 1
cnt = 0
for num in range(n):
if not bm & 1 << num and ((num + 1) % i == 0 or i % (num +
1) == 0):
cnt += dfs(bm ^ 1 << num, i - 1)
return cnt
return dfs(0, n)
class Solution:
def countArrangement(self, n: int) ->int:
def count(i, nums):
if i == 1:
return 1
return sum(count(i - 1, nums - {num}) for num in nums if num %
i == 0 or i % num == 0)
return count(n, set(range(1, n + 1)))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Solution:
def countArrangement(self, n: int) ->int:
cache = {}
def helper(perm):
digits = len(perm)
if digits == 1:
return 1
if perm in cache:
return cache[perm]
cnt = 0
for i in range(digits):
if perm[i] % digits == 0 or digits % perm[i] == 0:
cnt += helper(perm[:i] + perm[i + 1:])
cache[perm] = cnt
return cnt
return helper(tuple(range(1, n + 1)))
class Solution:
def countArrangement(self, n: int) ->int:
bitset_total = 2 ** n
dp = [[(0) for _ in range(bitset_total)] for _ in range(n + 1)]
dp[0][0] = 1
for i in range(1, n + 1):
for bm in range(bitset_total):
for num in range(n):
if bm & 1 << num and ((num + 1) % i == 0 or i % (num +
1) == 0):
dp[i][bm] += dp[i - 1][bm ^ 1 << num]
return dp[-1][-1]
class Solution:
def countArrangement(self, n: int) ->int:
@cache
def dfs(bm, i):
if i == 0:
return 1
cnt = 0
for num in range(n):
if not bm & 1 << num and ((num + 1) % i == 0 or i % (num +
1) == 0):
cnt += dfs(bm ^ 1 << num, i - 1)
return cnt
return dfs(0, n)
class Solution:
def countArrangement(self, n: int) ->int:
def count(i, nums):
if i == 1:
return 1
return sum(count(i - 1, nums - {num}) for num in nums if num %
i == 0 or i % num == 0)
return count(n, set(range(1, n + 1)))
<|reserved_special_token_1|>
from __future__ import annotations
from functools import cache
class Solution:
def countArrangement(self, n: int) -> int:
cache = {}
def helper(perm):
digits = len(perm)
if digits == 1:
return 1
if perm in cache:
return cache[perm]
cnt = 0
for i in range(digits):
if perm[i] % digits == 0 or digits % perm[i] == 0:
cnt += helper(perm[:i] + perm[i+1:])
cache[perm] = cnt
return cnt
return helper(tuple(range(1, n+1)))
class Solution:
def countArrangement(self, n: int) -> int:
# total number of bitset states possible
bitset_total = 2**n
dp = [[0 for _ in range(bitset_total)]
for _ in range(n+1)]
# all other valid states lead to this base case so mark this as 1
dp[0][0] = 1
# iterate over all positions
for i in range(1, n+1):
# iterate over all subsets
for bm in range(bitset_total):
# iterate over all numbers
for num in range(n):
# if number is not visited and satisfies condition in question
# & (各桁が両方とも1なら1になる)
# 1 << x (1を左にxシフトさせて右をゼロで埋める)
# ^ (XOR: 各桁の片方が1なら1になる)
if ((bm & (1 << num)) and
(((num+1) % i == 0) or
(i % (num+1) == 0))):
dp[i][bm] += dp[i-1][bm ^ (1 << num)]
return dp[-1][-1]
# bm is binary mask for visited numbers.
# i is current place we want to fill.
# Idea is to start from the end, and fill places in opposite direction,
# because for big numbers we potentially have less candidates.
# how dfs(bm, pl) will work:
# If we reached place 0 and procces was not interrupted so far,
# it means that we find beautiful arrangement.
# For each number 1, 2, ..., n we try to put this number on place pl:
# and we need to check two conditions: first, that this place is still empty,
# using bitmask and secondly that one of the two properties for beutiful arrangement
# holds. In this case we add dfs(bm^1<<i, pl - 1) to final answer.
# Finally, we run dfs(0, n): from the last place and with empty bit-mask.
class Solution:
def countArrangement(self, n: int) -> int:
@cache
def dfs(bm, i):
if i == 0:
return 1
cnt = 0
for num in range(n):
if not bm & 1 << num\
and ((num+1) % i == 0 or i % (num+1) == 0):
cnt += dfs(bm ^ 1 << num, i-1)
return cnt
return dfs(0, n)
# nums is the set of still available numbers.
# Note that my i goes downwards, from n to 1. Because position i = 1
# can hold any number, so I don't even have to check whether the last
# remaining number fits there. Also, position i = 2 happily holds
# every second number and i = 3 happily holds every third number,
# so filling the lowest positions last has a relatively high chance of success.
class Solution:
def countArrangement(self, n: int) -> int:
def count(i, nums):
if i == 1:
return 1
return sum(count(i-1, nums-{num})
for num in nums
if num % i == 0 or i % num == 0)
return count(n, set(range(1, n+1)))
|
flexible
|
{
"blob_id": "e6acc7b022001d8419095ad6364a6ae9504ec7aa",
"index": 508,
"step-1": "<mask token>\n\n\nclass Solution:\n <mask token>\n\n\nclass Solution:\n\n def countArrangement(self, n: int) ->int:\n\n @cache\n def dfs(bm, i):\n if i == 0:\n return 1\n cnt = 0\n for num in range(n):\n if not bm & 1 << num and ((num + 1) % i == 0 or i % (num + \n 1) == 0):\n cnt += dfs(bm ^ 1 << num, i - 1)\n return cnt\n return dfs(0, n)\n\n\nclass Solution:\n\n def countArrangement(self, n: int) ->int:\n\n def count(i, nums):\n if i == 1:\n return 1\n return sum(count(i - 1, nums - {num}) for num in nums if num %\n i == 0 or i % num == 0)\n return count(n, set(range(1, n + 1)))\n",
"step-2": "<mask token>\n\n\nclass Solution:\n\n def countArrangement(self, n: int) ->int:\n bitset_total = 2 ** n\n dp = [[(0) for _ in range(bitset_total)] for _ in range(n + 1)]\n dp[0][0] = 1\n for i in range(1, n + 1):\n for bm in range(bitset_total):\n for num in range(n):\n if bm & 1 << num and ((num + 1) % i == 0 or i % (num + \n 1) == 0):\n dp[i][bm] += dp[i - 1][bm ^ 1 << num]\n return dp[-1][-1]\n\n\nclass Solution:\n\n def countArrangement(self, n: int) ->int:\n\n @cache\n def dfs(bm, i):\n if i == 0:\n return 1\n cnt = 0\n for num in range(n):\n if not bm & 1 << num and ((num + 1) % i == 0 or i % (num + \n 1) == 0):\n cnt += dfs(bm ^ 1 << num, i - 1)\n return cnt\n return dfs(0, n)\n\n\nclass Solution:\n\n def countArrangement(self, n: int) ->int:\n\n def count(i, nums):\n if i == 1:\n return 1\n return sum(count(i - 1, nums - {num}) for num in nums if num %\n i == 0 or i % num == 0)\n return count(n, set(range(1, n + 1)))\n",
"step-3": "<mask token>\n\n\nclass Solution:\n <mask token>\n\n\nclass Solution:\n\n def countArrangement(self, n: int) ->int:\n bitset_total = 2 ** n\n dp = [[(0) for _ in range(bitset_total)] for _ in range(n + 1)]\n dp[0][0] = 1\n for i in range(1, n + 1):\n for bm in range(bitset_total):\n for num in range(n):\n if bm & 1 << num and ((num + 1) % i == 0 or i % (num + \n 1) == 0):\n dp[i][bm] += dp[i - 1][bm ^ 1 << num]\n return dp[-1][-1]\n\n\nclass Solution:\n\n def countArrangement(self, n: int) ->int:\n\n @cache\n def dfs(bm, i):\n if i == 0:\n return 1\n cnt = 0\n for num in range(n):\n if not bm & 1 << num and ((num + 1) % i == 0 or i % (num + \n 1) == 0):\n cnt += dfs(bm ^ 1 << num, i - 1)\n return cnt\n return dfs(0, n)\n\n\nclass Solution:\n\n def countArrangement(self, n: int) ->int:\n\n def count(i, nums):\n if i == 1:\n return 1\n return sum(count(i - 1, nums - {num}) for num in nums if num %\n i == 0 or i % num == 0)\n return count(n, set(range(1, n + 1)))\n",
"step-4": "<mask token>\n\n\nclass Solution:\n\n def countArrangement(self, n: int) ->int:\n cache = {}\n\n def helper(perm):\n digits = len(perm)\n if digits == 1:\n return 1\n if perm in cache:\n return cache[perm]\n cnt = 0\n for i in range(digits):\n if perm[i] % digits == 0 or digits % perm[i] == 0:\n cnt += helper(perm[:i] + perm[i + 1:])\n cache[perm] = cnt\n return cnt\n return helper(tuple(range(1, n + 1)))\n\n\nclass Solution:\n\n def countArrangement(self, n: int) ->int:\n bitset_total = 2 ** n\n dp = [[(0) for _ in range(bitset_total)] for _ in range(n + 1)]\n dp[0][0] = 1\n for i in range(1, n + 1):\n for bm in range(bitset_total):\n for num in range(n):\n if bm & 1 << num and ((num + 1) % i == 0 or i % (num + \n 1) == 0):\n dp[i][bm] += dp[i - 1][bm ^ 1 << num]\n return dp[-1][-1]\n\n\nclass Solution:\n\n def countArrangement(self, n: int) ->int:\n\n @cache\n def dfs(bm, i):\n if i == 0:\n return 1\n cnt = 0\n for num in range(n):\n if not bm & 1 << num and ((num + 1) % i == 0 or i % (num + \n 1) == 0):\n cnt += dfs(bm ^ 1 << num, i - 1)\n return cnt\n return dfs(0, n)\n\n\nclass Solution:\n\n def countArrangement(self, n: int) ->int:\n\n def count(i, nums):\n if i == 1:\n return 1\n return sum(count(i - 1, nums - {num}) for num in nums if num %\n i == 0 or i % num == 0)\n return count(n, set(range(1, n + 1)))\n",
"step-5": "from __future__ import annotations\nfrom functools import cache\n\n\nclass Solution:\n def countArrangement(self, n: int) -> int:\n cache = {}\n\n def helper(perm):\n digits = len(perm)\n if digits == 1:\n return 1\n if perm in cache:\n return cache[perm]\n cnt = 0\n for i in range(digits):\n if perm[i] % digits == 0 or digits % perm[i] == 0:\n cnt += helper(perm[:i] + perm[i+1:])\n cache[perm] = cnt\n return cnt\n\n return helper(tuple(range(1, n+1)))\n\n\nclass Solution:\n def countArrangement(self, n: int) -> int:\n # total number of bitset states possible\n bitset_total = 2**n\n dp = [[0 for _ in range(bitset_total)]\n for _ in range(n+1)]\n # all other valid states lead to this base case so mark this as 1\n dp[0][0] = 1\n # iterate over all positions\n for i in range(1, n+1):\n # iterate over all subsets\n for bm in range(bitset_total):\n # iterate over all numbers\n for num in range(n):\n # if number is not visited and satisfies condition in question\n # & (各桁が両方とも1なら1になる)\n # 1 << x (1を左にxシフトさせて右をゼロで埋める)\n # ^ (XOR: 各桁の片方が1なら1になる)\n if ((bm & (1 << num)) and\n (((num+1) % i == 0) or\n (i % (num+1) == 0))):\n dp[i][bm] += dp[i-1][bm ^ (1 << num)]\n return dp[-1][-1]\n\n\n# bm is binary mask for visited numbers.\n# i is current place we want to fill. \n# Idea is to start from the end, and fill places in opposite direction,\n# because for big numbers we potentially have less candidates.\n# how dfs(bm, pl) will work:\n# If we reached place 0 and procces was not interrupted so far,\n# it means that we find beautiful arrangement.\n# For each number 1, 2, ..., n we try to put this number on place pl:\n# and we need to check two conditions: first, that this place is still empty,\n# using bitmask and secondly that one of the two properties for beutiful arrangement\n# holds. In this case we add dfs(bm^1<<i, pl - 1) to final answer.\n# Finally, we run dfs(0, n): from the last place and with empty bit-mask.\nclass Solution:\n def countArrangement(self, n: int) -> int:\n @cache\n def dfs(bm, i):\n if i == 0:\n return 1\n\n cnt = 0\n for num in range(n):\n if not bm & 1 << num\\\n and ((num+1) % i == 0 or i % (num+1) == 0):\n cnt += dfs(bm ^ 1 << num, i-1)\n return cnt\n\n return dfs(0, n)\n\n\n# nums is the set of still available numbers.\n# Note that my i goes downwards, from n to 1. Because position i = 1\n# can hold any number, so I don't even have to check whether the last\n# remaining number fits there. Also, position i = 2 happily holds\n# every second number and i = 3 happily holds every third number,\n# so filling the lowest positions last has a relatively high chance of success.\nclass Solution:\n def countArrangement(self, n: int) -> int:\n def count(i, nums):\n if i == 1:\n return 1\n return sum(count(i-1, nums-{num})\n for num in nums\n if num % i == 0 or i % num == 0)\n return count(n, set(range(1, n+1)))\n",
"step-ids": [
5,
6,
7,
8,
10
]
}
|
[
5,
6,
7,
8,
10
] |
#!/usr/bin/env python
import os, time, sys
fifoname = '/dev/pi-blaster' # must open same name
def child( ):
pipeout = os.open(fifoname, os.O_WRONLY) # open fifo pipe file as fd
zzz = 0
while 1:
time.sleep(zzz)
os.write(pipeout, 'Spam %03d\n' % zzz)
zzz = (zzz+1) % 5
def parent( ):
pipein = open(fifoname, 'r', 0) # open fifo as stdio object
while 1:
line = pipein.readline( )[:-1] # blocks until data sent
print 'Parent %d got "%s" at %s' % (os.getpid(), line, time.time( ))
#if _ _name_ _ == '_ _main_ _':
# if not os.path.exists(fifoname):
# os.mkfifo(fifoname) # create a named pipe file
# if len(sys.argv) == 1:
# parent( ) # run as parent if no args
# else: # else run as child process
parent( )
|
normal
|
{
"blob_id": "7502e28197cb40044303a0a2163546f42375aeb6",
"index": 6119,
"step-1": "#!/usr/bin/env python\nimport os, time, sys\nfifoname = '/dev/pi-blaster' # must open same name\n\ndef child( ):\n pipeout = os.open(fifoname, os.O_WRONLY) # open fifo pipe file as fd\n zzz = 0\n while 1:\n time.sleep(zzz)\n os.write(pipeout, 'Spam %03d\\n' % zzz)\n zzz = (zzz+1) % 5\n\ndef parent( ):\n pipein = open(fifoname, 'r', 0) # open fifo as stdio object\n while 1:\n line = pipein.readline( )[:-1] # blocks until data sent\n print 'Parent %d got \"%s\" at %s' % (os.getpid(), line, time.time( ))\n\n#if _ _name_ _ == '_ _main_ _':\n# if not os.path.exists(fifoname):\n# os.mkfifo(fifoname) # create a named pipe file\n# if len(sys.argv) == 1:\n# parent( ) # run as parent if no args\n# else: # else run as child process\nparent( )\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Module that defines a controller for database's operations over business rules
"""
# built-in dependencies
import functools
import typing
# external dependencies
import sqlalchemy
from sqlalchemy.orm import sessionmaker
# project dependencies
from database.table import ResourceTable
__authors__ = ["Gabriel Castro", "Gustavo Possebon", "Henrique Kops"]
__date__ = "24/10/2020"
class _DatabaseResourceTableController:
"""
Controller for resource table access
"""
def __init__(self):
# sqlalchemy
self.engine = sqlalchemy.create_engine("sqlite:///db.sqlite3")
self.session = sessionmaker(bind=self.engine)
def register_peer(self, peer_id: str, peer_ip: str, peer_port: int,
resource_name: str, resource_path: str, resource_hash: str) -> None:
"""
Register 'peer x resource' relationship at database
:param peer_id: Peer's id
:param peer_ip: Peer's ip
:param peer_port: Peer's listen port
:param resource_name: Resource's name
:param resource_path: Resource's path
:param resource_hash: Resource's MD5
"""
session = self.session()
try:
new_resource = ResourceTable()
new_resource.peerId = peer_id
new_resource.peerIp = peer_ip
new_resource.peerPort = peer_port
new_resource.resourceName = resource_name
new_resource.resourcePath = resource_path
new_resource.resourceHash = resource_hash
session.add(new_resource)
session.commit()
finally:
session.close()
def get_available_peer(self, resource_name: str) -> typing.List:
"""
Get peer's ip and port and resource's path, name and hash
that contains same resource name
:param resource_name: Name of the resource to be searched at database
:return: List containing matching peer's and resource's info
"""
session = self.session()
try:
available_peers = session\
.query(
ResourceTable.peerIp,
ResourceTable.peerPort,
ResourceTable.resourcePath,
ResourceTable.resourceName,
ResourceTable.resourceHash
)\
.filter(ResourceTable.resourceName == resource_name)\
.group_by(ResourceTable.peerId)\
.all()
if available_peers:
return available_peers[0]
else:
return []
finally:
session.close()
def get_all_resources(self) -> typing.List:
"""
Get every register of peer's ip and port and resource's path, name and hash
:return: List of every 'peer x resource' info
"""
session = self.session()
try:
available_peers = session\
.query(
ResourceTable.peerIp,
ResourceTable.peerPort,
ResourceTable.resourcePath,
ResourceTable.resourceName,
ResourceTable.resourceHash
)\
.group_by(ResourceTable.peerId, ResourceTable.resourceHash)\
.all()
return available_peers
finally:
session.close()
def drop_peer(self, peer_id: str) -> None:
"""
Delete every record that contains same peer's id
:param peer_id: Peer's ip to be used as filter
"""
session = self.session()
try:
session\
.query(ResourceTable)\
.filter(ResourceTable.peerId == peer_id)\
.delete()
session.commit()
finally:
session.close()
@functools.lru_cache()
def get_database_resource_table_controller() -> [_DatabaseResourceTableController]:
"""
Singleton for DatabaseResourceTableController class
:return: Same instance for DatabaseResourceTableController class
"""
return _DatabaseResourceTableController()
|
normal
|
{
"blob_id": "c024e12fe06e47187c25a9f384ceed566bf94645",
"index": 6909,
"step-1": "<mask token>\n\n\nclass _DatabaseResourceTableController:\n <mask token>\n <mask token>\n\n def register_peer(self, peer_id: str, peer_ip: str, peer_port: int,\n resource_name: str, resource_path: str, resource_hash: str) ->None:\n \"\"\"\n Register 'peer x resource' relationship at database\n\n :param peer_id: Peer's id\n :param peer_ip: Peer's ip\n :param peer_port: Peer's listen port\n :param resource_name: Resource's name\n :param resource_path: Resource's path\n :param resource_hash: Resource's MD5\n \"\"\"\n session = self.session()\n try:\n new_resource = ResourceTable()\n new_resource.peerId = peer_id\n new_resource.peerIp = peer_ip\n new_resource.peerPort = peer_port\n new_resource.resourceName = resource_name\n new_resource.resourcePath = resource_path\n new_resource.resourceHash = resource_hash\n session.add(new_resource)\n session.commit()\n finally:\n session.close()\n\n def get_available_peer(self, resource_name: str) ->typing.List:\n \"\"\"\n Get peer's ip and port and resource's path, name and hash\n that contains same resource name\n\n :param resource_name: Name of the resource to be searched at database\n :return: List containing matching peer's and resource's info\n \"\"\"\n session = self.session()\n try:\n available_peers = session.query(ResourceTable.peerIp,\n ResourceTable.peerPort, ResourceTable.resourcePath,\n ResourceTable.resourceName, ResourceTable.resourceHash).filter(\n ResourceTable.resourceName == resource_name).group_by(\n ResourceTable.peerId).all()\n if available_peers:\n return available_peers[0]\n else:\n return []\n finally:\n session.close()\n\n def get_all_resources(self) ->typing.List:\n \"\"\"\n Get every register of peer's ip and port and resource's path, name and hash\n\n :return: List of every 'peer x resource' info\n \"\"\"\n session = self.session()\n try:\n available_peers = session.query(ResourceTable.peerIp,\n ResourceTable.peerPort, ResourceTable.resourcePath,\n ResourceTable.resourceName, ResourceTable.resourceHash\n ).group_by(ResourceTable.peerId, ResourceTable.resourceHash\n ).all()\n return available_peers\n finally:\n session.close()\n\n def drop_peer(self, peer_id: str) ->None:\n \"\"\"\n Delete every record that contains same peer's id\n\n :param peer_id: Peer's ip to be used as filter\n \"\"\"\n session = self.session()\n try:\n session.query(ResourceTable).filter(ResourceTable.peerId == peer_id\n ).delete()\n session.commit()\n finally:\n session.close()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass _DatabaseResourceTableController:\n <mask token>\n\n def __init__(self):\n self.engine = sqlalchemy.create_engine('sqlite:///db.sqlite3')\n self.session = sessionmaker(bind=self.engine)\n\n def register_peer(self, peer_id: str, peer_ip: str, peer_port: int,\n resource_name: str, resource_path: str, resource_hash: str) ->None:\n \"\"\"\n Register 'peer x resource' relationship at database\n\n :param peer_id: Peer's id\n :param peer_ip: Peer's ip\n :param peer_port: Peer's listen port\n :param resource_name: Resource's name\n :param resource_path: Resource's path\n :param resource_hash: Resource's MD5\n \"\"\"\n session = self.session()\n try:\n new_resource = ResourceTable()\n new_resource.peerId = peer_id\n new_resource.peerIp = peer_ip\n new_resource.peerPort = peer_port\n new_resource.resourceName = resource_name\n new_resource.resourcePath = resource_path\n new_resource.resourceHash = resource_hash\n session.add(new_resource)\n session.commit()\n finally:\n session.close()\n\n def get_available_peer(self, resource_name: str) ->typing.List:\n \"\"\"\n Get peer's ip and port and resource's path, name and hash\n that contains same resource name\n\n :param resource_name: Name of the resource to be searched at database\n :return: List containing matching peer's and resource's info\n \"\"\"\n session = self.session()\n try:\n available_peers = session.query(ResourceTable.peerIp,\n ResourceTable.peerPort, ResourceTable.resourcePath,\n ResourceTable.resourceName, ResourceTable.resourceHash).filter(\n ResourceTable.resourceName == resource_name).group_by(\n ResourceTable.peerId).all()\n if available_peers:\n return available_peers[0]\n else:\n return []\n finally:\n session.close()\n\n def get_all_resources(self) ->typing.List:\n \"\"\"\n Get every register of peer's ip and port and resource's path, name and hash\n\n :return: List of every 'peer x resource' info\n \"\"\"\n session = self.session()\n try:\n available_peers = session.query(ResourceTable.peerIp,\n ResourceTable.peerPort, ResourceTable.resourcePath,\n ResourceTable.resourceName, ResourceTable.resourceHash\n ).group_by(ResourceTable.peerId, ResourceTable.resourceHash\n ).all()\n return available_peers\n finally:\n session.close()\n\n def drop_peer(self, peer_id: str) ->None:\n \"\"\"\n Delete every record that contains same peer's id\n\n :param peer_id: Peer's ip to be used as filter\n \"\"\"\n session = self.session()\n try:\n session.query(ResourceTable).filter(ResourceTable.peerId == peer_id\n ).delete()\n session.commit()\n finally:\n session.close()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass _DatabaseResourceTableController:\n \"\"\"\n Controller for resource table access\n \"\"\"\n\n def __init__(self):\n self.engine = sqlalchemy.create_engine('sqlite:///db.sqlite3')\n self.session = sessionmaker(bind=self.engine)\n\n def register_peer(self, peer_id: str, peer_ip: str, peer_port: int,\n resource_name: str, resource_path: str, resource_hash: str) ->None:\n \"\"\"\n Register 'peer x resource' relationship at database\n\n :param peer_id: Peer's id\n :param peer_ip: Peer's ip\n :param peer_port: Peer's listen port\n :param resource_name: Resource's name\n :param resource_path: Resource's path\n :param resource_hash: Resource's MD5\n \"\"\"\n session = self.session()\n try:\n new_resource = ResourceTable()\n new_resource.peerId = peer_id\n new_resource.peerIp = peer_ip\n new_resource.peerPort = peer_port\n new_resource.resourceName = resource_name\n new_resource.resourcePath = resource_path\n new_resource.resourceHash = resource_hash\n session.add(new_resource)\n session.commit()\n finally:\n session.close()\n\n def get_available_peer(self, resource_name: str) ->typing.List:\n \"\"\"\n Get peer's ip and port and resource's path, name and hash\n that contains same resource name\n\n :param resource_name: Name of the resource to be searched at database\n :return: List containing matching peer's and resource's info\n \"\"\"\n session = self.session()\n try:\n available_peers = session.query(ResourceTable.peerIp,\n ResourceTable.peerPort, ResourceTable.resourcePath,\n ResourceTable.resourceName, ResourceTable.resourceHash).filter(\n ResourceTable.resourceName == resource_name).group_by(\n ResourceTable.peerId).all()\n if available_peers:\n return available_peers[0]\n else:\n return []\n finally:\n session.close()\n\n def get_all_resources(self) ->typing.List:\n \"\"\"\n Get every register of peer's ip and port and resource's path, name and hash\n\n :return: List of every 'peer x resource' info\n \"\"\"\n session = self.session()\n try:\n available_peers = session.query(ResourceTable.peerIp,\n ResourceTable.peerPort, ResourceTable.resourcePath,\n ResourceTable.resourceName, ResourceTable.resourceHash\n ).group_by(ResourceTable.peerId, ResourceTable.resourceHash\n ).all()\n return available_peers\n finally:\n session.close()\n\n def drop_peer(self, peer_id: str) ->None:\n \"\"\"\n Delete every record that contains same peer's id\n\n :param peer_id: Peer's ip to be used as filter\n \"\"\"\n session = self.session()\n try:\n session.query(ResourceTable).filter(ResourceTable.peerId == peer_id\n ).delete()\n session.commit()\n finally:\n session.close()\n\n\n<mask token>\n",
"step-4": "<mask token>\n__authors__ = ['Gabriel Castro', 'Gustavo Possebon', 'Henrique Kops']\n__date__ = '24/10/2020'\n\n\nclass _DatabaseResourceTableController:\n \"\"\"\n Controller for resource table access\n \"\"\"\n\n def __init__(self):\n self.engine = sqlalchemy.create_engine('sqlite:///db.sqlite3')\n self.session = sessionmaker(bind=self.engine)\n\n def register_peer(self, peer_id: str, peer_ip: str, peer_port: int,\n resource_name: str, resource_path: str, resource_hash: str) ->None:\n \"\"\"\n Register 'peer x resource' relationship at database\n\n :param peer_id: Peer's id\n :param peer_ip: Peer's ip\n :param peer_port: Peer's listen port\n :param resource_name: Resource's name\n :param resource_path: Resource's path\n :param resource_hash: Resource's MD5\n \"\"\"\n session = self.session()\n try:\n new_resource = ResourceTable()\n new_resource.peerId = peer_id\n new_resource.peerIp = peer_ip\n new_resource.peerPort = peer_port\n new_resource.resourceName = resource_name\n new_resource.resourcePath = resource_path\n new_resource.resourceHash = resource_hash\n session.add(new_resource)\n session.commit()\n finally:\n session.close()\n\n def get_available_peer(self, resource_name: str) ->typing.List:\n \"\"\"\n Get peer's ip and port and resource's path, name and hash\n that contains same resource name\n\n :param resource_name: Name of the resource to be searched at database\n :return: List containing matching peer's and resource's info\n \"\"\"\n session = self.session()\n try:\n available_peers = session.query(ResourceTable.peerIp,\n ResourceTable.peerPort, ResourceTable.resourcePath,\n ResourceTable.resourceName, ResourceTable.resourceHash).filter(\n ResourceTable.resourceName == resource_name).group_by(\n ResourceTable.peerId).all()\n if available_peers:\n return available_peers[0]\n else:\n return []\n finally:\n session.close()\n\n def get_all_resources(self) ->typing.List:\n \"\"\"\n Get every register of peer's ip and port and resource's path, name and hash\n\n :return: List of every 'peer x resource' info\n \"\"\"\n session = self.session()\n try:\n available_peers = session.query(ResourceTable.peerIp,\n ResourceTable.peerPort, ResourceTable.resourcePath,\n ResourceTable.resourceName, ResourceTable.resourceHash\n ).group_by(ResourceTable.peerId, ResourceTable.resourceHash\n ).all()\n return available_peers\n finally:\n session.close()\n\n def drop_peer(self, peer_id: str) ->None:\n \"\"\"\n Delete every record that contains same peer's id\n\n :param peer_id: Peer's ip to be used as filter\n \"\"\"\n session = self.session()\n try:\n session.query(ResourceTable).filter(ResourceTable.peerId == peer_id\n ).delete()\n session.commit()\n finally:\n session.close()\n\n\n@functools.lru_cache()\ndef get_database_resource_table_controller() ->[\n _DatabaseResourceTableController]:\n \"\"\"\n Singleton for DatabaseResourceTableController class\n\n :return: Same instance for DatabaseResourceTableController class\n \"\"\"\n return _DatabaseResourceTableController()\n",
"step-5": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\n\"\"\"\nModule that defines a controller for database's operations over business rules\n\"\"\"\n\n# built-in dependencies\nimport functools\nimport typing\n\n# external dependencies\nimport sqlalchemy\nfrom sqlalchemy.orm import sessionmaker\n\n# project dependencies\nfrom database.table import ResourceTable\n\n__authors__ = [\"Gabriel Castro\", \"Gustavo Possebon\", \"Henrique Kops\"]\n__date__ = \"24/10/2020\"\n\n\nclass _DatabaseResourceTableController:\n \"\"\"\n Controller for resource table access\n \"\"\"\n\n def __init__(self):\n # sqlalchemy\n self.engine = sqlalchemy.create_engine(\"sqlite:///db.sqlite3\")\n self.session = sessionmaker(bind=self.engine)\n\n def register_peer(self, peer_id: str, peer_ip: str, peer_port: int,\n resource_name: str, resource_path: str, resource_hash: str) -> None:\n \"\"\"\n Register 'peer x resource' relationship at database\n\n :param peer_id: Peer's id\n :param peer_ip: Peer's ip\n :param peer_port: Peer's listen port\n :param resource_name: Resource's name\n :param resource_path: Resource's path\n :param resource_hash: Resource's MD5\n \"\"\"\n\n session = self.session()\n\n try:\n new_resource = ResourceTable()\n\n new_resource.peerId = peer_id\n new_resource.peerIp = peer_ip\n new_resource.peerPort = peer_port\n new_resource.resourceName = resource_name\n new_resource.resourcePath = resource_path\n new_resource.resourceHash = resource_hash\n\n session.add(new_resource)\n session.commit()\n\n finally:\n session.close()\n\n def get_available_peer(self, resource_name: str) -> typing.List:\n \"\"\"\n Get peer's ip and port and resource's path, name and hash\n that contains same resource name\n\n :param resource_name: Name of the resource to be searched at database\n :return: List containing matching peer's and resource's info\n \"\"\"\n\n session = self.session()\n\n try:\n available_peers = session\\\n .query(\n ResourceTable.peerIp,\n ResourceTable.peerPort,\n ResourceTable.resourcePath,\n ResourceTable.resourceName,\n ResourceTable.resourceHash\n )\\\n .filter(ResourceTable.resourceName == resource_name)\\\n .group_by(ResourceTable.peerId)\\\n .all()\n\n if available_peers:\n return available_peers[0]\n\n else:\n return []\n\n finally:\n session.close()\n\n def get_all_resources(self) -> typing.List:\n \"\"\"\n Get every register of peer's ip and port and resource's path, name and hash\n\n :return: List of every 'peer x resource' info\n \"\"\"\n\n session = self.session()\n\n try:\n available_peers = session\\\n .query(\n ResourceTable.peerIp,\n ResourceTable.peerPort,\n ResourceTable.resourcePath,\n ResourceTable.resourceName,\n ResourceTable.resourceHash\n )\\\n .group_by(ResourceTable.peerId, ResourceTable.resourceHash)\\\n .all()\n\n return available_peers\n\n finally:\n session.close()\n\n def drop_peer(self, peer_id: str) -> None:\n \"\"\"\n Delete every record that contains same peer's id\n\n :param peer_id: Peer's ip to be used as filter\n \"\"\"\n\n session = self.session()\n try:\n session\\\n .query(ResourceTable)\\\n .filter(ResourceTable.peerId == peer_id)\\\n .delete()\n session.commit()\n\n finally:\n session.close()\n\n\n@functools.lru_cache()\ndef get_database_resource_table_controller() -> [_DatabaseResourceTableController]:\n \"\"\"\n Singleton for DatabaseResourceTableController class\n\n :return: Same instance for DatabaseResourceTableController class\n \"\"\"\n\n return _DatabaseResourceTableController()\n",
"step-ids": [
5,
6,
7,
9,
11
]
}
|
[
5,
6,
7,
9,
11
] |
import requests
if __name__ == "__main__":
# individual datacake webhook url
# Change this to the webhook url of your datacake device/product
datacake_url = "https://api.datacake.co/integrations/api/ae6dd531-4cf6-4966-b5c9-6c43939aae90/"
# Serial number
# Include Serial Number in Payload so Datacake can route information
# based on serial of device
serial = "python0001"
# Just some random demo data
number_of_persons_a = 234
number_of_persons_b = 345
additional_payload = "bla bla"
some_data = 23.456
a_boolean = True
# create api call
r = requests.post(datacake_url, json={
"number_of_persons_a": number_of_persons_a,
"number_of_persons_b": number_of_persons_b,
"additional_payload": additional_payload,
"some_data": some_data,
"a_boolean": a_boolean,
"serial": serial
})
print(r)
|
normal
|
{
"blob_id": "00af9627242648a5a16a34a18bfc117945f1bc08",
"index": 4936,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif __name__ == '__main__':\n datacake_url = (\n 'https://api.datacake.co/integrations/api/ae6dd531-4cf6-4966-b5c9-6c43939aae90/'\n )\n serial = 'python0001'\n number_of_persons_a = 234\n number_of_persons_b = 345\n additional_payload = 'bla bla'\n some_data = 23.456\n a_boolean = True\n r = requests.post(datacake_url, json={'number_of_persons_a':\n number_of_persons_a, 'number_of_persons_b': number_of_persons_b,\n 'additional_payload': additional_payload, 'some_data': some_data,\n 'a_boolean': a_boolean, 'serial': serial})\n print(r)\n",
"step-3": "import requests\nif __name__ == '__main__':\n datacake_url = (\n 'https://api.datacake.co/integrations/api/ae6dd531-4cf6-4966-b5c9-6c43939aae90/'\n )\n serial = 'python0001'\n number_of_persons_a = 234\n number_of_persons_b = 345\n additional_payload = 'bla bla'\n some_data = 23.456\n a_boolean = True\n r = requests.post(datacake_url, json={'number_of_persons_a':\n number_of_persons_a, 'number_of_persons_b': number_of_persons_b,\n 'additional_payload': additional_payload, 'some_data': some_data,\n 'a_boolean': a_boolean, 'serial': serial})\n print(r)\n",
"step-4": "import requests\n\nif __name__ == \"__main__\":\n\n # individual datacake webhook url\n # Change this to the webhook url of your datacake device/product\n datacake_url = \"https://api.datacake.co/integrations/api/ae6dd531-4cf6-4966-b5c9-6c43939aae90/\"\n\n # Serial number\n # Include Serial Number in Payload so Datacake can route information\n # based on serial of device\n serial = \"python0001\"\n\n # Just some random demo data\n number_of_persons_a = 234\n number_of_persons_b = 345\n additional_payload = \"bla bla\"\n some_data = 23.456\n a_boolean = True\n\n # create api call\n r = requests.post(datacake_url, json={\n \"number_of_persons_a\": number_of_persons_a,\n \"number_of_persons_b\": number_of_persons_b,\n \"additional_payload\": additional_payload,\n \"some_data\": some_data,\n \"a_boolean\": a_boolean,\n \"serial\": serial\n })\n\n print(r)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
##########################################################################################
## Scene Classification ##
## Authors : Chris Andrew, Santhoshini Reddy, Nikath Yasmeen, Sai Hima, Sriya Ragini ##
################################################################### ##
## Description: This project was developed as part of the DIP course at IIIT Sri City ##
## All code is available for free usage for educational purposes ##
## Authors do not authorize commercial use of the source code ##
##########################################################################################
# The following module shuffles the data to enable 10 fold cross-validation analysis
################ Imports ################
from random import shuffle
################ Global ################
path = "data/"
filename = "data"
################ Source ################
# ------------------------------------
f = open(path+filename+".csv",'r')
data = list()
train_data = list()
train_class = list()
# ------------------------------------
for line in f:
l = line.strip()
l = l.split(',')
l = map(float , l)
data.append(l)
# ------------------------------------
f.close()
# ------------------------------------
for i in range(100):
shuffle(data)
# ------------------------------------
for l in data:
train_data.append(l[0:-1])
train_class.append(int(l[-1]))
# ------------------------------------
f = open(path+filename+"_r.csv",'w')
for i in range(len(train_data)):
for entry in train_data[i]:
f.write(str(entry)+',')
# ------------------------------------
f.write(str(train_class[i])+'\n')
# ------------------------------------
f.close()
# ------------------------------------------------------------------------------------------------------------------------------------------
# ------------------------------------------------------------------------------------------------------------------------------------------
# ------------------------------------------------------------------------------------------------------------------------------------------
|
normal
|
{
"blob_id": "b8b20d6c977a6c1df6a592188c6e799f12da6a23",
"index": 9734,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor line in f:\n l = line.strip()\n l = l.split(',')\n l = map(float, l)\n data.append(l)\nf.close()\nfor i in range(100):\n shuffle(data)\nfor l in data:\n train_data.append(l[0:-1])\n train_class.append(int(l[-1]))\n<mask token>\nfor i in range(len(train_data)):\n for entry in train_data[i]:\n f.write(str(entry) + ',')\n f.write(str(train_class[i]) + '\\n')\nf.close()\n",
"step-3": "<mask token>\npath = 'data/'\nfilename = 'data'\nf = open(path + filename + '.csv', 'r')\ndata = list()\ntrain_data = list()\ntrain_class = list()\nfor line in f:\n l = line.strip()\n l = l.split(',')\n l = map(float, l)\n data.append(l)\nf.close()\nfor i in range(100):\n shuffle(data)\nfor l in data:\n train_data.append(l[0:-1])\n train_class.append(int(l[-1]))\nf = open(path + filename + '_r.csv', 'w')\nfor i in range(len(train_data)):\n for entry in train_data[i]:\n f.write(str(entry) + ',')\n f.write(str(train_class[i]) + '\\n')\nf.close()\n",
"step-4": "from random import shuffle\npath = 'data/'\nfilename = 'data'\nf = open(path + filename + '.csv', 'r')\ndata = list()\ntrain_data = list()\ntrain_class = list()\nfor line in f:\n l = line.strip()\n l = l.split(',')\n l = map(float, l)\n data.append(l)\nf.close()\nfor i in range(100):\n shuffle(data)\nfor l in data:\n train_data.append(l[0:-1])\n train_class.append(int(l[-1]))\nf = open(path + filename + '_r.csv', 'w')\nfor i in range(len(train_data)):\n for entry in train_data[i]:\n f.write(str(entry) + ',')\n f.write(str(train_class[i]) + '\\n')\nf.close()\n",
"step-5": "##########################################################################################\n## Scene Classification ##\n## Authors : Chris Andrew, Santhoshini Reddy, Nikath Yasmeen, Sai Hima, Sriya Ragini ##\n################################################################### ##\n## Description: This project was developed as part of the DIP course at IIIT Sri City ##\n## All code is available for free usage for educational purposes ##\n## Authors do not authorize commercial use of the source code ##\n##########################################################################################\n\n# The following module shuffles the data to enable 10 fold cross-validation analysis\n\n################ Imports ################\nfrom random import shuffle\n################ Global ################\npath = \"data/\"\nfilename = \"data\"\n################ Source ################\n# ------------------------------------\nf = open(path+filename+\".csv\",'r')\ndata = list()\ntrain_data = list()\ntrain_class = list()\n# ------------------------------------\nfor line in f:\n l = line.strip()\n l = l.split(',')\n l = map(float , l)\n data.append(l)\n # ------------------------------------\nf.close()\n# ------------------------------------\nfor i in range(100):\n shuffle(data)\n# ------------------------------------\nfor l in data:\n train_data.append(l[0:-1])\n train_class.append(int(l[-1]))\n# ------------------------------------\nf = open(path+filename+\"_r.csv\",'w')\nfor i in range(len(train_data)):\n for entry in train_data[i]:\n f.write(str(entry)+',')\n # ------------------------------------\n f.write(str(train_class[i])+'\\n')\n # ------------------------------------\nf.close()\n# ------------------------------------------------------------------------------------------------------------------------------------------\n# ------------------------------------------------------------------------------------------------------------------------------------------\n# ------------------------------------------------------------------------------------------------------------------------------------------\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
#!/usr/bin/env python
import argparse
import keyring
import papercut
import ConfigParser
import getpass
import time
import os
config = ConfigParser.ConfigParser()
config.read([os.path.expanduser('~/.papercut')])
try:
username = config.get('papercut','username')
except ConfigParser.NoSectionError:
username = None
p = argparse.ArgumentParser(description='Print some documents')
p.add_argument('--print', '-p', help='a filename to be printed', dest='printjob')
p.add_argument('--printer', '-r', help='a printer name to print to')
p.add_argument('--balance', '-b', nargs='?', const=True, help='display the user\' printing balance')
p.add_argument('--list', '-l', nargs='?', const=True, help='list available printers')
p.add_argument('--user', '-u', help='username')
p.add_argument('--password-options', '-o', choices=['save','prompt'], help='save: prompt for password and save to keyring,\n prompt: prompt for password')
args = p.parse_args()
if not username and not args.user:
username = raw_input('enter username: ')
password = keyring.get_password('papercut', username)
def list_printers(sessID):
printers = papercut.listPrinters(sessID)
print "\nAvailable printers:"
for i,printer in enumerate(printers):
print i,"\t",printer[1], "." * (50 - len(printer[1])), printer[0]
return printers
def get_balance(sessID):
print '\nYour balance is now: $ %.2f' % (int(papercut.getBalance(sessID)) / 100.0)
if args.password_options or not password:
password = getpass.getpass()
if args.password_options == 'save':
keyring.set_password('papercut', username, password)
print "password saved in keyring"
if args.list or args.balance or args.printjob or args.printer:
sessID = papercut.login(username, password)
if sessID:
print '\nLogged in to PaperCut with session ID',sessID
if args.list: list_printers(sessID)
if args.balance: get_balance(sessID)
if args.printjob:
if not args.printer:
printers = list_printers(sessID)
args.printer = raw_input('select printer: ')
try:
printerIndex = int(args.printer)
args.printer = printers[printerIndex][1]
except ValueError:
pass
printJobID = papercut.printFile(args.printjob, args.printer, sessID)
print '\nJob sent to printer', args.printer
status = papercut.getPrintStatus(printJobID)
while(status['status'] == 'Submitting'):
time.sleep(0.1)
status = papercut.getPrintStatus(printJobID)
print "\nJob queued for printing."
while(not status['complete']):
time.sleep(0.1)
status = papercut.getPrintStatus(printJobID)
print "\nComplete!"
print "\nThis job cost $", status['cost']
# print status
get_balance(sessID)
else:
print '\nDid not successfully log in to PaperCut'
|
normal
|
{
"blob_id": "33cc8814d9397bcb0041728407efef80a136f151",
"index": 505,
"step-1": "#!/usr/bin/env python\nimport argparse\nimport keyring\nimport papercut\nimport ConfigParser\nimport getpass\nimport time\nimport os\n\nconfig = ConfigParser.ConfigParser()\nconfig.read([os.path.expanduser('~/.papercut')])\ntry:\n username = config.get('papercut','username')\nexcept ConfigParser.NoSectionError:\n username = None\n\np = argparse.ArgumentParser(description='Print some documents')\np.add_argument('--print', '-p', help='a filename to be printed', dest='printjob')\np.add_argument('--printer', '-r', help='a printer name to print to')\np.add_argument('--balance', '-b', nargs='?', const=True, help='display the user\\' printing balance')\np.add_argument('--list', '-l', nargs='?', const=True, help='list available printers')\np.add_argument('--user', '-u', help='username')\np.add_argument('--password-options', '-o', choices=['save','prompt'], help='save: prompt for password and save to keyring,\\n prompt: prompt for password')\n\nargs = p.parse_args()\n\nif not username and not args.user:\n username = raw_input('enter username: ')\n\npassword = keyring.get_password('papercut', username)\n\ndef list_printers(sessID):\n printers = papercut.listPrinters(sessID)\n print \"\\nAvailable printers:\"\n for i,printer in enumerate(printers):\n print i,\"\\t\",printer[1], \".\" * (50 - len(printer[1])), printer[0]\n return printers\n\ndef get_balance(sessID):\n print '\\nYour balance is now: $ %.2f' % (int(papercut.getBalance(sessID)) / 100.0)\n\n\n\nif args.password_options or not password:\n password = getpass.getpass()\n \nif args.password_options == 'save':\n keyring.set_password('papercut', username, password)\n print \"password saved in keyring\"\n\nif args.list or args.balance or args.printjob or args.printer:\n sessID = papercut.login(username, password)\n if sessID:\n print '\\nLogged in to PaperCut with session ID',sessID\n if args.list: list_printers(sessID)\n if args.balance: get_balance(sessID)\n if args.printjob:\n if not args.printer:\n printers = list_printers(sessID)\n args.printer = raw_input('select printer: ')\n try:\n printerIndex = int(args.printer)\n args.printer = printers[printerIndex][1]\n except ValueError:\n pass\n printJobID = papercut.printFile(args.printjob, args.printer, sessID)\n print '\\nJob sent to printer', args.printer\n \n status = papercut.getPrintStatus(printJobID)\n while(status['status'] == 'Submitting'):\n time.sleep(0.1)\n status = papercut.getPrintStatus(printJobID)\n print \"\\nJob queued for printing.\"\n\n while(not status['complete']):\n time.sleep(0.1)\n status = papercut.getPrintStatus(printJobID)\n print \"\\nComplete!\"\n print \"\\nThis job cost $\", status['cost']\n# print status\n get_balance(sessID)\n \n else:\n print '\\nDid not successfully log in to PaperCut'\n \n\n\n \n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
#!/usr/bin/env python
import rospy
from mark1.srv import WordCount, WordCountResponse
s= set('',)
def count_words(request):
s.update(set( request.words.split() ))
print s
return WordCountResponse( len( request.words.split()))
rospy.init_node('mark_service_server')
service = rospy.Service('Word_count', WordCount, count_words)
rospy.spin()
|
normal
|
{
"blob_id": "e90e4d2c777554999ab72d725d7e57bdfd508d3a",
"index": 3966,
"step-1": "#!/usr/bin/env python\nimport rospy\nfrom mark1.srv import WordCount, WordCountResponse\n\ns= set('',)\n\ndef count_words(request):\n s.update(set( request.words.split() ))\n print s\n return WordCountResponse( len( request.words.split()))\n\nrospy.init_node('mark_service_server')\n\nservice = rospy.Service('Word_count', WordCount, count_words)\n\nrospy.spin()\n\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
from flask_wtf import FlaskForm
from wtforms import StringField, SelectField,SubmitField, PasswordField, RadioField, MultipleFileField, SubmitField, TextAreaField
from wtforms.fields.html5 import EmailField, TelField, DateField
from wtforms.validators import DataRequired, Email, Length, InputRequired
class SignUpForm(FlaskForm):
id = StringField('ID*', validators=[DataRequired()])
fname = StringField('Full Name*', validators=[DataRequired()])
email = EmailField('Email Id*',validators=[DataRequired(), Email()])
password = PasswordField('Password*', validators=[DataRequired()])
contactno = TelField('Mobile No*.', validators=[DataRequired(), Length(min=10, max=10)])
design = SelectField(u'Designation*', choices=[('admin', 'Admin'), ('stud', 'Student')], validators=[DataRequired()])
submit = SubmitField('Sign Up >>')
class LoginForm(FlaskForm):
email = EmailField('Email Id*',validators=[DataRequired(), Email()])
password = PasswordField('Password*', validators=[DataRequired()])
design = SelectField(u'Designation*', choices=[('admin', 'Admin'), ('stud', 'Student')], validators=[DataRequired()])
submit = SubmitField('Login >>')
class ForgotForm(FlaskForm):
email = EmailField('Email Id*',validators=[DataRequired(), Email()])
design = SelectField(u'Designation*', choices=[('admin', 'Admin'), ('stud', 'Student')], validators=[DataRequired()])
submit = SubmitField('Change your Password')
class changepassword(FlaskForm):
password = PasswordField('Enter Password', validators=[DataRequired()])
submit = SubmitField('Change Password')
class ComplaintForm(FlaskForm):
fname = StringField('Full Name *', validators=[DataRequired()])
email = EmailField('Email Id*',validators=[DataRequired(), Email()])
date = DateField('Date', validators=[DataRequired()])
degree = SelectField(u'Degree*', choices=[('bachelors', 'Bachelors'), ('masters', 'Masters')], validators=[DataRequired()])
semester = SelectField(u'Semester*', choices=[('first', 'First'), ('second', 'Second'), ('third', 'Third'), ('fourth', 'Fourth'), ('fifth', 'Fifth'), ('sixth', 'Sixth'), ('seventh', 'Seventh'), ('eighth', 'Eighth')], validators=[DataRequired()])
complaintcategory = SelectField(u'Complain Category*', choices=[('infrastructure', 'Infrastructure'), ('accounts', 'Accounts'), ('academics', 'Academics'), ('management', 'Management'), ('faculty', 'Faculty'), ('library', 'Library')], validators=[DataRequired()])
message = TextAreaField('Enter Complain Details', validators=[DataRequired(), Length(max=100)])
#file = MultipleFileField(u'Upload File')
submit = SubmitField('Submit')
class complaint_status(FlaskForm):
status = SelectField(u'Complaint Status', choices=[('Pending', 'Pending'), ('Under Review', 'Under Review'), ('Resolved', 'Resolved')])
submit = SubmitField('Update')
|
normal
|
{
"blob_id": "32ed07a89a6f929a6c4b78fd79e687b85e01015b",
"index": 535,
"step-1": "<mask token>\n\n\nclass ForgotForm(FlaskForm):\n email = EmailField('Email Id*', validators=[DataRequired(), Email()])\n design = SelectField(u'Designation*', choices=[('admin', 'Admin'), (\n 'stud', 'Student')], validators=[DataRequired()])\n submit = SubmitField('Change your Password')\n\n\nclass changepassword(FlaskForm):\n password = PasswordField('Enter Password', validators=[DataRequired()])\n submit = SubmitField('Change Password')\n\n\nclass ComplaintForm(FlaskForm):\n fname = StringField('Full Name *', validators=[DataRequired()])\n email = EmailField('Email Id*', validators=[DataRequired(), Email()])\n date = DateField('Date', validators=[DataRequired()])\n degree = SelectField(u'Degree*', choices=[('bachelors', 'Bachelors'), (\n 'masters', 'Masters')], validators=[DataRequired()])\n semester = SelectField(u'Semester*', choices=[('first', 'First'), (\n 'second', 'Second'), ('third', 'Third'), ('fourth', 'Fourth'), (\n 'fifth', 'Fifth'), ('sixth', 'Sixth'), ('seventh', 'Seventh'), (\n 'eighth', 'Eighth')], validators=[DataRequired()])\n complaintcategory = SelectField(u'Complain Category*', choices=[(\n 'infrastructure', 'Infrastructure'), ('accounts', 'Accounts'), (\n 'academics', 'Academics'), ('management', 'Management'), ('faculty',\n 'Faculty'), ('library', 'Library')], validators=[DataRequired()])\n message = TextAreaField('Enter Complain Details', validators=[\n DataRequired(), Length(max=100)])\n submit = SubmitField('Submit')\n\n\nclass complaint_status(FlaskForm):\n status = SelectField(u'Complaint Status', choices=[('Pending',\n 'Pending'), ('Under Review', 'Under Review'), ('Resolved', 'Resolved')]\n )\n submit = SubmitField('Update')\n",
"step-2": "<mask token>\n\n\nclass LoginForm(FlaskForm):\n email = EmailField('Email Id*', validators=[DataRequired(), Email()])\n password = PasswordField('Password*', validators=[DataRequired()])\n design = SelectField(u'Designation*', choices=[('admin', 'Admin'), (\n 'stud', 'Student')], validators=[DataRequired()])\n submit = SubmitField('Login >>')\n\n\nclass ForgotForm(FlaskForm):\n email = EmailField('Email Id*', validators=[DataRequired(), Email()])\n design = SelectField(u'Designation*', choices=[('admin', 'Admin'), (\n 'stud', 'Student')], validators=[DataRequired()])\n submit = SubmitField('Change your Password')\n\n\nclass changepassword(FlaskForm):\n password = PasswordField('Enter Password', validators=[DataRequired()])\n submit = SubmitField('Change Password')\n\n\nclass ComplaintForm(FlaskForm):\n fname = StringField('Full Name *', validators=[DataRequired()])\n email = EmailField('Email Id*', validators=[DataRequired(), Email()])\n date = DateField('Date', validators=[DataRequired()])\n degree = SelectField(u'Degree*', choices=[('bachelors', 'Bachelors'), (\n 'masters', 'Masters')], validators=[DataRequired()])\n semester = SelectField(u'Semester*', choices=[('first', 'First'), (\n 'second', 'Second'), ('third', 'Third'), ('fourth', 'Fourth'), (\n 'fifth', 'Fifth'), ('sixth', 'Sixth'), ('seventh', 'Seventh'), (\n 'eighth', 'Eighth')], validators=[DataRequired()])\n complaintcategory = SelectField(u'Complain Category*', choices=[(\n 'infrastructure', 'Infrastructure'), ('accounts', 'Accounts'), (\n 'academics', 'Academics'), ('management', 'Management'), ('faculty',\n 'Faculty'), ('library', 'Library')], validators=[DataRequired()])\n message = TextAreaField('Enter Complain Details', validators=[\n DataRequired(), Length(max=100)])\n submit = SubmitField('Submit')\n\n\nclass complaint_status(FlaskForm):\n status = SelectField(u'Complaint Status', choices=[('Pending',\n 'Pending'), ('Under Review', 'Under Review'), ('Resolved', 'Resolved')]\n )\n submit = SubmitField('Update')\n",
"step-3": "<mask token>\n\n\nclass SignUpForm(FlaskForm):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass LoginForm(FlaskForm):\n email = EmailField('Email Id*', validators=[DataRequired(), Email()])\n password = PasswordField('Password*', validators=[DataRequired()])\n design = SelectField(u'Designation*', choices=[('admin', 'Admin'), (\n 'stud', 'Student')], validators=[DataRequired()])\n submit = SubmitField('Login >>')\n\n\nclass ForgotForm(FlaskForm):\n email = EmailField('Email Id*', validators=[DataRequired(), Email()])\n design = SelectField(u'Designation*', choices=[('admin', 'Admin'), (\n 'stud', 'Student')], validators=[DataRequired()])\n submit = SubmitField('Change your Password')\n\n\nclass changepassword(FlaskForm):\n password = PasswordField('Enter Password', validators=[DataRequired()])\n submit = SubmitField('Change Password')\n\n\nclass ComplaintForm(FlaskForm):\n fname = StringField('Full Name *', validators=[DataRequired()])\n email = EmailField('Email Id*', validators=[DataRequired(), Email()])\n date = DateField('Date', validators=[DataRequired()])\n degree = SelectField(u'Degree*', choices=[('bachelors', 'Bachelors'), (\n 'masters', 'Masters')], validators=[DataRequired()])\n semester = SelectField(u'Semester*', choices=[('first', 'First'), (\n 'second', 'Second'), ('third', 'Third'), ('fourth', 'Fourth'), (\n 'fifth', 'Fifth'), ('sixth', 'Sixth'), ('seventh', 'Seventh'), (\n 'eighth', 'Eighth')], validators=[DataRequired()])\n complaintcategory = SelectField(u'Complain Category*', choices=[(\n 'infrastructure', 'Infrastructure'), ('accounts', 'Accounts'), (\n 'academics', 'Academics'), ('management', 'Management'), ('faculty',\n 'Faculty'), ('library', 'Library')], validators=[DataRequired()])\n message = TextAreaField('Enter Complain Details', validators=[\n DataRequired(), Length(max=100)])\n submit = SubmitField('Submit')\n\n\nclass complaint_status(FlaskForm):\n status = SelectField(u'Complaint Status', choices=[('Pending',\n 'Pending'), ('Under Review', 'Under Review'), ('Resolved', 'Resolved')]\n )\n submit = SubmitField('Update')\n",
"step-4": "<mask token>\n\n\nclass SignUpForm(FlaskForm):\n id = StringField('ID*', validators=[DataRequired()])\n fname = StringField('Full Name*', validators=[DataRequired()])\n email = EmailField('Email Id*', validators=[DataRequired(), Email()])\n password = PasswordField('Password*', validators=[DataRequired()])\n contactno = TelField('Mobile No*.', validators=[DataRequired(), Length(\n min=10, max=10)])\n design = SelectField(u'Designation*', choices=[('admin', 'Admin'), (\n 'stud', 'Student')], validators=[DataRequired()])\n submit = SubmitField('Sign Up >>')\n\n\nclass LoginForm(FlaskForm):\n email = EmailField('Email Id*', validators=[DataRequired(), Email()])\n password = PasswordField('Password*', validators=[DataRequired()])\n design = SelectField(u'Designation*', choices=[('admin', 'Admin'), (\n 'stud', 'Student')], validators=[DataRequired()])\n submit = SubmitField('Login >>')\n\n\nclass ForgotForm(FlaskForm):\n email = EmailField('Email Id*', validators=[DataRequired(), Email()])\n design = SelectField(u'Designation*', choices=[('admin', 'Admin'), (\n 'stud', 'Student')], validators=[DataRequired()])\n submit = SubmitField('Change your Password')\n\n\nclass changepassword(FlaskForm):\n password = PasswordField('Enter Password', validators=[DataRequired()])\n submit = SubmitField('Change Password')\n\n\nclass ComplaintForm(FlaskForm):\n fname = StringField('Full Name *', validators=[DataRequired()])\n email = EmailField('Email Id*', validators=[DataRequired(), Email()])\n date = DateField('Date', validators=[DataRequired()])\n degree = SelectField(u'Degree*', choices=[('bachelors', 'Bachelors'), (\n 'masters', 'Masters')], validators=[DataRequired()])\n semester = SelectField(u'Semester*', choices=[('first', 'First'), (\n 'second', 'Second'), ('third', 'Third'), ('fourth', 'Fourth'), (\n 'fifth', 'Fifth'), ('sixth', 'Sixth'), ('seventh', 'Seventh'), (\n 'eighth', 'Eighth')], validators=[DataRequired()])\n complaintcategory = SelectField(u'Complain Category*', choices=[(\n 'infrastructure', 'Infrastructure'), ('accounts', 'Accounts'), (\n 'academics', 'Academics'), ('management', 'Management'), ('faculty',\n 'Faculty'), ('library', 'Library')], validators=[DataRequired()])\n message = TextAreaField('Enter Complain Details', validators=[\n DataRequired(), Length(max=100)])\n submit = SubmitField('Submit')\n\n\nclass complaint_status(FlaskForm):\n status = SelectField(u'Complaint Status', choices=[('Pending',\n 'Pending'), ('Under Review', 'Under Review'), ('Resolved', 'Resolved')]\n )\n submit = SubmitField('Update')\n",
"step-5": "from flask_wtf import FlaskForm\nfrom wtforms import StringField, SelectField,SubmitField, PasswordField, RadioField, MultipleFileField, SubmitField, TextAreaField\nfrom wtforms.fields.html5 import EmailField, TelField, DateField\nfrom wtforms.validators import DataRequired, Email, Length, InputRequired\n\nclass SignUpForm(FlaskForm):\n id = StringField('ID*', validators=[DataRequired()])\n fname = StringField('Full Name*', validators=[DataRequired()])\n email = EmailField('Email Id*',validators=[DataRequired(), Email()])\n password = PasswordField('Password*', validators=[DataRequired()])\n contactno = TelField('Mobile No*.', validators=[DataRequired(), Length(min=10, max=10)])\n design = SelectField(u'Designation*', choices=[('admin', 'Admin'), ('stud', 'Student')], validators=[DataRequired()])\n submit = SubmitField('Sign Up >>')\n\n\nclass LoginForm(FlaskForm):\n email = EmailField('Email Id*',validators=[DataRequired(), Email()])\n password = PasswordField('Password*', validators=[DataRequired()])\n design = SelectField(u'Designation*', choices=[('admin', 'Admin'), ('stud', 'Student')], validators=[DataRequired()])\n submit = SubmitField('Login >>')\n\n\nclass ForgotForm(FlaskForm):\n email = EmailField('Email Id*',validators=[DataRequired(), Email()])\n design = SelectField(u'Designation*', choices=[('admin', 'Admin'), ('stud', 'Student')], validators=[DataRequired()])\n submit = SubmitField('Change your Password')\n\n\nclass changepassword(FlaskForm):\n password = PasswordField('Enter Password', validators=[DataRequired()])\n submit = SubmitField('Change Password')\n\n\nclass ComplaintForm(FlaskForm):\n fname = StringField('Full Name *', validators=[DataRequired()])\n email = EmailField('Email Id*',validators=[DataRequired(), Email()])\n date = DateField('Date', validators=[DataRequired()])\n degree = SelectField(u'Degree*', choices=[('bachelors', 'Bachelors'), ('masters', 'Masters')], validators=[DataRequired()])\n semester = SelectField(u'Semester*', choices=[('first', 'First'), ('second', 'Second'), ('third', 'Third'), ('fourth', 'Fourth'), ('fifth', 'Fifth'), ('sixth', 'Sixth'), ('seventh', 'Seventh'), ('eighth', 'Eighth')], validators=[DataRequired()])\n complaintcategory = SelectField(u'Complain Category*', choices=[('infrastructure', 'Infrastructure'), ('accounts', 'Accounts'), ('academics', 'Academics'), ('management', 'Management'), ('faculty', 'Faculty'), ('library', 'Library')], validators=[DataRequired()])\n message = TextAreaField('Enter Complain Details', validators=[DataRequired(), Length(max=100)])\n #file = MultipleFileField(u'Upload File')\n submit = SubmitField('Submit')\n\n\n\n\nclass complaint_status(FlaskForm):\n status = SelectField(u'Complaint Status', choices=[('Pending', 'Pending'), ('Under Review', 'Under Review'), ('Resolved', 'Resolved')])\n submit = SubmitField('Update')\n",
"step-ids": [
8,
10,
11,
12,
14
]
}
|
[
8,
10,
11,
12,
14
] |
#!/usr/bin/env python3
import json
import sqlite3
import sys
from scorelib import *
#from .scorelib import *
from collections import defaultdict
def __map2list(mp):
if len(mp.keys()) == 0:
return []
lst = [None] * max(mp.keys())
for idx in mp.keys():
lst[idx-1] = mp[idx]
return lst
def __translate_keys(translation_schema):
def f(obj):
schema = translation_schema.get(type(obj))
if schema is None:
return obj.__dict__
res = {}
for key in obj.__dict__:
res[schema.get(key, key)] = obj.__dict__[key]
return res
return f
def __to_bool(val):
if val == 'Y':
return True
elif val == 'N':
return False
else:
return None
def search(substr):
connection = sqlite3.connect('scorelib.dat')
result = defaultdict(lambda: [])
for person_id, person_name in connection.execute(r"SELECT id, name FROM person WHERE name LIKE '%' || ? || '%'", (substr, )):
root_composer = person_name
for (score_id, score_name, score_genre, score_incipit, score_key, score_year) in connection.execute(r"SELECT score.id, score.name, score.genre, score.incipit, score.key, score.year FROM score JOIN score_author a on score.id = a.score WHERE a.composer = ?", (person_id, )):
voicesMap = {}
for voice_name, voice_range, voice_number in connection.execute(r"SELECT name, range, number FROM voice WHERE score = ?", (score_id, )):
voicesMap[voice_number] = Voice(voice_name, voice_range)
composers = []
for c_name, c_born, c_died in connection.execute(r"SELECT person.name, person.born, person.died FROM score_author JOIN person ON score_author.composer = person.id WHERE score_author.score = ?", (score_id,)):
composers.append(Person(c_name, c_born, c_died))
composition = Composition(score_name, score_incipit, score_key, score_genre, score_year, __map2list(voicesMap), composers)
for edition_id, edition_name, edition_year in connection.execute(r"SELECT id, name, year FROM edition WHERE score = ?", (score_id,)):
editors = []
for e_name, e_born, e_died in connection.execute(r"SELECT person.name, person.born, person.died FROM edition_author JOIN person ON edition_author.editor = person.id WHERE edition_author.edition = ?", (edition_id,)):
editors.append(Person(e_name, e_born, e_died))
edition = Edition(composition, editors, edition_name)
for print_id, print_part in connection.execute(r"SELECT id, partiture FROM print WHERE edition = ?", (edition_id, )):
print = Print(edition, print_id, __to_bool(print_part))
result[root_composer].append({"Print Number": print.print_id,
"Composer": composition.authors,
"Title": composition.name,
"Genre": composition.genre,
"Key": composition.key,
"Composition Year": composition.year,
"Edition": edition.name,
"Voices": __map2list(voicesMap),
"Editor": edition.authors,
"Partiture": print.partiture,
"Incipit": composition.incipit})
json.dump(result,
sys.stdout,
default=__translate_keys({Print: {"print_id": "Print Number", "partiture": "Partiture", "edition": "Edition"},
Edition: {"authors": "Editors", "name": "Name", "composition": "Composition"},
Composition: {"name": "Name", "incipit": "Incipit", "key": "Key", "genre": "Genre", "year": "Composition Year", "voices": "Voices", "authors": "Composer"},
Voice: {"name": "Name", "range": "Range"},
Person: {"name": "Name", "born": "Born", "died": "Died"}}),
indent=4,
ensure_ascii=False)
return
def main(args):
text = ' '.join(args).strip()
if text == '':
json.dump({}, sys.stdout)
return
search(text)
main(sys.argv[1:])
|
normal
|
{
"blob_id": "9f6e5c219f7b668720b5379dde912ff22ef434d1",
"index": 9072,
"step-1": "<mask token>\n\n\ndef __map2list(mp):\n if len(mp.keys()) == 0:\n return []\n lst = [None] * max(mp.keys())\n for idx in mp.keys():\n lst[idx - 1] = mp[idx]\n return lst\n\n\ndef __translate_keys(translation_schema):\n\n def f(obj):\n schema = translation_schema.get(type(obj))\n if schema is None:\n return obj.__dict__\n res = {}\n for key in obj.__dict__:\n res[schema.get(key, key)] = obj.__dict__[key]\n return res\n return f\n\n\ndef __to_bool(val):\n if val == 'Y':\n return True\n elif val == 'N':\n return False\n else:\n return None\n\n\ndef search(substr):\n connection = sqlite3.connect('scorelib.dat')\n result = defaultdict(lambda : [])\n for person_id, person_name in connection.execute(\n \"SELECT id, name FROM person WHERE name LIKE '%' || ? || '%'\", (\n substr,)):\n root_composer = person_name\n for score_id, score_name, score_genre, score_incipit, score_key, score_year in connection.execute(\n 'SELECT score.id, score.name, score.genre, score.incipit, score.key, score.year FROM score JOIN score_author a on score.id = a.score WHERE a.composer = ?'\n , (person_id,)):\n voicesMap = {}\n for voice_name, voice_range, voice_number in connection.execute(\n 'SELECT name, range, number FROM voice WHERE score = ?', (\n score_id,)):\n voicesMap[voice_number] = Voice(voice_name, voice_range)\n composers = []\n for c_name, c_born, c_died in connection.execute(\n 'SELECT person.name, person.born, person.died FROM score_author JOIN person ON score_author.composer = person.id WHERE score_author.score = ?'\n , (score_id,)):\n composers.append(Person(c_name, c_born, c_died))\n composition = Composition(score_name, score_incipit, score_key,\n score_genre, score_year, __map2list(voicesMap), composers)\n for edition_id, edition_name, edition_year in connection.execute(\n 'SELECT id, name, year FROM edition WHERE score = ?', (\n score_id,)):\n editors = []\n for e_name, e_born, e_died in connection.execute(\n 'SELECT person.name, person.born, person.died FROM edition_author JOIN person ON edition_author.editor = person.id WHERE edition_author.edition = ?'\n , (edition_id,)):\n editors.append(Person(e_name, e_born, e_died))\n edition = Edition(composition, editors, edition_name)\n for print_id, print_part in connection.execute(\n 'SELECT id, partiture FROM print WHERE edition = ?', (\n edition_id,)):\n print = Print(edition, print_id, __to_bool(print_part))\n result[root_composer].append({'Print Number': print.\n print_id, 'Composer': composition.authors, 'Title':\n composition.name, 'Genre': composition.genre, 'Key':\n composition.key, 'Composition Year': composition.\n year, 'Edition': edition.name, 'Voices': __map2list\n (voicesMap), 'Editor': edition.authors, 'Partiture':\n print.partiture, 'Incipit': composition.incipit})\n json.dump(result, sys.stdout, default=__translate_keys({Print: {\n 'print_id': 'Print Number', 'partiture': 'Partiture', 'edition':\n 'Edition'}, Edition: {'authors': 'Editors', 'name': 'Name',\n 'composition': 'Composition'}, Composition: {'name': 'Name',\n 'incipit': 'Incipit', 'key': 'Key', 'genre': 'Genre', 'year':\n 'Composition Year', 'voices': 'Voices', 'authors': 'Composer'},\n Voice: {'name': 'Name', 'range': 'Range'}, Person: {'name': 'Name',\n 'born': 'Born', 'died': 'Died'}}), indent=4, ensure_ascii=False)\n return\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef __map2list(mp):\n if len(mp.keys()) == 0:\n return []\n lst = [None] * max(mp.keys())\n for idx in mp.keys():\n lst[idx - 1] = mp[idx]\n return lst\n\n\ndef __translate_keys(translation_schema):\n\n def f(obj):\n schema = translation_schema.get(type(obj))\n if schema is None:\n return obj.__dict__\n res = {}\n for key in obj.__dict__:\n res[schema.get(key, key)] = obj.__dict__[key]\n return res\n return f\n\n\ndef __to_bool(val):\n if val == 'Y':\n return True\n elif val == 'N':\n return False\n else:\n return None\n\n\ndef search(substr):\n connection = sqlite3.connect('scorelib.dat')\n result = defaultdict(lambda : [])\n for person_id, person_name in connection.execute(\n \"SELECT id, name FROM person WHERE name LIKE '%' || ? || '%'\", (\n substr,)):\n root_composer = person_name\n for score_id, score_name, score_genre, score_incipit, score_key, score_year in connection.execute(\n 'SELECT score.id, score.name, score.genre, score.incipit, score.key, score.year FROM score JOIN score_author a on score.id = a.score WHERE a.composer = ?'\n , (person_id,)):\n voicesMap = {}\n for voice_name, voice_range, voice_number in connection.execute(\n 'SELECT name, range, number FROM voice WHERE score = ?', (\n score_id,)):\n voicesMap[voice_number] = Voice(voice_name, voice_range)\n composers = []\n for c_name, c_born, c_died in connection.execute(\n 'SELECT person.name, person.born, person.died FROM score_author JOIN person ON score_author.composer = person.id WHERE score_author.score = ?'\n , (score_id,)):\n composers.append(Person(c_name, c_born, c_died))\n composition = Composition(score_name, score_incipit, score_key,\n score_genre, score_year, __map2list(voicesMap), composers)\n for edition_id, edition_name, edition_year in connection.execute(\n 'SELECT id, name, year FROM edition WHERE score = ?', (\n score_id,)):\n editors = []\n for e_name, e_born, e_died in connection.execute(\n 'SELECT person.name, person.born, person.died FROM edition_author JOIN person ON edition_author.editor = person.id WHERE edition_author.edition = ?'\n , (edition_id,)):\n editors.append(Person(e_name, e_born, e_died))\n edition = Edition(composition, editors, edition_name)\n for print_id, print_part in connection.execute(\n 'SELECT id, partiture FROM print WHERE edition = ?', (\n edition_id,)):\n print = Print(edition, print_id, __to_bool(print_part))\n result[root_composer].append({'Print Number': print.\n print_id, 'Composer': composition.authors, 'Title':\n composition.name, 'Genre': composition.genre, 'Key':\n composition.key, 'Composition Year': composition.\n year, 'Edition': edition.name, 'Voices': __map2list\n (voicesMap), 'Editor': edition.authors, 'Partiture':\n print.partiture, 'Incipit': composition.incipit})\n json.dump(result, sys.stdout, default=__translate_keys({Print: {\n 'print_id': 'Print Number', 'partiture': 'Partiture', 'edition':\n 'Edition'}, Edition: {'authors': 'Editors', 'name': 'Name',\n 'composition': 'Composition'}, Composition: {'name': 'Name',\n 'incipit': 'Incipit', 'key': 'Key', 'genre': 'Genre', 'year':\n 'Composition Year', 'voices': 'Voices', 'authors': 'Composer'},\n Voice: {'name': 'Name', 'range': 'Range'}, Person: {'name': 'Name',\n 'born': 'Born', 'died': 'Died'}}), indent=4, ensure_ascii=False)\n return\n\n\ndef main(args):\n text = ' '.join(args).strip()\n if text == '':\n json.dump({}, sys.stdout)\n return\n search(text)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef __map2list(mp):\n if len(mp.keys()) == 0:\n return []\n lst = [None] * max(mp.keys())\n for idx in mp.keys():\n lst[idx - 1] = mp[idx]\n return lst\n\n\ndef __translate_keys(translation_schema):\n\n def f(obj):\n schema = translation_schema.get(type(obj))\n if schema is None:\n return obj.__dict__\n res = {}\n for key in obj.__dict__:\n res[schema.get(key, key)] = obj.__dict__[key]\n return res\n return f\n\n\ndef __to_bool(val):\n if val == 'Y':\n return True\n elif val == 'N':\n return False\n else:\n return None\n\n\ndef search(substr):\n connection = sqlite3.connect('scorelib.dat')\n result = defaultdict(lambda : [])\n for person_id, person_name in connection.execute(\n \"SELECT id, name FROM person WHERE name LIKE '%' || ? || '%'\", (\n substr,)):\n root_composer = person_name\n for score_id, score_name, score_genre, score_incipit, score_key, score_year in connection.execute(\n 'SELECT score.id, score.name, score.genre, score.incipit, score.key, score.year FROM score JOIN score_author a on score.id = a.score WHERE a.composer = ?'\n , (person_id,)):\n voicesMap = {}\n for voice_name, voice_range, voice_number in connection.execute(\n 'SELECT name, range, number FROM voice WHERE score = ?', (\n score_id,)):\n voicesMap[voice_number] = Voice(voice_name, voice_range)\n composers = []\n for c_name, c_born, c_died in connection.execute(\n 'SELECT person.name, person.born, person.died FROM score_author JOIN person ON score_author.composer = person.id WHERE score_author.score = ?'\n , (score_id,)):\n composers.append(Person(c_name, c_born, c_died))\n composition = Composition(score_name, score_incipit, score_key,\n score_genre, score_year, __map2list(voicesMap), composers)\n for edition_id, edition_name, edition_year in connection.execute(\n 'SELECT id, name, year FROM edition WHERE score = ?', (\n score_id,)):\n editors = []\n for e_name, e_born, e_died in connection.execute(\n 'SELECT person.name, person.born, person.died FROM edition_author JOIN person ON edition_author.editor = person.id WHERE edition_author.edition = ?'\n , (edition_id,)):\n editors.append(Person(e_name, e_born, e_died))\n edition = Edition(composition, editors, edition_name)\n for print_id, print_part in connection.execute(\n 'SELECT id, partiture FROM print WHERE edition = ?', (\n edition_id,)):\n print = Print(edition, print_id, __to_bool(print_part))\n result[root_composer].append({'Print Number': print.\n print_id, 'Composer': composition.authors, 'Title':\n composition.name, 'Genre': composition.genre, 'Key':\n composition.key, 'Composition Year': composition.\n year, 'Edition': edition.name, 'Voices': __map2list\n (voicesMap), 'Editor': edition.authors, 'Partiture':\n print.partiture, 'Incipit': composition.incipit})\n json.dump(result, sys.stdout, default=__translate_keys({Print: {\n 'print_id': 'Print Number', 'partiture': 'Partiture', 'edition':\n 'Edition'}, Edition: {'authors': 'Editors', 'name': 'Name',\n 'composition': 'Composition'}, Composition: {'name': 'Name',\n 'incipit': 'Incipit', 'key': 'Key', 'genre': 'Genre', 'year':\n 'Composition Year', 'voices': 'Voices', 'authors': 'Composer'},\n Voice: {'name': 'Name', 'range': 'Range'}, Person: {'name': 'Name',\n 'born': 'Born', 'died': 'Died'}}), indent=4, ensure_ascii=False)\n return\n\n\ndef main(args):\n text = ' '.join(args).strip()\n if text == '':\n json.dump({}, sys.stdout)\n return\n search(text)\n\n\nmain(sys.argv[1:])\n",
"step-4": "import json\nimport sqlite3\nimport sys\nfrom scorelib import *\nfrom collections import defaultdict\n\n\ndef __map2list(mp):\n if len(mp.keys()) == 0:\n return []\n lst = [None] * max(mp.keys())\n for idx in mp.keys():\n lst[idx - 1] = mp[idx]\n return lst\n\n\ndef __translate_keys(translation_schema):\n\n def f(obj):\n schema = translation_schema.get(type(obj))\n if schema is None:\n return obj.__dict__\n res = {}\n for key in obj.__dict__:\n res[schema.get(key, key)] = obj.__dict__[key]\n return res\n return f\n\n\ndef __to_bool(val):\n if val == 'Y':\n return True\n elif val == 'N':\n return False\n else:\n return None\n\n\ndef search(substr):\n connection = sqlite3.connect('scorelib.dat')\n result = defaultdict(lambda : [])\n for person_id, person_name in connection.execute(\n \"SELECT id, name FROM person WHERE name LIKE '%' || ? || '%'\", (\n substr,)):\n root_composer = person_name\n for score_id, score_name, score_genre, score_incipit, score_key, score_year in connection.execute(\n 'SELECT score.id, score.name, score.genre, score.incipit, score.key, score.year FROM score JOIN score_author a on score.id = a.score WHERE a.composer = ?'\n , (person_id,)):\n voicesMap = {}\n for voice_name, voice_range, voice_number in connection.execute(\n 'SELECT name, range, number FROM voice WHERE score = ?', (\n score_id,)):\n voicesMap[voice_number] = Voice(voice_name, voice_range)\n composers = []\n for c_name, c_born, c_died in connection.execute(\n 'SELECT person.name, person.born, person.died FROM score_author JOIN person ON score_author.composer = person.id WHERE score_author.score = ?'\n , (score_id,)):\n composers.append(Person(c_name, c_born, c_died))\n composition = Composition(score_name, score_incipit, score_key,\n score_genre, score_year, __map2list(voicesMap), composers)\n for edition_id, edition_name, edition_year in connection.execute(\n 'SELECT id, name, year FROM edition WHERE score = ?', (\n score_id,)):\n editors = []\n for e_name, e_born, e_died in connection.execute(\n 'SELECT person.name, person.born, person.died FROM edition_author JOIN person ON edition_author.editor = person.id WHERE edition_author.edition = ?'\n , (edition_id,)):\n editors.append(Person(e_name, e_born, e_died))\n edition = Edition(composition, editors, edition_name)\n for print_id, print_part in connection.execute(\n 'SELECT id, partiture FROM print WHERE edition = ?', (\n edition_id,)):\n print = Print(edition, print_id, __to_bool(print_part))\n result[root_composer].append({'Print Number': print.\n print_id, 'Composer': composition.authors, 'Title':\n composition.name, 'Genre': composition.genre, 'Key':\n composition.key, 'Composition Year': composition.\n year, 'Edition': edition.name, 'Voices': __map2list\n (voicesMap), 'Editor': edition.authors, 'Partiture':\n print.partiture, 'Incipit': composition.incipit})\n json.dump(result, sys.stdout, default=__translate_keys({Print: {\n 'print_id': 'Print Number', 'partiture': 'Partiture', 'edition':\n 'Edition'}, Edition: {'authors': 'Editors', 'name': 'Name',\n 'composition': 'Composition'}, Composition: {'name': 'Name',\n 'incipit': 'Incipit', 'key': 'Key', 'genre': 'Genre', 'year':\n 'Composition Year', 'voices': 'Voices', 'authors': 'Composer'},\n Voice: {'name': 'Name', 'range': 'Range'}, Person: {'name': 'Name',\n 'born': 'Born', 'died': 'Died'}}), indent=4, ensure_ascii=False)\n return\n\n\ndef main(args):\n text = ' '.join(args).strip()\n if text == '':\n json.dump({}, sys.stdout)\n return\n search(text)\n\n\nmain(sys.argv[1:])\n",
"step-5": "#!/usr/bin/env python3\r\nimport json\r\nimport sqlite3\r\nimport sys\r\nfrom scorelib import *\r\n#from .scorelib import *\r\nfrom collections import defaultdict\r\n\r\n\r\ndef __map2list(mp):\r\n if len(mp.keys()) == 0:\r\n return []\r\n lst = [None] * max(mp.keys())\r\n for idx in mp.keys():\r\n lst[idx-1] = mp[idx]\r\n return lst\r\n\r\ndef __translate_keys(translation_schema):\r\n def f(obj):\r\n schema = translation_schema.get(type(obj))\r\n if schema is None:\r\n return obj.__dict__\r\n res = {}\r\n for key in obj.__dict__:\r\n res[schema.get(key, key)] = obj.__dict__[key]\r\n return res\r\n return f\r\n\r\n\r\ndef __to_bool(val):\r\n if val == 'Y':\r\n return True\r\n elif val == 'N':\r\n return False\r\n else:\r\n return None\r\n\r\n\r\ndef search(substr):\r\n connection = sqlite3.connect('scorelib.dat')\r\n result = defaultdict(lambda: [])\r\n for person_id, person_name in connection.execute(r\"SELECT id, name FROM person WHERE name LIKE '%' || ? || '%'\", (substr, )):\r\n root_composer = person_name\r\n for (score_id, score_name, score_genre, score_incipit, score_key, score_year) in connection.execute(r\"SELECT score.id, score.name, score.genre, score.incipit, score.key, score.year FROM score JOIN score_author a on score.id = a.score WHERE a.composer = ?\", (person_id, )):\r\n voicesMap = {}\r\n for voice_name, voice_range, voice_number in connection.execute(r\"SELECT name, range, number FROM voice WHERE score = ?\", (score_id, )):\r\n voicesMap[voice_number] = Voice(voice_name, voice_range)\r\n composers = []\r\n for c_name, c_born, c_died in connection.execute(r\"SELECT person.name, person.born, person.died FROM score_author JOIN person ON score_author.composer = person.id WHERE score_author.score = ?\", (score_id,)):\r\n composers.append(Person(c_name, c_born, c_died))\r\n composition = Composition(score_name, score_incipit, score_key, score_genre, score_year, __map2list(voicesMap), composers)\r\n for edition_id, edition_name, edition_year in connection.execute(r\"SELECT id, name, year FROM edition WHERE score = ?\", (score_id,)):\r\n editors = []\r\n for e_name, e_born, e_died in connection.execute(r\"SELECT person.name, person.born, person.died FROM edition_author JOIN person ON edition_author.editor = person.id WHERE edition_author.edition = ?\", (edition_id,)):\r\n editors.append(Person(e_name, e_born, e_died))\r\n edition = Edition(composition, editors, edition_name)\r\n for print_id, print_part in connection.execute(r\"SELECT id, partiture FROM print WHERE edition = ?\", (edition_id, )):\r\n print = Print(edition, print_id, __to_bool(print_part))\r\n result[root_composer].append({\"Print Number\": print.print_id,\r\n \"Composer\": composition.authors,\r\n \"Title\": composition.name,\r\n \"Genre\": composition.genre,\r\n \"Key\": composition.key,\r\n \"Composition Year\": composition.year,\r\n \"Edition\": edition.name,\r\n \"Voices\": __map2list(voicesMap),\r\n \"Editor\": edition.authors,\r\n \"Partiture\": print.partiture,\r\n \"Incipit\": composition.incipit})\r\n json.dump(result,\r\n sys.stdout,\r\n default=__translate_keys({Print: {\"print_id\": \"Print Number\", \"partiture\": \"Partiture\", \"edition\": \"Edition\"},\r\n Edition: {\"authors\": \"Editors\", \"name\": \"Name\", \"composition\": \"Composition\"},\r\n Composition: {\"name\": \"Name\", \"incipit\": \"Incipit\", \"key\": \"Key\", \"genre\": \"Genre\", \"year\": \"Composition Year\", \"voices\": \"Voices\", \"authors\": \"Composer\"},\r\n Voice: {\"name\": \"Name\", \"range\": \"Range\"},\r\n Person: {\"name\": \"Name\", \"born\": \"Born\", \"died\": \"Died\"}}),\r\n indent=4,\r\n ensure_ascii=False)\r\n return\r\n\r\n\r\ndef main(args):\r\n text = ' '.join(args).strip()\r\n if text == '':\r\n json.dump({}, sys.stdout)\r\n return\r\n search(text)\r\n\r\n\r\nmain(sys.argv[1:])",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
<|reserved_special_token_0|>
class GetDefaultUsers(APIView):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class GetSpecificUser(APIView):
permission_classes = [permissions.IsAuthenticated]
def post(self, request, id=None, *args, **kwargs):
try:
queryset = users.objects.get(id=id)
except user.DoesNotExist:
return JsonResponse({'error': 'user does not exits'}, status=400)
try:
profile_queryset = profile.objects.get(user=queryset)
except profile.DoesNotExist:
return JsonResponse({'error': 'user does not have a profile'},
status=400)
serializer_class = UserSerializer(queryset)
serializer_class_profile = ProfileSerializer(profile_queryset)
return Response({'user': serializer_class.data, 'profile':
serializer_class_profile.data}, status=200)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class GetDefaultUsers(APIView):
<|reserved_special_token_0|>
def post(self, request, *args, **kwargs):
user = self.request.user
userers = users.objects.all()[:5]
users_to_pass = []
for user_now in userers:
user_id = user.id
check_if_already_followed = Follow.objects.filter(user_id=
user_now.id).filter(follower_id=user.id)
if len(check_if_already_followed) == 0:
users_to_pass.append(user_now)
serilizer_class_many = UserSerializer(users_to_pass, many=True)
serilizer_class = UserSerializer(user)
return Response({'users': serilizer_class_many.data, 'user':
serilizer_class.data})
class GetSpecificUser(APIView):
permission_classes = [permissions.IsAuthenticated]
def post(self, request, id=None, *args, **kwargs):
try:
queryset = users.objects.get(id=id)
except user.DoesNotExist:
return JsonResponse({'error': 'user does not exits'}, status=400)
try:
profile_queryset = profile.objects.get(user=queryset)
except profile.DoesNotExist:
return JsonResponse({'error': 'user does not have a profile'},
status=400)
serializer_class = UserSerializer(queryset)
serializer_class_profile = ProfileSerializer(profile_queryset)
return Response({'user': serializer_class.data, 'profile':
serializer_class_profile.data}, status=200)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class GetDefaultUsers(APIView):
permission_classes = [permissions.IsAuthenticated]
def post(self, request, *args, **kwargs):
user = self.request.user
userers = users.objects.all()[:5]
users_to_pass = []
for user_now in userers:
user_id = user.id
check_if_already_followed = Follow.objects.filter(user_id=
user_now.id).filter(follower_id=user.id)
if len(check_if_already_followed) == 0:
users_to_pass.append(user_now)
serilizer_class_many = UserSerializer(users_to_pass, many=True)
serilizer_class = UserSerializer(user)
return Response({'users': serilizer_class_many.data, 'user':
serilizer_class.data})
class GetSpecificUser(APIView):
permission_classes = [permissions.IsAuthenticated]
def post(self, request, id=None, *args, **kwargs):
try:
queryset = users.objects.get(id=id)
except user.DoesNotExist:
return JsonResponse({'error': 'user does not exits'}, status=400)
try:
profile_queryset = profile.objects.get(user=queryset)
except profile.DoesNotExist:
return JsonResponse({'error': 'user does not have a profile'},
status=400)
serializer_class = UserSerializer(queryset)
serializer_class_profile = ProfileSerializer(profile_queryset)
return Response({'user': serializer_class.data, 'profile':
serializer_class_profile.data}, status=200)
<|reserved_special_token_1|>
from rest_framework.views import APIView
from .serializers import UserSerializer
from rest_framework import permissions
from .models import users
from rest_framework.response import Response
from django.http import JsonResponse
from rest_framework import viewsets
from profiles.models import profile
from profiles.serializers import ProfileSerializer
from follows.models import Follow
class GetDefaultUsers(APIView):
permission_classes = [permissions.IsAuthenticated]
def post(self, request, *args, **kwargs):
user = self.request.user
userers = users.objects.all()[:5]
users_to_pass = []
for user_now in userers:
user_id = user.id
check_if_already_followed = Follow.objects.filter(user_id=
user_now.id).filter(follower_id=user.id)
if len(check_if_already_followed) == 0:
users_to_pass.append(user_now)
serilizer_class_many = UserSerializer(users_to_pass, many=True)
serilizer_class = UserSerializer(user)
return Response({'users': serilizer_class_many.data, 'user':
serilizer_class.data})
class GetSpecificUser(APIView):
permission_classes = [permissions.IsAuthenticated]
def post(self, request, id=None, *args, **kwargs):
try:
queryset = users.objects.get(id=id)
except user.DoesNotExist:
return JsonResponse({'error': 'user does not exits'}, status=400)
try:
profile_queryset = profile.objects.get(user=queryset)
except profile.DoesNotExist:
return JsonResponse({'error': 'user does not have a profile'},
status=400)
serializer_class = UserSerializer(queryset)
serializer_class_profile = ProfileSerializer(profile_queryset)
return Response({'user': serializer_class.data, 'profile':
serializer_class_profile.data}, status=200)
<|reserved_special_token_1|>
from rest_framework.views import APIView
from .serializers import UserSerializer
from rest_framework import permissions
from .models import users
from rest_framework.response import Response
from django.http import JsonResponse
from rest_framework import viewsets
from profiles.models import profile
from profiles.serializers import ProfileSerializer
from follows.models import Follow
class GetDefaultUsers(APIView):
permission_classes =[
permissions.IsAuthenticated
]
def post(self,request, *args, **kwargs):
user = self.request.user
userers = users.objects.all()[:5]
users_to_pass = []
for user_now in userers:
user_id = user.id
check_if_already_followed = Follow.objects.filter(user_id = user_now.id).filter(follower_id = user.id)
if len(check_if_already_followed) == 0:
users_to_pass.append(user_now)
serilizer_class_many = UserSerializer(users_to_pass, many=True)
serilizer_class = UserSerializer(user)
return Response({
'users':serilizer_class_many.data,
"user":serilizer_class.data
})
class GetSpecificUser(APIView):
permission_classes =[
permissions.IsAuthenticated
]
def post(self, request,id=None, *args, **kwargs):
try:
queryset = users.objects.get(id=id)
except user.DoesNotExist:
return JsonResponse({'error':"user does not exits"}, status = 400)
try:
profile_queryset = profile.objects.get(user = queryset)
except profile.DoesNotExist:
return JsonResponse({'error':"user does not have a profile"}, status = 400)
serializer_class = UserSerializer(queryset)
serializer_class_profile = ProfileSerializer(profile_queryset)
return Response(
{'user':serializer_class.data,
'profile':serializer_class_profile.data
},
status=200)
|
flexible
|
{
"blob_id": "c5a7f269f579bd1960afa4f700b5c3436ac6d91a",
"index": 2733,
"step-1": "<mask token>\n\n\nclass GetDefaultUsers(APIView):\n <mask token>\n <mask token>\n\n\nclass GetSpecificUser(APIView):\n permission_classes = [permissions.IsAuthenticated]\n\n def post(self, request, id=None, *args, **kwargs):\n try:\n queryset = users.objects.get(id=id)\n except user.DoesNotExist:\n return JsonResponse({'error': 'user does not exits'}, status=400)\n try:\n profile_queryset = profile.objects.get(user=queryset)\n except profile.DoesNotExist:\n return JsonResponse({'error': 'user does not have a profile'},\n status=400)\n serializer_class = UserSerializer(queryset)\n serializer_class_profile = ProfileSerializer(profile_queryset)\n return Response({'user': serializer_class.data, 'profile':\n serializer_class_profile.data}, status=200)\n",
"step-2": "<mask token>\n\n\nclass GetDefaultUsers(APIView):\n <mask token>\n\n def post(self, request, *args, **kwargs):\n user = self.request.user\n userers = users.objects.all()[:5]\n users_to_pass = []\n for user_now in userers:\n user_id = user.id\n check_if_already_followed = Follow.objects.filter(user_id=\n user_now.id).filter(follower_id=user.id)\n if len(check_if_already_followed) == 0:\n users_to_pass.append(user_now)\n serilizer_class_many = UserSerializer(users_to_pass, many=True)\n serilizer_class = UserSerializer(user)\n return Response({'users': serilizer_class_many.data, 'user':\n serilizer_class.data})\n\n\nclass GetSpecificUser(APIView):\n permission_classes = [permissions.IsAuthenticated]\n\n def post(self, request, id=None, *args, **kwargs):\n try:\n queryset = users.objects.get(id=id)\n except user.DoesNotExist:\n return JsonResponse({'error': 'user does not exits'}, status=400)\n try:\n profile_queryset = profile.objects.get(user=queryset)\n except profile.DoesNotExist:\n return JsonResponse({'error': 'user does not have a profile'},\n status=400)\n serializer_class = UserSerializer(queryset)\n serializer_class_profile = ProfileSerializer(profile_queryset)\n return Response({'user': serializer_class.data, 'profile':\n serializer_class_profile.data}, status=200)\n",
"step-3": "<mask token>\n\n\nclass GetDefaultUsers(APIView):\n permission_classes = [permissions.IsAuthenticated]\n\n def post(self, request, *args, **kwargs):\n user = self.request.user\n userers = users.objects.all()[:5]\n users_to_pass = []\n for user_now in userers:\n user_id = user.id\n check_if_already_followed = Follow.objects.filter(user_id=\n user_now.id).filter(follower_id=user.id)\n if len(check_if_already_followed) == 0:\n users_to_pass.append(user_now)\n serilizer_class_many = UserSerializer(users_to_pass, many=True)\n serilizer_class = UserSerializer(user)\n return Response({'users': serilizer_class_many.data, 'user':\n serilizer_class.data})\n\n\nclass GetSpecificUser(APIView):\n permission_classes = [permissions.IsAuthenticated]\n\n def post(self, request, id=None, *args, **kwargs):\n try:\n queryset = users.objects.get(id=id)\n except user.DoesNotExist:\n return JsonResponse({'error': 'user does not exits'}, status=400)\n try:\n profile_queryset = profile.objects.get(user=queryset)\n except profile.DoesNotExist:\n return JsonResponse({'error': 'user does not have a profile'},\n status=400)\n serializer_class = UserSerializer(queryset)\n serializer_class_profile = ProfileSerializer(profile_queryset)\n return Response({'user': serializer_class.data, 'profile':\n serializer_class_profile.data}, status=200)\n",
"step-4": "from rest_framework.views import APIView\nfrom .serializers import UserSerializer\nfrom rest_framework import permissions\nfrom .models import users\nfrom rest_framework.response import Response\nfrom django.http import JsonResponse\nfrom rest_framework import viewsets\nfrom profiles.models import profile\nfrom profiles.serializers import ProfileSerializer\nfrom follows.models import Follow\n\n\nclass GetDefaultUsers(APIView):\n permission_classes = [permissions.IsAuthenticated]\n\n def post(self, request, *args, **kwargs):\n user = self.request.user\n userers = users.objects.all()[:5]\n users_to_pass = []\n for user_now in userers:\n user_id = user.id\n check_if_already_followed = Follow.objects.filter(user_id=\n user_now.id).filter(follower_id=user.id)\n if len(check_if_already_followed) == 0:\n users_to_pass.append(user_now)\n serilizer_class_many = UserSerializer(users_to_pass, many=True)\n serilizer_class = UserSerializer(user)\n return Response({'users': serilizer_class_many.data, 'user':\n serilizer_class.data})\n\n\nclass GetSpecificUser(APIView):\n permission_classes = [permissions.IsAuthenticated]\n\n def post(self, request, id=None, *args, **kwargs):\n try:\n queryset = users.objects.get(id=id)\n except user.DoesNotExist:\n return JsonResponse({'error': 'user does not exits'}, status=400)\n try:\n profile_queryset = profile.objects.get(user=queryset)\n except profile.DoesNotExist:\n return JsonResponse({'error': 'user does not have a profile'},\n status=400)\n serializer_class = UserSerializer(queryset)\n serializer_class_profile = ProfileSerializer(profile_queryset)\n return Response({'user': serializer_class.data, 'profile':\n serializer_class_profile.data}, status=200)\n",
"step-5": "from rest_framework.views import APIView\nfrom .serializers import UserSerializer\nfrom rest_framework import permissions\nfrom .models import users\nfrom rest_framework.response import Response\nfrom django.http import JsonResponse\nfrom rest_framework import viewsets\nfrom profiles.models import profile\nfrom profiles.serializers import ProfileSerializer\nfrom follows.models import Follow\n\n\nclass GetDefaultUsers(APIView):\n permission_classes =[\n permissions.IsAuthenticated\n ]\n \n def post(self,request, *args, **kwargs):\n user = self.request.user\n userers = users.objects.all()[:5]\n users_to_pass = []\n for user_now in userers:\n user_id = user.id\n check_if_already_followed = Follow.objects.filter(user_id = user_now.id).filter(follower_id = user.id)\n if len(check_if_already_followed) == 0:\n users_to_pass.append(user_now)\n \n serilizer_class_many = UserSerializer(users_to_pass, many=True)\n serilizer_class = UserSerializer(user)\n return Response({\n 'users':serilizer_class_many.data,\n \"user\":serilizer_class.data\n })\n \nclass GetSpecificUser(APIView):\n permission_classes =[\n permissions.IsAuthenticated\n ]\n def post(self, request,id=None, *args, **kwargs):\n try:\n queryset = users.objects.get(id=id)\n except user.DoesNotExist:\n return JsonResponse({'error':\"user does not exits\"}, status = 400)\n try:\n profile_queryset = profile.objects.get(user = queryset)\n except profile.DoesNotExist:\n return JsonResponse({'error':\"user does not have a profile\"}, status = 400)\n \n serializer_class = UserSerializer(queryset)\n serializer_class_profile = ProfileSerializer(profile_queryset)\n \n return Response(\n {'user':serializer_class.data,\n 'profile':serializer_class_profile.data \n },\n status=200)\n \n ",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
admin.site.register(username)
<|reserved_special_token_1|>
from django.contrib import admin
from get_my_tweets.models import username
admin.site.register(username)
|
flexible
|
{
"blob_id": "84ece5d1a9e38b83a5b60052fc3ab089c498d2fc",
"index": 9147,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nadmin.site.register(username)\n",
"step-3": "from django.contrib import admin\nfrom get_my_tweets.models import username\nadmin.site.register(username)\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
#!/bin/python3
# TODO: implement the stack O(N) version
'''
Naive: O(N^3) or sum_{k=1...N}( O(N^2 (N-K)) )
for each size N
for each window of size N in the array
traverse the window to find the max
Naive with heap: O(N^2 log N)
for each size N O(N)
traverse array and accumulate window of size N O(N log N)
find max O(1)
DP:
Notice that min(W, p), the min size for window of size W and at position p, is
equal to min(min(W - 1, p), min(W - 1, p + 1)). Therefore, DP with these
tables can reduce the size of the problem to O(W^2) ~= O(N^2). Is this good
enough? No.
Domination windows:
Let us say that i dominates a contiguous range of n values if it's lower than
all n of its neighboring values. This means that i will show up as a min window
when considering window sizes of up to size n. We want to find the largest i
such that it domaintes other numbers in a window of size n. Now how to find this
efficiently? If we iterate through each i and compare it to its n neighbors,
that will also be O(N^2) time.
Start with lowest number and 1-dimensional flood fill. This will take O(N^2)
time in the worst case though.
However, you don't actually have to perform the flood fill. Instead, we can just
use the coordinates of lower numbers and perform something like binary search
to find the closest coordinates to a given coordinate in O(log N) time.
Overall this means that we iterate through each number, starting from the
lowest, and perform O(log N) time binary searches to find the boundaries over
which this element i dominates. Total time is O(N log N).
'''
import math
import os
import random
import re
import sys
from collections import defaultdict
from heapq import heappush, heappop
from bisect import insort_left
# Complete the riddle function below.
def riddle(lst):
'''
Holy fuck.
Better summary than above of what's happening:
Define an value `v` in the list to dominate a range of size `n`, including `v`
itself, if `v` is smaller than all other numbers in this contiguous range.
Define `v`'s "dominating window" to be the largest such range. If `v` has a
dominating window of size `n`, then it must show up as a value when we take
minimums of size `w`. Therefore, to find the maximum of all such minimum
windows, we only need to find the maximum `v` which dominates a range of size
`n` or greater, for each `n` between 1 and `N`.
To do this, the naive algorithm is to, for each number, flood fill in each
direction until you hit a number smaller than itself. However, we can instead
start with the smallest number, and keep a list of indices which we have
already processed, that we know is smaller than the number we're processing.
Using binary search, we can find the interval indices in which the current
index lies, and find the bounding interval in O(log N) time. Repeat for each
of `n` numbers for a total time complexity of O(N log N).
Finally, for each window size `w`, find the maximum `v` that dominates a range
of size `n` or larger.
It seems like this is not the best solution though. There is a O(N) solution
using stacks.
'''
max_by_w_size = { w: -float('inf') for w in range(1, len(lst) + 1) }
# note that bounding_indices are indexes into len(lst), not values themselves
bounding_indices = [-1, len(lst)]
sorted_lst = sorted(enumerate(lst), key=lambda x: x[1])
for i, value in sorted_lst:
# note that l_index and r_index are indices to the bounding indices
r_index = bsearch(bounding_indices, i)
l_index = r_index - 1
l_point = bounding_indices[l_index]
r_point = bounding_indices[r_index]
# (l_point + 1, r_point) defines a "dominating window" for `value`
w = r_point - (l_point + 1)
assert w > 0
max_by_w_size[w] = max(max_by_w_size[w], value)
insort_left(bounding_indices, i)
m = -float('inf')
maxes = []
for w in reversed(range(1, len(lst) + 1)):
m = max(m, max_by_w_size[w])
maxes.append(m)
return reversed(maxes)
def bsearch(lst, target):
i, j = 0, len(lst)
while i < j:
mid = (i + j) // 2
if lst[mid] == target:
return mid + 1 # insert on the right side of the same number, not that it should matter?
elif lst[mid] < target:
i = mid + 1
else:
j = mid
return i
def riddle_dp(arr):
'''
Too slow to pass large test cases. See `riddle`.
'''
N = len(arr)
min_w = {} # dict of (win_size, win_position) to minimum
for i, el in enumerate(arr):
min_w[(1, i)] = el
for w in range(2, len(arr) + 1):
for i in range(N - w + 1):
# print('w, i', w, i)
min_w[(w, i)] = min(min_w[(w - 1, i)], min_w[(w - 1, i + 1)])
# print('min_w', min_w)
return [max(min_w[(w, i)] for i in range(N - w + 1)) for w in range(1, len(arr) + 1)]
if __name__ == '__main__':
fptr = open(os.environ['OUTPUT_PATH'], 'w')
n = int(input())
arr = list(map(int, input().rstrip().split()))
res = riddle(arr)
fptr.write(' '.join(map(str, res)))
fptr.write('\n')
fptr.close()
|
normal
|
{
"blob_id": "dce7fd0c9ed8e1d433f9131a8d137c8dcca4ac56",
"index": 8307,
"step-1": "<mask token>\n\n\ndef riddle(lst):\n \"\"\"\n Holy fuck.\n\n Better summary than above of what's happening:\n\n Define an value `v` in the list to dominate a range of size `n`, including `v`\n itself, if `v` is smaller than all other numbers in this contiguous range.\n Define `v`'s \"dominating window\" to be the largest such range. If `v` has a\n dominating window of size `n`, then it must show up as a value when we take\n minimums of size `w`. Therefore, to find the maximum of all such minimum\n windows, we only need to find the maximum `v` which dominates a range of size\n `n` or greater, for each `n` between 1 and `N`.\n\n To do this, the naive algorithm is to, for each number, flood fill in each\n direction until you hit a number smaller than itself. However, we can instead\n start with the smallest number, and keep a list of indices which we have\n already processed, that we know is smaller than the number we're processing.\n Using binary search, we can find the interval indices in which the current\n index lies, and find the bounding interval in O(log N) time. Repeat for each\n of `n` numbers for a total time complexity of O(N log N).\n\n Finally, for each window size `w`, find the maximum `v` that dominates a range\n of size `n` or larger.\n\n It seems like this is not the best solution though. There is a O(N) solution\n using stacks.\n \"\"\"\n max_by_w_size = {w: (-float('inf')) for w in range(1, len(lst) + 1)}\n bounding_indices = [-1, len(lst)]\n sorted_lst = sorted(enumerate(lst), key=lambda x: x[1])\n for i, value in sorted_lst:\n r_index = bsearch(bounding_indices, i)\n l_index = r_index - 1\n l_point = bounding_indices[l_index]\n r_point = bounding_indices[r_index]\n w = r_point - (l_point + 1)\n assert w > 0\n max_by_w_size[w] = max(max_by_w_size[w], value)\n insort_left(bounding_indices, i)\n m = -float('inf')\n maxes = []\n for w in reversed(range(1, len(lst) + 1)):\n m = max(m, max_by_w_size[w])\n maxes.append(m)\n return reversed(maxes)\n\n\ndef bsearch(lst, target):\n i, j = 0, len(lst)\n while i < j:\n mid = (i + j) // 2\n if lst[mid] == target:\n return mid + 1\n elif lst[mid] < target:\n i = mid + 1\n else:\n j = mid\n return i\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef riddle(lst):\n \"\"\"\n Holy fuck.\n\n Better summary than above of what's happening:\n\n Define an value `v` in the list to dominate a range of size `n`, including `v`\n itself, if `v` is smaller than all other numbers in this contiguous range.\n Define `v`'s \"dominating window\" to be the largest such range. If `v` has a\n dominating window of size `n`, then it must show up as a value when we take\n minimums of size `w`. Therefore, to find the maximum of all such minimum\n windows, we only need to find the maximum `v` which dominates a range of size\n `n` or greater, for each `n` between 1 and `N`.\n\n To do this, the naive algorithm is to, for each number, flood fill in each\n direction until you hit a number smaller than itself. However, we can instead\n start with the smallest number, and keep a list of indices which we have\n already processed, that we know is smaller than the number we're processing.\n Using binary search, we can find the interval indices in which the current\n index lies, and find the bounding interval in O(log N) time. Repeat for each\n of `n` numbers for a total time complexity of O(N log N).\n\n Finally, for each window size `w`, find the maximum `v` that dominates a range\n of size `n` or larger.\n\n It seems like this is not the best solution though. There is a O(N) solution\n using stacks.\n \"\"\"\n max_by_w_size = {w: (-float('inf')) for w in range(1, len(lst) + 1)}\n bounding_indices = [-1, len(lst)]\n sorted_lst = sorted(enumerate(lst), key=lambda x: x[1])\n for i, value in sorted_lst:\n r_index = bsearch(bounding_indices, i)\n l_index = r_index - 1\n l_point = bounding_indices[l_index]\n r_point = bounding_indices[r_index]\n w = r_point - (l_point + 1)\n assert w > 0\n max_by_w_size[w] = max(max_by_w_size[w], value)\n insort_left(bounding_indices, i)\n m = -float('inf')\n maxes = []\n for w in reversed(range(1, len(lst) + 1)):\n m = max(m, max_by_w_size[w])\n maxes.append(m)\n return reversed(maxes)\n\n\ndef bsearch(lst, target):\n i, j = 0, len(lst)\n while i < j:\n mid = (i + j) // 2\n if lst[mid] == target:\n return mid + 1\n elif lst[mid] < target:\n i = mid + 1\n else:\n j = mid\n return i\n\n\ndef riddle_dp(arr):\n \"\"\"\n Too slow to pass large test cases. See `riddle`.\n \"\"\"\n N = len(arr)\n min_w = {}\n for i, el in enumerate(arr):\n min_w[1, i] = el\n for w in range(2, len(arr) + 1):\n for i in range(N - w + 1):\n min_w[w, i] = min(min_w[w - 1, i], min_w[w - 1, i + 1])\n return [max(min_w[w, i] for i in range(N - w + 1)) for w in range(1, \n len(arr) + 1)]\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef riddle(lst):\n \"\"\"\n Holy fuck.\n\n Better summary than above of what's happening:\n\n Define an value `v` in the list to dominate a range of size `n`, including `v`\n itself, if `v` is smaller than all other numbers in this contiguous range.\n Define `v`'s \"dominating window\" to be the largest such range. If `v` has a\n dominating window of size `n`, then it must show up as a value when we take\n minimums of size `w`. Therefore, to find the maximum of all such minimum\n windows, we only need to find the maximum `v` which dominates a range of size\n `n` or greater, for each `n` between 1 and `N`.\n\n To do this, the naive algorithm is to, for each number, flood fill in each\n direction until you hit a number smaller than itself. However, we can instead\n start with the smallest number, and keep a list of indices which we have\n already processed, that we know is smaller than the number we're processing.\n Using binary search, we can find the interval indices in which the current\n index lies, and find the bounding interval in O(log N) time. Repeat for each\n of `n` numbers for a total time complexity of O(N log N).\n\n Finally, for each window size `w`, find the maximum `v` that dominates a range\n of size `n` or larger.\n\n It seems like this is not the best solution though. There is a O(N) solution\n using stacks.\n \"\"\"\n max_by_w_size = {w: (-float('inf')) for w in range(1, len(lst) + 1)}\n bounding_indices = [-1, len(lst)]\n sorted_lst = sorted(enumerate(lst), key=lambda x: x[1])\n for i, value in sorted_lst:\n r_index = bsearch(bounding_indices, i)\n l_index = r_index - 1\n l_point = bounding_indices[l_index]\n r_point = bounding_indices[r_index]\n w = r_point - (l_point + 1)\n assert w > 0\n max_by_w_size[w] = max(max_by_w_size[w], value)\n insort_left(bounding_indices, i)\n m = -float('inf')\n maxes = []\n for w in reversed(range(1, len(lst) + 1)):\n m = max(m, max_by_w_size[w])\n maxes.append(m)\n return reversed(maxes)\n\n\ndef bsearch(lst, target):\n i, j = 0, len(lst)\n while i < j:\n mid = (i + j) // 2\n if lst[mid] == target:\n return mid + 1\n elif lst[mid] < target:\n i = mid + 1\n else:\n j = mid\n return i\n\n\ndef riddle_dp(arr):\n \"\"\"\n Too slow to pass large test cases. See `riddle`.\n \"\"\"\n N = len(arr)\n min_w = {}\n for i, el in enumerate(arr):\n min_w[1, i] = el\n for w in range(2, len(arr) + 1):\n for i in range(N - w + 1):\n min_w[w, i] = min(min_w[w - 1, i], min_w[w - 1, i + 1])\n return [max(min_w[w, i] for i in range(N - w + 1)) for w in range(1, \n len(arr) + 1)]\n\n\nif __name__ == '__main__':\n fptr = open(os.environ['OUTPUT_PATH'], 'w')\n n = int(input())\n arr = list(map(int, input().rstrip().split()))\n res = riddle(arr)\n fptr.write(' '.join(map(str, res)))\n fptr.write('\\n')\n fptr.close()\n",
"step-4": "<mask token>\nimport math\nimport os\nimport random\nimport re\nimport sys\nfrom collections import defaultdict\nfrom heapq import heappush, heappop\nfrom bisect import insort_left\n\n\ndef riddle(lst):\n \"\"\"\n Holy fuck.\n\n Better summary than above of what's happening:\n\n Define an value `v` in the list to dominate a range of size `n`, including `v`\n itself, if `v` is smaller than all other numbers in this contiguous range.\n Define `v`'s \"dominating window\" to be the largest such range. If `v` has a\n dominating window of size `n`, then it must show up as a value when we take\n minimums of size `w`. Therefore, to find the maximum of all such minimum\n windows, we only need to find the maximum `v` which dominates a range of size\n `n` or greater, for each `n` between 1 and `N`.\n\n To do this, the naive algorithm is to, for each number, flood fill in each\n direction until you hit a number smaller than itself. However, we can instead\n start with the smallest number, and keep a list of indices which we have\n already processed, that we know is smaller than the number we're processing.\n Using binary search, we can find the interval indices in which the current\n index lies, and find the bounding interval in O(log N) time. Repeat for each\n of `n` numbers for a total time complexity of O(N log N).\n\n Finally, for each window size `w`, find the maximum `v` that dominates a range\n of size `n` or larger.\n\n It seems like this is not the best solution though. There is a O(N) solution\n using stacks.\n \"\"\"\n max_by_w_size = {w: (-float('inf')) for w in range(1, len(lst) + 1)}\n bounding_indices = [-1, len(lst)]\n sorted_lst = sorted(enumerate(lst), key=lambda x: x[1])\n for i, value in sorted_lst:\n r_index = bsearch(bounding_indices, i)\n l_index = r_index - 1\n l_point = bounding_indices[l_index]\n r_point = bounding_indices[r_index]\n w = r_point - (l_point + 1)\n assert w > 0\n max_by_w_size[w] = max(max_by_w_size[w], value)\n insort_left(bounding_indices, i)\n m = -float('inf')\n maxes = []\n for w in reversed(range(1, len(lst) + 1)):\n m = max(m, max_by_w_size[w])\n maxes.append(m)\n return reversed(maxes)\n\n\ndef bsearch(lst, target):\n i, j = 0, len(lst)\n while i < j:\n mid = (i + j) // 2\n if lst[mid] == target:\n return mid + 1\n elif lst[mid] < target:\n i = mid + 1\n else:\n j = mid\n return i\n\n\ndef riddle_dp(arr):\n \"\"\"\n Too slow to pass large test cases. See `riddle`.\n \"\"\"\n N = len(arr)\n min_w = {}\n for i, el in enumerate(arr):\n min_w[1, i] = el\n for w in range(2, len(arr) + 1):\n for i in range(N - w + 1):\n min_w[w, i] = min(min_w[w - 1, i], min_w[w - 1, i + 1])\n return [max(min_w[w, i] for i in range(N - w + 1)) for w in range(1, \n len(arr) + 1)]\n\n\nif __name__ == '__main__':\n fptr = open(os.environ['OUTPUT_PATH'], 'w')\n n = int(input())\n arr = list(map(int, input().rstrip().split()))\n res = riddle(arr)\n fptr.write(' '.join(map(str, res)))\n fptr.write('\\n')\n fptr.close()\n",
"step-5": "#!/bin/python3\n\n# TODO: implement the stack O(N) version\n\n'''\nNaive: O(N^3) or sum_{k=1...N}( O(N^2 (N-K)) )\n for each size N\n for each window of size N in the array\n traverse the window to find the max\n\nNaive with heap: O(N^2 log N)\n for each size N O(N)\n traverse array and accumulate window of size N O(N log N)\n find max O(1)\n\nDP:\nNotice that min(W, p), the min size for window of size W and at position p, is\nequal to min(min(W - 1, p), min(W - 1, p + 1)). Therefore, DP with these\ntables can reduce the size of the problem to O(W^2) ~= O(N^2). Is this good\nenough? No.\n\nDomination windows:\nLet us say that i dominates a contiguous range of n values if it's lower than\nall n of its neighboring values. This means that i will show up as a min window\nwhen considering window sizes of up to size n. We want to find the largest i\nsuch that it domaintes other numbers in a window of size n. Now how to find this\nefficiently? If we iterate through each i and compare it to its n neighbors,\nthat will also be O(N^2) time.\n\nStart with lowest number and 1-dimensional flood fill. This will take O(N^2)\ntime in the worst case though.\n\nHowever, you don't actually have to perform the flood fill. Instead, we can just\nuse the coordinates of lower numbers and perform something like binary search\nto find the closest coordinates to a given coordinate in O(log N) time.\n\nOverall this means that we iterate through each number, starting from the\nlowest, and perform O(log N) time binary searches to find the boundaries over\nwhich this element i dominates. Total time is O(N log N).\n'''\n\nimport math\nimport os\nimport random\nimport re\nimport sys\n\nfrom collections import defaultdict\nfrom heapq import heappush, heappop\nfrom bisect import insort_left\n\n# Complete the riddle function below.\ndef riddle(lst):\n '''\n Holy fuck.\n\n Better summary than above of what's happening:\n\n Define an value `v` in the list to dominate a range of size `n`, including `v`\n itself, if `v` is smaller than all other numbers in this contiguous range.\n Define `v`'s \"dominating window\" to be the largest such range. If `v` has a\n dominating window of size `n`, then it must show up as a value when we take\n minimums of size `w`. Therefore, to find the maximum of all such minimum\n windows, we only need to find the maximum `v` which dominates a range of size\n `n` or greater, for each `n` between 1 and `N`.\n\n To do this, the naive algorithm is to, for each number, flood fill in each\n direction until you hit a number smaller than itself. However, we can instead\n start with the smallest number, and keep a list of indices which we have\n already processed, that we know is smaller than the number we're processing.\n Using binary search, we can find the interval indices in which the current\n index lies, and find the bounding interval in O(log N) time. Repeat for each\n of `n` numbers for a total time complexity of O(N log N).\n\n Finally, for each window size `w`, find the maximum `v` that dominates a range\n of size `n` or larger.\n\n It seems like this is not the best solution though. There is a O(N) solution\n using stacks.\n '''\n max_by_w_size = { w: -float('inf') for w in range(1, len(lst) + 1) }\n # note that bounding_indices are indexes into len(lst), not values themselves\n bounding_indices = [-1, len(lst)]\n sorted_lst = sorted(enumerate(lst), key=lambda x: x[1])\n for i, value in sorted_lst:\n # note that l_index and r_index are indices to the bounding indices\n r_index = bsearch(bounding_indices, i)\n l_index = r_index - 1\n l_point = bounding_indices[l_index]\n r_point = bounding_indices[r_index]\n # (l_point + 1, r_point) defines a \"dominating window\" for `value`\n w = r_point - (l_point + 1)\n assert w > 0\n max_by_w_size[w] = max(max_by_w_size[w], value)\n insort_left(bounding_indices, i)\n\n m = -float('inf')\n maxes = []\n for w in reversed(range(1, len(lst) + 1)):\n m = max(m, max_by_w_size[w])\n maxes.append(m)\n return reversed(maxes)\n\ndef bsearch(lst, target):\n i, j = 0, len(lst)\n while i < j:\n mid = (i + j) // 2\n if lst[mid] == target:\n return mid + 1 # insert on the right side of the same number, not that it should matter?\n elif lst[mid] < target:\n i = mid + 1\n else:\n j = mid\n return i\n\ndef riddle_dp(arr):\n '''\n Too slow to pass large test cases. See `riddle`.\n '''\n N = len(arr)\n min_w = {} # dict of (win_size, win_position) to minimum\n for i, el in enumerate(arr):\n min_w[(1, i)] = el\n for w in range(2, len(arr) + 1):\n for i in range(N - w + 1):\n # print('w, i', w, i)\n min_w[(w, i)] = min(min_w[(w - 1, i)], min_w[(w - 1, i + 1)])\n # print('min_w', min_w)\n return [max(min_w[(w, i)] for i in range(N - w + 1)) for w in range(1, len(arr) + 1)]\n\nif __name__ == '__main__':\n fptr = open(os.environ['OUTPUT_PATH'], 'w')\n\n n = int(input())\n\n arr = list(map(int, input().rstrip().split()))\n\n res = riddle(arr)\n\n fptr.write(' '.join(map(str, res)))\n fptr.write('\\n')\n\n fptr.close()\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
class TestCommand(ExternalNotificationsPatchTestCase):
<|reserved_special_token_0|>
@patch('intake.management.commands.send_followups.is_the_weekend')
@patch('intake.management.commands.send_followups.FollowupsService')
def test_doesnt_do_anything_on_the_weekend(self, FollowupsService,
is_the_weekend):
is_the_weekend.return_value = True
command = send_followups.Command()
command.stdout = Mock()
command.handle()
FollowupsService.assert_not_called()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class TestCommand(ExternalNotificationsPatchTestCase):
<|reserved_special_token_0|>
@patch('intake.management.commands.send_followups.is_the_weekend')
@patch('intake.management.commands.send_followups.FollowupsService')
def test_doesnt_do_anything_on_the_weekend(self, FollowupsService,
is_the_weekend):
is_the_weekend.return_value = True
command = send_followups.Command()
command.stdout = Mock()
command.handle()
FollowupsService.assert_not_called()
@patch('intake.management.commands.send_followups.is_the_weekend')
def test_expected_weekday_run(self, is_the_weekend):
is_the_weekend.return_value = False
org = Organization.objects.get(slug='ebclc')
dates = sorted([mock.get_old_date() for i in range(464, 469)])
for date, pk in zip(dates, range(464, 469)):
factories.FormSubmissionWithOrgsFactory.create(id=pk,
date_received=date, organizations=[org], answers=
get_answers_for_orgs([org], contact_preferences=[
'prefers_email', 'prefers_sms'], phone='4445551111', email=
'test@test.com'))
command = send_followups.Command()
command.stdout = Mock()
with self.assertLogs('project.services.logging_service', logging.INFO
) as logs:
command.handle()
self.assertEqual(len(self.notifications.email_followup.send.
mock_calls), 4)
assertInLogsCount(logs, {'event_name=app_followup_sent': 4})
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class TestCommand(ExternalNotificationsPatchTestCase):
fixtures = ['counties', 'organizations']
@patch('intake.management.commands.send_followups.is_the_weekend')
@patch('intake.management.commands.send_followups.FollowupsService')
def test_doesnt_do_anything_on_the_weekend(self, FollowupsService,
is_the_weekend):
is_the_weekend.return_value = True
command = send_followups.Command()
command.stdout = Mock()
command.handle()
FollowupsService.assert_not_called()
@patch('intake.management.commands.send_followups.is_the_weekend')
def test_expected_weekday_run(self, is_the_weekend):
is_the_weekend.return_value = False
org = Organization.objects.get(slug='ebclc')
dates = sorted([mock.get_old_date() for i in range(464, 469)])
for date, pk in zip(dates, range(464, 469)):
factories.FormSubmissionWithOrgsFactory.create(id=pk,
date_received=date, organizations=[org], answers=
get_answers_for_orgs([org], contact_preferences=[
'prefers_email', 'prefers_sms'], phone='4445551111', email=
'test@test.com'))
command = send_followups.Command()
command.stdout = Mock()
with self.assertLogs('project.services.logging_service', logging.INFO
) as logs:
command.handle()
self.assertEqual(len(self.notifications.email_followup.send.
mock_calls), 4)
assertInLogsCount(logs, {'event_name=app_followup_sent': 4})
<|reserved_special_token_1|>
import logging
from unittest.mock import patch, Mock
from intake.tests.base_testcases import ExternalNotificationsPatchTestCase
from intake.tests import mock, factories
from intake.tests.mock_org_answers import get_answers_for_orgs
from intake.management.commands import send_followups
from user_accounts.models import Organization
from project.tests.assertions import assertInLogsCount
class TestCommand(ExternalNotificationsPatchTestCase):
fixtures = ['counties', 'organizations']
@patch('intake.management.commands.send_followups.is_the_weekend')
@patch('intake.management.commands.send_followups.FollowupsService')
def test_doesnt_do_anything_on_the_weekend(self, FollowupsService,
is_the_weekend):
is_the_weekend.return_value = True
command = send_followups.Command()
command.stdout = Mock()
command.handle()
FollowupsService.assert_not_called()
@patch('intake.management.commands.send_followups.is_the_weekend')
def test_expected_weekday_run(self, is_the_weekend):
is_the_weekend.return_value = False
org = Organization.objects.get(slug='ebclc')
dates = sorted([mock.get_old_date() for i in range(464, 469)])
for date, pk in zip(dates, range(464, 469)):
factories.FormSubmissionWithOrgsFactory.create(id=pk,
date_received=date, organizations=[org], answers=
get_answers_for_orgs([org], contact_preferences=[
'prefers_email', 'prefers_sms'], phone='4445551111', email=
'test@test.com'))
command = send_followups.Command()
command.stdout = Mock()
with self.assertLogs('project.services.logging_service', logging.INFO
) as logs:
command.handle()
self.assertEqual(len(self.notifications.email_followup.send.
mock_calls), 4)
assertInLogsCount(logs, {'event_name=app_followup_sent': 4})
<|reserved_special_token_1|>
import logging
from unittest.mock import patch, Mock
from intake.tests.base_testcases import ExternalNotificationsPatchTestCase
from intake.tests import mock, factories
from intake.tests.mock_org_answers import get_answers_for_orgs
from intake.management.commands import send_followups
from user_accounts.models import Organization
from project.tests.assertions import assertInLogsCount
class TestCommand(ExternalNotificationsPatchTestCase):
fixtures = [
'counties', 'organizations']
@patch('intake.management.commands.send_followups.is_the_weekend')
@patch('intake.management.commands.send_followups.FollowupsService')
def test_doesnt_do_anything_on_the_weekend(
self, FollowupsService, is_the_weekend):
is_the_weekend.return_value = True
command = send_followups.Command()
command.stdout = Mock()
command.handle()
FollowupsService.assert_not_called()
@patch('intake.management.commands.send_followups.is_the_weekend')
def test_expected_weekday_run(self, is_the_weekend):
is_the_weekend.return_value = False
org = Organization.objects.get(slug='ebclc')
dates = sorted([mock.get_old_date() for i in range(464, 469)])
for date, pk in zip(dates, range(464, 469)):
factories.FormSubmissionWithOrgsFactory.create(
id=pk,
date_received=date,
organizations=[org],
answers=get_answers_for_orgs(
[org],
contact_preferences=[
'prefers_email',
'prefers_sms'],
phone='4445551111',
email='test@test.com',
))
command = send_followups.Command()
command.stdout = Mock()
with self.assertLogs(
'project.services.logging_service', logging.INFO) as logs:
command.handle()
self.assertEqual(
len(self.notifications.email_followup.send.mock_calls), 4)
assertInLogsCount(logs, {'event_name=app_followup_sent': 4})
|
flexible
|
{
"blob_id": "5cb67e5fcedafca4ce124e4094cbd8e1e9d95bb4",
"index": 3740,
"step-1": "<mask token>\n\n\nclass TestCommand(ExternalNotificationsPatchTestCase):\n <mask token>\n\n @patch('intake.management.commands.send_followups.is_the_weekend')\n @patch('intake.management.commands.send_followups.FollowupsService')\n def test_doesnt_do_anything_on_the_weekend(self, FollowupsService,\n is_the_weekend):\n is_the_weekend.return_value = True\n command = send_followups.Command()\n command.stdout = Mock()\n command.handle()\n FollowupsService.assert_not_called()\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass TestCommand(ExternalNotificationsPatchTestCase):\n <mask token>\n\n @patch('intake.management.commands.send_followups.is_the_weekend')\n @patch('intake.management.commands.send_followups.FollowupsService')\n def test_doesnt_do_anything_on_the_weekend(self, FollowupsService,\n is_the_weekend):\n is_the_weekend.return_value = True\n command = send_followups.Command()\n command.stdout = Mock()\n command.handle()\n FollowupsService.assert_not_called()\n\n @patch('intake.management.commands.send_followups.is_the_weekend')\n def test_expected_weekday_run(self, is_the_weekend):\n is_the_weekend.return_value = False\n org = Organization.objects.get(slug='ebclc')\n dates = sorted([mock.get_old_date() for i in range(464, 469)])\n for date, pk in zip(dates, range(464, 469)):\n factories.FormSubmissionWithOrgsFactory.create(id=pk,\n date_received=date, organizations=[org], answers=\n get_answers_for_orgs([org], contact_preferences=[\n 'prefers_email', 'prefers_sms'], phone='4445551111', email=\n 'test@test.com'))\n command = send_followups.Command()\n command.stdout = Mock()\n with self.assertLogs('project.services.logging_service', logging.INFO\n ) as logs:\n command.handle()\n self.assertEqual(len(self.notifications.email_followup.send.\n mock_calls), 4)\n assertInLogsCount(logs, {'event_name=app_followup_sent': 4})\n",
"step-3": "<mask token>\n\n\nclass TestCommand(ExternalNotificationsPatchTestCase):\n fixtures = ['counties', 'organizations']\n\n @patch('intake.management.commands.send_followups.is_the_weekend')\n @patch('intake.management.commands.send_followups.FollowupsService')\n def test_doesnt_do_anything_on_the_weekend(self, FollowupsService,\n is_the_weekend):\n is_the_weekend.return_value = True\n command = send_followups.Command()\n command.stdout = Mock()\n command.handle()\n FollowupsService.assert_not_called()\n\n @patch('intake.management.commands.send_followups.is_the_weekend')\n def test_expected_weekday_run(self, is_the_weekend):\n is_the_weekend.return_value = False\n org = Organization.objects.get(slug='ebclc')\n dates = sorted([mock.get_old_date() for i in range(464, 469)])\n for date, pk in zip(dates, range(464, 469)):\n factories.FormSubmissionWithOrgsFactory.create(id=pk,\n date_received=date, organizations=[org], answers=\n get_answers_for_orgs([org], contact_preferences=[\n 'prefers_email', 'prefers_sms'], phone='4445551111', email=\n 'test@test.com'))\n command = send_followups.Command()\n command.stdout = Mock()\n with self.assertLogs('project.services.logging_service', logging.INFO\n ) as logs:\n command.handle()\n self.assertEqual(len(self.notifications.email_followup.send.\n mock_calls), 4)\n assertInLogsCount(logs, {'event_name=app_followup_sent': 4})\n",
"step-4": "import logging\nfrom unittest.mock import patch, Mock\nfrom intake.tests.base_testcases import ExternalNotificationsPatchTestCase\nfrom intake.tests import mock, factories\nfrom intake.tests.mock_org_answers import get_answers_for_orgs\nfrom intake.management.commands import send_followups\nfrom user_accounts.models import Organization\nfrom project.tests.assertions import assertInLogsCount\n\n\nclass TestCommand(ExternalNotificationsPatchTestCase):\n fixtures = ['counties', 'organizations']\n\n @patch('intake.management.commands.send_followups.is_the_weekend')\n @patch('intake.management.commands.send_followups.FollowupsService')\n def test_doesnt_do_anything_on_the_weekend(self, FollowupsService,\n is_the_weekend):\n is_the_weekend.return_value = True\n command = send_followups.Command()\n command.stdout = Mock()\n command.handle()\n FollowupsService.assert_not_called()\n\n @patch('intake.management.commands.send_followups.is_the_weekend')\n def test_expected_weekday_run(self, is_the_weekend):\n is_the_weekend.return_value = False\n org = Organization.objects.get(slug='ebclc')\n dates = sorted([mock.get_old_date() for i in range(464, 469)])\n for date, pk in zip(dates, range(464, 469)):\n factories.FormSubmissionWithOrgsFactory.create(id=pk,\n date_received=date, organizations=[org], answers=\n get_answers_for_orgs([org], contact_preferences=[\n 'prefers_email', 'prefers_sms'], phone='4445551111', email=\n 'test@test.com'))\n command = send_followups.Command()\n command.stdout = Mock()\n with self.assertLogs('project.services.logging_service', logging.INFO\n ) as logs:\n command.handle()\n self.assertEqual(len(self.notifications.email_followup.send.\n mock_calls), 4)\n assertInLogsCount(logs, {'event_name=app_followup_sent': 4})\n",
"step-5": "import logging\nfrom unittest.mock import patch, Mock\nfrom intake.tests.base_testcases import ExternalNotificationsPatchTestCase\nfrom intake.tests import mock, factories\nfrom intake.tests.mock_org_answers import get_answers_for_orgs\nfrom intake.management.commands import send_followups\nfrom user_accounts.models import Organization\nfrom project.tests.assertions import assertInLogsCount\n\n\nclass TestCommand(ExternalNotificationsPatchTestCase):\n\n fixtures = [\n 'counties', 'organizations']\n\n @patch('intake.management.commands.send_followups.is_the_weekend')\n @patch('intake.management.commands.send_followups.FollowupsService')\n def test_doesnt_do_anything_on_the_weekend(\n self, FollowupsService, is_the_weekend):\n is_the_weekend.return_value = True\n command = send_followups.Command()\n command.stdout = Mock()\n command.handle()\n FollowupsService.assert_not_called()\n\n @patch('intake.management.commands.send_followups.is_the_weekend')\n def test_expected_weekday_run(self, is_the_weekend):\n is_the_weekend.return_value = False\n org = Organization.objects.get(slug='ebclc')\n dates = sorted([mock.get_old_date() for i in range(464, 469)])\n for date, pk in zip(dates, range(464, 469)):\n factories.FormSubmissionWithOrgsFactory.create(\n id=pk,\n date_received=date,\n organizations=[org],\n answers=get_answers_for_orgs(\n [org],\n contact_preferences=[\n 'prefers_email',\n 'prefers_sms'],\n phone='4445551111',\n email='test@test.com',\n ))\n command = send_followups.Command()\n command.stdout = Mock()\n with self.assertLogs(\n 'project.services.logging_service', logging.INFO) as logs:\n command.handle()\n self.assertEqual(\n len(self.notifications.email_followup.send.mock_calls), 4)\n assertInLogsCount(logs, {'event_name=app_followup_sent': 4})\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
T = List[int]
C = Callable[[int], None]
<|reserved_special_token_1|>
from typing import List, Callable
T = List[int]
C = Callable[[int], None]
<|reserved_special_token_1|>
from typing import List, Callable
#: A list of int
T = List[int]
C = Callable[[int], None] # a generic alias not having a doccomment
|
flexible
|
{
"blob_id": "aaee69d339cf1c14e54366633155ee57026e6487",
"index": 2071,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nT = List[int]\nC = Callable[[int], None]\n",
"step-3": "from typing import List, Callable\nT = List[int]\nC = Callable[[int], None]\n",
"step-4": "from typing import List, Callable\n\n#: A list of int\nT = List[int]\n\nC = Callable[[int], None] # a generic alias not having a doccomment\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from __future__ import absolute_import
import sys
from apscheduler.executors.base import BaseExecutor, run_job
try:
import gevent
except ImportError: # pragma: nocover
raise ImportError('GeventExecutor requires gevent installed')
class GeventExecutor(BaseExecutor):
"""
Runs jobs as greenlets.
Plugin alias: ``gevent``
"""
def _do_submit_job(self, job, run_times):
def callback(greenlet):
try:
events = greenlet.get()
except BaseException:
self._run_job_error(job.id, *sys.exc_info()[1:])
else:
self._run_job_success(job.id, events)
gevent.spawn(run_job, job, job._jobstore_alias, run_times, self._logger.name).\
link(callback)
|
normal
|
{
"blob_id": "afcadc11d23fb921eb6f8038a908de02ee763ca4",
"index": 693,
"step-1": "<mask token>\n\n\nclass GeventExecutor(BaseExecutor):\n <mask token>\n\n def _do_submit_job(self, job, run_times):\n\n def callback(greenlet):\n try:\n events = greenlet.get()\n except BaseException:\n self._run_job_error(job.id, *sys.exc_info()[1:])\n else:\n self._run_job_success(job.id, events)\n gevent.spawn(run_job, job, job._jobstore_alias, run_times, self.\n _logger.name).link(callback)\n",
"step-2": "<mask token>\n\n\nclass GeventExecutor(BaseExecutor):\n \"\"\"\n Runs jobs as greenlets.\n\n Plugin alias: ``gevent``\n \"\"\"\n\n def _do_submit_job(self, job, run_times):\n\n def callback(greenlet):\n try:\n events = greenlet.get()\n except BaseException:\n self._run_job_error(job.id, *sys.exc_info()[1:])\n else:\n self._run_job_success(job.id, events)\n gevent.spawn(run_job, job, job._jobstore_alias, run_times, self.\n _logger.name).link(callback)\n",
"step-3": "<mask token>\ntry:\n import gevent\nexcept ImportError:\n raise ImportError('GeventExecutor requires gevent installed')\n\n\nclass GeventExecutor(BaseExecutor):\n \"\"\"\n Runs jobs as greenlets.\n\n Plugin alias: ``gevent``\n \"\"\"\n\n def _do_submit_job(self, job, run_times):\n\n def callback(greenlet):\n try:\n events = greenlet.get()\n except BaseException:\n self._run_job_error(job.id, *sys.exc_info()[1:])\n else:\n self._run_job_success(job.id, events)\n gevent.spawn(run_job, job, job._jobstore_alias, run_times, self.\n _logger.name).link(callback)\n",
"step-4": "from __future__ import absolute_import\nimport sys\nfrom apscheduler.executors.base import BaseExecutor, run_job\ntry:\n import gevent\nexcept ImportError:\n raise ImportError('GeventExecutor requires gevent installed')\n\n\nclass GeventExecutor(BaseExecutor):\n \"\"\"\n Runs jobs as greenlets.\n\n Plugin alias: ``gevent``\n \"\"\"\n\n def _do_submit_job(self, job, run_times):\n\n def callback(greenlet):\n try:\n events = greenlet.get()\n except BaseException:\n self._run_job_error(job.id, *sys.exc_info()[1:])\n else:\n self._run_job_success(job.id, events)\n gevent.spawn(run_job, job, job._jobstore_alias, run_times, self.\n _logger.name).link(callback)\n",
"step-5": "from __future__ import absolute_import\nimport sys\n\nfrom apscheduler.executors.base import BaseExecutor, run_job\n\n\ntry:\n import gevent\nexcept ImportError: # pragma: nocover\n raise ImportError('GeventExecutor requires gevent installed')\n\n\nclass GeventExecutor(BaseExecutor):\n \"\"\"\n Runs jobs as greenlets.\n\n Plugin alias: ``gevent``\n \"\"\"\n\n def _do_submit_job(self, job, run_times):\n def callback(greenlet):\n try:\n events = greenlet.get()\n except BaseException:\n self._run_job_error(job.id, *sys.exc_info()[1:])\n else:\n self._run_job_success(job.id, events)\n\n gevent.spawn(run_job, job, job._jobstore_alias, run_times, self._logger.name).\\\n link(callback)\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
def boundarytester(playerinput):
if playerinput[1][0] > 14 or playerinput[1][0] < 0 or playerinput[1][1
] > 14 or playerinput[1][1] < 0:
return False
if playerinput[2] == 'h':
if playerinput[1][1] + len(list(playerinput[0])) - 1 > 14:
return False
if playerinput[2] == 'v':
if playerinput[1][0] + len(list(playerinput[0])) - 1 > 14:
return False
return True
def moveconverter(playerinput, board):
word = list(playerinput[0])
if playerinput[2] == 'v':
p = [(x, playerinput[1][1]) for x in range(playerinput[1][0],
playerinput[1][0] + len(word))]
bmask = board[p[0][0]:p[-1][0] + 1, p[0][1]] == 52
letters = np.array(word)[bmask].tolist()
positions = np.array(p)[bmask].tolist()
elif playerinput[2] == 'h':
p = [(playerinput[1][0], x) for x in range(playerinput[1][1],
playerinput[1][1] + len(word))]
bmask = board[p[0][0], p[0][1]:p[-1][1] + 1] == 52
letters = np.array(word)[bmask].tolist()
positions = np.array(p)[bmask].tolist()
return letters, positions
<|reserved_special_token_0|>
def wordsmade(letters, positions, mainboard):
l1 = dict(zip(string.ascii_uppercase, list(range(0, 26, 1))))
l2 = dict(zip(string.ascii_lowercase, list(range(26, 52, 1))))
letternumberkey = {**l1, **l2}
letternumberkey[' '] = 52
board = mainboard.copy()
for i in range(len(letters)):
board[positions[i][0], positions[i][1]] = letternumberkey[letters[i]]
wordsp = []
for i in positions:
ph = np.array(list(zip([i[0]] * 15, list(range(0, 15)))))[board[i[0
], :] < 52].tolist()
r = list(map(tuple, ph))
if len(r) > 1:
for j in range(r.index(tuple(i)), 0, -1):
if r[j][1] - r[j - 1][1] > 1:
r = r[j:]
break
for j in range(r.index(tuple(i)), len(r), 1):
try:
if r[j + 1][1] - r[j][1] > 1:
r = r[:j + 1]
break
except IndexError:
pass
if len(r) > 1:
wordsp.append(r)
pv = np.array(list(zip(list(range(0, 15)), [i[1]] * 15)))[board[:,
i[1]] < 52].tolist()
s = list(map(tuple, pv))
if len(s) > 1:
for j in range(s.index(tuple(i)), 0, -1):
if s[j][0] - s[j - 1][0] > 1:
s = s[j:]
break
for j in range(s.index(tuple(i)), len(s), 1):
try:
if s[j + 1][0] - s[j][0] > 1:
s = s[:j + 1]
break
except IndexError:
pass
if len(s) > 1:
wordsp.append(s)
wordspq = []
for i in wordsp:
wordspq.append(tuple(i))
return list(set(wordspq))
def validword(words, filename='wordlist/sowpods.txt'):
with open(filename, 'r') as f:
rd = f.read()
rd = rd.split('\n')
rd = set(rd)
for j in words:
if j not in rd:
return False, j
return True, True
<|reserved_special_token_0|>
def overlaptester(playerinput, board):
l_1 = dict(zip(list(range(0, 26, 1)), string.ascii_uppercase))
l_2 = dict(zip(list(range(26, 52, 1)), string.ascii_lowercase))
numberletterkey = {**l_1, **l_2}
numberletterkey[52] = ' '
word = playerinput[0]
if playerinput[2] == 'v':
p = [(x, playerinput[1][1]) for x in range(playerinput[1][0],
playerinput[1][0] + len(word))]
bmask = board[p[0][0]:p[-1][0] + 1, p[0][1]] != 52
overlapletters = ''.join(np.array(list(word))[bmask].tolist()).lower()
existingletters = ''.join(list(map(lambda x: numberletterkey[x],
board[p[0][0]:p[-1][0] + 1, p[0][1]][bmask]))).lower()
if overlapletters != existingletters:
return False
elif playerinput[2] == 'h':
p = [(playerinput[1][0], x) for x in range(playerinput[1][1],
playerinput[1][1] + len(word))]
bmask = board[p[0][0], p[0][1]:p[-1][1] + 1] != 52
overlapletters = ''.join(np.array(list(word))[bmask].tolist()).lower()
existingletters = ''.join(list(map(lambda x: numberletterkey[x],
board[p[0][0], p[0][1]:p[-1][1] + 1][bmask]))).lower()
if overlapletters != existingletters:
return False
return True
def mainrules(playerinput, board, rack, validity=True, filename=
'wordlist/sowpods.txt'):
l_1 = dict(zip(list(range(0, 26, 1)), string.ascii_uppercase))
l_2 = dict(zip(list(range(26, 52, 1)), string.ascii_lowercase))
numberletterkey = {**l_1, **l_2}
numberletterkey[52] = ' '
l1 = dict(zip(string.ascii_uppercase, list(range(0, 26, 1))))
l2 = dict(zip(string.ascii_lowercase, list(range(26, 52, 1))))
letternumberkey = {**l1, **l2}
letternumberkey[' '] = 52
if not boundarytester(playerinput):
return False, False, 'Your word extends outside the board.'
if not overlaptester(playerinput, board):
return (False, False,
'The word you want to put requires different letters at places where there already are letters.'
)
move = moveconverter(playerinput, board)
words = wordsmade(move[0], move[1], board)
if not island_words_tester(move[1], board):
return (False, False,
'The first move has to contain the middle square(6,6). Rest must be connected via at least 1 letter to the words on the board.'
)
internal_board = board.copy()
for i in range(len(move[0])):
internal_board[move[1][i][0], move[1][i][1]] = letternumberkey[move
[0][i]]
actual_words = []
for i in words:
w = []
for j in i:
w.append(numberletterkey[internal_board[j[0], j[1]]])
actual_words.append(''.join(w).lower())
if validity:
if not validword(actual_words, filename)[0]:
return (False, False,
'Validity mode is on. One of the words you formed is not valid: '
+ validword(actual_words, filename)[1])
if not racksufficiency(move[0], rack)[0]:
return False, False, racksufficiency(move[0], rack)[1]
return True, move, words
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def boundarytester(playerinput):
if playerinput[1][0] > 14 or playerinput[1][0] < 0 or playerinput[1][1
] > 14 or playerinput[1][1] < 0:
return False
if playerinput[2] == 'h':
if playerinput[1][1] + len(list(playerinput[0])) - 1 > 14:
return False
if playerinput[2] == 'v':
if playerinput[1][0] + len(list(playerinput[0])) - 1 > 14:
return False
return True
def moveconverter(playerinput, board):
word = list(playerinput[0])
if playerinput[2] == 'v':
p = [(x, playerinput[1][1]) for x in range(playerinput[1][0],
playerinput[1][0] + len(word))]
bmask = board[p[0][0]:p[-1][0] + 1, p[0][1]] == 52
letters = np.array(word)[bmask].tolist()
positions = np.array(p)[bmask].tolist()
elif playerinput[2] == 'h':
p = [(playerinput[1][0], x) for x in range(playerinput[1][1],
playerinput[1][1] + len(word))]
bmask = board[p[0][0], p[0][1]:p[-1][1] + 1] == 52
letters = np.array(word)[bmask].tolist()
positions = np.array(p)[bmask].tolist()
return letters, positions
<|reserved_special_token_0|>
def wordsmade(letters, positions, mainboard):
l1 = dict(zip(string.ascii_uppercase, list(range(0, 26, 1))))
l2 = dict(zip(string.ascii_lowercase, list(range(26, 52, 1))))
letternumberkey = {**l1, **l2}
letternumberkey[' '] = 52
board = mainboard.copy()
for i in range(len(letters)):
board[positions[i][0], positions[i][1]] = letternumberkey[letters[i]]
wordsp = []
for i in positions:
ph = np.array(list(zip([i[0]] * 15, list(range(0, 15)))))[board[i[0
], :] < 52].tolist()
r = list(map(tuple, ph))
if len(r) > 1:
for j in range(r.index(tuple(i)), 0, -1):
if r[j][1] - r[j - 1][1] > 1:
r = r[j:]
break
for j in range(r.index(tuple(i)), len(r), 1):
try:
if r[j + 1][1] - r[j][1] > 1:
r = r[:j + 1]
break
except IndexError:
pass
if len(r) > 1:
wordsp.append(r)
pv = np.array(list(zip(list(range(0, 15)), [i[1]] * 15)))[board[:,
i[1]] < 52].tolist()
s = list(map(tuple, pv))
if len(s) > 1:
for j in range(s.index(tuple(i)), 0, -1):
if s[j][0] - s[j - 1][0] > 1:
s = s[j:]
break
for j in range(s.index(tuple(i)), len(s), 1):
try:
if s[j + 1][0] - s[j][0] > 1:
s = s[:j + 1]
break
except IndexError:
pass
if len(s) > 1:
wordsp.append(s)
wordspq = []
for i in wordsp:
wordspq.append(tuple(i))
return list(set(wordspq))
def validword(words, filename='wordlist/sowpods.txt'):
with open(filename, 'r') as f:
rd = f.read()
rd = rd.split('\n')
rd = set(rd)
for j in words:
if j not in rd:
return False, j
return True, True
def racksufficiency(letters, rack):
rackblanks = [x for x in rack if x == ' ']
blanks = [x for x in letters if x != x.upper()]
if len(blanks) > len(rackblanks):
return (False,
'You entered special tiles (lower case letters) more than you have.'
)
for i in [x for x in letters if x == x.upper()]:
if i not in [x for x in letters if x != ' ']:
return (False,
'You do not have the tiles to play the word you want to.')
return True, True
def overlaptester(playerinput, board):
l_1 = dict(zip(list(range(0, 26, 1)), string.ascii_uppercase))
l_2 = dict(zip(list(range(26, 52, 1)), string.ascii_lowercase))
numberletterkey = {**l_1, **l_2}
numberletterkey[52] = ' '
word = playerinput[0]
if playerinput[2] == 'v':
p = [(x, playerinput[1][1]) for x in range(playerinput[1][0],
playerinput[1][0] + len(word))]
bmask = board[p[0][0]:p[-1][0] + 1, p[0][1]] != 52
overlapletters = ''.join(np.array(list(word))[bmask].tolist()).lower()
existingletters = ''.join(list(map(lambda x: numberletterkey[x],
board[p[0][0]:p[-1][0] + 1, p[0][1]][bmask]))).lower()
if overlapletters != existingletters:
return False
elif playerinput[2] == 'h':
p = [(playerinput[1][0], x) for x in range(playerinput[1][1],
playerinput[1][1] + len(word))]
bmask = board[p[0][0], p[0][1]:p[-1][1] + 1] != 52
overlapletters = ''.join(np.array(list(word))[bmask].tolist()).lower()
existingletters = ''.join(list(map(lambda x: numberletterkey[x],
board[p[0][0], p[0][1]:p[-1][1] + 1][bmask]))).lower()
if overlapletters != existingletters:
return False
return True
def mainrules(playerinput, board, rack, validity=True, filename=
'wordlist/sowpods.txt'):
l_1 = dict(zip(list(range(0, 26, 1)), string.ascii_uppercase))
l_2 = dict(zip(list(range(26, 52, 1)), string.ascii_lowercase))
numberletterkey = {**l_1, **l_2}
numberletterkey[52] = ' '
l1 = dict(zip(string.ascii_uppercase, list(range(0, 26, 1))))
l2 = dict(zip(string.ascii_lowercase, list(range(26, 52, 1))))
letternumberkey = {**l1, **l2}
letternumberkey[' '] = 52
if not boundarytester(playerinput):
return False, False, 'Your word extends outside the board.'
if not overlaptester(playerinput, board):
return (False, False,
'The word you want to put requires different letters at places where there already are letters.'
)
move = moveconverter(playerinput, board)
words = wordsmade(move[0], move[1], board)
if not island_words_tester(move[1], board):
return (False, False,
'The first move has to contain the middle square(6,6). Rest must be connected via at least 1 letter to the words on the board.'
)
internal_board = board.copy()
for i in range(len(move[0])):
internal_board[move[1][i][0], move[1][i][1]] = letternumberkey[move
[0][i]]
actual_words = []
for i in words:
w = []
for j in i:
w.append(numberletterkey[internal_board[j[0], j[1]]])
actual_words.append(''.join(w).lower())
if validity:
if not validword(actual_words, filename)[0]:
return (False, False,
'Validity mode is on. One of the words you formed is not valid: '
+ validword(actual_words, filename)[1])
if not racksufficiency(move[0], rack)[0]:
return False, False, racksufficiency(move[0], rack)[1]
return True, move, words
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def boundarytester(playerinput):
if playerinput[1][0] > 14 or playerinput[1][0] < 0 or playerinput[1][1
] > 14 or playerinput[1][1] < 0:
return False
if playerinput[2] == 'h':
if playerinput[1][1] + len(list(playerinput[0])) - 1 > 14:
return False
if playerinput[2] == 'v':
if playerinput[1][0] + len(list(playerinput[0])) - 1 > 14:
return False
return True
def moveconverter(playerinput, board):
word = list(playerinput[0])
if playerinput[2] == 'v':
p = [(x, playerinput[1][1]) for x in range(playerinput[1][0],
playerinput[1][0] + len(word))]
bmask = board[p[0][0]:p[-1][0] + 1, p[0][1]] == 52
letters = np.array(word)[bmask].tolist()
positions = np.array(p)[bmask].tolist()
elif playerinput[2] == 'h':
p = [(playerinput[1][0], x) for x in range(playerinput[1][1],
playerinput[1][1] + len(word))]
bmask = board[p[0][0], p[0][1]:p[-1][1] + 1] == 52
letters = np.array(word)[bmask].tolist()
positions = np.array(p)[bmask].tolist()
return letters, positions
def island_words_tester(positions, board):
test = False
for i in positions:
if i[0] == 7 and i[1] == 7:
test = True
else:
adjacent_positions = [(i[0] - 1, i[1]), (i[0] + 1, i[1]), (i[0],
i[1] - 1), (i[0], i[1] + 1)]
adjacent_positions = [x for x in adjacent_positions if (x[0] >=
0 and x[0] < 15) and (x[1] >= 0 and x[1] < 15)]
for j in adjacent_positions:
if board[j[0], j[1]] < 52:
test = True
return test
def wordsmade(letters, positions, mainboard):
l1 = dict(zip(string.ascii_uppercase, list(range(0, 26, 1))))
l2 = dict(zip(string.ascii_lowercase, list(range(26, 52, 1))))
letternumberkey = {**l1, **l2}
letternumberkey[' '] = 52
board = mainboard.copy()
for i in range(len(letters)):
board[positions[i][0], positions[i][1]] = letternumberkey[letters[i]]
wordsp = []
for i in positions:
ph = np.array(list(zip([i[0]] * 15, list(range(0, 15)))))[board[i[0
], :] < 52].tolist()
r = list(map(tuple, ph))
if len(r) > 1:
for j in range(r.index(tuple(i)), 0, -1):
if r[j][1] - r[j - 1][1] > 1:
r = r[j:]
break
for j in range(r.index(tuple(i)), len(r), 1):
try:
if r[j + 1][1] - r[j][1] > 1:
r = r[:j + 1]
break
except IndexError:
pass
if len(r) > 1:
wordsp.append(r)
pv = np.array(list(zip(list(range(0, 15)), [i[1]] * 15)))[board[:,
i[1]] < 52].tolist()
s = list(map(tuple, pv))
if len(s) > 1:
for j in range(s.index(tuple(i)), 0, -1):
if s[j][0] - s[j - 1][0] > 1:
s = s[j:]
break
for j in range(s.index(tuple(i)), len(s), 1):
try:
if s[j + 1][0] - s[j][0] > 1:
s = s[:j + 1]
break
except IndexError:
pass
if len(s) > 1:
wordsp.append(s)
wordspq = []
for i in wordsp:
wordspq.append(tuple(i))
return list(set(wordspq))
def validword(words, filename='wordlist/sowpods.txt'):
with open(filename, 'r') as f:
rd = f.read()
rd = rd.split('\n')
rd = set(rd)
for j in words:
if j not in rd:
return False, j
return True, True
def racksufficiency(letters, rack):
rackblanks = [x for x in rack if x == ' ']
blanks = [x for x in letters if x != x.upper()]
if len(blanks) > len(rackblanks):
return (False,
'You entered special tiles (lower case letters) more than you have.'
)
for i in [x for x in letters if x == x.upper()]:
if i not in [x for x in letters if x != ' ']:
return (False,
'You do not have the tiles to play the word you want to.')
return True, True
def overlaptester(playerinput, board):
l_1 = dict(zip(list(range(0, 26, 1)), string.ascii_uppercase))
l_2 = dict(zip(list(range(26, 52, 1)), string.ascii_lowercase))
numberletterkey = {**l_1, **l_2}
numberletterkey[52] = ' '
word = playerinput[0]
if playerinput[2] == 'v':
p = [(x, playerinput[1][1]) for x in range(playerinput[1][0],
playerinput[1][0] + len(word))]
bmask = board[p[0][0]:p[-1][0] + 1, p[0][1]] != 52
overlapletters = ''.join(np.array(list(word))[bmask].tolist()).lower()
existingletters = ''.join(list(map(lambda x: numberletterkey[x],
board[p[0][0]:p[-1][0] + 1, p[0][1]][bmask]))).lower()
if overlapletters != existingletters:
return False
elif playerinput[2] == 'h':
p = [(playerinput[1][0], x) for x in range(playerinput[1][1],
playerinput[1][1] + len(word))]
bmask = board[p[0][0], p[0][1]:p[-1][1] + 1] != 52
overlapletters = ''.join(np.array(list(word))[bmask].tolist()).lower()
existingletters = ''.join(list(map(lambda x: numberletterkey[x],
board[p[0][0], p[0][1]:p[-1][1] + 1][bmask]))).lower()
if overlapletters != existingletters:
return False
return True
def mainrules(playerinput, board, rack, validity=True, filename=
'wordlist/sowpods.txt'):
l_1 = dict(zip(list(range(0, 26, 1)), string.ascii_uppercase))
l_2 = dict(zip(list(range(26, 52, 1)), string.ascii_lowercase))
numberletterkey = {**l_1, **l_2}
numberletterkey[52] = ' '
l1 = dict(zip(string.ascii_uppercase, list(range(0, 26, 1))))
l2 = dict(zip(string.ascii_lowercase, list(range(26, 52, 1))))
letternumberkey = {**l1, **l2}
letternumberkey[' '] = 52
if not boundarytester(playerinput):
return False, False, 'Your word extends outside the board.'
if not overlaptester(playerinput, board):
return (False, False,
'The word you want to put requires different letters at places where there already are letters.'
)
move = moveconverter(playerinput, board)
words = wordsmade(move[0], move[1], board)
if not island_words_tester(move[1], board):
return (False, False,
'The first move has to contain the middle square(6,6). Rest must be connected via at least 1 letter to the words on the board.'
)
internal_board = board.copy()
for i in range(len(move[0])):
internal_board[move[1][i][0], move[1][i][1]] = letternumberkey[move
[0][i]]
actual_words = []
for i in words:
w = []
for j in i:
w.append(numberletterkey[internal_board[j[0], j[1]]])
actual_words.append(''.join(w).lower())
if validity:
if not validword(actual_words, filename)[0]:
return (False, False,
'Validity mode is on. One of the words you formed is not valid: '
+ validword(actual_words, filename)[1])
if not racksufficiency(move[0], rack)[0]:
return False, False, racksufficiency(move[0], rack)[1]
return True, move, words
<|reserved_special_token_1|>
import numpy as np
import string
def boundarytester(playerinput):
if playerinput[1][0] > 14 or playerinput[1][0] < 0 or playerinput[1][1
] > 14 or playerinput[1][1] < 0:
return False
if playerinput[2] == 'h':
if playerinput[1][1] + len(list(playerinput[0])) - 1 > 14:
return False
if playerinput[2] == 'v':
if playerinput[1][0] + len(list(playerinput[0])) - 1 > 14:
return False
return True
def moveconverter(playerinput, board):
word = list(playerinput[0])
if playerinput[2] == 'v':
p = [(x, playerinput[1][1]) for x in range(playerinput[1][0],
playerinput[1][0] + len(word))]
bmask = board[p[0][0]:p[-1][0] + 1, p[0][1]] == 52
letters = np.array(word)[bmask].tolist()
positions = np.array(p)[bmask].tolist()
elif playerinput[2] == 'h':
p = [(playerinput[1][0], x) for x in range(playerinput[1][1],
playerinput[1][1] + len(word))]
bmask = board[p[0][0], p[0][1]:p[-1][1] + 1] == 52
letters = np.array(word)[bmask].tolist()
positions = np.array(p)[bmask].tolist()
return letters, positions
def island_words_tester(positions, board):
test = False
for i in positions:
if i[0] == 7 and i[1] == 7:
test = True
else:
adjacent_positions = [(i[0] - 1, i[1]), (i[0] + 1, i[1]), (i[0],
i[1] - 1), (i[0], i[1] + 1)]
adjacent_positions = [x for x in adjacent_positions if (x[0] >=
0 and x[0] < 15) and (x[1] >= 0 and x[1] < 15)]
for j in adjacent_positions:
if board[j[0], j[1]] < 52:
test = True
return test
def wordsmade(letters, positions, mainboard):
l1 = dict(zip(string.ascii_uppercase, list(range(0, 26, 1))))
l2 = dict(zip(string.ascii_lowercase, list(range(26, 52, 1))))
letternumberkey = {**l1, **l2}
letternumberkey[' '] = 52
board = mainboard.copy()
for i in range(len(letters)):
board[positions[i][0], positions[i][1]] = letternumberkey[letters[i]]
wordsp = []
for i in positions:
ph = np.array(list(zip([i[0]] * 15, list(range(0, 15)))))[board[i[0
], :] < 52].tolist()
r = list(map(tuple, ph))
if len(r) > 1:
for j in range(r.index(tuple(i)), 0, -1):
if r[j][1] - r[j - 1][1] > 1:
r = r[j:]
break
for j in range(r.index(tuple(i)), len(r), 1):
try:
if r[j + 1][1] - r[j][1] > 1:
r = r[:j + 1]
break
except IndexError:
pass
if len(r) > 1:
wordsp.append(r)
pv = np.array(list(zip(list(range(0, 15)), [i[1]] * 15)))[board[:,
i[1]] < 52].tolist()
s = list(map(tuple, pv))
if len(s) > 1:
for j in range(s.index(tuple(i)), 0, -1):
if s[j][0] - s[j - 1][0] > 1:
s = s[j:]
break
for j in range(s.index(tuple(i)), len(s), 1):
try:
if s[j + 1][0] - s[j][0] > 1:
s = s[:j + 1]
break
except IndexError:
pass
if len(s) > 1:
wordsp.append(s)
wordspq = []
for i in wordsp:
wordspq.append(tuple(i))
return list(set(wordspq))
def validword(words, filename='wordlist/sowpods.txt'):
with open(filename, 'r') as f:
rd = f.read()
rd = rd.split('\n')
rd = set(rd)
for j in words:
if j not in rd:
return False, j
return True, True
def racksufficiency(letters, rack):
rackblanks = [x for x in rack if x == ' ']
blanks = [x for x in letters if x != x.upper()]
if len(blanks) > len(rackblanks):
return (False,
'You entered special tiles (lower case letters) more than you have.'
)
for i in [x for x in letters if x == x.upper()]:
if i not in [x for x in letters if x != ' ']:
return (False,
'You do not have the tiles to play the word you want to.')
return True, True
def overlaptester(playerinput, board):
l_1 = dict(zip(list(range(0, 26, 1)), string.ascii_uppercase))
l_2 = dict(zip(list(range(26, 52, 1)), string.ascii_lowercase))
numberletterkey = {**l_1, **l_2}
numberletterkey[52] = ' '
word = playerinput[0]
if playerinput[2] == 'v':
p = [(x, playerinput[1][1]) for x in range(playerinput[1][0],
playerinput[1][0] + len(word))]
bmask = board[p[0][0]:p[-1][0] + 1, p[0][1]] != 52
overlapletters = ''.join(np.array(list(word))[bmask].tolist()).lower()
existingletters = ''.join(list(map(lambda x: numberletterkey[x],
board[p[0][0]:p[-1][0] + 1, p[0][1]][bmask]))).lower()
if overlapletters != existingletters:
return False
elif playerinput[2] == 'h':
p = [(playerinput[1][0], x) for x in range(playerinput[1][1],
playerinput[1][1] + len(word))]
bmask = board[p[0][0], p[0][1]:p[-1][1] + 1] != 52
overlapletters = ''.join(np.array(list(word))[bmask].tolist()).lower()
existingletters = ''.join(list(map(lambda x: numberletterkey[x],
board[p[0][0], p[0][1]:p[-1][1] + 1][bmask]))).lower()
if overlapletters != existingletters:
return False
return True
def mainrules(playerinput, board, rack, validity=True, filename=
'wordlist/sowpods.txt'):
l_1 = dict(zip(list(range(0, 26, 1)), string.ascii_uppercase))
l_2 = dict(zip(list(range(26, 52, 1)), string.ascii_lowercase))
numberletterkey = {**l_1, **l_2}
numberletterkey[52] = ' '
l1 = dict(zip(string.ascii_uppercase, list(range(0, 26, 1))))
l2 = dict(zip(string.ascii_lowercase, list(range(26, 52, 1))))
letternumberkey = {**l1, **l2}
letternumberkey[' '] = 52
if not boundarytester(playerinput):
return False, False, 'Your word extends outside the board.'
if not overlaptester(playerinput, board):
return (False, False,
'The word you want to put requires different letters at places where there already are letters.'
)
move = moveconverter(playerinput, board)
words = wordsmade(move[0], move[1], board)
if not island_words_tester(move[1], board):
return (False, False,
'The first move has to contain the middle square(6,6). Rest must be connected via at least 1 letter to the words on the board.'
)
internal_board = board.copy()
for i in range(len(move[0])):
internal_board[move[1][i][0], move[1][i][1]] = letternumberkey[move
[0][i]]
actual_words = []
for i in words:
w = []
for j in i:
w.append(numberletterkey[internal_board[j[0], j[1]]])
actual_words.append(''.join(w).lower())
if validity:
if not validword(actual_words, filename)[0]:
return (False, False,
'Validity mode is on. One of the words you formed is not valid: '
+ validword(actual_words, filename)[1])
if not racksufficiency(move[0], rack)[0]:
return False, False, racksufficiency(move[0], rack)[1]
return True, move, words
<|reserved_special_token_1|>
# player input is: Word made, starting tile position of the word made, horizontal or vertical
# example: playerinput = ['STRING', (0, 1), 'v']
import numpy as np
import string
def boundarytester(playerinput): # to check whether the player is placing the tiles within the confines of the board
if playerinput[1][0] > 14 or playerinput[1][0] < 0 or playerinput[1][1] > 14 or playerinput[1][1] < 0:
return False
if playerinput[2] == 'h':
if (playerinput[1][1] + len(list(playerinput[0])) - 1) > 14:
return False
if playerinput[2] == 'v':
if (playerinput[1][0] + len(list(playerinput[0])) - 1) > 14:
return False
return True
def moveconverter(playerinput, board): # converting player input to internal lingo for a move
word = list(playerinput[0])
if playerinput[2] == 'v':
# p is the list of positions of the tiles in the entered word
p = [(x, playerinput[1][1]) for x in range(playerinput[1][0], playerinput[1][0] + len(word))]
# bmask is a boolean mask to find out which positions are not occupied on the board at the location of the word
bmask = board[p[0][0]:p[-1][0] + 1, p[0][1]] == 52
letters = np.array(word)[bmask].tolist()
positions = np.array(p)[bmask].tolist()
elif playerinput[2] == 'h':
p = [(playerinput[1][0], x) for x in range(playerinput[1][1], playerinput[1][1] + len(word))]
bmask = board[p[0][0], p[0][1]:p[-1][1] + 1] == 52
letters = np.array(word)[bmask].tolist()
positions = np.array(p)[bmask].tolist()
return letters, positions
def island_words_tester(positions, board): # to check if the entered word is in continuation with at least 1 existing word on the board
test = False
for i in positions:
if i[0] == 7 and i[1] == 7: # to ensure the first move involves the centre tile
test = True
else:
adjacent_positions = [(i[0] - 1, i[1]), (i[0] + 1, i[1]),(i[0], i[1] - 1), (i[0], i[1] + 1)] # checking for adjacency
adjacent_positions = [x for x in adjacent_positions if ((x[0] >= 0) and (x[0] < 15)) and ((x[1] >= 0) and (x[1] < 15))]
for j in adjacent_positions:
if board[j[0], j[1]] < 52:
test = True
return test
# This function takes the positions of the letters placed on the board, and returns a list of the words made
# by the placement of the letters. The format of the returned list is that it is a list of tuples, each a word
# represented by the positions of the corresponding letters.
def wordsmade(letters, positions, mainboard):
# prepping stuff
l1 = dict(zip(string.ascii_uppercase, list(range(0, 26, 1))))
l2 = dict(zip(string.ascii_lowercase, list(range(26, 52, 1))))
letternumberkey = {**l1, **l2}
letternumberkey[' '] = 52
board = mainboard.copy()
# prepped and ready!
for i in range(len(letters)):
board[positions[i][0], positions[i][1]] = letternumberkey[letters[i]] # locally modify the board
wordsp = []
for i in positions:
# horizontally looking for words made
ph = np.array(list(zip([i[0]] * 15, list(range(0, 15)))))[board[i[0], :] < 52].tolist()
# a list of all occupied places on the board
r = list(map(tuple, ph))
# trimming the list so that any places after an unoccupied place from the placed letters are removed
if len(r) > 1:
for j in range(r.index(tuple(i)), 0, -1):
if (r[j][1] - r[j - 1][1]) > 1:
r = r[j:]
break
for j in range(r.index(tuple(i)), len(r), 1):
try:
if (r[j + 1][1] - r[j][1]) > 1:
r = r[:j + 1]
break
except IndexError: # if the +1 causes the index to exceed the limit
pass
if len(r) > 1:
wordsp.append(r)
# vertically looking for words made
pv = np.array(list(zip(list(range(0, 15)), [i[1]] * 15)))[board[:, i[1]] < 52].tolist()
s = list(map(tuple, pv))
if len(s) > 1:
for j in range(s.index(tuple(i)), 0, -1):
if (s[j][0] - s[j - 1][0]) > 1:
s = s[j:]
break
for j in range(s.index(tuple(i)), len(s), 1):
try:
if (s[j + 1][0] - s[j][0]) > 1:
s = s[:j + 1]
break
except IndexError:
pass
if len(s) > 1:
wordsp.append(s)
wordspq = []
for i in wordsp:
wordspq.append(tuple(i))
return list(set(wordspq)) # set is used here to remove redundant words.
def validword(words, filename='wordlist/sowpods.txt'): # checks if the word is present in the wordlist
with open(filename, 'r') as f:
rd = f.read()
rd = rd.split('\n')
rd = set(rd)
for j in words:
if j not in rd:
return False, j
return True, True
def racksufficiency(letters, rack): # check if the desired move can be achieved based on the rack of the player
rackblanks = [x for x in rack if x == ' ']
blanks = [x for x in letters if x != x.upper()]
if len(blanks) > len(rackblanks):
return False, 'You entered special tiles (lower case letters) more than you have.'
for i in [x for x in letters if x == x.upper()]:
if i not in [x for x in letters if x != ' ']:
return False, 'You do not have the tiles to play the word you want to.'
return True, True
def overlaptester(playerinput, board): # if the desired word conflicts with letters already on board
# prepping stuff
l_1 = dict(zip(list(range(0, 26, 1)), string.ascii_uppercase))
l_2 = dict(zip(list(range(26, 52, 1)), string.ascii_lowercase))
numberletterkey = {**l_1, **l_2}
numberletterkey[52] = ' '
# prepped and ready!
word = playerinput[0]
if playerinput[2] == 'v':
p = [(x, playerinput[1][1]) for x in range(playerinput[1][0], playerinput[1][0] + len(word))]
# bmask is a boolean mask to find out which positions are occupied on the board
bmask = board[p[0][0]:p[-1][0] + 1, p[0][1]] != 52
overlapletters = ''.join(np.array(list(word))[bmask].tolist()).lower()
existingletters = ''.join(
list(map(lambda x: numberletterkey[x], board[p[0][0]:p[-1][0] + 1, p[0][1]][bmask]))).lower()
if overlapletters != existingletters:
return False
elif playerinput[2] == 'h':
p = [(playerinput[1][0], x) for x in range(playerinput[1][1], playerinput[1][1] + len(word))]
bmask = board[p[0][0], p[0][1]:p[-1][1] + 1] != 52
overlapletters = ''.join(np.array(list(word))[bmask].tolist()).lower()
existingletters = ''.join(
list(map(lambda x: numberletterkey[x], board[p[0][0], p[0][1]:p[-1][1] + 1][bmask]))).lower()
if overlapletters != existingletters:
return False
return True
def mainrules(playerinput, board, rack, validity=True, filename='wordlist/sowpods.txt'): # applies all the checks on the input
# prepping
l_1 = dict(zip(list(range(0, 26, 1)), string.ascii_uppercase))
l_2 = dict(zip(list(range(26, 52, 1)), string.ascii_lowercase))
numberletterkey = {**l_1, **l_2}
numberletterkey[52] = ' '
l1 = dict(zip(string.ascii_uppercase, list(range(0, 26, 1))))
l2 = dict(zip(string.ascii_lowercase, list(range(26, 52, 1))))
letternumberkey = {**l1, **l2}
letternumberkey[' '] = 52
# prepped and ready!
if not boundarytester(playerinput):
return False, False, "Your word extends outside the board."
if not overlaptester(playerinput, board):
return False, False, "The word you want to put requires different letters at places where there already are letters."
move = moveconverter(playerinput, board)
words = wordsmade(move[0], move[1], board)
if not island_words_tester(move[1], board):
return False, False, "The first move has to contain the middle square(6,6). Rest must be connected via at least 1 letter to the words on the board."
internal_board = board.copy() # to modify the board locally
for i in range(len(move[0])):
internal_board[move[1][i][0], move[1][i][1]] = letternumberkey[move[0][i]]
actual_words = [] # the words in actuality (no position/letters-represented-by-numbers)
for i in words:
w = []
for j in i:
w.append(numberletterkey[internal_board[j[0], j[1]]])
actual_words.append(''.join(w).lower())
if validity:
if not validword(actual_words, filename)[0]:
return False, False, "Validity mode is on. One of the words you formed is not valid: " + validword(actual_words, filename)[1]
if not racksufficiency(move[0], rack)[0]:
return False, False, racksufficiency(move[0], rack)[1]
return True, move, words
|
flexible
|
{
"blob_id": "2cb0f2fbf3ceddb2f1ee65614506dbfb3b5c8089",
"index": 4736,
"step-1": "<mask token>\n\n\ndef boundarytester(playerinput):\n if playerinput[1][0] > 14 or playerinput[1][0] < 0 or playerinput[1][1\n ] > 14 or playerinput[1][1] < 0:\n return False\n if playerinput[2] == 'h':\n if playerinput[1][1] + len(list(playerinput[0])) - 1 > 14:\n return False\n if playerinput[2] == 'v':\n if playerinput[1][0] + len(list(playerinput[0])) - 1 > 14:\n return False\n return True\n\n\ndef moveconverter(playerinput, board):\n word = list(playerinput[0])\n if playerinput[2] == 'v':\n p = [(x, playerinput[1][1]) for x in range(playerinput[1][0], \n playerinput[1][0] + len(word))]\n bmask = board[p[0][0]:p[-1][0] + 1, p[0][1]] == 52\n letters = np.array(word)[bmask].tolist()\n positions = np.array(p)[bmask].tolist()\n elif playerinput[2] == 'h':\n p = [(playerinput[1][0], x) for x in range(playerinput[1][1], \n playerinput[1][1] + len(word))]\n bmask = board[p[0][0], p[0][1]:p[-1][1] + 1] == 52\n letters = np.array(word)[bmask].tolist()\n positions = np.array(p)[bmask].tolist()\n return letters, positions\n\n\n<mask token>\n\n\ndef wordsmade(letters, positions, mainboard):\n l1 = dict(zip(string.ascii_uppercase, list(range(0, 26, 1))))\n l2 = dict(zip(string.ascii_lowercase, list(range(26, 52, 1))))\n letternumberkey = {**l1, **l2}\n letternumberkey[' '] = 52\n board = mainboard.copy()\n for i in range(len(letters)):\n board[positions[i][0], positions[i][1]] = letternumberkey[letters[i]]\n wordsp = []\n for i in positions:\n ph = np.array(list(zip([i[0]] * 15, list(range(0, 15)))))[board[i[0\n ], :] < 52].tolist()\n r = list(map(tuple, ph))\n if len(r) > 1:\n for j in range(r.index(tuple(i)), 0, -1):\n if r[j][1] - r[j - 1][1] > 1:\n r = r[j:]\n break\n for j in range(r.index(tuple(i)), len(r), 1):\n try:\n if r[j + 1][1] - r[j][1] > 1:\n r = r[:j + 1]\n break\n except IndexError:\n pass\n if len(r) > 1:\n wordsp.append(r)\n pv = np.array(list(zip(list(range(0, 15)), [i[1]] * 15)))[board[:,\n i[1]] < 52].tolist()\n s = list(map(tuple, pv))\n if len(s) > 1:\n for j in range(s.index(tuple(i)), 0, -1):\n if s[j][0] - s[j - 1][0] > 1:\n s = s[j:]\n break\n for j in range(s.index(tuple(i)), len(s), 1):\n try:\n if s[j + 1][0] - s[j][0] > 1:\n s = s[:j + 1]\n break\n except IndexError:\n pass\n if len(s) > 1:\n wordsp.append(s)\n wordspq = []\n for i in wordsp:\n wordspq.append(tuple(i))\n return list(set(wordspq))\n\n\ndef validword(words, filename='wordlist/sowpods.txt'):\n with open(filename, 'r') as f:\n rd = f.read()\n rd = rd.split('\\n')\n rd = set(rd)\n for j in words:\n if j not in rd:\n return False, j\n return True, True\n\n\n<mask token>\n\n\ndef overlaptester(playerinput, board):\n l_1 = dict(zip(list(range(0, 26, 1)), string.ascii_uppercase))\n l_2 = dict(zip(list(range(26, 52, 1)), string.ascii_lowercase))\n numberletterkey = {**l_1, **l_2}\n numberletterkey[52] = ' '\n word = playerinput[0]\n if playerinput[2] == 'v':\n p = [(x, playerinput[1][1]) for x in range(playerinput[1][0], \n playerinput[1][0] + len(word))]\n bmask = board[p[0][0]:p[-1][0] + 1, p[0][1]] != 52\n overlapletters = ''.join(np.array(list(word))[bmask].tolist()).lower()\n existingletters = ''.join(list(map(lambda x: numberletterkey[x],\n board[p[0][0]:p[-1][0] + 1, p[0][1]][bmask]))).lower()\n if overlapletters != existingletters:\n return False\n elif playerinput[2] == 'h':\n p = [(playerinput[1][0], x) for x in range(playerinput[1][1], \n playerinput[1][1] + len(word))]\n bmask = board[p[0][0], p[0][1]:p[-1][1] + 1] != 52\n overlapletters = ''.join(np.array(list(word))[bmask].tolist()).lower()\n existingletters = ''.join(list(map(lambda x: numberletterkey[x],\n board[p[0][0], p[0][1]:p[-1][1] + 1][bmask]))).lower()\n if overlapletters != existingletters:\n return False\n return True\n\n\ndef mainrules(playerinput, board, rack, validity=True, filename=\n 'wordlist/sowpods.txt'):\n l_1 = dict(zip(list(range(0, 26, 1)), string.ascii_uppercase))\n l_2 = dict(zip(list(range(26, 52, 1)), string.ascii_lowercase))\n numberletterkey = {**l_1, **l_2}\n numberletterkey[52] = ' '\n l1 = dict(zip(string.ascii_uppercase, list(range(0, 26, 1))))\n l2 = dict(zip(string.ascii_lowercase, list(range(26, 52, 1))))\n letternumberkey = {**l1, **l2}\n letternumberkey[' '] = 52\n if not boundarytester(playerinput):\n return False, False, 'Your word extends outside the board.'\n if not overlaptester(playerinput, board):\n return (False, False,\n 'The word you want to put requires different letters at places where there already are letters.'\n )\n move = moveconverter(playerinput, board)\n words = wordsmade(move[0], move[1], board)\n if not island_words_tester(move[1], board):\n return (False, False,\n 'The first move has to contain the middle square(6,6). Rest must be connected via at least 1 letter to the words on the board.'\n )\n internal_board = board.copy()\n for i in range(len(move[0])):\n internal_board[move[1][i][0], move[1][i][1]] = letternumberkey[move\n [0][i]]\n actual_words = []\n for i in words:\n w = []\n for j in i:\n w.append(numberletterkey[internal_board[j[0], j[1]]])\n actual_words.append(''.join(w).lower())\n if validity:\n if not validword(actual_words, filename)[0]:\n return (False, False, \n 'Validity mode is on. One of the words you formed is not valid: '\n + validword(actual_words, filename)[1])\n if not racksufficiency(move[0], rack)[0]:\n return False, False, racksufficiency(move[0], rack)[1]\n return True, move, words\n",
"step-2": "<mask token>\n\n\ndef boundarytester(playerinput):\n if playerinput[1][0] > 14 or playerinput[1][0] < 0 or playerinput[1][1\n ] > 14 or playerinput[1][1] < 0:\n return False\n if playerinput[2] == 'h':\n if playerinput[1][1] + len(list(playerinput[0])) - 1 > 14:\n return False\n if playerinput[2] == 'v':\n if playerinput[1][0] + len(list(playerinput[0])) - 1 > 14:\n return False\n return True\n\n\ndef moveconverter(playerinput, board):\n word = list(playerinput[0])\n if playerinput[2] == 'v':\n p = [(x, playerinput[1][1]) for x in range(playerinput[1][0], \n playerinput[1][0] + len(word))]\n bmask = board[p[0][0]:p[-1][0] + 1, p[0][1]] == 52\n letters = np.array(word)[bmask].tolist()\n positions = np.array(p)[bmask].tolist()\n elif playerinput[2] == 'h':\n p = [(playerinput[1][0], x) for x in range(playerinput[1][1], \n playerinput[1][1] + len(word))]\n bmask = board[p[0][0], p[0][1]:p[-1][1] + 1] == 52\n letters = np.array(word)[bmask].tolist()\n positions = np.array(p)[bmask].tolist()\n return letters, positions\n\n\n<mask token>\n\n\ndef wordsmade(letters, positions, mainboard):\n l1 = dict(zip(string.ascii_uppercase, list(range(0, 26, 1))))\n l2 = dict(zip(string.ascii_lowercase, list(range(26, 52, 1))))\n letternumberkey = {**l1, **l2}\n letternumberkey[' '] = 52\n board = mainboard.copy()\n for i in range(len(letters)):\n board[positions[i][0], positions[i][1]] = letternumberkey[letters[i]]\n wordsp = []\n for i in positions:\n ph = np.array(list(zip([i[0]] * 15, list(range(0, 15)))))[board[i[0\n ], :] < 52].tolist()\n r = list(map(tuple, ph))\n if len(r) > 1:\n for j in range(r.index(tuple(i)), 0, -1):\n if r[j][1] - r[j - 1][1] > 1:\n r = r[j:]\n break\n for j in range(r.index(tuple(i)), len(r), 1):\n try:\n if r[j + 1][1] - r[j][1] > 1:\n r = r[:j + 1]\n break\n except IndexError:\n pass\n if len(r) > 1:\n wordsp.append(r)\n pv = np.array(list(zip(list(range(0, 15)), [i[1]] * 15)))[board[:,\n i[1]] < 52].tolist()\n s = list(map(tuple, pv))\n if len(s) > 1:\n for j in range(s.index(tuple(i)), 0, -1):\n if s[j][0] - s[j - 1][0] > 1:\n s = s[j:]\n break\n for j in range(s.index(tuple(i)), len(s), 1):\n try:\n if s[j + 1][0] - s[j][0] > 1:\n s = s[:j + 1]\n break\n except IndexError:\n pass\n if len(s) > 1:\n wordsp.append(s)\n wordspq = []\n for i in wordsp:\n wordspq.append(tuple(i))\n return list(set(wordspq))\n\n\ndef validword(words, filename='wordlist/sowpods.txt'):\n with open(filename, 'r') as f:\n rd = f.read()\n rd = rd.split('\\n')\n rd = set(rd)\n for j in words:\n if j not in rd:\n return False, j\n return True, True\n\n\ndef racksufficiency(letters, rack):\n rackblanks = [x for x in rack if x == ' ']\n blanks = [x for x in letters if x != x.upper()]\n if len(blanks) > len(rackblanks):\n return (False,\n 'You entered special tiles (lower case letters) more than you have.'\n )\n for i in [x for x in letters if x == x.upper()]:\n if i not in [x for x in letters if x != ' ']:\n return (False,\n 'You do not have the tiles to play the word you want to.')\n return True, True\n\n\ndef overlaptester(playerinput, board):\n l_1 = dict(zip(list(range(0, 26, 1)), string.ascii_uppercase))\n l_2 = dict(zip(list(range(26, 52, 1)), string.ascii_lowercase))\n numberletterkey = {**l_1, **l_2}\n numberletterkey[52] = ' '\n word = playerinput[0]\n if playerinput[2] == 'v':\n p = [(x, playerinput[1][1]) for x in range(playerinput[1][0], \n playerinput[1][0] + len(word))]\n bmask = board[p[0][0]:p[-1][0] + 1, p[0][1]] != 52\n overlapletters = ''.join(np.array(list(word))[bmask].tolist()).lower()\n existingletters = ''.join(list(map(lambda x: numberletterkey[x],\n board[p[0][0]:p[-1][0] + 1, p[0][1]][bmask]))).lower()\n if overlapletters != existingletters:\n return False\n elif playerinput[2] == 'h':\n p = [(playerinput[1][0], x) for x in range(playerinput[1][1], \n playerinput[1][1] + len(word))]\n bmask = board[p[0][0], p[0][1]:p[-1][1] + 1] != 52\n overlapletters = ''.join(np.array(list(word))[bmask].tolist()).lower()\n existingletters = ''.join(list(map(lambda x: numberletterkey[x],\n board[p[0][0], p[0][1]:p[-1][1] + 1][bmask]))).lower()\n if overlapletters != existingletters:\n return False\n return True\n\n\ndef mainrules(playerinput, board, rack, validity=True, filename=\n 'wordlist/sowpods.txt'):\n l_1 = dict(zip(list(range(0, 26, 1)), string.ascii_uppercase))\n l_2 = dict(zip(list(range(26, 52, 1)), string.ascii_lowercase))\n numberletterkey = {**l_1, **l_2}\n numberletterkey[52] = ' '\n l1 = dict(zip(string.ascii_uppercase, list(range(0, 26, 1))))\n l2 = dict(zip(string.ascii_lowercase, list(range(26, 52, 1))))\n letternumberkey = {**l1, **l2}\n letternumberkey[' '] = 52\n if not boundarytester(playerinput):\n return False, False, 'Your word extends outside the board.'\n if not overlaptester(playerinput, board):\n return (False, False,\n 'The word you want to put requires different letters at places where there already are letters.'\n )\n move = moveconverter(playerinput, board)\n words = wordsmade(move[0], move[1], board)\n if not island_words_tester(move[1], board):\n return (False, False,\n 'The first move has to contain the middle square(6,6). Rest must be connected via at least 1 letter to the words on the board.'\n )\n internal_board = board.copy()\n for i in range(len(move[0])):\n internal_board[move[1][i][0], move[1][i][1]] = letternumberkey[move\n [0][i]]\n actual_words = []\n for i in words:\n w = []\n for j in i:\n w.append(numberletterkey[internal_board[j[0], j[1]]])\n actual_words.append(''.join(w).lower())\n if validity:\n if not validword(actual_words, filename)[0]:\n return (False, False, \n 'Validity mode is on. One of the words you formed is not valid: '\n + validword(actual_words, filename)[1])\n if not racksufficiency(move[0], rack)[0]:\n return False, False, racksufficiency(move[0], rack)[1]\n return True, move, words\n",
"step-3": "<mask token>\n\n\ndef boundarytester(playerinput):\n if playerinput[1][0] > 14 or playerinput[1][0] < 0 or playerinput[1][1\n ] > 14 or playerinput[1][1] < 0:\n return False\n if playerinput[2] == 'h':\n if playerinput[1][1] + len(list(playerinput[0])) - 1 > 14:\n return False\n if playerinput[2] == 'v':\n if playerinput[1][0] + len(list(playerinput[0])) - 1 > 14:\n return False\n return True\n\n\ndef moveconverter(playerinput, board):\n word = list(playerinput[0])\n if playerinput[2] == 'v':\n p = [(x, playerinput[1][1]) for x in range(playerinput[1][0], \n playerinput[1][0] + len(word))]\n bmask = board[p[0][0]:p[-1][0] + 1, p[0][1]] == 52\n letters = np.array(word)[bmask].tolist()\n positions = np.array(p)[bmask].tolist()\n elif playerinput[2] == 'h':\n p = [(playerinput[1][0], x) for x in range(playerinput[1][1], \n playerinput[1][1] + len(word))]\n bmask = board[p[0][0], p[0][1]:p[-1][1] + 1] == 52\n letters = np.array(word)[bmask].tolist()\n positions = np.array(p)[bmask].tolist()\n return letters, positions\n\n\ndef island_words_tester(positions, board):\n test = False\n for i in positions:\n if i[0] == 7 and i[1] == 7:\n test = True\n else:\n adjacent_positions = [(i[0] - 1, i[1]), (i[0] + 1, i[1]), (i[0],\n i[1] - 1), (i[0], i[1] + 1)]\n adjacent_positions = [x for x in adjacent_positions if (x[0] >=\n 0 and x[0] < 15) and (x[1] >= 0 and x[1] < 15)]\n for j in adjacent_positions:\n if board[j[0], j[1]] < 52:\n test = True\n return test\n\n\ndef wordsmade(letters, positions, mainboard):\n l1 = dict(zip(string.ascii_uppercase, list(range(0, 26, 1))))\n l2 = dict(zip(string.ascii_lowercase, list(range(26, 52, 1))))\n letternumberkey = {**l1, **l2}\n letternumberkey[' '] = 52\n board = mainboard.copy()\n for i in range(len(letters)):\n board[positions[i][0], positions[i][1]] = letternumberkey[letters[i]]\n wordsp = []\n for i in positions:\n ph = np.array(list(zip([i[0]] * 15, list(range(0, 15)))))[board[i[0\n ], :] < 52].tolist()\n r = list(map(tuple, ph))\n if len(r) > 1:\n for j in range(r.index(tuple(i)), 0, -1):\n if r[j][1] - r[j - 1][1] > 1:\n r = r[j:]\n break\n for j in range(r.index(tuple(i)), len(r), 1):\n try:\n if r[j + 1][1] - r[j][1] > 1:\n r = r[:j + 1]\n break\n except IndexError:\n pass\n if len(r) > 1:\n wordsp.append(r)\n pv = np.array(list(zip(list(range(0, 15)), [i[1]] * 15)))[board[:,\n i[1]] < 52].tolist()\n s = list(map(tuple, pv))\n if len(s) > 1:\n for j in range(s.index(tuple(i)), 0, -1):\n if s[j][0] - s[j - 1][0] > 1:\n s = s[j:]\n break\n for j in range(s.index(tuple(i)), len(s), 1):\n try:\n if s[j + 1][0] - s[j][0] > 1:\n s = s[:j + 1]\n break\n except IndexError:\n pass\n if len(s) > 1:\n wordsp.append(s)\n wordspq = []\n for i in wordsp:\n wordspq.append(tuple(i))\n return list(set(wordspq))\n\n\ndef validword(words, filename='wordlist/sowpods.txt'):\n with open(filename, 'r') as f:\n rd = f.read()\n rd = rd.split('\\n')\n rd = set(rd)\n for j in words:\n if j not in rd:\n return False, j\n return True, True\n\n\ndef racksufficiency(letters, rack):\n rackblanks = [x for x in rack if x == ' ']\n blanks = [x for x in letters if x != x.upper()]\n if len(blanks) > len(rackblanks):\n return (False,\n 'You entered special tiles (lower case letters) more than you have.'\n )\n for i in [x for x in letters if x == x.upper()]:\n if i not in [x for x in letters if x != ' ']:\n return (False,\n 'You do not have the tiles to play the word you want to.')\n return True, True\n\n\ndef overlaptester(playerinput, board):\n l_1 = dict(zip(list(range(0, 26, 1)), string.ascii_uppercase))\n l_2 = dict(zip(list(range(26, 52, 1)), string.ascii_lowercase))\n numberletterkey = {**l_1, **l_2}\n numberletterkey[52] = ' '\n word = playerinput[0]\n if playerinput[2] == 'v':\n p = [(x, playerinput[1][1]) for x in range(playerinput[1][0], \n playerinput[1][0] + len(word))]\n bmask = board[p[0][0]:p[-1][0] + 1, p[0][1]] != 52\n overlapletters = ''.join(np.array(list(word))[bmask].tolist()).lower()\n existingletters = ''.join(list(map(lambda x: numberletterkey[x],\n board[p[0][0]:p[-1][0] + 1, p[0][1]][bmask]))).lower()\n if overlapletters != existingletters:\n return False\n elif playerinput[2] == 'h':\n p = [(playerinput[1][0], x) for x in range(playerinput[1][1], \n playerinput[1][1] + len(word))]\n bmask = board[p[0][0], p[0][1]:p[-1][1] + 1] != 52\n overlapletters = ''.join(np.array(list(word))[bmask].tolist()).lower()\n existingletters = ''.join(list(map(lambda x: numberletterkey[x],\n board[p[0][0], p[0][1]:p[-1][1] + 1][bmask]))).lower()\n if overlapletters != existingletters:\n return False\n return True\n\n\ndef mainrules(playerinput, board, rack, validity=True, filename=\n 'wordlist/sowpods.txt'):\n l_1 = dict(zip(list(range(0, 26, 1)), string.ascii_uppercase))\n l_2 = dict(zip(list(range(26, 52, 1)), string.ascii_lowercase))\n numberletterkey = {**l_1, **l_2}\n numberletterkey[52] = ' '\n l1 = dict(zip(string.ascii_uppercase, list(range(0, 26, 1))))\n l2 = dict(zip(string.ascii_lowercase, list(range(26, 52, 1))))\n letternumberkey = {**l1, **l2}\n letternumberkey[' '] = 52\n if not boundarytester(playerinput):\n return False, False, 'Your word extends outside the board.'\n if not overlaptester(playerinput, board):\n return (False, False,\n 'The word you want to put requires different letters at places where there already are letters.'\n )\n move = moveconverter(playerinput, board)\n words = wordsmade(move[0], move[1], board)\n if not island_words_tester(move[1], board):\n return (False, False,\n 'The first move has to contain the middle square(6,6). Rest must be connected via at least 1 letter to the words on the board.'\n )\n internal_board = board.copy()\n for i in range(len(move[0])):\n internal_board[move[1][i][0], move[1][i][1]] = letternumberkey[move\n [0][i]]\n actual_words = []\n for i in words:\n w = []\n for j in i:\n w.append(numberletterkey[internal_board[j[0], j[1]]])\n actual_words.append(''.join(w).lower())\n if validity:\n if not validword(actual_words, filename)[0]:\n return (False, False, \n 'Validity mode is on. One of the words you formed is not valid: '\n + validword(actual_words, filename)[1])\n if not racksufficiency(move[0], rack)[0]:\n return False, False, racksufficiency(move[0], rack)[1]\n return True, move, words\n",
"step-4": "import numpy as np\nimport string\n\n\ndef boundarytester(playerinput):\n if playerinput[1][0] > 14 or playerinput[1][0] < 0 or playerinput[1][1\n ] > 14 or playerinput[1][1] < 0:\n return False\n if playerinput[2] == 'h':\n if playerinput[1][1] + len(list(playerinput[0])) - 1 > 14:\n return False\n if playerinput[2] == 'v':\n if playerinput[1][0] + len(list(playerinput[0])) - 1 > 14:\n return False\n return True\n\n\ndef moveconverter(playerinput, board):\n word = list(playerinput[0])\n if playerinput[2] == 'v':\n p = [(x, playerinput[1][1]) for x in range(playerinput[1][0], \n playerinput[1][0] + len(word))]\n bmask = board[p[0][0]:p[-1][0] + 1, p[0][1]] == 52\n letters = np.array(word)[bmask].tolist()\n positions = np.array(p)[bmask].tolist()\n elif playerinput[2] == 'h':\n p = [(playerinput[1][0], x) for x in range(playerinput[1][1], \n playerinput[1][1] + len(word))]\n bmask = board[p[0][0], p[0][1]:p[-1][1] + 1] == 52\n letters = np.array(word)[bmask].tolist()\n positions = np.array(p)[bmask].tolist()\n return letters, positions\n\n\ndef island_words_tester(positions, board):\n test = False\n for i in positions:\n if i[0] == 7 and i[1] == 7:\n test = True\n else:\n adjacent_positions = [(i[0] - 1, i[1]), (i[0] + 1, i[1]), (i[0],\n i[1] - 1), (i[0], i[1] + 1)]\n adjacent_positions = [x for x in adjacent_positions if (x[0] >=\n 0 and x[0] < 15) and (x[1] >= 0 and x[1] < 15)]\n for j in adjacent_positions:\n if board[j[0], j[1]] < 52:\n test = True\n return test\n\n\ndef wordsmade(letters, positions, mainboard):\n l1 = dict(zip(string.ascii_uppercase, list(range(0, 26, 1))))\n l2 = dict(zip(string.ascii_lowercase, list(range(26, 52, 1))))\n letternumberkey = {**l1, **l2}\n letternumberkey[' '] = 52\n board = mainboard.copy()\n for i in range(len(letters)):\n board[positions[i][0], positions[i][1]] = letternumberkey[letters[i]]\n wordsp = []\n for i in positions:\n ph = np.array(list(zip([i[0]] * 15, list(range(0, 15)))))[board[i[0\n ], :] < 52].tolist()\n r = list(map(tuple, ph))\n if len(r) > 1:\n for j in range(r.index(tuple(i)), 0, -1):\n if r[j][1] - r[j - 1][1] > 1:\n r = r[j:]\n break\n for j in range(r.index(tuple(i)), len(r), 1):\n try:\n if r[j + 1][1] - r[j][1] > 1:\n r = r[:j + 1]\n break\n except IndexError:\n pass\n if len(r) > 1:\n wordsp.append(r)\n pv = np.array(list(zip(list(range(0, 15)), [i[1]] * 15)))[board[:,\n i[1]] < 52].tolist()\n s = list(map(tuple, pv))\n if len(s) > 1:\n for j in range(s.index(tuple(i)), 0, -1):\n if s[j][0] - s[j - 1][0] > 1:\n s = s[j:]\n break\n for j in range(s.index(tuple(i)), len(s), 1):\n try:\n if s[j + 1][0] - s[j][0] > 1:\n s = s[:j + 1]\n break\n except IndexError:\n pass\n if len(s) > 1:\n wordsp.append(s)\n wordspq = []\n for i in wordsp:\n wordspq.append(tuple(i))\n return list(set(wordspq))\n\n\ndef validword(words, filename='wordlist/sowpods.txt'):\n with open(filename, 'r') as f:\n rd = f.read()\n rd = rd.split('\\n')\n rd = set(rd)\n for j in words:\n if j not in rd:\n return False, j\n return True, True\n\n\ndef racksufficiency(letters, rack):\n rackblanks = [x for x in rack if x == ' ']\n blanks = [x for x in letters if x != x.upper()]\n if len(blanks) > len(rackblanks):\n return (False,\n 'You entered special tiles (lower case letters) more than you have.'\n )\n for i in [x for x in letters if x == x.upper()]:\n if i not in [x for x in letters if x != ' ']:\n return (False,\n 'You do not have the tiles to play the word you want to.')\n return True, True\n\n\ndef overlaptester(playerinput, board):\n l_1 = dict(zip(list(range(0, 26, 1)), string.ascii_uppercase))\n l_2 = dict(zip(list(range(26, 52, 1)), string.ascii_lowercase))\n numberletterkey = {**l_1, **l_2}\n numberletterkey[52] = ' '\n word = playerinput[0]\n if playerinput[2] == 'v':\n p = [(x, playerinput[1][1]) for x in range(playerinput[1][0], \n playerinput[1][0] + len(word))]\n bmask = board[p[0][0]:p[-1][0] + 1, p[0][1]] != 52\n overlapletters = ''.join(np.array(list(word))[bmask].tolist()).lower()\n existingletters = ''.join(list(map(lambda x: numberletterkey[x],\n board[p[0][0]:p[-1][0] + 1, p[0][1]][bmask]))).lower()\n if overlapletters != existingletters:\n return False\n elif playerinput[2] == 'h':\n p = [(playerinput[1][0], x) for x in range(playerinput[1][1], \n playerinput[1][1] + len(word))]\n bmask = board[p[0][0], p[0][1]:p[-1][1] + 1] != 52\n overlapletters = ''.join(np.array(list(word))[bmask].tolist()).lower()\n existingletters = ''.join(list(map(lambda x: numberletterkey[x],\n board[p[0][0], p[0][1]:p[-1][1] + 1][bmask]))).lower()\n if overlapletters != existingletters:\n return False\n return True\n\n\ndef mainrules(playerinput, board, rack, validity=True, filename=\n 'wordlist/sowpods.txt'):\n l_1 = dict(zip(list(range(0, 26, 1)), string.ascii_uppercase))\n l_2 = dict(zip(list(range(26, 52, 1)), string.ascii_lowercase))\n numberletterkey = {**l_1, **l_2}\n numberletterkey[52] = ' '\n l1 = dict(zip(string.ascii_uppercase, list(range(0, 26, 1))))\n l2 = dict(zip(string.ascii_lowercase, list(range(26, 52, 1))))\n letternumberkey = {**l1, **l2}\n letternumberkey[' '] = 52\n if not boundarytester(playerinput):\n return False, False, 'Your word extends outside the board.'\n if not overlaptester(playerinput, board):\n return (False, False,\n 'The word you want to put requires different letters at places where there already are letters.'\n )\n move = moveconverter(playerinput, board)\n words = wordsmade(move[0], move[1], board)\n if not island_words_tester(move[1], board):\n return (False, False,\n 'The first move has to contain the middle square(6,6). Rest must be connected via at least 1 letter to the words on the board.'\n )\n internal_board = board.copy()\n for i in range(len(move[0])):\n internal_board[move[1][i][0], move[1][i][1]] = letternumberkey[move\n [0][i]]\n actual_words = []\n for i in words:\n w = []\n for j in i:\n w.append(numberletterkey[internal_board[j[0], j[1]]])\n actual_words.append(''.join(w).lower())\n if validity:\n if not validword(actual_words, filename)[0]:\n return (False, False, \n 'Validity mode is on. One of the words you formed is not valid: '\n + validword(actual_words, filename)[1])\n if not racksufficiency(move[0], rack)[0]:\n return False, False, racksufficiency(move[0], rack)[1]\n return True, move, words\n",
"step-5": "# player input is: Word made, starting tile position of the word made, horizontal or vertical\n# example: playerinput = ['STRING', (0, 1), 'v']\nimport numpy as np\nimport string\n\n\ndef boundarytester(playerinput): # to check whether the player is placing the tiles within the confines of the board\n if playerinput[1][0] > 14 or playerinput[1][0] < 0 or playerinput[1][1] > 14 or playerinput[1][1] < 0:\n return False\n if playerinput[2] == 'h':\n if (playerinput[1][1] + len(list(playerinput[0])) - 1) > 14:\n return False\n if playerinput[2] == 'v':\n if (playerinput[1][0] + len(list(playerinput[0])) - 1) > 14:\n return False\n return True\n\n\ndef moveconverter(playerinput, board): # converting player input to internal lingo for a move\n word = list(playerinput[0])\n if playerinput[2] == 'v':\n # p is the list of positions of the tiles in the entered word\n p = [(x, playerinput[1][1]) for x in range(playerinput[1][0], playerinput[1][0] + len(word))]\n # bmask is a boolean mask to find out which positions are not occupied on the board at the location of the word\n bmask = board[p[0][0]:p[-1][0] + 1, p[0][1]] == 52\n letters = np.array(word)[bmask].tolist()\n positions = np.array(p)[bmask].tolist()\n\n elif playerinput[2] == 'h':\n p = [(playerinput[1][0], x) for x in range(playerinput[1][1], playerinput[1][1] + len(word))]\n bmask = board[p[0][0], p[0][1]:p[-1][1] + 1] == 52\n letters = np.array(word)[bmask].tolist()\n positions = np.array(p)[bmask].tolist()\n\n return letters, positions\n\n\ndef island_words_tester(positions, board): # to check if the entered word is in continuation with at least 1 existing word on the board\n test = False\n for i in positions:\n if i[0] == 7 and i[1] == 7: # to ensure the first move involves the centre tile\n test = True\n else:\n adjacent_positions = [(i[0] - 1, i[1]), (i[0] + 1, i[1]),(i[0], i[1] - 1), (i[0], i[1] + 1)] # checking for adjacency\n adjacent_positions = [x for x in adjacent_positions if ((x[0] >= 0) and (x[0] < 15)) and ((x[1] >= 0) and (x[1] < 15))]\n for j in adjacent_positions:\n if board[j[0], j[1]] < 52:\n test = True\n return test\n\n\n# This function takes the positions of the letters placed on the board, and returns a list of the words made\n# by the placement of the letters. The format of the returned list is that it is a list of tuples, each a word\n# represented by the positions of the corresponding letters.\ndef wordsmade(letters, positions, mainboard):\n # prepping stuff\n l1 = dict(zip(string.ascii_uppercase, list(range(0, 26, 1))))\n l2 = dict(zip(string.ascii_lowercase, list(range(26, 52, 1))))\n letternumberkey = {**l1, **l2}\n letternumberkey[' '] = 52\n board = mainboard.copy()\n # prepped and ready!\n for i in range(len(letters)):\n board[positions[i][0], positions[i][1]] = letternumberkey[letters[i]] # locally modify the board\n wordsp = []\n for i in positions:\n # horizontally looking for words made\n ph = np.array(list(zip([i[0]] * 15, list(range(0, 15)))))[board[i[0], :] < 52].tolist()\n # a list of all occupied places on the board\n r = list(map(tuple, ph))\n # trimming the list so that any places after an unoccupied place from the placed letters are removed\n if len(r) > 1:\n for j in range(r.index(tuple(i)), 0, -1):\n if (r[j][1] - r[j - 1][1]) > 1:\n r = r[j:]\n break\n for j in range(r.index(tuple(i)), len(r), 1):\n try:\n if (r[j + 1][1] - r[j][1]) > 1:\n r = r[:j + 1]\n break\n except IndexError: # if the +1 causes the index to exceed the limit\n pass\n if len(r) > 1:\n wordsp.append(r)\n\n # vertically looking for words made\n pv = np.array(list(zip(list(range(0, 15)), [i[1]] * 15)))[board[:, i[1]] < 52].tolist()\n s = list(map(tuple, pv))\n if len(s) > 1:\n for j in range(s.index(tuple(i)), 0, -1):\n if (s[j][0] - s[j - 1][0]) > 1:\n s = s[j:]\n break\n for j in range(s.index(tuple(i)), len(s), 1):\n try:\n if (s[j + 1][0] - s[j][0]) > 1:\n s = s[:j + 1]\n break\n except IndexError:\n pass\n if len(s) > 1:\n wordsp.append(s)\n wordspq = []\n for i in wordsp:\n wordspq.append(tuple(i))\n\n return list(set(wordspq)) # set is used here to remove redundant words.\n\n\ndef validword(words, filename='wordlist/sowpods.txt'): # checks if the word is present in the wordlist\n with open(filename, 'r') as f:\n rd = f.read()\n rd = rd.split('\\n')\n rd = set(rd)\n for j in words:\n if j not in rd:\n return False, j\n return True, True\n\n\ndef racksufficiency(letters, rack): # check if the desired move can be achieved based on the rack of the player\n rackblanks = [x for x in rack if x == ' ']\n blanks = [x for x in letters if x != x.upper()]\n if len(blanks) > len(rackblanks):\n return False, 'You entered special tiles (lower case letters) more than you have.'\n for i in [x for x in letters if x == x.upper()]:\n if i not in [x for x in letters if x != ' ']:\n return False, 'You do not have the tiles to play the word you want to.'\n return True, True\n \n\n\ndef overlaptester(playerinput, board): # if the desired word conflicts with letters already on board\n # prepping stuff\n l_1 = dict(zip(list(range(0, 26, 1)), string.ascii_uppercase))\n l_2 = dict(zip(list(range(26, 52, 1)), string.ascii_lowercase))\n numberletterkey = {**l_1, **l_2}\n numberletterkey[52] = ' '\n # prepped and ready!\n word = playerinput[0]\n if playerinput[2] == 'v':\n p = [(x, playerinput[1][1]) for x in range(playerinput[1][0], playerinput[1][0] + len(word))]\n # bmask is a boolean mask to find out which positions are occupied on the board\n bmask = board[p[0][0]:p[-1][0] + 1, p[0][1]] != 52\n overlapletters = ''.join(np.array(list(word))[bmask].tolist()).lower()\n existingletters = ''.join(\n list(map(lambda x: numberletterkey[x], board[p[0][0]:p[-1][0] + 1, p[0][1]][bmask]))).lower()\n if overlapletters != existingletters:\n return False\n elif playerinput[2] == 'h':\n p = [(playerinput[1][0], x) for x in range(playerinput[1][1], playerinput[1][1] + len(word))]\n bmask = board[p[0][0], p[0][1]:p[-1][1] + 1] != 52\n overlapletters = ''.join(np.array(list(word))[bmask].tolist()).lower()\n existingletters = ''.join(\n list(map(lambda x: numberletterkey[x], board[p[0][0], p[0][1]:p[-1][1] + 1][bmask]))).lower()\n if overlapletters != existingletters:\n return False\n return True\n\n\ndef mainrules(playerinput, board, rack, validity=True, filename='wordlist/sowpods.txt'): # applies all the checks on the input\n # prepping\n l_1 = dict(zip(list(range(0, 26, 1)), string.ascii_uppercase))\n l_2 = dict(zip(list(range(26, 52, 1)), string.ascii_lowercase))\n numberletterkey = {**l_1, **l_2}\n numberletterkey[52] = ' '\n l1 = dict(zip(string.ascii_uppercase, list(range(0, 26, 1))))\n l2 = dict(zip(string.ascii_lowercase, list(range(26, 52, 1))))\n letternumberkey = {**l1, **l2}\n letternumberkey[' '] = 52\n # prepped and ready!\n if not boundarytester(playerinput):\n return False, False, \"Your word extends outside the board.\"\n if not overlaptester(playerinput, board):\n return False, False, \"The word you want to put requires different letters at places where there already are letters.\"\n move = moveconverter(playerinput, board)\n words = wordsmade(move[0], move[1], board)\n if not island_words_tester(move[1], board):\n return False, False, \"The first move has to contain the middle square(6,6). Rest must be connected via at least 1 letter to the words on the board.\"\n internal_board = board.copy() # to modify the board locally\n for i in range(len(move[0])):\n internal_board[move[1][i][0], move[1][i][1]] = letternumberkey[move[0][i]]\n actual_words = [] # the words in actuality (no position/letters-represented-by-numbers)\n for i in words:\n w = []\n for j in i:\n w.append(numberletterkey[internal_board[j[0], j[1]]])\n actual_words.append(''.join(w).lower())\n if validity:\n if not validword(actual_words, filename)[0]:\n return False, False, \"Validity mode is on. One of the words you formed is not valid: \" + validword(actual_words, filename)[1]\n if not racksufficiency(move[0], rack)[0]:\n return False, False, racksufficiency(move[0], rack)[1]\n return True, move, words\n",
"step-ids": [
6,
7,
8,
9,
10
]
}
|
[
6,
7,
8,
9,
10
] |
from sys import stdin
read = lambda: stdin.readline().strip()
class Trie:
def __init__(self, me, parent=None):
self.me = me
self.parent = parent
self.children = {}
def get_answer(trie, count):
print(("--" * count) + trie.me)
trie.children = dict(sorted(trie.children.items(), key=lambda x: x[0]))
for k in trie.children.keys():
get_answer(trie.children[k], count + 1)
def main():
trie_dict = {}
for i in range(int(read())):
data = read().split()
if data[1] not in trie_dict:
trie_dict[data[1]] = Trie(data[1])
cur = trie_dict[data[1]]
for j in range(2, len(data)):
# cur에 같은 데이터가 없을 경우
if data[j] not in cur.children:
cur.children[data[j]] = Trie(data[j])
cur = cur.children[data[j]]
trie_dict = dict(sorted(trie_dict.items(), key=lambda x: x[0]))
for k in trie_dict.keys():
get_answer(trie_dict[k], 0)
if __name__ == "__main__":
main()
|
normal
|
{
"blob_id": "c5605f4770d61d435cc1817bad4d5cbe0aaf1d18",
"index": 8824,
"step-1": "<mask token>\n\n\nclass Trie:\n\n def __init__(self, me, parent=None):\n self.me = me\n self.parent = parent\n self.children = {}\n\n\n<mask token>\n\n\ndef main():\n trie_dict = {}\n for i in range(int(read())):\n data = read().split()\n if data[1] not in trie_dict:\n trie_dict[data[1]] = Trie(data[1])\n cur = trie_dict[data[1]]\n for j in range(2, len(data)):\n if data[j] not in cur.children:\n cur.children[data[j]] = Trie(data[j])\n cur = cur.children[data[j]]\n trie_dict = dict(sorted(trie_dict.items(), key=lambda x: x[0]))\n for k in trie_dict.keys():\n get_answer(trie_dict[k], 0)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Trie:\n\n def __init__(self, me, parent=None):\n self.me = me\n self.parent = parent\n self.children = {}\n\n\ndef get_answer(trie, count):\n print('--' * count + trie.me)\n trie.children = dict(sorted(trie.children.items(), key=lambda x: x[0]))\n for k in trie.children.keys():\n get_answer(trie.children[k], count + 1)\n\n\ndef main():\n trie_dict = {}\n for i in range(int(read())):\n data = read().split()\n if data[1] not in trie_dict:\n trie_dict[data[1]] = Trie(data[1])\n cur = trie_dict[data[1]]\n for j in range(2, len(data)):\n if data[j] not in cur.children:\n cur.children[data[j]] = Trie(data[j])\n cur = cur.children[data[j]]\n trie_dict = dict(sorted(trie_dict.items(), key=lambda x: x[0]))\n for k in trie_dict.keys():\n get_answer(trie_dict[k], 0)\n\n\nif __name__ == '__main__':\n main()\n",
"step-3": "<mask token>\nread = lambda : stdin.readline().strip()\n\n\nclass Trie:\n\n def __init__(self, me, parent=None):\n self.me = me\n self.parent = parent\n self.children = {}\n\n\ndef get_answer(trie, count):\n print('--' * count + trie.me)\n trie.children = dict(sorted(trie.children.items(), key=lambda x: x[0]))\n for k in trie.children.keys():\n get_answer(trie.children[k], count + 1)\n\n\ndef main():\n trie_dict = {}\n for i in range(int(read())):\n data = read().split()\n if data[1] not in trie_dict:\n trie_dict[data[1]] = Trie(data[1])\n cur = trie_dict[data[1]]\n for j in range(2, len(data)):\n if data[j] not in cur.children:\n cur.children[data[j]] = Trie(data[j])\n cur = cur.children[data[j]]\n trie_dict = dict(sorted(trie_dict.items(), key=lambda x: x[0]))\n for k in trie_dict.keys():\n get_answer(trie_dict[k], 0)\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "from sys import stdin\nread = lambda : stdin.readline().strip()\n\n\nclass Trie:\n\n def __init__(self, me, parent=None):\n self.me = me\n self.parent = parent\n self.children = {}\n\n\ndef get_answer(trie, count):\n print('--' * count + trie.me)\n trie.children = dict(sorted(trie.children.items(), key=lambda x: x[0]))\n for k in trie.children.keys():\n get_answer(trie.children[k], count + 1)\n\n\ndef main():\n trie_dict = {}\n for i in range(int(read())):\n data = read().split()\n if data[1] not in trie_dict:\n trie_dict[data[1]] = Trie(data[1])\n cur = trie_dict[data[1]]\n for j in range(2, len(data)):\n if data[j] not in cur.children:\n cur.children[data[j]] = Trie(data[j])\n cur = cur.children[data[j]]\n trie_dict = dict(sorted(trie_dict.items(), key=lambda x: x[0]))\n for k in trie_dict.keys():\n get_answer(trie_dict[k], 0)\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "from sys import stdin\nread = lambda: stdin.readline().strip()\n\n\nclass Trie:\n def __init__(self, me, parent=None):\n self.me = me\n self.parent = parent\n self.children = {}\n\n\ndef get_answer(trie, count):\n print((\"--\" * count) + trie.me)\n\n trie.children = dict(sorted(trie.children.items(), key=lambda x: x[0]))\n for k in trie.children.keys():\n get_answer(trie.children[k], count + 1)\n\n\ndef main():\n trie_dict = {}\n for i in range(int(read())):\n data = read().split()\n if data[1] not in trie_dict:\n trie_dict[data[1]] = Trie(data[1])\n\n cur = trie_dict[data[1]]\n for j in range(2, len(data)):\n # cur에 같은 데이터가 없을 경우\n if data[j] not in cur.children:\n cur.children[data[j]] = Trie(data[j])\n cur = cur.children[data[j]]\n\n trie_dict = dict(sorted(trie_dict.items(), key=lambda x: x[0]))\n for k in trie_dict.keys():\n get_answer(trie_dict[k], 0)\n\n\nif __name__ == \"__main__\":\n main()",
"step-ids": [
3,
5,
6,
7,
8
]
}
|
[
3,
5,
6,
7,
8
] |
from tkinter import *
import mathcalc as c
root= Tk()
root.title("CALCULATOR")
ent=Entry(root,width=35)
ent.grid(row=0,column=0,columnspan=3,padx=10,pady=10)
#ent.grid(row=0,column=0)
ch=''
num=ent.get()
def clicked(num):
current=ent.get()
ent.delete(0,END)
ent.insert(0,str(current)+str(num))
def click_clear():
ent.delete(0,END)
def add():
global ch
ch='+'
clicked('+')
def subtract():
global ch
ch='-'
clicked('-')
def multiply():
global ch
ch='*'
clicked('*')
def divide():
global ch
ch='/'
clicked('/')
def equals():
f_num,s_num=ent.get().split(ch)
res=c.calculate(float(f_num),float(s_num),ch)
ent.delete(0,END)
ent.insert(0,res)
#buttons
but1=Button(root,text="1",padx=40,pady=20,command=lambda: clicked(1))
but2=Button(root,text="2",padx=40,pady=20,command=lambda: clicked(2))
but3=Button(root,text="3",padx=40,pady=20,command=lambda: clicked(3))
but4=Button(root,text="4",padx=40,pady=20,command=lambda: clicked(4))
but5=Button(root,text="5",padx=40,pady=20,command=lambda: clicked(5))
but6=Button(root,text="6",padx=40,pady=20,command=lambda: clicked(6))
but7=Button(root,text="7",padx=40,pady=20,command=lambda: clicked(7))
but8=Button(root,text="8",padx=40,pady=20,command=lambda: clicked(8))
but9=Button(root,text="9",padx=40,pady=20,command=lambda: clicked(9))
but0=Button(root,text="0",padx=40,pady=20,command=lambda: clicked(0))
but_plus=Button(root,text="+",padx=39,pady=20,command=add)
but_sub=Button(root,text="-",padx=40,pady=20,command=subtract)
but_mul=Button(root,text="*",padx=40,pady=20,command=multiply)
but_div=Button(root,text="/",padx=40,pady=20,command=divide)
but_eq=Button(root,text="=",padx=89,pady=20,command=equals)
but_clr=Button(root,text="C",padx=89,pady=20,command=click_clear)
#button place
but7.grid(row=1,column=0)
but8.grid(row=1,column=1)
but9.grid(row=1,column=2)
but4.grid(row=2,column=0)
but5.grid(row=2,column=1)
but6.grid(row=2,column=2)
but1.grid(row=3,column=0)
but2.grid(row=3,column=1)
but3.grid(row=3,column=2)
but0.grid(row=4,column=0)
but_plus.grid(row=5,column=0)
but_sub.grid(row=6,column=0)
but_mul.grid(row=6,column=1)
but_div.grid(row=6,column=2)
but_eq.grid(row=4,column=1,columnspan=2)
but_clr.grid(row=5,column=1,columnspan=2)
root.mainloop()
|
normal
|
{
"blob_id": "bdd9ebfa9a2f14d57efd527ca88032bfb0160a5e",
"index": 7504,
"step-1": "<mask token>\n\n\ndef clicked(num):\n current = ent.get()\n ent.delete(0, END)\n ent.insert(0, str(current) + str(num))\n\n\ndef click_clear():\n ent.delete(0, END)\n\n\ndef add():\n global ch\n ch = '+'\n clicked('+')\n\n\ndef subtract():\n global ch\n ch = '-'\n clicked('-')\n\n\ndef multiply():\n global ch\n ch = '*'\n clicked('*')\n\n\ndef divide():\n global ch\n ch = '/'\n clicked('/')\n\n\ndef equals():\n f_num, s_num = ent.get().split(ch)\n res = c.calculate(float(f_num), float(s_num), ch)\n ent.delete(0, END)\n ent.insert(0, res)\n\n\n<mask token>\n",
"step-2": "<mask token>\nroot.title('CALCULATOR')\n<mask token>\nent.grid(row=0, column=0, columnspan=3, padx=10, pady=10)\n<mask token>\n\n\ndef clicked(num):\n current = ent.get()\n ent.delete(0, END)\n ent.insert(0, str(current) + str(num))\n\n\ndef click_clear():\n ent.delete(0, END)\n\n\ndef add():\n global ch\n ch = '+'\n clicked('+')\n\n\ndef subtract():\n global ch\n ch = '-'\n clicked('-')\n\n\ndef multiply():\n global ch\n ch = '*'\n clicked('*')\n\n\ndef divide():\n global ch\n ch = '/'\n clicked('/')\n\n\ndef equals():\n f_num, s_num = ent.get().split(ch)\n res = c.calculate(float(f_num), float(s_num), ch)\n ent.delete(0, END)\n ent.insert(0, res)\n\n\n<mask token>\nbut7.grid(row=1, column=0)\nbut8.grid(row=1, column=1)\nbut9.grid(row=1, column=2)\nbut4.grid(row=2, column=0)\nbut5.grid(row=2, column=1)\nbut6.grid(row=2, column=2)\nbut1.grid(row=3, column=0)\nbut2.grid(row=3, column=1)\nbut3.grid(row=3, column=2)\nbut0.grid(row=4, column=0)\nbut_plus.grid(row=5, column=0)\nbut_sub.grid(row=6, column=0)\nbut_mul.grid(row=6, column=1)\nbut_div.grid(row=6, column=2)\nbut_eq.grid(row=4, column=1, columnspan=2)\nbut_clr.grid(row=5, column=1, columnspan=2)\nroot.mainloop()\n",
"step-3": "<mask token>\nroot = Tk()\nroot.title('CALCULATOR')\nent = Entry(root, width=35)\nent.grid(row=0, column=0, columnspan=3, padx=10, pady=10)\nch = ''\nnum = ent.get()\n\n\ndef clicked(num):\n current = ent.get()\n ent.delete(0, END)\n ent.insert(0, str(current) + str(num))\n\n\ndef click_clear():\n ent.delete(0, END)\n\n\ndef add():\n global ch\n ch = '+'\n clicked('+')\n\n\ndef subtract():\n global ch\n ch = '-'\n clicked('-')\n\n\ndef multiply():\n global ch\n ch = '*'\n clicked('*')\n\n\ndef divide():\n global ch\n ch = '/'\n clicked('/')\n\n\ndef equals():\n f_num, s_num = ent.get().split(ch)\n res = c.calculate(float(f_num), float(s_num), ch)\n ent.delete(0, END)\n ent.insert(0, res)\n\n\nbut1 = Button(root, text='1', padx=40, pady=20, command=lambda : clicked(1))\nbut2 = Button(root, text='2', padx=40, pady=20, command=lambda : clicked(2))\nbut3 = Button(root, text='3', padx=40, pady=20, command=lambda : clicked(3))\nbut4 = Button(root, text='4', padx=40, pady=20, command=lambda : clicked(4))\nbut5 = Button(root, text='5', padx=40, pady=20, command=lambda : clicked(5))\nbut6 = Button(root, text='6', padx=40, pady=20, command=lambda : clicked(6))\nbut7 = Button(root, text='7', padx=40, pady=20, command=lambda : clicked(7))\nbut8 = Button(root, text='8', padx=40, pady=20, command=lambda : clicked(8))\nbut9 = Button(root, text='9', padx=40, pady=20, command=lambda : clicked(9))\nbut0 = Button(root, text='0', padx=40, pady=20, command=lambda : clicked(0))\nbut_plus = Button(root, text='+', padx=39, pady=20, command=add)\nbut_sub = Button(root, text='-', padx=40, pady=20, command=subtract)\nbut_mul = Button(root, text='*', padx=40, pady=20, command=multiply)\nbut_div = Button(root, text='/', padx=40, pady=20, command=divide)\nbut_eq = Button(root, text='=', padx=89, pady=20, command=equals)\nbut_clr = Button(root, text='C', padx=89, pady=20, command=click_clear)\nbut7.grid(row=1, column=0)\nbut8.grid(row=1, column=1)\nbut9.grid(row=1, column=2)\nbut4.grid(row=2, column=0)\nbut5.grid(row=2, column=1)\nbut6.grid(row=2, column=2)\nbut1.grid(row=3, column=0)\nbut2.grid(row=3, column=1)\nbut3.grid(row=3, column=2)\nbut0.grid(row=4, column=0)\nbut_plus.grid(row=5, column=0)\nbut_sub.grid(row=6, column=0)\nbut_mul.grid(row=6, column=1)\nbut_div.grid(row=6, column=2)\nbut_eq.grid(row=4, column=1, columnspan=2)\nbut_clr.grid(row=5, column=1, columnspan=2)\nroot.mainloop()\n",
"step-4": "from tkinter import *\nimport mathcalc as c\nroot = Tk()\nroot.title('CALCULATOR')\nent = Entry(root, width=35)\nent.grid(row=0, column=0, columnspan=3, padx=10, pady=10)\nch = ''\nnum = ent.get()\n\n\ndef clicked(num):\n current = ent.get()\n ent.delete(0, END)\n ent.insert(0, str(current) + str(num))\n\n\ndef click_clear():\n ent.delete(0, END)\n\n\ndef add():\n global ch\n ch = '+'\n clicked('+')\n\n\ndef subtract():\n global ch\n ch = '-'\n clicked('-')\n\n\ndef multiply():\n global ch\n ch = '*'\n clicked('*')\n\n\ndef divide():\n global ch\n ch = '/'\n clicked('/')\n\n\ndef equals():\n f_num, s_num = ent.get().split(ch)\n res = c.calculate(float(f_num), float(s_num), ch)\n ent.delete(0, END)\n ent.insert(0, res)\n\n\nbut1 = Button(root, text='1', padx=40, pady=20, command=lambda : clicked(1))\nbut2 = Button(root, text='2', padx=40, pady=20, command=lambda : clicked(2))\nbut3 = Button(root, text='3', padx=40, pady=20, command=lambda : clicked(3))\nbut4 = Button(root, text='4', padx=40, pady=20, command=lambda : clicked(4))\nbut5 = Button(root, text='5', padx=40, pady=20, command=lambda : clicked(5))\nbut6 = Button(root, text='6', padx=40, pady=20, command=lambda : clicked(6))\nbut7 = Button(root, text='7', padx=40, pady=20, command=lambda : clicked(7))\nbut8 = Button(root, text='8', padx=40, pady=20, command=lambda : clicked(8))\nbut9 = Button(root, text='9', padx=40, pady=20, command=lambda : clicked(9))\nbut0 = Button(root, text='0', padx=40, pady=20, command=lambda : clicked(0))\nbut_plus = Button(root, text='+', padx=39, pady=20, command=add)\nbut_sub = Button(root, text='-', padx=40, pady=20, command=subtract)\nbut_mul = Button(root, text='*', padx=40, pady=20, command=multiply)\nbut_div = Button(root, text='/', padx=40, pady=20, command=divide)\nbut_eq = Button(root, text='=', padx=89, pady=20, command=equals)\nbut_clr = Button(root, text='C', padx=89, pady=20, command=click_clear)\nbut7.grid(row=1, column=0)\nbut8.grid(row=1, column=1)\nbut9.grid(row=1, column=2)\nbut4.grid(row=2, column=0)\nbut5.grid(row=2, column=1)\nbut6.grid(row=2, column=2)\nbut1.grid(row=3, column=0)\nbut2.grid(row=3, column=1)\nbut3.grid(row=3, column=2)\nbut0.grid(row=4, column=0)\nbut_plus.grid(row=5, column=0)\nbut_sub.grid(row=6, column=0)\nbut_mul.grid(row=6, column=1)\nbut_div.grid(row=6, column=2)\nbut_eq.grid(row=4, column=1, columnspan=2)\nbut_clr.grid(row=5, column=1, columnspan=2)\nroot.mainloop()\n",
"step-5": "from tkinter import *\r\nimport mathcalc as c \r\nroot= Tk()\r\nroot.title(\"CALCULATOR\")\r\nent=Entry(root,width=35)\r\nent.grid(row=0,column=0,columnspan=3,padx=10,pady=10)\r\n#ent.grid(row=0,column=0)\r\nch=''\r\nnum=ent.get()\r\ndef clicked(num):\r\n\tcurrent=ent.get()\r\n\tent.delete(0,END)\r\n\tent.insert(0,str(current)+str(num))\r\ndef click_clear():\r\n\tent.delete(0,END)\r\n\r\ndef add():\r\n\tglobal ch\r\n\tch='+' \r\n\tclicked('+')\r\n\r\ndef subtract():\r\n\tglobal ch\r\n\tch='-' \r\n\tclicked('-')\r\n\r\ndef multiply():\r\n\tglobal ch\r\n\tch='*' \r\n\tclicked('*')\r\n\r\ndef divide():\r\n\tglobal ch\r\n\tch='/' \r\n\tclicked('/')\r\ndef equals():\r\n\tf_num,s_num=ent.get().split(ch)\r\n\tres=c.calculate(float(f_num),float(s_num),ch)\r\n\tent.delete(0,END)\r\n\tent.insert(0,res)\r\n\r\n#buttons\r\nbut1=Button(root,text=\"1\",padx=40,pady=20,command=lambda: clicked(1))\r\nbut2=Button(root,text=\"2\",padx=40,pady=20,command=lambda: clicked(2))\r\nbut3=Button(root,text=\"3\",padx=40,pady=20,command=lambda: clicked(3))\r\nbut4=Button(root,text=\"4\",padx=40,pady=20,command=lambda: clicked(4))\r\nbut5=Button(root,text=\"5\",padx=40,pady=20,command=lambda: clicked(5))\r\nbut6=Button(root,text=\"6\",padx=40,pady=20,command=lambda: clicked(6))\r\nbut7=Button(root,text=\"7\",padx=40,pady=20,command=lambda: clicked(7))\r\nbut8=Button(root,text=\"8\",padx=40,pady=20,command=lambda: clicked(8))\r\nbut9=Button(root,text=\"9\",padx=40,pady=20,command=lambda: clicked(9))\r\nbut0=Button(root,text=\"0\",padx=40,pady=20,command=lambda: clicked(0))\r\n\r\nbut_plus=Button(root,text=\"+\",padx=39,pady=20,command=add)\r\nbut_sub=Button(root,text=\"-\",padx=40,pady=20,command=subtract)\r\nbut_mul=Button(root,text=\"*\",padx=40,pady=20,command=multiply)\r\nbut_div=Button(root,text=\"/\",padx=40,pady=20,command=divide)\r\nbut_eq=Button(root,text=\"=\",padx=89,pady=20,command=equals)\r\nbut_clr=Button(root,text=\"C\",padx=89,pady=20,command=click_clear)\r\n#button place\r\nbut7.grid(row=1,column=0)\r\nbut8.grid(row=1,column=1)\r\nbut9.grid(row=1,column=2)\r\n\r\nbut4.grid(row=2,column=0)\r\nbut5.grid(row=2,column=1)\r\nbut6.grid(row=2,column=2)\r\n\r\nbut1.grid(row=3,column=0)\r\nbut2.grid(row=3,column=1)\r\nbut3.grid(row=3,column=2)\r\n\r\nbut0.grid(row=4,column=0)\r\nbut_plus.grid(row=5,column=0)\r\nbut_sub.grid(row=6,column=0)\r\nbut_mul.grid(row=6,column=1)\r\nbut_div.grid(row=6,column=2)\r\nbut_eq.grid(row=4,column=1,columnspan=2)\r\nbut_clr.grid(row=5,column=1,columnspan=2)\r\nroot.mainloop()\r\n",
"step-ids": [
7,
8,
9,
10,
11
]
}
|
[
7,
8,
9,
10,
11
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
try:
fh = open('testfile', 'w')
fh.write('test')
except IOError:
print('Error:没有找到文件')
else:
print('sucess')
fh.close()
<|reserved_special_token_1|>
try:
fh = open("testfile","w")
fh.write("test")
except IOError:
print("Error:没有找到文件")
else:
print("sucess")
fh.close()
|
flexible
|
{
"blob_id": "15e0b396a4726f98ce5ae2620338d7d48985707e",
"index": 9533,
"step-1": "<mask token>\n",
"step-2": "try:\n fh = open('testfile', 'w')\n fh.write('test')\nexcept IOError:\n print('Error:没有找到文件')\nelse:\n print('sucess')\n fh.close()\n",
"step-3": "try:\r\n\tfh = open(\"testfile\",\"w\")\r\n\tfh.write(\"test\")\r\nexcept IOError:\r\n\tprint(\"Error:没有找到文件\")\r\nelse:\r\n\tprint(\"sucess\")\r\n\tfh.close()\r\n\r\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
name = input("Enter your name: ")
print("Hi buddy! Today we will play a game " + name + "!")
print("Are you ready?")
question = input("Are you ready ? Yes or no: ")
print(name + " we are starting!")
liste1 = ['My neighbor ', 'My girlfriend ', 'My boyfriend ', 'My dog ']
num = input("Enter a number: ")
liste1 = liste1[int(num)]
liste2 = ['hates ', 'loves ', 'enjoys ', 'ridicules ']
num = input("Enter a number: ")
liste2 = liste2[int(num)]
liste3 = ['with me ', 'with my grandma ', 'with our home staff ', 'with our money ']
num = input("Enter a number: ")
liste3 = liste3[int(num)]
liste4 = ['in every situation ! ', 'until end of the world ! ']
num = input("Enter a number: ")
liste4 = liste4[int(num)]
print(liste1 + liste2 + liste3 + liste4)
|
normal
|
{
"blob_id": "4ef6002480fcaa514f41227978bae76f6e02c22d",
"index": 6401,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint('Hi buddy! Today we will play a game ' + name + '!')\nprint('Are you ready?')\n<mask token>\nprint(name + ' we are starting!')\n<mask token>\nprint(liste1 + liste2 + liste3 + liste4)\n",
"step-3": "name = input('Enter your name: ')\nprint('Hi buddy! Today we will play a game ' + name + '!')\nprint('Are you ready?')\nquestion = input('Are you ready ? Yes or no: ')\nprint(name + ' we are starting!')\nliste1 = ['My neighbor ', 'My girlfriend ', 'My boyfriend ', 'My dog ']\nnum = input('Enter a number: ')\nliste1 = liste1[int(num)]\nliste2 = ['hates ', 'loves ', 'enjoys ', 'ridicules ']\nnum = input('Enter a number: ')\nliste2 = liste2[int(num)]\nliste3 = ['with me ', 'with my grandma ', 'with our home staff ',\n 'with our money ']\nnum = input('Enter a number: ')\nliste3 = liste3[int(num)]\nliste4 = ['in every situation ! ', 'until end of the world ! ']\nnum = input('Enter a number: ')\nliste4 = liste4[int(num)]\nprint(liste1 + liste2 + liste3 + liste4)\n",
"step-4": "name = input(\"Enter your name: \")\r\nprint(\"Hi buddy! Today we will play a game \" + name + \"!\")\r\n\r\nprint(\"Are you ready?\")\r\n\r\nquestion = input(\"Are you ready ? Yes or no: \")\r\nprint(name + \" we are starting!\")\r\n\r\n\r\nliste1 = ['My neighbor ', 'My girlfriend ', 'My boyfriend ', 'My dog ']\r\nnum = input(\"Enter a number: \")\r\n\r\nliste1 = liste1[int(num)]\r\n\r\nliste2 = ['hates ', 'loves ', 'enjoys ', 'ridicules ']\r\nnum = input(\"Enter a number: \")\r\n\r\nliste2 = liste2[int(num)]\r\n\r\nliste3 = ['with me ', 'with my grandma ', 'with our home staff ', 'with our money ']\r\nnum = input(\"Enter a number: \")\r\n\r\nliste3 = liste3[int(num)]\r\n\r\nliste4 = ['in every situation ! ', 'until end of the world ! ']\r\nnum = input(\"Enter a number: \")\r\n\r\nliste4 = liste4[int(num)]\r\n\r\nprint(liste1 + liste2 + liste3 + liste4)",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
# -*- coding: utf-8 -*-
import random
import gym
import numpy as np
from collections import deque
from keras.models import Sequential
from keras.layers import Dense
from keras.optimizers import Adam
from simulation_utils import box, simulation
from kinematics import pose3D
a = np.log(2)/25
apdataX = np.random.random((5, 35))
quarter_way_arr = [False, False, False]
quarter_way_arr[0] = True
quarter_way_arr[1] = True
quarter_way_arr[2] = True
mat = np.eye(3)
print(np.linalg.norm(mat))
|
normal
|
{
"blob_id": "7e7e96fb9377e4dc59a46a46951f5057ecae419a",
"index": 201,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(np.linalg.norm(mat))\n",
"step-3": "<mask token>\na = np.log(2) / 25\napdataX = np.random.random((5, 35))\nquarter_way_arr = [False, False, False]\nquarter_way_arr[0] = True\nquarter_way_arr[1] = True\nquarter_way_arr[2] = True\nmat = np.eye(3)\nprint(np.linalg.norm(mat))\n",
"step-4": "import random\nimport gym\nimport numpy as np\nfrom collections import deque\nfrom keras.models import Sequential\nfrom keras.layers import Dense\nfrom keras.optimizers import Adam\nfrom simulation_utils import box, simulation\nfrom kinematics import pose3D\na = np.log(2) / 25\napdataX = np.random.random((5, 35))\nquarter_way_arr = [False, False, False]\nquarter_way_arr[0] = True\nquarter_way_arr[1] = True\nquarter_way_arr[2] = True\nmat = np.eye(3)\nprint(np.linalg.norm(mat))\n",
"step-5": "# -*- coding: utf-8 -*-\nimport random\nimport gym\nimport numpy as np\nfrom collections import deque\nfrom keras.models import Sequential\nfrom keras.layers import Dense\nfrom keras.optimizers import Adam\nfrom simulation_utils import box, simulation\nfrom kinematics import pose3D\n\na = np.log(2)/25\n\napdataX = np.random.random((5, 35))\nquarter_way_arr = [False, False, False]\n\nquarter_way_arr[0] = True\nquarter_way_arr[1] = True\nquarter_way_arr[2] = True\n\nmat = np.eye(3)\nprint(np.linalg.norm(mat))\n\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from django.urls import path
from jobscrapper.views import *
urlpatterns = [
path('', home_vacancies_view, name="vacancy-home"),
path('list/', vacancies_view, name="vacancy"),
]
|
normal
|
{
"blob_id": "3ee20391d56d8c429ab1bd2f6b0e5b261721e401",
"index": 7965,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nurlpatterns = [path('', home_vacancies_view, name='vacancy-home'), path(\n 'list/', vacancies_view, name='vacancy')]\n",
"step-3": "from django.urls import path\nfrom jobscrapper.views import *\nurlpatterns = [path('', home_vacancies_view, name='vacancy-home'), path(\n 'list/', vacancies_view, name='vacancy')]\n",
"step-4": "from django.urls import path\nfrom jobscrapper.views import *\n\nurlpatterns = [\n path('', home_vacancies_view, name=\"vacancy-home\"),\n path('list/', vacancies_view, name=\"vacancy\"),\n\n]",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
"""
k-element subsets of the set [n]
3-element subsets of the set [6]
123
"""
result = []
def get_subset(A, k, n):
a_list = [i for i in A]
if len(a_list) == k:
result.append(a_list)
return
s_num = max(a_list)+1 if a_list else 1
for i in range(s_num, n+1):
a_list.append(i)
get_subset(a_list, k, n)
a_list.remove(i)
def subset_algor(n, k):
V = []
get_subset(V, k, n)
def main():
# subset_algor(int(input()), int(input()))
subset_algor(7, 3)
for i in range(len(result)):
print(result[i], " Rank: ", i)
print(len(result))
if __name__ == "__main__":
main()
|
normal
|
{
"blob_id": "d48353caa07d3bfa003ea9354b411fe0c79591db",
"index": 2725,
"step-1": "<mask token>\n\n\ndef get_subset(A, k, n):\n a_list = [i for i in A]\n if len(a_list) == k:\n result.append(a_list)\n return\n s_num = max(a_list) + 1 if a_list else 1\n for i in range(s_num, n + 1):\n a_list.append(i)\n get_subset(a_list, k, n)\n a_list.remove(i)\n\n\n<mask token>\n\n\ndef main():\n subset_algor(7, 3)\n for i in range(len(result)):\n print(result[i], ' Rank: ', i)\n print(len(result))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef get_subset(A, k, n):\n a_list = [i for i in A]\n if len(a_list) == k:\n result.append(a_list)\n return\n s_num = max(a_list) + 1 if a_list else 1\n for i in range(s_num, n + 1):\n a_list.append(i)\n get_subset(a_list, k, n)\n a_list.remove(i)\n\n\ndef subset_algor(n, k):\n V = []\n get_subset(V, k, n)\n\n\ndef main():\n subset_algor(7, 3)\n for i in range(len(result)):\n print(result[i], ' Rank: ', i)\n print(len(result))\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef get_subset(A, k, n):\n a_list = [i for i in A]\n if len(a_list) == k:\n result.append(a_list)\n return\n s_num = max(a_list) + 1 if a_list else 1\n for i in range(s_num, n + 1):\n a_list.append(i)\n get_subset(a_list, k, n)\n a_list.remove(i)\n\n\ndef subset_algor(n, k):\n V = []\n get_subset(V, k, n)\n\n\ndef main():\n subset_algor(7, 3)\n for i in range(len(result)):\n print(result[i], ' Rank: ', i)\n print(len(result))\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "<mask token>\nresult = []\n\n\ndef get_subset(A, k, n):\n a_list = [i for i in A]\n if len(a_list) == k:\n result.append(a_list)\n return\n s_num = max(a_list) + 1 if a_list else 1\n for i in range(s_num, n + 1):\n a_list.append(i)\n get_subset(a_list, k, n)\n a_list.remove(i)\n\n\ndef subset_algor(n, k):\n V = []\n get_subset(V, k, n)\n\n\ndef main():\n subset_algor(7, 3)\n for i in range(len(result)):\n print(result[i], ' Rank: ', i)\n print(len(result))\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "\"\"\"\nk-element subsets of the set [n]\n3-element subsets of the set [6]\n\n123\n\"\"\"\n\nresult = []\n\n\ndef get_subset(A, k, n):\n a_list = [i for i in A]\n if len(a_list) == k:\n result.append(a_list)\n return\n s_num = max(a_list)+1 if a_list else 1\n for i in range(s_num, n+1):\n a_list.append(i)\n get_subset(a_list, k, n)\n a_list.remove(i)\n\n\ndef subset_algor(n, k):\n V = []\n get_subset(V, k, n)\n\n\ndef main():\n # subset_algor(int(input()), int(input()))\n subset_algor(7, 3)\n\n for i in range(len(result)):\n print(result[i], \" Rank: \", i)\n print(len(result))\n\n\nif __name__ == \"__main__\":\n main()\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
class NewScrape:
def scrape_main(self):
"""
Top-level function.
Use links from below, scrape a page, sleep for 5s, and restart on the next link.
"""
for i in self.gen_links():
index = str(self.gen_links().index(i))
link = i
self.get_weibo(link, index)
time.sleep(5)
self.retrieve_posts(OUTPUT_FILE_NAME)
print('=' * 10)
print('Congratulations! Your data is stored')
return None
<|reserved_special_token_0|>
def get_weibo(self, link, index):
"""
Scrape a certain weibio search result page on 'zhongsou' and store it in locally.
"""
html_doc = open('{}Temp/weibo.txt'.format(WORKING_DIR), 'w',
encoding='utf8')
r = requests.get(link)
print('accessing web data.')
html_doc.write(r.text)
html_doc.close()
outfile_name = 'zhongsou_results_page_' + index + '.csv'
outfile = open('{}Temp/'.format(WORKING_DIR) + outfile_name, 'w',
encoding='utf8')
html_doc = open('{}Temp/weibo.txt'.format(WORKING_DIR), 'r',
encoding='utf8')
soup = BeautifulSoup(html_doc)
user_link = []
post_txt = []
post_link = []
post_time = []
weibo_items = soup.find_all('div', class_='weibo_item')
for item in weibo_items:
for link in item.find_all('a', target='_blank', class_='sina_weibo'
):
url = link.get('href')
post_link.append(url)
for post in item.find_all('h3', class_='weibo_title'):
for a in post.find_all('a'):
url = a.get('href')
user_link.append(url)
for time in item.find_all('div', class_='weibo_time'):
txt = time.get_text()
post_time.append(txt)
for post in item.find_all('p', class_='weibo_txt'):
txt = post.get_text()
post_txt.append(txt)
data = {'post_text': post_txt, 'post_link': post_link, 'user':
user_link, 'time': post_time}
frame = DataFrame(data)
frame.to_csv(outfile, encoding='utf-8')
print(outfile_name, 'processed complete.')
outfile.close()
html_doc.close()
return None
<|reserved_special_token_0|>
def retrieve_posts(self, outfile_name):
"""(str)->a file
"""
post_text = []
for i in range(50):
frame_2 = pandas.read_csv('{}Temp/zhongsou_results_page_{}.csv'
.format(WORKING_DIR, str(i)))
df2 = DataFrame(frame_2)
for i in df2.post_text:
post_text.append(i)
data = {'post_text': post_text}
frame = DataFrame(data)
frame.to_csv('{}Text_data/{}.txt'.format(WORKING_DIR, outfile_name),
encoding='utf-8')
frame.to_excel('{}Text_data/{}.xlsx'.format(WORKING_DIR,
outfile_name), encoding='utf-8')
print('Done')
return None
class ContinueScrape:
def scrape_main(self):
"""
Top-level function.
Use links from below, scrape a page, sleep for 5s, and restart on the next link.
"""
for i in self.gen_links():
index = str(self.gen_links().index(i))
link = i
cmd = self.get_weibo(link, index)
if cmd == 'STOP':
break
else:
time.sleep(10)
continue
print('=' * 10)
print('Scrape is now complete. Help me to organize them.')
print(
'View your temp folder, what is the biggest number of the files? \n'
)
fn = int(input())
self.retrieve_posts(fn)
print('=' * 10)
print('Congratulations! Your data is stored')
return
def gen_links(self):
links = []
for i in range(1, 51):
i = str(i)
links.append('{}&b={}'.format(QUERY_LINK, i))
return links
def get_weibo(self, link, index):
"""
Scrape a certain weibio search result page on 'zhongsou' and store it in locally.
"""
html_doc = open('{}Temp/weibo.txt'.format(WORKING_DIR), 'w',
encoding='utf8')
r = requests.get(link)
print('Accessing web data.')
html_doc.write(r.text)
html_doc.close()
h_post_text = []
h_frame = pandas.read_csv(OLD_MASTER_FILE)
h_df = DataFrame(h_frame)
for i in h_df.post_text:
h_post_text.append(i)
outfile_name = 'zhongsou_results_page_' + index + '.csv'
outfile = open('{}Temp/'.format(WORKING_DIR) + outfile_name, 'w',
encoding='utf8')
html_doc = open('{}Temp/weibo.txt'.format(WORKING_DIR), 'r',
encoding='utf8')
soup = BeautifulSoup(html_doc)
user_link = []
post_txt = []
post_link = []
post_time = []
cmd = None
weibo_items = soup.find_all('div', class_='weibo_item')
for item in weibo_items:
for link in item.find_all('a', target='_blank', class_='sina_weibo'
):
url = link.get('href')
post_link.append(url)
for post in item.find_all('h3', class_='weibo_title'):
for a in post.find_all('a'):
url = a.get('href')
user_link.append(url)
for time in item.find_all('div', class_='weibo_time'):
txt = time.get_text()
post_time.append(txt)
for post in item.find_all('p', class_='weibo_txt'):
txt = post.get_text()
post_txt.append(txt)
if txt == h_post_text[0]:
print(txt)
print(' ___ exists')
print('End of new data.')
del post_link[-1]
del user_link[-1]
del post_time[-1]
del post_txt[-1]
cmd = 'STOP'
break
data = {'post_text': post_txt, 'post_link': post_link, 'user':
user_link, 'time': post_time}
frame = DataFrame(data)
frame.to_csv(outfile, encoding='utf-8')
print(outfile_name, 'processed complete.')
outfile.close()
html_doc.close()
return cmd
def retrieve_posts(self, file_number_total):
"""(int)->a file
"""
post_text = []
for i in range(file_number_total + 1):
frame_2 = pandas.read_csv('{}Temp/zhongsou_results_page_{}.csv'
.format(WORKING_DIR, str(i)))
df2 = DataFrame(frame_2)
for i in df2.post_text:
post_text.append(i)
frame_1 = pandas.read_csv(OLD_MASTER_FILE)
df1 = DataFrame(frame_1)
for i in df1.post_text:
post_text.append(i)
data = {'post_text': post_text}
frame = DataFrame(data)
frame.to_csv('{}Text_data/{}_2.txt'.format(WORKING_DIR,
OUTPUT_FILE_NAME), encoding='utf-8')
frame.to_excel('{}Text_data/{}_2.xlsx'.format(WORKING_DIR,
OUTPUT_FILE_NAME), encoding='utf-8')
print('Data gathered.')
print('Temp files removed')
return None
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
global QUERY_LINK
<|reserved_special_token_0|>
global OUTPUT_FILE_NAME
<|reserved_special_token_0|>
global WORKING_DIR
<|reserved_special_token_0|>
global OLD_MASTER_FILE
<|reserved_special_token_0|>
class NewScrape:
def scrape_main(self):
"""
Top-level function.
Use links from below, scrape a page, sleep for 5s, and restart on the next link.
"""
for i in self.gen_links():
index = str(self.gen_links().index(i))
link = i
self.get_weibo(link, index)
time.sleep(5)
self.retrieve_posts(OUTPUT_FILE_NAME)
print('=' * 10)
print('Congratulations! Your data is stored')
return None
def gen_links(self):
links = []
for i in range(1, 51):
i = str(i)
links.append('{}&b={}'.format(QUERY_LINK, i))
return links
def get_weibo(self, link, index):
"""
Scrape a certain weibio search result page on 'zhongsou' and store it in locally.
"""
html_doc = open('{}Temp/weibo.txt'.format(WORKING_DIR), 'w',
encoding='utf8')
r = requests.get(link)
print('accessing web data.')
html_doc.write(r.text)
html_doc.close()
outfile_name = 'zhongsou_results_page_' + index + '.csv'
outfile = open('{}Temp/'.format(WORKING_DIR) + outfile_name, 'w',
encoding='utf8')
html_doc = open('{}Temp/weibo.txt'.format(WORKING_DIR), 'r',
encoding='utf8')
soup = BeautifulSoup(html_doc)
user_link = []
post_txt = []
post_link = []
post_time = []
weibo_items = soup.find_all('div', class_='weibo_item')
for item in weibo_items:
for link in item.find_all('a', target='_blank', class_='sina_weibo'
):
url = link.get('href')
post_link.append(url)
for post in item.find_all('h3', class_='weibo_title'):
for a in post.find_all('a'):
url = a.get('href')
user_link.append(url)
for time in item.find_all('div', class_='weibo_time'):
txt = time.get_text()
post_time.append(txt)
for post in item.find_all('p', class_='weibo_txt'):
txt = post.get_text()
post_txt.append(txt)
data = {'post_text': post_txt, 'post_link': post_link, 'user':
user_link, 'time': post_time}
frame = DataFrame(data)
frame.to_csv(outfile, encoding='utf-8')
print(outfile_name, 'processed complete.')
outfile.close()
html_doc.close()
return None
def clean_temp(self):
filelist = glob.glob('{}Temp/*'.format(WORKING_DIR))
for f in filelist:
os.remove(f)
print('Temp files removed')
return None
def retrieve_posts(self, outfile_name):
"""(str)->a file
"""
post_text = []
for i in range(50):
frame_2 = pandas.read_csv('{}Temp/zhongsou_results_page_{}.csv'
.format(WORKING_DIR, str(i)))
df2 = DataFrame(frame_2)
for i in df2.post_text:
post_text.append(i)
data = {'post_text': post_text}
frame = DataFrame(data)
frame.to_csv('{}Text_data/{}.txt'.format(WORKING_DIR, outfile_name),
encoding='utf-8')
frame.to_excel('{}Text_data/{}.xlsx'.format(WORKING_DIR,
outfile_name), encoding='utf-8')
print('Done')
return None
class ContinueScrape:
def scrape_main(self):
"""
Top-level function.
Use links from below, scrape a page, sleep for 5s, and restart on the next link.
"""
for i in self.gen_links():
index = str(self.gen_links().index(i))
link = i
cmd = self.get_weibo(link, index)
if cmd == 'STOP':
break
else:
time.sleep(10)
continue
print('=' * 10)
print('Scrape is now complete. Help me to organize them.')
print(
'View your temp folder, what is the biggest number of the files? \n'
)
fn = int(input())
self.retrieve_posts(fn)
print('=' * 10)
print('Congratulations! Your data is stored')
return
def gen_links(self):
links = []
for i in range(1, 51):
i = str(i)
links.append('{}&b={}'.format(QUERY_LINK, i))
return links
def get_weibo(self, link, index):
"""
Scrape a certain weibio search result page on 'zhongsou' and store it in locally.
"""
html_doc = open('{}Temp/weibo.txt'.format(WORKING_DIR), 'w',
encoding='utf8')
r = requests.get(link)
print('Accessing web data.')
html_doc.write(r.text)
html_doc.close()
h_post_text = []
h_frame = pandas.read_csv(OLD_MASTER_FILE)
h_df = DataFrame(h_frame)
for i in h_df.post_text:
h_post_text.append(i)
outfile_name = 'zhongsou_results_page_' + index + '.csv'
outfile = open('{}Temp/'.format(WORKING_DIR) + outfile_name, 'w',
encoding='utf8')
html_doc = open('{}Temp/weibo.txt'.format(WORKING_DIR), 'r',
encoding='utf8')
soup = BeautifulSoup(html_doc)
user_link = []
post_txt = []
post_link = []
post_time = []
cmd = None
weibo_items = soup.find_all('div', class_='weibo_item')
for item in weibo_items:
for link in item.find_all('a', target='_blank', class_='sina_weibo'
):
url = link.get('href')
post_link.append(url)
for post in item.find_all('h3', class_='weibo_title'):
for a in post.find_all('a'):
url = a.get('href')
user_link.append(url)
for time in item.find_all('div', class_='weibo_time'):
txt = time.get_text()
post_time.append(txt)
for post in item.find_all('p', class_='weibo_txt'):
txt = post.get_text()
post_txt.append(txt)
if txt == h_post_text[0]:
print(txt)
print(' ___ exists')
print('End of new data.')
del post_link[-1]
del user_link[-1]
del post_time[-1]
del post_txt[-1]
cmd = 'STOP'
break
data = {'post_text': post_txt, 'post_link': post_link, 'user':
user_link, 'time': post_time}
frame = DataFrame(data)
frame.to_csv(outfile, encoding='utf-8')
print(outfile_name, 'processed complete.')
outfile.close()
html_doc.close()
return cmd
def retrieve_posts(self, file_number_total):
"""(int)->a file
"""
post_text = []
for i in range(file_number_total + 1):
frame_2 = pandas.read_csv('{}Temp/zhongsou_results_page_{}.csv'
.format(WORKING_DIR, str(i)))
df2 = DataFrame(frame_2)
for i in df2.post_text:
post_text.append(i)
frame_1 = pandas.read_csv(OLD_MASTER_FILE)
df1 = DataFrame(frame_1)
for i in df1.post_text:
post_text.append(i)
data = {'post_text': post_text}
frame = DataFrame(data)
frame.to_csv('{}Text_data/{}_2.txt'.format(WORKING_DIR,
OUTPUT_FILE_NAME), encoding='utf-8')
frame.to_excel('{}Text_data/{}_2.xlsx'.format(WORKING_DIR,
OUTPUT_FILE_NAME), encoding='utf-8')
print('Data gathered.')
print('Temp files removed')
return None
print('=' * 10)
print(
"""This program will help you collect Weibo language data as generated by the 中搜 search results.
"""
)
print(
"""Use this page to generate a link for your query item:
http://t.zhongsou.com/wb?form_id=1&org=1&sel=0&so=1&v=%D6%D0%CB%D1&w=%CD%F8%D3%EF"""
)
<|reserved_special_token_0|>
if resp == 'Y':
print()
print('=' * 10)
print('Initialize scraping now.')
print('=' * 10)
NewScrape().scrape_main()
elif resp == 'N':
OLD_MASTER_FILE = input(
"""
Where is the old txt file you want to merge later? Please paste full path.
> """
)
print()
print('=' * 10)
print('WARNING: FURTHER ACTIONS NEEDED AT THE END OF SCRAPING.')
print('Initialize scraping now.')
print('=' * 10)
ContinueScrape().scrape_main()
else:
print('Invalid command. Try again.')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
global QUERY_LINK
QUERY_LINK = (
'http://t.zhongsou.com/wb?form_id=1&org=1&sel=0&so=1&v=%D6%D0%CB%D1&w=%B1%C6'
)
global OUTPUT_FILE_NAME
OUTPUT_FILE_NAME = 'scrape'
global WORKING_DIR
WORKING_DIR = '~/Corpora/'
global OLD_MASTER_FILE
OLD_MASTER_FILE = '{}Text_data/'.format(WORKING_DIR) + 'yeshizuile.txt'
class NewScrape:
def scrape_main(self):
"""
Top-level function.
Use links from below, scrape a page, sleep for 5s, and restart on the next link.
"""
for i in self.gen_links():
index = str(self.gen_links().index(i))
link = i
self.get_weibo(link, index)
time.sleep(5)
self.retrieve_posts(OUTPUT_FILE_NAME)
print('=' * 10)
print('Congratulations! Your data is stored')
return None
def gen_links(self):
links = []
for i in range(1, 51):
i = str(i)
links.append('{}&b={}'.format(QUERY_LINK, i))
return links
def get_weibo(self, link, index):
"""
Scrape a certain weibio search result page on 'zhongsou' and store it in locally.
"""
html_doc = open('{}Temp/weibo.txt'.format(WORKING_DIR), 'w',
encoding='utf8')
r = requests.get(link)
print('accessing web data.')
html_doc.write(r.text)
html_doc.close()
outfile_name = 'zhongsou_results_page_' + index + '.csv'
outfile = open('{}Temp/'.format(WORKING_DIR) + outfile_name, 'w',
encoding='utf8')
html_doc = open('{}Temp/weibo.txt'.format(WORKING_DIR), 'r',
encoding='utf8')
soup = BeautifulSoup(html_doc)
user_link = []
post_txt = []
post_link = []
post_time = []
weibo_items = soup.find_all('div', class_='weibo_item')
for item in weibo_items:
for link in item.find_all('a', target='_blank', class_='sina_weibo'
):
url = link.get('href')
post_link.append(url)
for post in item.find_all('h3', class_='weibo_title'):
for a in post.find_all('a'):
url = a.get('href')
user_link.append(url)
for time in item.find_all('div', class_='weibo_time'):
txt = time.get_text()
post_time.append(txt)
for post in item.find_all('p', class_='weibo_txt'):
txt = post.get_text()
post_txt.append(txt)
data = {'post_text': post_txt, 'post_link': post_link, 'user':
user_link, 'time': post_time}
frame = DataFrame(data)
frame.to_csv(outfile, encoding='utf-8')
print(outfile_name, 'processed complete.')
outfile.close()
html_doc.close()
return None
def clean_temp(self):
filelist = glob.glob('{}Temp/*'.format(WORKING_DIR))
for f in filelist:
os.remove(f)
print('Temp files removed')
return None
def retrieve_posts(self, outfile_name):
"""(str)->a file
"""
post_text = []
for i in range(50):
frame_2 = pandas.read_csv('{}Temp/zhongsou_results_page_{}.csv'
.format(WORKING_DIR, str(i)))
df2 = DataFrame(frame_2)
for i in df2.post_text:
post_text.append(i)
data = {'post_text': post_text}
frame = DataFrame(data)
frame.to_csv('{}Text_data/{}.txt'.format(WORKING_DIR, outfile_name),
encoding='utf-8')
frame.to_excel('{}Text_data/{}.xlsx'.format(WORKING_DIR,
outfile_name), encoding='utf-8')
print('Done')
return None
class ContinueScrape:
def scrape_main(self):
"""
Top-level function.
Use links from below, scrape a page, sleep for 5s, and restart on the next link.
"""
for i in self.gen_links():
index = str(self.gen_links().index(i))
link = i
cmd = self.get_weibo(link, index)
if cmd == 'STOP':
break
else:
time.sleep(10)
continue
print('=' * 10)
print('Scrape is now complete. Help me to organize them.')
print(
'View your temp folder, what is the biggest number of the files? \n'
)
fn = int(input())
self.retrieve_posts(fn)
print('=' * 10)
print('Congratulations! Your data is stored')
return
def gen_links(self):
links = []
for i in range(1, 51):
i = str(i)
links.append('{}&b={}'.format(QUERY_LINK, i))
return links
def get_weibo(self, link, index):
"""
Scrape a certain weibio search result page on 'zhongsou' and store it in locally.
"""
html_doc = open('{}Temp/weibo.txt'.format(WORKING_DIR), 'w',
encoding='utf8')
r = requests.get(link)
print('Accessing web data.')
html_doc.write(r.text)
html_doc.close()
h_post_text = []
h_frame = pandas.read_csv(OLD_MASTER_FILE)
h_df = DataFrame(h_frame)
for i in h_df.post_text:
h_post_text.append(i)
outfile_name = 'zhongsou_results_page_' + index + '.csv'
outfile = open('{}Temp/'.format(WORKING_DIR) + outfile_name, 'w',
encoding='utf8')
html_doc = open('{}Temp/weibo.txt'.format(WORKING_DIR), 'r',
encoding='utf8')
soup = BeautifulSoup(html_doc)
user_link = []
post_txt = []
post_link = []
post_time = []
cmd = None
weibo_items = soup.find_all('div', class_='weibo_item')
for item in weibo_items:
for link in item.find_all('a', target='_blank', class_='sina_weibo'
):
url = link.get('href')
post_link.append(url)
for post in item.find_all('h3', class_='weibo_title'):
for a in post.find_all('a'):
url = a.get('href')
user_link.append(url)
for time in item.find_all('div', class_='weibo_time'):
txt = time.get_text()
post_time.append(txt)
for post in item.find_all('p', class_='weibo_txt'):
txt = post.get_text()
post_txt.append(txt)
if txt == h_post_text[0]:
print(txt)
print(' ___ exists')
print('End of new data.')
del post_link[-1]
del user_link[-1]
del post_time[-1]
del post_txt[-1]
cmd = 'STOP'
break
data = {'post_text': post_txt, 'post_link': post_link, 'user':
user_link, 'time': post_time}
frame = DataFrame(data)
frame.to_csv(outfile, encoding='utf-8')
print(outfile_name, 'processed complete.')
outfile.close()
html_doc.close()
return cmd
def retrieve_posts(self, file_number_total):
"""(int)->a file
"""
post_text = []
for i in range(file_number_total + 1):
frame_2 = pandas.read_csv('{}Temp/zhongsou_results_page_{}.csv'
.format(WORKING_DIR, str(i)))
df2 = DataFrame(frame_2)
for i in df2.post_text:
post_text.append(i)
frame_1 = pandas.read_csv(OLD_MASTER_FILE)
df1 = DataFrame(frame_1)
for i in df1.post_text:
post_text.append(i)
data = {'post_text': post_text}
frame = DataFrame(data)
frame.to_csv('{}Text_data/{}_2.txt'.format(WORKING_DIR,
OUTPUT_FILE_NAME), encoding='utf-8')
frame.to_excel('{}Text_data/{}_2.xlsx'.format(WORKING_DIR,
OUTPUT_FILE_NAME), encoding='utf-8')
print('Data gathered.')
print('Temp files removed')
return None
print('=' * 10)
print(
"""This program will help you collect Weibo language data as generated by the 中搜 search results.
"""
)
print(
"""Use this page to generate a link for your query item:
http://t.zhongsou.com/wb?form_id=1&org=1&sel=0&so=1&v=%D6%D0%CB%D1&w=%CD%F8%D3%EF"""
)
QUERY_LINK = input("""
Paste your query link
> """)
OUTPUT_FILE_NAME = input(
"""
What's your query term? (This will be used as file name)
> """)
resp = input("""
Is this your first time running this query? Y/N
> """).upper()
if resp == 'Y':
print()
print('=' * 10)
print('Initialize scraping now.')
print('=' * 10)
NewScrape().scrape_main()
elif resp == 'N':
OLD_MASTER_FILE = input(
"""
Where is the old txt file you want to merge later? Please paste full path.
> """
)
print()
print('=' * 10)
print('WARNING: FURTHER ACTIONS NEEDED AT THE END OF SCRAPING.')
print('Initialize scraping now.')
print('=' * 10)
ContinueScrape().scrape_main()
else:
print('Invalid command. Try again.')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
import requests
from bs4 import BeautifulSoup
from pandas import DataFrame
import time
import pandas
import glob, os
global QUERY_LINK
QUERY_LINK = (
'http://t.zhongsou.com/wb?form_id=1&org=1&sel=0&so=1&v=%D6%D0%CB%D1&w=%B1%C6'
)
global OUTPUT_FILE_NAME
OUTPUT_FILE_NAME = 'scrape'
global WORKING_DIR
WORKING_DIR = '~/Corpora/'
global OLD_MASTER_FILE
OLD_MASTER_FILE = '{}Text_data/'.format(WORKING_DIR) + 'yeshizuile.txt'
class NewScrape:
def scrape_main(self):
"""
Top-level function.
Use links from below, scrape a page, sleep for 5s, and restart on the next link.
"""
for i in self.gen_links():
index = str(self.gen_links().index(i))
link = i
self.get_weibo(link, index)
time.sleep(5)
self.retrieve_posts(OUTPUT_FILE_NAME)
print('=' * 10)
print('Congratulations! Your data is stored')
return None
def gen_links(self):
links = []
for i in range(1, 51):
i = str(i)
links.append('{}&b={}'.format(QUERY_LINK, i))
return links
def get_weibo(self, link, index):
"""
Scrape a certain weibio search result page on 'zhongsou' and store it in locally.
"""
html_doc = open('{}Temp/weibo.txt'.format(WORKING_DIR), 'w',
encoding='utf8')
r = requests.get(link)
print('accessing web data.')
html_doc.write(r.text)
html_doc.close()
outfile_name = 'zhongsou_results_page_' + index + '.csv'
outfile = open('{}Temp/'.format(WORKING_DIR) + outfile_name, 'w',
encoding='utf8')
html_doc = open('{}Temp/weibo.txt'.format(WORKING_DIR), 'r',
encoding='utf8')
soup = BeautifulSoup(html_doc)
user_link = []
post_txt = []
post_link = []
post_time = []
weibo_items = soup.find_all('div', class_='weibo_item')
for item in weibo_items:
for link in item.find_all('a', target='_blank', class_='sina_weibo'
):
url = link.get('href')
post_link.append(url)
for post in item.find_all('h3', class_='weibo_title'):
for a in post.find_all('a'):
url = a.get('href')
user_link.append(url)
for time in item.find_all('div', class_='weibo_time'):
txt = time.get_text()
post_time.append(txt)
for post in item.find_all('p', class_='weibo_txt'):
txt = post.get_text()
post_txt.append(txt)
data = {'post_text': post_txt, 'post_link': post_link, 'user':
user_link, 'time': post_time}
frame = DataFrame(data)
frame.to_csv(outfile, encoding='utf-8')
print(outfile_name, 'processed complete.')
outfile.close()
html_doc.close()
return None
def clean_temp(self):
filelist = glob.glob('{}Temp/*'.format(WORKING_DIR))
for f in filelist:
os.remove(f)
print('Temp files removed')
return None
def retrieve_posts(self, outfile_name):
"""(str)->a file
"""
post_text = []
for i in range(50):
frame_2 = pandas.read_csv('{}Temp/zhongsou_results_page_{}.csv'
.format(WORKING_DIR, str(i)))
df2 = DataFrame(frame_2)
for i in df2.post_text:
post_text.append(i)
data = {'post_text': post_text}
frame = DataFrame(data)
frame.to_csv('{}Text_data/{}.txt'.format(WORKING_DIR, outfile_name),
encoding='utf-8')
frame.to_excel('{}Text_data/{}.xlsx'.format(WORKING_DIR,
outfile_name), encoding='utf-8')
print('Done')
return None
class ContinueScrape:
def scrape_main(self):
"""
Top-level function.
Use links from below, scrape a page, sleep for 5s, and restart on the next link.
"""
for i in self.gen_links():
index = str(self.gen_links().index(i))
link = i
cmd = self.get_weibo(link, index)
if cmd == 'STOP':
break
else:
time.sleep(10)
continue
print('=' * 10)
print('Scrape is now complete. Help me to organize them.')
print(
'View your temp folder, what is the biggest number of the files? \n'
)
fn = int(input())
self.retrieve_posts(fn)
print('=' * 10)
print('Congratulations! Your data is stored')
return
def gen_links(self):
links = []
for i in range(1, 51):
i = str(i)
links.append('{}&b={}'.format(QUERY_LINK, i))
return links
def get_weibo(self, link, index):
"""
Scrape a certain weibio search result page on 'zhongsou' and store it in locally.
"""
html_doc = open('{}Temp/weibo.txt'.format(WORKING_DIR), 'w',
encoding='utf8')
r = requests.get(link)
print('Accessing web data.')
html_doc.write(r.text)
html_doc.close()
h_post_text = []
h_frame = pandas.read_csv(OLD_MASTER_FILE)
h_df = DataFrame(h_frame)
for i in h_df.post_text:
h_post_text.append(i)
outfile_name = 'zhongsou_results_page_' + index + '.csv'
outfile = open('{}Temp/'.format(WORKING_DIR) + outfile_name, 'w',
encoding='utf8')
html_doc = open('{}Temp/weibo.txt'.format(WORKING_DIR), 'r',
encoding='utf8')
soup = BeautifulSoup(html_doc)
user_link = []
post_txt = []
post_link = []
post_time = []
cmd = None
weibo_items = soup.find_all('div', class_='weibo_item')
for item in weibo_items:
for link in item.find_all('a', target='_blank', class_='sina_weibo'
):
url = link.get('href')
post_link.append(url)
for post in item.find_all('h3', class_='weibo_title'):
for a in post.find_all('a'):
url = a.get('href')
user_link.append(url)
for time in item.find_all('div', class_='weibo_time'):
txt = time.get_text()
post_time.append(txt)
for post in item.find_all('p', class_='weibo_txt'):
txt = post.get_text()
post_txt.append(txt)
if txt == h_post_text[0]:
print(txt)
print(' ___ exists')
print('End of new data.')
del post_link[-1]
del user_link[-1]
del post_time[-1]
del post_txt[-1]
cmd = 'STOP'
break
data = {'post_text': post_txt, 'post_link': post_link, 'user':
user_link, 'time': post_time}
frame = DataFrame(data)
frame.to_csv(outfile, encoding='utf-8')
print(outfile_name, 'processed complete.')
outfile.close()
html_doc.close()
return cmd
def retrieve_posts(self, file_number_total):
"""(int)->a file
"""
post_text = []
for i in range(file_number_total + 1):
frame_2 = pandas.read_csv('{}Temp/zhongsou_results_page_{}.csv'
.format(WORKING_DIR, str(i)))
df2 = DataFrame(frame_2)
for i in df2.post_text:
post_text.append(i)
frame_1 = pandas.read_csv(OLD_MASTER_FILE)
df1 = DataFrame(frame_1)
for i in df1.post_text:
post_text.append(i)
data = {'post_text': post_text}
frame = DataFrame(data)
frame.to_csv('{}Text_data/{}_2.txt'.format(WORKING_DIR,
OUTPUT_FILE_NAME), encoding='utf-8')
frame.to_excel('{}Text_data/{}_2.xlsx'.format(WORKING_DIR,
OUTPUT_FILE_NAME), encoding='utf-8')
print('Data gathered.')
print('Temp files removed')
return None
print('=' * 10)
print(
"""This program will help you collect Weibo language data as generated by the 中搜 search results.
"""
)
print(
"""Use this page to generate a link for your query item:
http://t.zhongsou.com/wb?form_id=1&org=1&sel=0&so=1&v=%D6%D0%CB%D1&w=%CD%F8%D3%EF"""
)
QUERY_LINK = input("""
Paste your query link
> """)
OUTPUT_FILE_NAME = input(
"""
What's your query term? (This will be used as file name)
> """)
resp = input("""
Is this your first time running this query? Y/N
> """).upper()
if resp == 'Y':
print()
print('=' * 10)
print('Initialize scraping now.')
print('=' * 10)
NewScrape().scrape_main()
elif resp == 'N':
OLD_MASTER_FILE = input(
"""
Where is the old txt file you want to merge later? Please paste full path.
> """
)
print()
print('=' * 10)
print('WARNING: FURTHER ACTIONS NEEDED AT THE END OF SCRAPING.')
print('Initialize scraping now.')
print('=' * 10)
ContinueScrape().scrape_main()
else:
print('Invalid command. Try again.')
<|reserved_special_token_1|>
'''
Author: Iris Peng. Date: Feb 21, 2016
Usage: Scrape Weibo posts from Zhongsou for the first time for a query
In the terminal, type
$ python3 scrape_weibo.py
and follow the prompts
'''
import requests
from bs4 import BeautifulSoup
from pandas import DataFrame
import time
import pandas
import glob, os
global QUERY_LINK
QUERY_LINK = 'http://t.zhongsou.com/wb?form_id=1&org=1&sel=0&so=1&v=%D6%D0%CB%D1&w=%B1%C6'#link
global OUTPUT_FILE_NAME
OUTPUT_FILE_NAME = 'scrape' # Name of your output file
global WORKING_DIR
WORKING_DIR = '~/Corpora/'
global OLD_MASTER_FILE
OLD_MASTER_FILE = '{}Text_data/'.format(WORKING_DIR) + 'yeshizuile.txt' #Feed the new output
class NewScrape():
def scrape_main(self):
'''
Top-level function.
Use links from below, scrape a page, sleep for 5s, and restart on the next link.
'''
for i in self.gen_links():
index = str(self.gen_links().index(i))
link = i
self.get_weibo(link,index)
time.sleep(5)
self.retrieve_posts(OUTPUT_FILE_NAME)
#self.clean_temp()
print('='*10)
print('Congratulations! Your data is stored')
return None
def gen_links(self):
links = []
for i in range(1,51):
i = str(i)
links.append('{}&b={}'.format(QUERY_LINK,i))
return links
def get_weibo(self,link,index):
'''
Scrape a certain weibio search result page on 'zhongsou' and store it in locally.
'''
html_doc = open('{}Temp/weibo.txt'.format(WORKING_DIR),'w', encoding = 'utf8')
r = requests.get(link)
print ('accessing web data.')
html_doc.write(r.text)
html_doc.close()
# Write into a csv file
outfile_name = 'zhongsou_results_page_' + index + '.csv'
outfile = open('{}Temp/'.format(WORKING_DIR) + outfile_name,'w', encoding = 'utf8') #change path
# Turn the text into a BeautifulSoup object and strip down the text.
html_doc = open('{}Temp/weibo.txt'.format(WORKING_DIR),'r', encoding = 'utf8')#change path
soup = BeautifulSoup(html_doc)
user_link = []
post_txt = []
post_link = []
post_time = []
weibo_items = soup.find_all('div', class_='weibo_item')
for item in weibo_items:
for link in item.find_all('a', target='_blank', class_='sina_weibo'):
url = link.get('href')
post_link.append(url)
for post in item.find_all('h3', class_='weibo_title'):
for a in post.find_all('a'):
url = a.get('href')
user_link.append(url)
for time in item.find_all('div', class_='weibo_time'):
txt = time.get_text()
post_time.append(txt)
for post in item.find_all('p', class_='weibo_txt'):
txt = post.get_text()
post_txt.append(txt)
data = {'post_text':post_txt,'post_link':post_link,'user':user_link, 'time':post_time}
frame = DataFrame(data)
frame.to_csv(outfile, encoding='utf-8')
print (outfile_name,'processed complete.')
outfile.close()
html_doc.close()
return None
def clean_temp(self):
filelist = glob.glob('{}Temp/*'.format(WORKING_DIR))
for f in filelist:
os.remove(f)
print('Temp files removed')
return None
def retrieve_posts(self,outfile_name):
'''(str)->a file
'''
post_text = []
for i in range(50):
frame_2 = pandas.read_csv('{}Temp/zhongsou_results_page_{}.csv'.format(WORKING_DIR, str(i)))#change directory
df2 = DataFrame(frame_2)
for i in df2.post_text:#the column'post_text'
post_text.append(i)
data = {'post_text':post_text}
frame = DataFrame(data)
frame.to_csv('{}Text_data/{}.txt'.format(WORKING_DIR, outfile_name), encoding = 'utf-8')#change saved path
frame.to_excel('{}Text_data/{}.xlsx'.format(WORKING_DIR, outfile_name), encoding = 'utf-8')#change saved path
print("Done")
return None
class ContinueScrape():
def scrape_main(self):
'''
Top-level function.
Use links from below, scrape a page, sleep for 5s, and restart on the next link.
'''
for i in self.gen_links():
index = str(self.gen_links().index(i))
link = i
cmd = self.get_weibo(link,index)
if cmd == 'STOP':
break
else:
time.sleep(10)
continue
print('='*10)
print('Scrape is now complete. Help me to organize them.')
print ('View your temp folder, what is the biggest number of the files? \n')
fn = int(input())
self.retrieve_posts(fn)
print('='*10)
print('Congratulations! Your data is stored')
return
def gen_links(self):
links = []
for i in range(1,51):
i = str(i)
links.append('{}&b={}'.format(QUERY_LINK,i))
return links
def get_weibo(self,link,index):
'''
Scrape a certain weibio search result page on 'zhongsou' and store it in locally.
'''
html_doc = open('{}Temp/weibo.txt'.format(WORKING_DIR), 'w', encoding='utf8')
r = requests.get(link)
print ('Accessing web data.')
html_doc.write(r.text)
html_doc.close()
# Retrieve scrape history
h_post_text = []
h_frame = pandas.read_csv(OLD_MASTER_FILE)
h_df = DataFrame(h_frame)
for i in h_df.post_text:
h_post_text.append(i)
# Write into a csv file
outfile_name = 'zhongsou_results_page_' + index + '.csv'
outfile = open('{}Temp/'.format(WORKING_DIR)+ outfile_name,'w', encoding = 'utf8') #change path
# Turn the text into a BeautifulSoup object and strip down the text.
html_doc = open('{}Temp/weibo.txt'.format(WORKING_DIR), 'r', encoding='utf8')
soup = BeautifulSoup(html_doc)
user_link = []
post_txt = []
post_link = []
post_time = []
cmd = None
weibo_items = soup.find_all('div', class_='weibo_item')
for item in weibo_items:
for link in item.find_all('a', target='_blank', class_='sina_weibo'):
url = link.get('href')
post_link.append(url)
for post in item.find_all('h3', class_='weibo_title'):
for a in post.find_all('a'):
url = a.get('href')
user_link.append(url)
for time in item.find_all('div', class_='weibo_time'):
txt = time.get_text()
post_time.append(txt)
for post in item.find_all('p', class_='weibo_txt'):
txt = post.get_text()
post_txt.append(txt)
#has bugs!
#if txt in h_post_text:
if txt == h_post_text[0]:
print (txt)
print(' ___ exists')
print ('End of new data.') #Doesn't affect main function, break should be in main function
del post_link[-1]
del user_link[-1]
del post_time[-1]
del post_txt[-1]
cmd = 'STOP'
break
data = {'post_text':post_txt,'post_link':post_link,'user':user_link, 'time':post_time}
frame = DataFrame(data)
frame.to_csv(outfile, encoding='utf-8')
print (outfile_name,'processed complete.')
outfile.close()
html_doc.close()
return cmd
def retrieve_posts(self,file_number_total):
'''(int)->a file
'''
post_text = []
for i in range(file_number_total+1):
frame_2 = pandas.read_csv('{}Temp/zhongsou_results_page_{}.csv'.format(WORKING_DIR, str(i)))
df2 = DataFrame(frame_2)
for i in df2.post_text:#the column'post_text'
post_text.append(i)
frame_1 = pandas.read_csv(OLD_MASTER_FILE)
df1 = DataFrame(frame_1)
for i in df1.post_text:
post_text.append(i)
data = {'post_text':post_text}
frame = DataFrame(data)
frame.to_csv('{}Text_data/{}_2.txt'.format(WORKING_DIR, OUTPUT_FILE_NAME), encoding = 'utf-8')#saved path
frame.to_excel('{}Text_data/{}_2.xlsx'.format(WORKING_DIR, OUTPUT_FILE_NAME), encoding = 'utf-8')#saved path
print("Data gathered.")
## filelist = glob.glob('{}Temp/*'.format(WORKING_DIR))
## for f in filelist:
## os.remove(f)
#os.remove(OLD_MASTER_FILE)
print('Temp files removed')
return None
print('='*10)
print('This program will help you collect Weibo language data as generated by the 中搜 search results.\n')
print('Use this page to generate a link for your query item:\n\nhttp://t.zhongsou.com/wb?form_id=1&org=1&sel=0&so=1&v=%D6%D0%CB%D1&w=%CD%F8%D3%EF')
QUERY_LINK = input('\nPaste your query link \n> ')
OUTPUT_FILE_NAME = input('\nWhat\'s your query term? (This will be used as file name)\n> ')
resp = input('\nIs this your first time running this query? Y/N\n> ').upper()
if resp == 'Y':
print()
print('='*10)
print('Initialize scraping now.')
print('='*10)
NewScrape().scrape_main()
elif resp == 'N':
OLD_MASTER_FILE = input('\nWhere is the old txt file you want to merge later? Please paste full path. \n> ')
print()
print('='*10)
print('WARNING: FURTHER ACTIONS NEEDED AT THE END OF SCRAPING.')
print('Initialize scraping now.')
print('='*10)
ContinueScrape().scrape_main()
else:
print('Invalid command. Try again.')
|
flexible
|
{
"blob_id": "ed3fbae19c88100690dd5c558c0dc6d36a4849c8",
"index": 1451,
"step-1": "<mask token>\n\n\nclass NewScrape:\n\n def scrape_main(self):\n \"\"\"\n Top-level function.\n Use links from below, scrape a page, sleep for 5s, and restart on the next link.\n \"\"\"\n for i in self.gen_links():\n index = str(self.gen_links().index(i))\n link = i\n self.get_weibo(link, index)\n time.sleep(5)\n self.retrieve_posts(OUTPUT_FILE_NAME)\n print('=' * 10)\n print('Congratulations! Your data is stored')\n return None\n <mask token>\n\n def get_weibo(self, link, index):\n \"\"\"\n Scrape a certain weibio search result page on 'zhongsou' and store it in locally.\n \"\"\"\n html_doc = open('{}Temp/weibo.txt'.format(WORKING_DIR), 'w',\n encoding='utf8')\n r = requests.get(link)\n print('accessing web data.')\n html_doc.write(r.text)\n html_doc.close()\n outfile_name = 'zhongsou_results_page_' + index + '.csv'\n outfile = open('{}Temp/'.format(WORKING_DIR) + outfile_name, 'w',\n encoding='utf8')\n html_doc = open('{}Temp/weibo.txt'.format(WORKING_DIR), 'r',\n encoding='utf8')\n soup = BeautifulSoup(html_doc)\n user_link = []\n post_txt = []\n post_link = []\n post_time = []\n weibo_items = soup.find_all('div', class_='weibo_item')\n for item in weibo_items:\n for link in item.find_all('a', target='_blank', class_='sina_weibo'\n ):\n url = link.get('href')\n post_link.append(url)\n for post in item.find_all('h3', class_='weibo_title'):\n for a in post.find_all('a'):\n url = a.get('href')\n user_link.append(url)\n for time in item.find_all('div', class_='weibo_time'):\n txt = time.get_text()\n post_time.append(txt)\n for post in item.find_all('p', class_='weibo_txt'):\n txt = post.get_text()\n post_txt.append(txt)\n data = {'post_text': post_txt, 'post_link': post_link, 'user':\n user_link, 'time': post_time}\n frame = DataFrame(data)\n frame.to_csv(outfile, encoding='utf-8')\n print(outfile_name, 'processed complete.')\n outfile.close()\n html_doc.close()\n return None\n <mask token>\n\n def retrieve_posts(self, outfile_name):\n \"\"\"(str)->a file\n \"\"\"\n post_text = []\n for i in range(50):\n frame_2 = pandas.read_csv('{}Temp/zhongsou_results_page_{}.csv'\n .format(WORKING_DIR, str(i)))\n df2 = DataFrame(frame_2)\n for i in df2.post_text:\n post_text.append(i)\n data = {'post_text': post_text}\n frame = DataFrame(data)\n frame.to_csv('{}Text_data/{}.txt'.format(WORKING_DIR, outfile_name),\n encoding='utf-8')\n frame.to_excel('{}Text_data/{}.xlsx'.format(WORKING_DIR,\n outfile_name), encoding='utf-8')\n print('Done')\n return None\n\n\nclass ContinueScrape:\n\n def scrape_main(self):\n \"\"\"\n Top-level function.\n Use links from below, scrape a page, sleep for 5s, and restart on the next link.\n \"\"\"\n for i in self.gen_links():\n index = str(self.gen_links().index(i))\n link = i\n cmd = self.get_weibo(link, index)\n if cmd == 'STOP':\n break\n else:\n time.sleep(10)\n continue\n print('=' * 10)\n print('Scrape is now complete. Help me to organize them.')\n print(\n 'View your temp folder, what is the biggest number of the files? \\n'\n )\n fn = int(input())\n self.retrieve_posts(fn)\n print('=' * 10)\n print('Congratulations! Your data is stored')\n return\n\n def gen_links(self):\n links = []\n for i in range(1, 51):\n i = str(i)\n links.append('{}&b={}'.format(QUERY_LINK, i))\n return links\n\n def get_weibo(self, link, index):\n \"\"\"\n Scrape a certain weibio search result page on 'zhongsou' and store it in locally.\n \"\"\"\n html_doc = open('{}Temp/weibo.txt'.format(WORKING_DIR), 'w',\n encoding='utf8')\n r = requests.get(link)\n print('Accessing web data.')\n html_doc.write(r.text)\n html_doc.close()\n h_post_text = []\n h_frame = pandas.read_csv(OLD_MASTER_FILE)\n h_df = DataFrame(h_frame)\n for i in h_df.post_text:\n h_post_text.append(i)\n outfile_name = 'zhongsou_results_page_' + index + '.csv'\n outfile = open('{}Temp/'.format(WORKING_DIR) + outfile_name, 'w',\n encoding='utf8')\n html_doc = open('{}Temp/weibo.txt'.format(WORKING_DIR), 'r',\n encoding='utf8')\n soup = BeautifulSoup(html_doc)\n user_link = []\n post_txt = []\n post_link = []\n post_time = []\n cmd = None\n weibo_items = soup.find_all('div', class_='weibo_item')\n for item in weibo_items:\n for link in item.find_all('a', target='_blank', class_='sina_weibo'\n ):\n url = link.get('href')\n post_link.append(url)\n for post in item.find_all('h3', class_='weibo_title'):\n for a in post.find_all('a'):\n url = a.get('href')\n user_link.append(url)\n for time in item.find_all('div', class_='weibo_time'):\n txt = time.get_text()\n post_time.append(txt)\n for post in item.find_all('p', class_='weibo_txt'):\n txt = post.get_text()\n post_txt.append(txt)\n if txt == h_post_text[0]:\n print(txt)\n print(' ___ exists')\n print('End of new data.')\n del post_link[-1]\n del user_link[-1]\n del post_time[-1]\n del post_txt[-1]\n cmd = 'STOP'\n break\n data = {'post_text': post_txt, 'post_link': post_link, 'user':\n user_link, 'time': post_time}\n frame = DataFrame(data)\n frame.to_csv(outfile, encoding='utf-8')\n print(outfile_name, 'processed complete.')\n outfile.close()\n html_doc.close()\n return cmd\n\n def retrieve_posts(self, file_number_total):\n \"\"\"(int)->a file\n \"\"\"\n post_text = []\n for i in range(file_number_total + 1):\n frame_2 = pandas.read_csv('{}Temp/zhongsou_results_page_{}.csv'\n .format(WORKING_DIR, str(i)))\n df2 = DataFrame(frame_2)\n for i in df2.post_text:\n post_text.append(i)\n frame_1 = pandas.read_csv(OLD_MASTER_FILE)\n df1 = DataFrame(frame_1)\n for i in df1.post_text:\n post_text.append(i)\n data = {'post_text': post_text}\n frame = DataFrame(data)\n frame.to_csv('{}Text_data/{}_2.txt'.format(WORKING_DIR,\n OUTPUT_FILE_NAME), encoding='utf-8')\n frame.to_excel('{}Text_data/{}_2.xlsx'.format(WORKING_DIR,\n OUTPUT_FILE_NAME), encoding='utf-8')\n print('Data gathered.')\n print('Temp files removed')\n return None\n\n\n<mask token>\n",
"step-2": "<mask token>\nglobal QUERY_LINK\n<mask token>\nglobal OUTPUT_FILE_NAME\n<mask token>\nglobal WORKING_DIR\n<mask token>\nglobal OLD_MASTER_FILE\n<mask token>\n\n\nclass NewScrape:\n\n def scrape_main(self):\n \"\"\"\n Top-level function.\n Use links from below, scrape a page, sleep for 5s, and restart on the next link.\n \"\"\"\n for i in self.gen_links():\n index = str(self.gen_links().index(i))\n link = i\n self.get_weibo(link, index)\n time.sleep(5)\n self.retrieve_posts(OUTPUT_FILE_NAME)\n print('=' * 10)\n print('Congratulations! Your data is stored')\n return None\n\n def gen_links(self):\n links = []\n for i in range(1, 51):\n i = str(i)\n links.append('{}&b={}'.format(QUERY_LINK, i))\n return links\n\n def get_weibo(self, link, index):\n \"\"\"\n Scrape a certain weibio search result page on 'zhongsou' and store it in locally.\n \"\"\"\n html_doc = open('{}Temp/weibo.txt'.format(WORKING_DIR), 'w',\n encoding='utf8')\n r = requests.get(link)\n print('accessing web data.')\n html_doc.write(r.text)\n html_doc.close()\n outfile_name = 'zhongsou_results_page_' + index + '.csv'\n outfile = open('{}Temp/'.format(WORKING_DIR) + outfile_name, 'w',\n encoding='utf8')\n html_doc = open('{}Temp/weibo.txt'.format(WORKING_DIR), 'r',\n encoding='utf8')\n soup = BeautifulSoup(html_doc)\n user_link = []\n post_txt = []\n post_link = []\n post_time = []\n weibo_items = soup.find_all('div', class_='weibo_item')\n for item in weibo_items:\n for link in item.find_all('a', target='_blank', class_='sina_weibo'\n ):\n url = link.get('href')\n post_link.append(url)\n for post in item.find_all('h3', class_='weibo_title'):\n for a in post.find_all('a'):\n url = a.get('href')\n user_link.append(url)\n for time in item.find_all('div', class_='weibo_time'):\n txt = time.get_text()\n post_time.append(txt)\n for post in item.find_all('p', class_='weibo_txt'):\n txt = post.get_text()\n post_txt.append(txt)\n data = {'post_text': post_txt, 'post_link': post_link, 'user':\n user_link, 'time': post_time}\n frame = DataFrame(data)\n frame.to_csv(outfile, encoding='utf-8')\n print(outfile_name, 'processed complete.')\n outfile.close()\n html_doc.close()\n return None\n\n def clean_temp(self):\n filelist = glob.glob('{}Temp/*'.format(WORKING_DIR))\n for f in filelist:\n os.remove(f)\n print('Temp files removed')\n return None\n\n def retrieve_posts(self, outfile_name):\n \"\"\"(str)->a file\n \"\"\"\n post_text = []\n for i in range(50):\n frame_2 = pandas.read_csv('{}Temp/zhongsou_results_page_{}.csv'\n .format(WORKING_DIR, str(i)))\n df2 = DataFrame(frame_2)\n for i in df2.post_text:\n post_text.append(i)\n data = {'post_text': post_text}\n frame = DataFrame(data)\n frame.to_csv('{}Text_data/{}.txt'.format(WORKING_DIR, outfile_name),\n encoding='utf-8')\n frame.to_excel('{}Text_data/{}.xlsx'.format(WORKING_DIR,\n outfile_name), encoding='utf-8')\n print('Done')\n return None\n\n\nclass ContinueScrape:\n\n def scrape_main(self):\n \"\"\"\n Top-level function.\n Use links from below, scrape a page, sleep for 5s, and restart on the next link.\n \"\"\"\n for i in self.gen_links():\n index = str(self.gen_links().index(i))\n link = i\n cmd = self.get_weibo(link, index)\n if cmd == 'STOP':\n break\n else:\n time.sleep(10)\n continue\n print('=' * 10)\n print('Scrape is now complete. Help me to organize them.')\n print(\n 'View your temp folder, what is the biggest number of the files? \\n'\n )\n fn = int(input())\n self.retrieve_posts(fn)\n print('=' * 10)\n print('Congratulations! Your data is stored')\n return\n\n def gen_links(self):\n links = []\n for i in range(1, 51):\n i = str(i)\n links.append('{}&b={}'.format(QUERY_LINK, i))\n return links\n\n def get_weibo(self, link, index):\n \"\"\"\n Scrape a certain weibio search result page on 'zhongsou' and store it in locally.\n \"\"\"\n html_doc = open('{}Temp/weibo.txt'.format(WORKING_DIR), 'w',\n encoding='utf8')\n r = requests.get(link)\n print('Accessing web data.')\n html_doc.write(r.text)\n html_doc.close()\n h_post_text = []\n h_frame = pandas.read_csv(OLD_MASTER_FILE)\n h_df = DataFrame(h_frame)\n for i in h_df.post_text:\n h_post_text.append(i)\n outfile_name = 'zhongsou_results_page_' + index + '.csv'\n outfile = open('{}Temp/'.format(WORKING_DIR) + outfile_name, 'w',\n encoding='utf8')\n html_doc = open('{}Temp/weibo.txt'.format(WORKING_DIR), 'r',\n encoding='utf8')\n soup = BeautifulSoup(html_doc)\n user_link = []\n post_txt = []\n post_link = []\n post_time = []\n cmd = None\n weibo_items = soup.find_all('div', class_='weibo_item')\n for item in weibo_items:\n for link in item.find_all('a', target='_blank', class_='sina_weibo'\n ):\n url = link.get('href')\n post_link.append(url)\n for post in item.find_all('h3', class_='weibo_title'):\n for a in post.find_all('a'):\n url = a.get('href')\n user_link.append(url)\n for time in item.find_all('div', class_='weibo_time'):\n txt = time.get_text()\n post_time.append(txt)\n for post in item.find_all('p', class_='weibo_txt'):\n txt = post.get_text()\n post_txt.append(txt)\n if txt == h_post_text[0]:\n print(txt)\n print(' ___ exists')\n print('End of new data.')\n del post_link[-1]\n del user_link[-1]\n del post_time[-1]\n del post_txt[-1]\n cmd = 'STOP'\n break\n data = {'post_text': post_txt, 'post_link': post_link, 'user':\n user_link, 'time': post_time}\n frame = DataFrame(data)\n frame.to_csv(outfile, encoding='utf-8')\n print(outfile_name, 'processed complete.')\n outfile.close()\n html_doc.close()\n return cmd\n\n def retrieve_posts(self, file_number_total):\n \"\"\"(int)->a file\n \"\"\"\n post_text = []\n for i in range(file_number_total + 1):\n frame_2 = pandas.read_csv('{}Temp/zhongsou_results_page_{}.csv'\n .format(WORKING_DIR, str(i)))\n df2 = DataFrame(frame_2)\n for i in df2.post_text:\n post_text.append(i)\n frame_1 = pandas.read_csv(OLD_MASTER_FILE)\n df1 = DataFrame(frame_1)\n for i in df1.post_text:\n post_text.append(i)\n data = {'post_text': post_text}\n frame = DataFrame(data)\n frame.to_csv('{}Text_data/{}_2.txt'.format(WORKING_DIR,\n OUTPUT_FILE_NAME), encoding='utf-8')\n frame.to_excel('{}Text_data/{}_2.xlsx'.format(WORKING_DIR,\n OUTPUT_FILE_NAME), encoding='utf-8')\n print('Data gathered.')\n print('Temp files removed')\n return None\n\n\nprint('=' * 10)\nprint(\n \"\"\"This program will help you collect Weibo language data as generated by the 中搜 search results.\n\"\"\"\n )\nprint(\n \"\"\"Use this page to generate a link for your query item:\n\nhttp://t.zhongsou.com/wb?form_id=1&org=1&sel=0&so=1&v=%D6%D0%CB%D1&w=%CD%F8%D3%EF\"\"\"\n )\n<mask token>\nif resp == 'Y':\n print()\n print('=' * 10)\n print('Initialize scraping now.')\n print('=' * 10)\n NewScrape().scrape_main()\nelif resp == 'N':\n OLD_MASTER_FILE = input(\n \"\"\"\nWhere is the old txt file you want to merge later? Please paste full path. \n> \"\"\"\n )\n print()\n print('=' * 10)\n print('WARNING: FURTHER ACTIONS NEEDED AT THE END OF SCRAPING.')\n print('Initialize scraping now.')\n print('=' * 10)\n ContinueScrape().scrape_main()\nelse:\n print('Invalid command. Try again.')\n",
"step-3": "<mask token>\nglobal QUERY_LINK\nQUERY_LINK = (\n 'http://t.zhongsou.com/wb?form_id=1&org=1&sel=0&so=1&v=%D6%D0%CB%D1&w=%B1%C6'\n )\nglobal OUTPUT_FILE_NAME\nOUTPUT_FILE_NAME = 'scrape'\nglobal WORKING_DIR\nWORKING_DIR = '~/Corpora/'\nglobal OLD_MASTER_FILE\nOLD_MASTER_FILE = '{}Text_data/'.format(WORKING_DIR) + 'yeshizuile.txt'\n\n\nclass NewScrape:\n\n def scrape_main(self):\n \"\"\"\n Top-level function.\n Use links from below, scrape a page, sleep for 5s, and restart on the next link.\n \"\"\"\n for i in self.gen_links():\n index = str(self.gen_links().index(i))\n link = i\n self.get_weibo(link, index)\n time.sleep(5)\n self.retrieve_posts(OUTPUT_FILE_NAME)\n print('=' * 10)\n print('Congratulations! Your data is stored')\n return None\n\n def gen_links(self):\n links = []\n for i in range(1, 51):\n i = str(i)\n links.append('{}&b={}'.format(QUERY_LINK, i))\n return links\n\n def get_weibo(self, link, index):\n \"\"\"\n Scrape a certain weibio search result page on 'zhongsou' and store it in locally.\n \"\"\"\n html_doc = open('{}Temp/weibo.txt'.format(WORKING_DIR), 'w',\n encoding='utf8')\n r = requests.get(link)\n print('accessing web data.')\n html_doc.write(r.text)\n html_doc.close()\n outfile_name = 'zhongsou_results_page_' + index + '.csv'\n outfile = open('{}Temp/'.format(WORKING_DIR) + outfile_name, 'w',\n encoding='utf8')\n html_doc = open('{}Temp/weibo.txt'.format(WORKING_DIR), 'r',\n encoding='utf8')\n soup = BeautifulSoup(html_doc)\n user_link = []\n post_txt = []\n post_link = []\n post_time = []\n weibo_items = soup.find_all('div', class_='weibo_item')\n for item in weibo_items:\n for link in item.find_all('a', target='_blank', class_='sina_weibo'\n ):\n url = link.get('href')\n post_link.append(url)\n for post in item.find_all('h3', class_='weibo_title'):\n for a in post.find_all('a'):\n url = a.get('href')\n user_link.append(url)\n for time in item.find_all('div', class_='weibo_time'):\n txt = time.get_text()\n post_time.append(txt)\n for post in item.find_all('p', class_='weibo_txt'):\n txt = post.get_text()\n post_txt.append(txt)\n data = {'post_text': post_txt, 'post_link': post_link, 'user':\n user_link, 'time': post_time}\n frame = DataFrame(data)\n frame.to_csv(outfile, encoding='utf-8')\n print(outfile_name, 'processed complete.')\n outfile.close()\n html_doc.close()\n return None\n\n def clean_temp(self):\n filelist = glob.glob('{}Temp/*'.format(WORKING_DIR))\n for f in filelist:\n os.remove(f)\n print('Temp files removed')\n return None\n\n def retrieve_posts(self, outfile_name):\n \"\"\"(str)->a file\n \"\"\"\n post_text = []\n for i in range(50):\n frame_2 = pandas.read_csv('{}Temp/zhongsou_results_page_{}.csv'\n .format(WORKING_DIR, str(i)))\n df2 = DataFrame(frame_2)\n for i in df2.post_text:\n post_text.append(i)\n data = {'post_text': post_text}\n frame = DataFrame(data)\n frame.to_csv('{}Text_data/{}.txt'.format(WORKING_DIR, outfile_name),\n encoding='utf-8')\n frame.to_excel('{}Text_data/{}.xlsx'.format(WORKING_DIR,\n outfile_name), encoding='utf-8')\n print('Done')\n return None\n\n\nclass ContinueScrape:\n\n def scrape_main(self):\n \"\"\"\n Top-level function.\n Use links from below, scrape a page, sleep for 5s, and restart on the next link.\n \"\"\"\n for i in self.gen_links():\n index = str(self.gen_links().index(i))\n link = i\n cmd = self.get_weibo(link, index)\n if cmd == 'STOP':\n break\n else:\n time.sleep(10)\n continue\n print('=' * 10)\n print('Scrape is now complete. Help me to organize them.')\n print(\n 'View your temp folder, what is the biggest number of the files? \\n'\n )\n fn = int(input())\n self.retrieve_posts(fn)\n print('=' * 10)\n print('Congratulations! Your data is stored')\n return\n\n def gen_links(self):\n links = []\n for i in range(1, 51):\n i = str(i)\n links.append('{}&b={}'.format(QUERY_LINK, i))\n return links\n\n def get_weibo(self, link, index):\n \"\"\"\n Scrape a certain weibio search result page on 'zhongsou' and store it in locally.\n \"\"\"\n html_doc = open('{}Temp/weibo.txt'.format(WORKING_DIR), 'w',\n encoding='utf8')\n r = requests.get(link)\n print('Accessing web data.')\n html_doc.write(r.text)\n html_doc.close()\n h_post_text = []\n h_frame = pandas.read_csv(OLD_MASTER_FILE)\n h_df = DataFrame(h_frame)\n for i in h_df.post_text:\n h_post_text.append(i)\n outfile_name = 'zhongsou_results_page_' + index + '.csv'\n outfile = open('{}Temp/'.format(WORKING_DIR) + outfile_name, 'w',\n encoding='utf8')\n html_doc = open('{}Temp/weibo.txt'.format(WORKING_DIR), 'r',\n encoding='utf8')\n soup = BeautifulSoup(html_doc)\n user_link = []\n post_txt = []\n post_link = []\n post_time = []\n cmd = None\n weibo_items = soup.find_all('div', class_='weibo_item')\n for item in weibo_items:\n for link in item.find_all('a', target='_blank', class_='sina_weibo'\n ):\n url = link.get('href')\n post_link.append(url)\n for post in item.find_all('h3', class_='weibo_title'):\n for a in post.find_all('a'):\n url = a.get('href')\n user_link.append(url)\n for time in item.find_all('div', class_='weibo_time'):\n txt = time.get_text()\n post_time.append(txt)\n for post in item.find_all('p', class_='weibo_txt'):\n txt = post.get_text()\n post_txt.append(txt)\n if txt == h_post_text[0]:\n print(txt)\n print(' ___ exists')\n print('End of new data.')\n del post_link[-1]\n del user_link[-1]\n del post_time[-1]\n del post_txt[-1]\n cmd = 'STOP'\n break\n data = {'post_text': post_txt, 'post_link': post_link, 'user':\n user_link, 'time': post_time}\n frame = DataFrame(data)\n frame.to_csv(outfile, encoding='utf-8')\n print(outfile_name, 'processed complete.')\n outfile.close()\n html_doc.close()\n return cmd\n\n def retrieve_posts(self, file_number_total):\n \"\"\"(int)->a file\n \"\"\"\n post_text = []\n for i in range(file_number_total + 1):\n frame_2 = pandas.read_csv('{}Temp/zhongsou_results_page_{}.csv'\n .format(WORKING_DIR, str(i)))\n df2 = DataFrame(frame_2)\n for i in df2.post_text:\n post_text.append(i)\n frame_1 = pandas.read_csv(OLD_MASTER_FILE)\n df1 = DataFrame(frame_1)\n for i in df1.post_text:\n post_text.append(i)\n data = {'post_text': post_text}\n frame = DataFrame(data)\n frame.to_csv('{}Text_data/{}_2.txt'.format(WORKING_DIR,\n OUTPUT_FILE_NAME), encoding='utf-8')\n frame.to_excel('{}Text_data/{}_2.xlsx'.format(WORKING_DIR,\n OUTPUT_FILE_NAME), encoding='utf-8')\n print('Data gathered.')\n print('Temp files removed')\n return None\n\n\nprint('=' * 10)\nprint(\n \"\"\"This program will help you collect Weibo language data as generated by the 中搜 search results.\n\"\"\"\n )\nprint(\n \"\"\"Use this page to generate a link for your query item:\n\nhttp://t.zhongsou.com/wb?form_id=1&org=1&sel=0&so=1&v=%D6%D0%CB%D1&w=%CD%F8%D3%EF\"\"\"\n )\nQUERY_LINK = input(\"\"\"\nPaste your query link \n> \"\"\")\nOUTPUT_FILE_NAME = input(\n \"\"\"\nWhat's your query term? (This will be used as file name)\n> \"\"\")\nresp = input(\"\"\"\nIs this your first time running this query? Y/N\n> \"\"\").upper()\nif resp == 'Y':\n print()\n print('=' * 10)\n print('Initialize scraping now.')\n print('=' * 10)\n NewScrape().scrape_main()\nelif resp == 'N':\n OLD_MASTER_FILE = input(\n \"\"\"\nWhere is the old txt file you want to merge later? Please paste full path. \n> \"\"\"\n )\n print()\n print('=' * 10)\n print('WARNING: FURTHER ACTIONS NEEDED AT THE END OF SCRAPING.')\n print('Initialize scraping now.')\n print('=' * 10)\n ContinueScrape().scrape_main()\nelse:\n print('Invalid command. Try again.')\n",
"step-4": "<mask token>\nimport requests\nfrom bs4 import BeautifulSoup\nfrom pandas import DataFrame\nimport time\nimport pandas\nimport glob, os\nglobal QUERY_LINK\nQUERY_LINK = (\n 'http://t.zhongsou.com/wb?form_id=1&org=1&sel=0&so=1&v=%D6%D0%CB%D1&w=%B1%C6'\n )\nglobal OUTPUT_FILE_NAME\nOUTPUT_FILE_NAME = 'scrape'\nglobal WORKING_DIR\nWORKING_DIR = '~/Corpora/'\nglobal OLD_MASTER_FILE\nOLD_MASTER_FILE = '{}Text_data/'.format(WORKING_DIR) + 'yeshizuile.txt'\n\n\nclass NewScrape:\n\n def scrape_main(self):\n \"\"\"\n Top-level function.\n Use links from below, scrape a page, sleep for 5s, and restart on the next link.\n \"\"\"\n for i in self.gen_links():\n index = str(self.gen_links().index(i))\n link = i\n self.get_weibo(link, index)\n time.sleep(5)\n self.retrieve_posts(OUTPUT_FILE_NAME)\n print('=' * 10)\n print('Congratulations! Your data is stored')\n return None\n\n def gen_links(self):\n links = []\n for i in range(1, 51):\n i = str(i)\n links.append('{}&b={}'.format(QUERY_LINK, i))\n return links\n\n def get_weibo(self, link, index):\n \"\"\"\n Scrape a certain weibio search result page on 'zhongsou' and store it in locally.\n \"\"\"\n html_doc = open('{}Temp/weibo.txt'.format(WORKING_DIR), 'w',\n encoding='utf8')\n r = requests.get(link)\n print('accessing web data.')\n html_doc.write(r.text)\n html_doc.close()\n outfile_name = 'zhongsou_results_page_' + index + '.csv'\n outfile = open('{}Temp/'.format(WORKING_DIR) + outfile_name, 'w',\n encoding='utf8')\n html_doc = open('{}Temp/weibo.txt'.format(WORKING_DIR), 'r',\n encoding='utf8')\n soup = BeautifulSoup(html_doc)\n user_link = []\n post_txt = []\n post_link = []\n post_time = []\n weibo_items = soup.find_all('div', class_='weibo_item')\n for item in weibo_items:\n for link in item.find_all('a', target='_blank', class_='sina_weibo'\n ):\n url = link.get('href')\n post_link.append(url)\n for post in item.find_all('h3', class_='weibo_title'):\n for a in post.find_all('a'):\n url = a.get('href')\n user_link.append(url)\n for time in item.find_all('div', class_='weibo_time'):\n txt = time.get_text()\n post_time.append(txt)\n for post in item.find_all('p', class_='weibo_txt'):\n txt = post.get_text()\n post_txt.append(txt)\n data = {'post_text': post_txt, 'post_link': post_link, 'user':\n user_link, 'time': post_time}\n frame = DataFrame(data)\n frame.to_csv(outfile, encoding='utf-8')\n print(outfile_name, 'processed complete.')\n outfile.close()\n html_doc.close()\n return None\n\n def clean_temp(self):\n filelist = glob.glob('{}Temp/*'.format(WORKING_DIR))\n for f in filelist:\n os.remove(f)\n print('Temp files removed')\n return None\n\n def retrieve_posts(self, outfile_name):\n \"\"\"(str)->a file\n \"\"\"\n post_text = []\n for i in range(50):\n frame_2 = pandas.read_csv('{}Temp/zhongsou_results_page_{}.csv'\n .format(WORKING_DIR, str(i)))\n df2 = DataFrame(frame_2)\n for i in df2.post_text:\n post_text.append(i)\n data = {'post_text': post_text}\n frame = DataFrame(data)\n frame.to_csv('{}Text_data/{}.txt'.format(WORKING_DIR, outfile_name),\n encoding='utf-8')\n frame.to_excel('{}Text_data/{}.xlsx'.format(WORKING_DIR,\n outfile_name), encoding='utf-8')\n print('Done')\n return None\n\n\nclass ContinueScrape:\n\n def scrape_main(self):\n \"\"\"\n Top-level function.\n Use links from below, scrape a page, sleep for 5s, and restart on the next link.\n \"\"\"\n for i in self.gen_links():\n index = str(self.gen_links().index(i))\n link = i\n cmd = self.get_weibo(link, index)\n if cmd == 'STOP':\n break\n else:\n time.sleep(10)\n continue\n print('=' * 10)\n print('Scrape is now complete. Help me to organize them.')\n print(\n 'View your temp folder, what is the biggest number of the files? \\n'\n )\n fn = int(input())\n self.retrieve_posts(fn)\n print('=' * 10)\n print('Congratulations! Your data is stored')\n return\n\n def gen_links(self):\n links = []\n for i in range(1, 51):\n i = str(i)\n links.append('{}&b={}'.format(QUERY_LINK, i))\n return links\n\n def get_weibo(self, link, index):\n \"\"\"\n Scrape a certain weibio search result page on 'zhongsou' and store it in locally.\n \"\"\"\n html_doc = open('{}Temp/weibo.txt'.format(WORKING_DIR), 'w',\n encoding='utf8')\n r = requests.get(link)\n print('Accessing web data.')\n html_doc.write(r.text)\n html_doc.close()\n h_post_text = []\n h_frame = pandas.read_csv(OLD_MASTER_FILE)\n h_df = DataFrame(h_frame)\n for i in h_df.post_text:\n h_post_text.append(i)\n outfile_name = 'zhongsou_results_page_' + index + '.csv'\n outfile = open('{}Temp/'.format(WORKING_DIR) + outfile_name, 'w',\n encoding='utf8')\n html_doc = open('{}Temp/weibo.txt'.format(WORKING_DIR), 'r',\n encoding='utf8')\n soup = BeautifulSoup(html_doc)\n user_link = []\n post_txt = []\n post_link = []\n post_time = []\n cmd = None\n weibo_items = soup.find_all('div', class_='weibo_item')\n for item in weibo_items:\n for link in item.find_all('a', target='_blank', class_='sina_weibo'\n ):\n url = link.get('href')\n post_link.append(url)\n for post in item.find_all('h3', class_='weibo_title'):\n for a in post.find_all('a'):\n url = a.get('href')\n user_link.append(url)\n for time in item.find_all('div', class_='weibo_time'):\n txt = time.get_text()\n post_time.append(txt)\n for post in item.find_all('p', class_='weibo_txt'):\n txt = post.get_text()\n post_txt.append(txt)\n if txt == h_post_text[0]:\n print(txt)\n print(' ___ exists')\n print('End of new data.')\n del post_link[-1]\n del user_link[-1]\n del post_time[-1]\n del post_txt[-1]\n cmd = 'STOP'\n break\n data = {'post_text': post_txt, 'post_link': post_link, 'user':\n user_link, 'time': post_time}\n frame = DataFrame(data)\n frame.to_csv(outfile, encoding='utf-8')\n print(outfile_name, 'processed complete.')\n outfile.close()\n html_doc.close()\n return cmd\n\n def retrieve_posts(self, file_number_total):\n \"\"\"(int)->a file\n \"\"\"\n post_text = []\n for i in range(file_number_total + 1):\n frame_2 = pandas.read_csv('{}Temp/zhongsou_results_page_{}.csv'\n .format(WORKING_DIR, str(i)))\n df2 = DataFrame(frame_2)\n for i in df2.post_text:\n post_text.append(i)\n frame_1 = pandas.read_csv(OLD_MASTER_FILE)\n df1 = DataFrame(frame_1)\n for i in df1.post_text:\n post_text.append(i)\n data = {'post_text': post_text}\n frame = DataFrame(data)\n frame.to_csv('{}Text_data/{}_2.txt'.format(WORKING_DIR,\n OUTPUT_FILE_NAME), encoding='utf-8')\n frame.to_excel('{}Text_data/{}_2.xlsx'.format(WORKING_DIR,\n OUTPUT_FILE_NAME), encoding='utf-8')\n print('Data gathered.')\n print('Temp files removed')\n return None\n\n\nprint('=' * 10)\nprint(\n \"\"\"This program will help you collect Weibo language data as generated by the 中搜 search results.\n\"\"\"\n )\nprint(\n \"\"\"Use this page to generate a link for your query item:\n\nhttp://t.zhongsou.com/wb?form_id=1&org=1&sel=0&so=1&v=%D6%D0%CB%D1&w=%CD%F8%D3%EF\"\"\"\n )\nQUERY_LINK = input(\"\"\"\nPaste your query link \n> \"\"\")\nOUTPUT_FILE_NAME = input(\n \"\"\"\nWhat's your query term? (This will be used as file name)\n> \"\"\")\nresp = input(\"\"\"\nIs this your first time running this query? Y/N\n> \"\"\").upper()\nif resp == 'Y':\n print()\n print('=' * 10)\n print('Initialize scraping now.')\n print('=' * 10)\n NewScrape().scrape_main()\nelif resp == 'N':\n OLD_MASTER_FILE = input(\n \"\"\"\nWhere is the old txt file you want to merge later? Please paste full path. \n> \"\"\"\n )\n print()\n print('=' * 10)\n print('WARNING: FURTHER ACTIONS NEEDED AT THE END OF SCRAPING.')\n print('Initialize scraping now.')\n print('=' * 10)\n ContinueScrape().scrape_main()\nelse:\n print('Invalid command. Try again.')\n",
"step-5": "'''\nAuthor: Iris Peng. Date: Feb 21, 2016\nUsage: Scrape Weibo posts from Zhongsou for the first time for a query\n\nIn the terminal, type\n$ python3 scrape_weibo.py\n\nand follow the prompts\n\n'''\nimport requests\nfrom bs4 import BeautifulSoup\nfrom pandas import DataFrame\nimport time\nimport pandas\nimport glob, os\n\n\nglobal QUERY_LINK\nQUERY_LINK = 'http://t.zhongsou.com/wb?form_id=1&org=1&sel=0&so=1&v=%D6%D0%CB%D1&w=%B1%C6'#link\n\nglobal OUTPUT_FILE_NAME\nOUTPUT_FILE_NAME = 'scrape' # Name of your output file\n\nglobal WORKING_DIR\nWORKING_DIR = '~/Corpora/'\n\nglobal OLD_MASTER_FILE\nOLD_MASTER_FILE = '{}Text_data/'.format(WORKING_DIR) + 'yeshizuile.txt' #Feed the new output\n \n\nclass NewScrape():\n \n def scrape_main(self):\n '''\n Top-level function.\n Use links from below, scrape a page, sleep for 5s, and restart on the next link.\n '''\n for i in self.gen_links():\n index = str(self.gen_links().index(i))\n link = i\n self.get_weibo(link,index)\n time.sleep(5)\n \n self.retrieve_posts(OUTPUT_FILE_NAME)\n #self.clean_temp()\n print('='*10)\n print('Congratulations! Your data is stored')\n return None\n\n def gen_links(self):\n links = []\n for i in range(1,51):\n i = str(i) \n links.append('{}&b={}'.format(QUERY_LINK,i))\n return links\n\n def get_weibo(self,link,index):\n \n '''\n Scrape a certain weibio search result page on 'zhongsou' and store it in locally.\n '''\n\n html_doc = open('{}Temp/weibo.txt'.format(WORKING_DIR),'w', encoding = 'utf8')\n \n r = requests.get(link)\n print ('accessing web data.')\n html_doc.write(r.text)\n html_doc.close()\n \n # Write into a csv file\n outfile_name = 'zhongsou_results_page_' + index + '.csv'\n outfile = open('{}Temp/'.format(WORKING_DIR) + outfile_name,'w', encoding = 'utf8') #change path\n \n # Turn the text into a BeautifulSoup object and strip down the text.\n html_doc = open('{}Temp/weibo.txt'.format(WORKING_DIR),'r', encoding = 'utf8')#change path\n soup = BeautifulSoup(html_doc)\n\n user_link = []\n post_txt = []\n post_link = []\n post_time = []\n \n weibo_items = soup.find_all('div', class_='weibo_item')\n \n for item in weibo_items: \n \n for link in item.find_all('a', target='_blank', class_='sina_weibo'):\n url = link.get('href')\n post_link.append(url)\n\n for post in item.find_all('h3', class_='weibo_title'):\n for a in post.find_all('a'):\n url = a.get('href')\n user_link.append(url)\n\n for time in item.find_all('div', class_='weibo_time'):\n txt = time.get_text()\n post_time.append(txt)\n\n for post in item.find_all('p', class_='weibo_txt'):\n txt = post.get_text()\n post_txt.append(txt)\n \n data = {'post_text':post_txt,'post_link':post_link,'user':user_link, 'time':post_time}\n frame = DataFrame(data)\n frame.to_csv(outfile, encoding='utf-8')\n print (outfile_name,'processed complete.')\n \n outfile.close()\n html_doc.close()\n return None\n\n def clean_temp(self):\n filelist = glob.glob('{}Temp/*'.format(WORKING_DIR))\n for f in filelist:\n os.remove(f)\n print('Temp files removed')\n return None\n\n \n def retrieve_posts(self,outfile_name):\n '''(str)->a file\n ''' \n post_text = []\n \n for i in range(50):\n frame_2 = pandas.read_csv('{}Temp/zhongsou_results_page_{}.csv'.format(WORKING_DIR, str(i)))#change directory\n df2 = DataFrame(frame_2)\n for i in df2.post_text:#the column'post_text'\n post_text.append(i)\n\n data = {'post_text':post_text}\n frame = DataFrame(data)\n frame.to_csv('{}Text_data/{}.txt'.format(WORKING_DIR, outfile_name), encoding = 'utf-8')#change saved path\n frame.to_excel('{}Text_data/{}.xlsx'.format(WORKING_DIR, outfile_name), encoding = 'utf-8')#change saved path\n print(\"Done\")\n return None \n\nclass ContinueScrape():\n \n def scrape_main(self):\n '''\n Top-level function.\n Use links from below, scrape a page, sleep for 5s, and restart on the next link.\n '''\n for i in self.gen_links():\n index = str(self.gen_links().index(i))\n link = i\n cmd = self.get_weibo(link,index)\n if cmd == 'STOP':\n break\n else:\n time.sleep(10)\n continue\n \n print('='*10)\n print('Scrape is now complete. Help me to organize them.')\n print ('View your temp folder, what is the biggest number of the files? \\n')\n fn = int(input())\n self.retrieve_posts(fn)\n print('='*10)\n print('Congratulations! Your data is stored')\n return \n\n def gen_links(self):\n links = []\n for i in range(1,51):\n i = str(i) \n links.append('{}&b={}'.format(QUERY_LINK,i))\n return links\n\n def get_weibo(self,link,index):\n \n '''\n Scrape a certain weibio search result page on 'zhongsou' and store it in locally.\n '''\n\n html_doc = open('{}Temp/weibo.txt'.format(WORKING_DIR), 'w', encoding='utf8')\n\n r = requests.get(link)\n print ('Accessing web data.')\n html_doc.write(r.text)\n html_doc.close()\n\n # Retrieve scrape history\n h_post_text = [] \n h_frame = pandas.read_csv(OLD_MASTER_FILE) \n h_df = DataFrame(h_frame)\n for i in h_df.post_text:\n h_post_text.append(i)\n \n # Write into a csv file\n outfile_name = 'zhongsou_results_page_' + index + '.csv'\n outfile = open('{}Temp/'.format(WORKING_DIR)+ outfile_name,'w', encoding = 'utf8') #change path\n \n # Turn the text into a BeautifulSoup object and strip down the text.\n html_doc = open('{}Temp/weibo.txt'.format(WORKING_DIR), 'r', encoding='utf8')\n soup = BeautifulSoup(html_doc)\n\n user_link = []\n post_txt = []\n post_link = []\n post_time = []\n cmd = None\n \n weibo_items = soup.find_all('div', class_='weibo_item')\n \n for item in weibo_items: \n \n for link in item.find_all('a', target='_blank', class_='sina_weibo'):\n url = link.get('href')\n post_link.append(url)\n\n for post in item.find_all('h3', class_='weibo_title'):\n for a in post.find_all('a'):\n url = a.get('href')\n user_link.append(url)\n\n for time in item.find_all('div', class_='weibo_time'):\n txt = time.get_text()\n post_time.append(txt)\n\n for post in item.find_all('p', class_='weibo_txt'):\n txt = post.get_text()\n post_txt.append(txt)\n\n #has bugs!\n #if txt in h_post_text:\n if txt == h_post_text[0]: \n print (txt)\n print(' ___ exists')\n print ('End of new data.') #Doesn't affect main function, break should be in main function\n del post_link[-1]\n del user_link[-1]\n del post_time[-1]\n del post_txt[-1]\n cmd = 'STOP'\n break\n \n data = {'post_text':post_txt,'post_link':post_link,'user':user_link, 'time':post_time}\n frame = DataFrame(data)\n frame.to_csv(outfile, encoding='utf-8')\n print (outfile_name,'processed complete.')\n \n outfile.close()\n html_doc.close()\n return cmd\n\n def retrieve_posts(self,file_number_total):\n '''(int)->a file\n '''\n post_text = []\n \n \n for i in range(file_number_total+1):\n frame_2 = pandas.read_csv('{}Temp/zhongsou_results_page_{}.csv'.format(WORKING_DIR, str(i)))\n df2 = DataFrame(frame_2)\n for i in df2.post_text:#the column'post_text'\n post_text.append(i)\n\n frame_1 = pandas.read_csv(OLD_MASTER_FILE)\n df1 = DataFrame(frame_1)\n for i in df1.post_text:\n post_text.append(i)\n\n data = {'post_text':post_text}\n frame = DataFrame(data)\n frame.to_csv('{}Text_data/{}_2.txt'.format(WORKING_DIR, OUTPUT_FILE_NAME), encoding = 'utf-8')#saved path\n frame.to_excel('{}Text_data/{}_2.xlsx'.format(WORKING_DIR, OUTPUT_FILE_NAME), encoding = 'utf-8')#saved path\n\n\n print(\"Data gathered.\")\n\n## filelist = glob.glob('{}Temp/*'.format(WORKING_DIR))\n## for f in filelist:\n## os.remove(f)\n\n #os.remove(OLD_MASTER_FILE)\n\n print('Temp files removed')\n\n return None \n\nprint('='*10)\nprint('This program will help you collect Weibo language data as generated by the 中搜 search results.\\n')\nprint('Use this page to generate a link for your query item:\\n\\nhttp://t.zhongsou.com/wb?form_id=1&org=1&sel=0&so=1&v=%D6%D0%CB%D1&w=%CD%F8%D3%EF')\nQUERY_LINK = input('\\nPaste your query link \\n> ')\nOUTPUT_FILE_NAME = input('\\nWhat\\'s your query term? (This will be used as file name)\\n> ')\nresp = input('\\nIs this your first time running this query? Y/N\\n> ').upper()\nif resp == 'Y':\n print()\n print('='*10)\n print('Initialize scraping now.')\n print('='*10)\n NewScrape().scrape_main()\nelif resp == 'N':\n OLD_MASTER_FILE = input('\\nWhere is the old txt file you want to merge later? Please paste full path. \\n> ')\n print()\n print('='*10)\n print('WARNING: FURTHER ACTIONS NEEDED AT THE END OF SCRAPING.')\n print('Initialize scraping now.')\n print('='*10)\n ContinueScrape().scrape_main()\n \nelse:\n print('Invalid command. Try again.')\n",
"step-ids": [
9,
12,
13,
14,
15
]
}
|
[
9,
12,
13,
14,
15
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def index(request):
baseball = League.objects.filter(name__contains='Baseball')
women_league = League.objects.filter(name__contains='women')
hockey_league = League.objects.filter(sport__contains='hockey')
not_football = League.objects.exclude(sport='soccer')
conference = League.objects.filter(name__contains='conference')
atlanta = Team.objects.filter(location='Atlanta')
dallas = Team.objects.filter(location='Dallas')
raptors = Team.objects.filter(team_name__contains='Raptors')
city = Team.objects.filter(location__contains='city')
ti = Team.objects.filter(team_name__startswith='T')
order = Team.objects.order_by('location')
Iorder = Team.objects.order_by('-team_name')
cooper = Player.objects.filter(last_name='Cooper')
joshua = Player.objects.filter(first_name='Joshua')
josh = Player.objects.filter(last_name='Cooper') & Player.objects.exclude(
first_name='Joshua')
aw = Player.objects.filter(first_name='Alexander') | Player.objects.filter(
first_name='Wyatt')
atlantic = Team.objects.filter(league__name='Atlantic Soccer Conference')
player_boston = Player.objects.filter(curr_team__team_name='Penguin',
curr_team__location='Boston')
jugadores = Player.objects.filter(curr_team__league__name=
'International Collegiate Baseball Conference')
amateur_soccer = Player.objects.filter(curr_team__league__name=
'American Amateur Soccer Conference').filter(last_name='Lopez')
soccer = Player.objects.filter(all_teams__league__sport='Soccer')
sophia = Team.objects.filter(curr_players__first_name='Sophia')
sophia_leagues = League.objects.filter(teams__curr_players__first_name=
'Sophia')
flores = Player.objects.filter(last_name='FLores').exclude(
curr_team__team_name='Washington Roughriders')
evans = Team.objects.filter(all_players__first_name='Samuel',
all_players__last_name='Evans') & Team.objects.filter(
curr_players__first_name='Samuel', curr_players__last_name='Evans')
thunder_cat = Player.objects.filter(all_teams__team_name='Tigers'
) | Player.objects.filter(curr_team__team_name='Tigers')
try:
loswichitavikin = Team.objects.get(team_name='Vikings', location=
'Wichita')
wichita_players = loswichitavikin.all_players.all()
wichita_current_ids = [player.id for player in loswichitavikin.
curr_players.all()]
not_now_wichita = [player for player in wichita_players if player.
id not in wichita_current_ids]
except Team.DoesNotExist:
not_now_wichita = []
joshuas2 = Player.objects.filter(first_name='Joshua'
) & Player.objects.filter(all_teams__league__name=
'Atlantic Federation of Collegiate Baseball Athletics')
team12 = Team.objects.annotate(Count('curr_players')).annotate(Count(
'all_players')).filter(curr_players__count__gte=12).filter(
all_players__count__gte=12)
orderplayer = Player.objects.annotate(Count('all_teams')).order_by(
'all_teams__count')
"""
Detroit colt 4
try:
loswichitavikin = Team.objects.get(team_name = "Vikings", location = "Wichita")
wichita_players = loswichitavikin.all_players.all()
wichita_current_ids = [player.id for player in loswichitavikin.curr_players.all()]
not_now_wichita = [player for player in wichita_players if player.id not in wichita_current_ids]
except Team.DoesNotExist:
not_now_wichita = []
"""
context = {'leagues': League.objects.all(), 'teams': Team.objects.all(),
'players': Player.objects.all(), 'baseball': baseball,
'women_league': women_league, 'hockey_league': hockey_league,
'not_football': not_football, 'conference': conference, 'atlanta':
atlanta, 'dallas': dallas, 'raptor': raptors, 'city': city, 'ti':
ti, 'order': order, 'Iorder': Iorder, 'cooper': cooper, 'joshua':
joshua, 'josh': josh, 'aw': aw, 'atlantic': atlantic,
'player_boston': player_boston, 'jugadores': jugadores,
'amateur_soccer': amateur_soccer, 'soccer': soccer, 'sophia':
sophia, 'sophia_leagues': sophia_leagues, 'flores': flores, 'evans':
evans, 'thunder_cat': thunder_cat, 'not_now_wichita':
not_now_wichita, 'joshuas2': joshuas2, 'team12': team12,
'orderplayer': orderplayer}
return render(request, 'leagues/index.html', context)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def index(request):
baseball = League.objects.filter(name__contains='Baseball')
women_league = League.objects.filter(name__contains='women')
hockey_league = League.objects.filter(sport__contains='hockey')
not_football = League.objects.exclude(sport='soccer')
conference = League.objects.filter(name__contains='conference')
atlanta = Team.objects.filter(location='Atlanta')
dallas = Team.objects.filter(location='Dallas')
raptors = Team.objects.filter(team_name__contains='Raptors')
city = Team.objects.filter(location__contains='city')
ti = Team.objects.filter(team_name__startswith='T')
order = Team.objects.order_by('location')
Iorder = Team.objects.order_by('-team_name')
cooper = Player.objects.filter(last_name='Cooper')
joshua = Player.objects.filter(first_name='Joshua')
josh = Player.objects.filter(last_name='Cooper') & Player.objects.exclude(
first_name='Joshua')
aw = Player.objects.filter(first_name='Alexander') | Player.objects.filter(
first_name='Wyatt')
atlantic = Team.objects.filter(league__name='Atlantic Soccer Conference')
player_boston = Player.objects.filter(curr_team__team_name='Penguin',
curr_team__location='Boston')
jugadores = Player.objects.filter(curr_team__league__name=
'International Collegiate Baseball Conference')
amateur_soccer = Player.objects.filter(curr_team__league__name=
'American Amateur Soccer Conference').filter(last_name='Lopez')
soccer = Player.objects.filter(all_teams__league__sport='Soccer')
sophia = Team.objects.filter(curr_players__first_name='Sophia')
sophia_leagues = League.objects.filter(teams__curr_players__first_name=
'Sophia')
flores = Player.objects.filter(last_name='FLores').exclude(
curr_team__team_name='Washington Roughriders')
evans = Team.objects.filter(all_players__first_name='Samuel',
all_players__last_name='Evans') & Team.objects.filter(
curr_players__first_name='Samuel', curr_players__last_name='Evans')
thunder_cat = Player.objects.filter(all_teams__team_name='Tigers'
) | Player.objects.filter(curr_team__team_name='Tigers')
try:
loswichitavikin = Team.objects.get(team_name='Vikings', location=
'Wichita')
wichita_players = loswichitavikin.all_players.all()
wichita_current_ids = [player.id for player in loswichitavikin.
curr_players.all()]
not_now_wichita = [player for player in wichita_players if player.
id not in wichita_current_ids]
except Team.DoesNotExist:
not_now_wichita = []
joshuas2 = Player.objects.filter(first_name='Joshua'
) & Player.objects.filter(all_teams__league__name=
'Atlantic Federation of Collegiate Baseball Athletics')
team12 = Team.objects.annotate(Count('curr_players')).annotate(Count(
'all_players')).filter(curr_players__count__gte=12).filter(
all_players__count__gte=12)
orderplayer = Player.objects.annotate(Count('all_teams')).order_by(
'all_teams__count')
"""
Detroit colt 4
try:
loswichitavikin = Team.objects.get(team_name = "Vikings", location = "Wichita")
wichita_players = loswichitavikin.all_players.all()
wichita_current_ids = [player.id for player in loswichitavikin.curr_players.all()]
not_now_wichita = [player for player in wichita_players if player.id not in wichita_current_ids]
except Team.DoesNotExist:
not_now_wichita = []
"""
context = {'leagues': League.objects.all(), 'teams': Team.objects.all(),
'players': Player.objects.all(), 'baseball': baseball,
'women_league': women_league, 'hockey_league': hockey_league,
'not_football': not_football, 'conference': conference, 'atlanta':
atlanta, 'dallas': dallas, 'raptor': raptors, 'city': city, 'ti':
ti, 'order': order, 'Iorder': Iorder, 'cooper': cooper, 'joshua':
joshua, 'josh': josh, 'aw': aw, 'atlantic': atlantic,
'player_boston': player_boston, 'jugadores': jugadores,
'amateur_soccer': amateur_soccer, 'soccer': soccer, 'sophia':
sophia, 'sophia_leagues': sophia_leagues, 'flores': flores, 'evans':
evans, 'thunder_cat': thunder_cat, 'not_now_wichita':
not_now_wichita, 'joshuas2': joshuas2, 'team12': team12,
'orderplayer': orderplayer}
return render(request, 'leagues/index.html', context)
def make_data(request):
team_maker.gen_leagues(10 * 2)
team_maker.gen_teams(50 * 2)
team_maker.gen_players(200 * 2)
return redirect('index')
<|reserved_special_token_1|>
from django.shortcuts import render, redirect
from .models import League, Team, Player
from django.db.models import Count
from . import team_maker
def index(request):
baseball = League.objects.filter(name__contains='Baseball')
women_league = League.objects.filter(name__contains='women')
hockey_league = League.objects.filter(sport__contains='hockey')
not_football = League.objects.exclude(sport='soccer')
conference = League.objects.filter(name__contains='conference')
atlanta = Team.objects.filter(location='Atlanta')
dallas = Team.objects.filter(location='Dallas')
raptors = Team.objects.filter(team_name__contains='Raptors')
city = Team.objects.filter(location__contains='city')
ti = Team.objects.filter(team_name__startswith='T')
order = Team.objects.order_by('location')
Iorder = Team.objects.order_by('-team_name')
cooper = Player.objects.filter(last_name='Cooper')
joshua = Player.objects.filter(first_name='Joshua')
josh = Player.objects.filter(last_name='Cooper') & Player.objects.exclude(
first_name='Joshua')
aw = Player.objects.filter(first_name='Alexander') | Player.objects.filter(
first_name='Wyatt')
atlantic = Team.objects.filter(league__name='Atlantic Soccer Conference')
player_boston = Player.objects.filter(curr_team__team_name='Penguin',
curr_team__location='Boston')
jugadores = Player.objects.filter(curr_team__league__name=
'International Collegiate Baseball Conference')
amateur_soccer = Player.objects.filter(curr_team__league__name=
'American Amateur Soccer Conference').filter(last_name='Lopez')
soccer = Player.objects.filter(all_teams__league__sport='Soccer')
sophia = Team.objects.filter(curr_players__first_name='Sophia')
sophia_leagues = League.objects.filter(teams__curr_players__first_name=
'Sophia')
flores = Player.objects.filter(last_name='FLores').exclude(
curr_team__team_name='Washington Roughriders')
evans = Team.objects.filter(all_players__first_name='Samuel',
all_players__last_name='Evans') & Team.objects.filter(
curr_players__first_name='Samuel', curr_players__last_name='Evans')
thunder_cat = Player.objects.filter(all_teams__team_name='Tigers'
) | Player.objects.filter(curr_team__team_name='Tigers')
try:
loswichitavikin = Team.objects.get(team_name='Vikings', location=
'Wichita')
wichita_players = loswichitavikin.all_players.all()
wichita_current_ids = [player.id for player in loswichitavikin.
curr_players.all()]
not_now_wichita = [player for player in wichita_players if player.
id not in wichita_current_ids]
except Team.DoesNotExist:
not_now_wichita = []
joshuas2 = Player.objects.filter(first_name='Joshua'
) & Player.objects.filter(all_teams__league__name=
'Atlantic Federation of Collegiate Baseball Athletics')
team12 = Team.objects.annotate(Count('curr_players')).annotate(Count(
'all_players')).filter(curr_players__count__gte=12).filter(
all_players__count__gte=12)
orderplayer = Player.objects.annotate(Count('all_teams')).order_by(
'all_teams__count')
"""
Detroit colt 4
try:
loswichitavikin = Team.objects.get(team_name = "Vikings", location = "Wichita")
wichita_players = loswichitavikin.all_players.all()
wichita_current_ids = [player.id for player in loswichitavikin.curr_players.all()]
not_now_wichita = [player for player in wichita_players if player.id not in wichita_current_ids]
except Team.DoesNotExist:
not_now_wichita = []
"""
context = {'leagues': League.objects.all(), 'teams': Team.objects.all(),
'players': Player.objects.all(), 'baseball': baseball,
'women_league': women_league, 'hockey_league': hockey_league,
'not_football': not_football, 'conference': conference, 'atlanta':
atlanta, 'dallas': dallas, 'raptor': raptors, 'city': city, 'ti':
ti, 'order': order, 'Iorder': Iorder, 'cooper': cooper, 'joshua':
joshua, 'josh': josh, 'aw': aw, 'atlantic': atlantic,
'player_boston': player_boston, 'jugadores': jugadores,
'amateur_soccer': amateur_soccer, 'soccer': soccer, 'sophia':
sophia, 'sophia_leagues': sophia_leagues, 'flores': flores, 'evans':
evans, 'thunder_cat': thunder_cat, 'not_now_wichita':
not_now_wichita, 'joshuas2': joshuas2, 'team12': team12,
'orderplayer': orderplayer}
return render(request, 'leagues/index.html', context)
def make_data(request):
team_maker.gen_leagues(10 * 2)
team_maker.gen_teams(50 * 2)
team_maker.gen_players(200 * 2)
return redirect('index')
<|reserved_special_token_1|>
from django.shortcuts import render, redirect
from .models import League, Team, Player
from django.db.models import Count
from . import team_maker
def index(request):
baseball = League.objects.filter(name__contains='Baseball')
women_league = League.objects.filter(name__contains='women')
hockey_league = League.objects.filter(sport__contains='hockey')
not_football = League.objects.exclude(sport='soccer')
conference = League.objects.filter(name__contains='conference')
atlanta = Team.objects.filter(location='Atlanta')
dallas = Team.objects.filter(location='Dallas')
raptors = Team.objects.filter(team_name__contains='Raptors')
city = Team.objects.filter(location__contains='city')
ti = Team.objects.filter(team_name__startswith='T')
order = Team.objects.order_by('location')
Iorder = Team.objects.order_by('-team_name')
cooper = Player.objects.filter(last_name='Cooper')
joshua = Player.objects.filter(first_name='Joshua')
josh = Player.objects.filter(
last_name='Cooper') & Player.objects.exclude(first_name='Joshua')
aw = Player.objects.filter(
first_name='Alexander') | Player.objects.filter(first_name='Wyatt')
# second part
atlantic = Team.objects.filter(league__name='Atlantic Soccer Conference')
player_boston = Player.objects.filter(
curr_team__team_name='Penguin', curr_team__location='Boston')
jugadores = Player.objects.filter(
curr_team__league__name='International Collegiate Baseball Conference')
amateur_soccer = Player.objects.filter(
curr_team__league__name='American Amateur Soccer Conference').filter(last_name='Lopez')
soccer = Player.objects.filter(all_teams__league__sport='Soccer')
sophia = Team.objects.filter(curr_players__first_name='Sophia')
sophia_leagues = League.objects.filter(
teams__curr_players__first_name='Sophia')
flores = Player.objects.filter(last_name='FLores').exclude(
curr_team__team_name='Washington Roughriders')
evans = Team.objects.filter(all_players__first_name='Samuel', all_players__last_name='Evans') & Team.objects.filter(
curr_players__first_name='Samuel', curr_players__last_name='Evans')
thunder_cat = Player.objects.filter(
all_teams__team_name='Tigers') | Player.objects.filter(curr_team__team_name='Tigers')
# whichitas team
try:
loswichitavikin = Team.objects.get(
team_name="Vikings", location="Wichita")
wichita_players = loswichitavikin.all_players.all()
wichita_current_ids = [
player.id for player in loswichitavikin.curr_players.all()]
not_now_wichita = [
player for player in wichita_players if player.id not in wichita_current_ids]
except Team.DoesNotExist:
not_now_wichita = []
joshuas2 = Player.objects.filter(first_name='Joshua') & Player.objects.filter(
all_teams__league__name='Atlantic Federation of Collegiate Baseball Athletics')
team12 = Team.objects.annotate(Count('curr_players')).annotate(Count(
'all_players')).filter(curr_players__count__gte=12).filter(all_players__count__gte=12)
orderplayer = Player.objects.annotate(
Count('all_teams')).order_by('all_teams__count')
'''
Detroit colt 4
try:
loswichitavikin = Team.objects.get(team_name = "Vikings", location = "Wichita")
wichita_players = loswichitavikin.all_players.all()
wichita_current_ids = [player.id for player in loswichitavikin.curr_players.all()]
not_now_wichita = [player for player in wichita_players if player.id not in wichita_current_ids]
except Team.DoesNotExist:
not_now_wichita = []
'''
# jacob 12
context = {
"leagues": League.objects.all(),
"teams": Team.objects.all(),
"players": Player.objects.all(),
'baseball': baseball,
'women_league': women_league,
'hockey_league': hockey_league,
'not_football': not_football,
'conference': conference,
'atlanta': atlanta,
'dallas': dallas,
'raptor': raptors,
'city': city,
'ti': ti,
'order': order,
'Iorder': Iorder,
'cooper': cooper,
'joshua': joshua,
'josh': josh,
'aw': aw,
'atlantic': atlantic,
'player_boston': player_boston,
'jugadores': jugadores,
'amateur_soccer': amateur_soccer,
'soccer': soccer,
'sophia': sophia,
'sophia_leagues': sophia_leagues,
'flores': flores,
'evans': evans,
'thunder_cat': thunder_cat,
'not_now_wichita': not_now_wichita,
'joshuas2': joshuas2,
'team12': team12,
'orderplayer': orderplayer
}
return render(request, "leagues/index.html", context)
def make_data(request):
team_maker.gen_leagues(10*2)
team_maker.gen_teams(50*2)
team_maker.gen_players(200*2)
return redirect("index")
|
flexible
|
{
"blob_id": "49703775da87e8cbbe78a69c91a68128c3fd78e1",
"index": 3363,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef index(request):\n baseball = League.objects.filter(name__contains='Baseball')\n women_league = League.objects.filter(name__contains='women')\n hockey_league = League.objects.filter(sport__contains='hockey')\n not_football = League.objects.exclude(sport='soccer')\n conference = League.objects.filter(name__contains='conference')\n atlanta = Team.objects.filter(location='Atlanta')\n dallas = Team.objects.filter(location='Dallas')\n raptors = Team.objects.filter(team_name__contains='Raptors')\n city = Team.objects.filter(location__contains='city')\n ti = Team.objects.filter(team_name__startswith='T')\n order = Team.objects.order_by('location')\n Iorder = Team.objects.order_by('-team_name')\n cooper = Player.objects.filter(last_name='Cooper')\n joshua = Player.objects.filter(first_name='Joshua')\n josh = Player.objects.filter(last_name='Cooper') & Player.objects.exclude(\n first_name='Joshua')\n aw = Player.objects.filter(first_name='Alexander') | Player.objects.filter(\n first_name='Wyatt')\n atlantic = Team.objects.filter(league__name='Atlantic Soccer Conference')\n player_boston = Player.objects.filter(curr_team__team_name='Penguin',\n curr_team__location='Boston')\n jugadores = Player.objects.filter(curr_team__league__name=\n 'International Collegiate Baseball Conference')\n amateur_soccer = Player.objects.filter(curr_team__league__name=\n 'American Amateur Soccer Conference').filter(last_name='Lopez')\n soccer = Player.objects.filter(all_teams__league__sport='Soccer')\n sophia = Team.objects.filter(curr_players__first_name='Sophia')\n sophia_leagues = League.objects.filter(teams__curr_players__first_name=\n 'Sophia')\n flores = Player.objects.filter(last_name='FLores').exclude(\n curr_team__team_name='Washington Roughriders')\n evans = Team.objects.filter(all_players__first_name='Samuel',\n all_players__last_name='Evans') & Team.objects.filter(\n curr_players__first_name='Samuel', curr_players__last_name='Evans')\n thunder_cat = Player.objects.filter(all_teams__team_name='Tigers'\n ) | Player.objects.filter(curr_team__team_name='Tigers')\n try:\n loswichitavikin = Team.objects.get(team_name='Vikings', location=\n 'Wichita')\n wichita_players = loswichitavikin.all_players.all()\n wichita_current_ids = [player.id for player in loswichitavikin.\n curr_players.all()]\n not_now_wichita = [player for player in wichita_players if player.\n id not in wichita_current_ids]\n except Team.DoesNotExist:\n not_now_wichita = []\n joshuas2 = Player.objects.filter(first_name='Joshua'\n ) & Player.objects.filter(all_teams__league__name=\n 'Atlantic Federation of Collegiate Baseball Athletics')\n team12 = Team.objects.annotate(Count('curr_players')).annotate(Count(\n 'all_players')).filter(curr_players__count__gte=12).filter(\n all_players__count__gte=12)\n orderplayer = Player.objects.annotate(Count('all_teams')).order_by(\n 'all_teams__count')\n \"\"\"\n\tDetroit colt 4 \n\ttry:\n\t\tloswichitavikin = Team.objects.get(team_name = \"Vikings\", location = \"Wichita\")\n\t\twichita_players = loswichitavikin.all_players.all()\n\t\twichita_current_ids = [player.id for player in loswichitavikin.curr_players.all()]\n\t\tnot_now_wichita = [player for player in wichita_players if player.id not in wichita_current_ids]\n\n\texcept Team.DoesNotExist:\n\t\tnot_now_wichita = []\n\"\"\"\n context = {'leagues': League.objects.all(), 'teams': Team.objects.all(),\n 'players': Player.objects.all(), 'baseball': baseball,\n 'women_league': women_league, 'hockey_league': hockey_league,\n 'not_football': not_football, 'conference': conference, 'atlanta':\n atlanta, 'dallas': dallas, 'raptor': raptors, 'city': city, 'ti':\n ti, 'order': order, 'Iorder': Iorder, 'cooper': cooper, 'joshua':\n joshua, 'josh': josh, 'aw': aw, 'atlantic': atlantic,\n 'player_boston': player_boston, 'jugadores': jugadores,\n 'amateur_soccer': amateur_soccer, 'soccer': soccer, 'sophia':\n sophia, 'sophia_leagues': sophia_leagues, 'flores': flores, 'evans':\n evans, 'thunder_cat': thunder_cat, 'not_now_wichita':\n not_now_wichita, 'joshuas2': joshuas2, 'team12': team12,\n 'orderplayer': orderplayer}\n return render(request, 'leagues/index.html', context)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef index(request):\n baseball = League.objects.filter(name__contains='Baseball')\n women_league = League.objects.filter(name__contains='women')\n hockey_league = League.objects.filter(sport__contains='hockey')\n not_football = League.objects.exclude(sport='soccer')\n conference = League.objects.filter(name__contains='conference')\n atlanta = Team.objects.filter(location='Atlanta')\n dallas = Team.objects.filter(location='Dallas')\n raptors = Team.objects.filter(team_name__contains='Raptors')\n city = Team.objects.filter(location__contains='city')\n ti = Team.objects.filter(team_name__startswith='T')\n order = Team.objects.order_by('location')\n Iorder = Team.objects.order_by('-team_name')\n cooper = Player.objects.filter(last_name='Cooper')\n joshua = Player.objects.filter(first_name='Joshua')\n josh = Player.objects.filter(last_name='Cooper') & Player.objects.exclude(\n first_name='Joshua')\n aw = Player.objects.filter(first_name='Alexander') | Player.objects.filter(\n first_name='Wyatt')\n atlantic = Team.objects.filter(league__name='Atlantic Soccer Conference')\n player_boston = Player.objects.filter(curr_team__team_name='Penguin',\n curr_team__location='Boston')\n jugadores = Player.objects.filter(curr_team__league__name=\n 'International Collegiate Baseball Conference')\n amateur_soccer = Player.objects.filter(curr_team__league__name=\n 'American Amateur Soccer Conference').filter(last_name='Lopez')\n soccer = Player.objects.filter(all_teams__league__sport='Soccer')\n sophia = Team.objects.filter(curr_players__first_name='Sophia')\n sophia_leagues = League.objects.filter(teams__curr_players__first_name=\n 'Sophia')\n flores = Player.objects.filter(last_name='FLores').exclude(\n curr_team__team_name='Washington Roughriders')\n evans = Team.objects.filter(all_players__first_name='Samuel',\n all_players__last_name='Evans') & Team.objects.filter(\n curr_players__first_name='Samuel', curr_players__last_name='Evans')\n thunder_cat = Player.objects.filter(all_teams__team_name='Tigers'\n ) | Player.objects.filter(curr_team__team_name='Tigers')\n try:\n loswichitavikin = Team.objects.get(team_name='Vikings', location=\n 'Wichita')\n wichita_players = loswichitavikin.all_players.all()\n wichita_current_ids = [player.id for player in loswichitavikin.\n curr_players.all()]\n not_now_wichita = [player for player in wichita_players if player.\n id not in wichita_current_ids]\n except Team.DoesNotExist:\n not_now_wichita = []\n joshuas2 = Player.objects.filter(first_name='Joshua'\n ) & Player.objects.filter(all_teams__league__name=\n 'Atlantic Federation of Collegiate Baseball Athletics')\n team12 = Team.objects.annotate(Count('curr_players')).annotate(Count(\n 'all_players')).filter(curr_players__count__gte=12).filter(\n all_players__count__gte=12)\n orderplayer = Player.objects.annotate(Count('all_teams')).order_by(\n 'all_teams__count')\n \"\"\"\n\tDetroit colt 4 \n\ttry:\n\t\tloswichitavikin = Team.objects.get(team_name = \"Vikings\", location = \"Wichita\")\n\t\twichita_players = loswichitavikin.all_players.all()\n\t\twichita_current_ids = [player.id for player in loswichitavikin.curr_players.all()]\n\t\tnot_now_wichita = [player for player in wichita_players if player.id not in wichita_current_ids]\n\n\texcept Team.DoesNotExist:\n\t\tnot_now_wichita = []\n\"\"\"\n context = {'leagues': League.objects.all(), 'teams': Team.objects.all(),\n 'players': Player.objects.all(), 'baseball': baseball,\n 'women_league': women_league, 'hockey_league': hockey_league,\n 'not_football': not_football, 'conference': conference, 'atlanta':\n atlanta, 'dallas': dallas, 'raptor': raptors, 'city': city, 'ti':\n ti, 'order': order, 'Iorder': Iorder, 'cooper': cooper, 'joshua':\n joshua, 'josh': josh, 'aw': aw, 'atlantic': atlantic,\n 'player_boston': player_boston, 'jugadores': jugadores,\n 'amateur_soccer': amateur_soccer, 'soccer': soccer, 'sophia':\n sophia, 'sophia_leagues': sophia_leagues, 'flores': flores, 'evans':\n evans, 'thunder_cat': thunder_cat, 'not_now_wichita':\n not_now_wichita, 'joshuas2': joshuas2, 'team12': team12,\n 'orderplayer': orderplayer}\n return render(request, 'leagues/index.html', context)\n\n\ndef make_data(request):\n team_maker.gen_leagues(10 * 2)\n team_maker.gen_teams(50 * 2)\n team_maker.gen_players(200 * 2)\n return redirect('index')\n",
"step-4": "from django.shortcuts import render, redirect\nfrom .models import League, Team, Player\nfrom django.db.models import Count\nfrom . import team_maker\n\n\ndef index(request):\n baseball = League.objects.filter(name__contains='Baseball')\n women_league = League.objects.filter(name__contains='women')\n hockey_league = League.objects.filter(sport__contains='hockey')\n not_football = League.objects.exclude(sport='soccer')\n conference = League.objects.filter(name__contains='conference')\n atlanta = Team.objects.filter(location='Atlanta')\n dallas = Team.objects.filter(location='Dallas')\n raptors = Team.objects.filter(team_name__contains='Raptors')\n city = Team.objects.filter(location__contains='city')\n ti = Team.objects.filter(team_name__startswith='T')\n order = Team.objects.order_by('location')\n Iorder = Team.objects.order_by('-team_name')\n cooper = Player.objects.filter(last_name='Cooper')\n joshua = Player.objects.filter(first_name='Joshua')\n josh = Player.objects.filter(last_name='Cooper') & Player.objects.exclude(\n first_name='Joshua')\n aw = Player.objects.filter(first_name='Alexander') | Player.objects.filter(\n first_name='Wyatt')\n atlantic = Team.objects.filter(league__name='Atlantic Soccer Conference')\n player_boston = Player.objects.filter(curr_team__team_name='Penguin',\n curr_team__location='Boston')\n jugadores = Player.objects.filter(curr_team__league__name=\n 'International Collegiate Baseball Conference')\n amateur_soccer = Player.objects.filter(curr_team__league__name=\n 'American Amateur Soccer Conference').filter(last_name='Lopez')\n soccer = Player.objects.filter(all_teams__league__sport='Soccer')\n sophia = Team.objects.filter(curr_players__first_name='Sophia')\n sophia_leagues = League.objects.filter(teams__curr_players__first_name=\n 'Sophia')\n flores = Player.objects.filter(last_name='FLores').exclude(\n curr_team__team_name='Washington Roughriders')\n evans = Team.objects.filter(all_players__first_name='Samuel',\n all_players__last_name='Evans') & Team.objects.filter(\n curr_players__first_name='Samuel', curr_players__last_name='Evans')\n thunder_cat = Player.objects.filter(all_teams__team_name='Tigers'\n ) | Player.objects.filter(curr_team__team_name='Tigers')\n try:\n loswichitavikin = Team.objects.get(team_name='Vikings', location=\n 'Wichita')\n wichita_players = loswichitavikin.all_players.all()\n wichita_current_ids = [player.id for player in loswichitavikin.\n curr_players.all()]\n not_now_wichita = [player for player in wichita_players if player.\n id not in wichita_current_ids]\n except Team.DoesNotExist:\n not_now_wichita = []\n joshuas2 = Player.objects.filter(first_name='Joshua'\n ) & Player.objects.filter(all_teams__league__name=\n 'Atlantic Federation of Collegiate Baseball Athletics')\n team12 = Team.objects.annotate(Count('curr_players')).annotate(Count(\n 'all_players')).filter(curr_players__count__gte=12).filter(\n all_players__count__gte=12)\n orderplayer = Player.objects.annotate(Count('all_teams')).order_by(\n 'all_teams__count')\n \"\"\"\n\tDetroit colt 4 \n\ttry:\n\t\tloswichitavikin = Team.objects.get(team_name = \"Vikings\", location = \"Wichita\")\n\t\twichita_players = loswichitavikin.all_players.all()\n\t\twichita_current_ids = [player.id for player in loswichitavikin.curr_players.all()]\n\t\tnot_now_wichita = [player for player in wichita_players if player.id not in wichita_current_ids]\n\n\texcept Team.DoesNotExist:\n\t\tnot_now_wichita = []\n\"\"\"\n context = {'leagues': League.objects.all(), 'teams': Team.objects.all(),\n 'players': Player.objects.all(), 'baseball': baseball,\n 'women_league': women_league, 'hockey_league': hockey_league,\n 'not_football': not_football, 'conference': conference, 'atlanta':\n atlanta, 'dallas': dallas, 'raptor': raptors, 'city': city, 'ti':\n ti, 'order': order, 'Iorder': Iorder, 'cooper': cooper, 'joshua':\n joshua, 'josh': josh, 'aw': aw, 'atlantic': atlantic,\n 'player_boston': player_boston, 'jugadores': jugadores,\n 'amateur_soccer': amateur_soccer, 'soccer': soccer, 'sophia':\n sophia, 'sophia_leagues': sophia_leagues, 'flores': flores, 'evans':\n evans, 'thunder_cat': thunder_cat, 'not_now_wichita':\n not_now_wichita, 'joshuas2': joshuas2, 'team12': team12,\n 'orderplayer': orderplayer}\n return render(request, 'leagues/index.html', context)\n\n\ndef make_data(request):\n team_maker.gen_leagues(10 * 2)\n team_maker.gen_teams(50 * 2)\n team_maker.gen_players(200 * 2)\n return redirect('index')\n",
"step-5": "from django.shortcuts import render, redirect\nfrom .models import League, Team, Player\nfrom django.db.models import Count\n\nfrom . import team_maker\n\n\ndef index(request):\n\n baseball = League.objects.filter(name__contains='Baseball')\n women_league = League.objects.filter(name__contains='women')\n hockey_league = League.objects.filter(sport__contains='hockey')\n not_football = League.objects.exclude(sport='soccer')\n conference = League.objects.filter(name__contains='conference')\n atlanta = Team.objects.filter(location='Atlanta')\n dallas = Team.objects.filter(location='Dallas')\n raptors = Team.objects.filter(team_name__contains='Raptors')\n city = Team.objects.filter(location__contains='city')\n ti = Team.objects.filter(team_name__startswith='T')\n order = Team.objects.order_by('location')\n Iorder = Team.objects.order_by('-team_name')\n cooper = Player.objects.filter(last_name='Cooper')\n joshua = Player.objects.filter(first_name='Joshua')\n josh = Player.objects.filter(\n last_name='Cooper') & Player.objects.exclude(first_name='Joshua')\n aw = Player.objects.filter(\n first_name='Alexander') | Player.objects.filter(first_name='Wyatt')\n # second part\n atlantic = Team.objects.filter(league__name='Atlantic Soccer Conference')\n player_boston = Player.objects.filter(\n curr_team__team_name='Penguin', curr_team__location='Boston')\n jugadores = Player.objects.filter(\n curr_team__league__name='International Collegiate Baseball Conference')\n amateur_soccer = Player.objects.filter(\n curr_team__league__name='American Amateur Soccer Conference').filter(last_name='Lopez')\n soccer = Player.objects.filter(all_teams__league__sport='Soccer')\n sophia = Team.objects.filter(curr_players__first_name='Sophia')\n sophia_leagues = League.objects.filter(\n teams__curr_players__first_name='Sophia')\n flores = Player.objects.filter(last_name='FLores').exclude(\n curr_team__team_name='Washington Roughriders')\n evans = Team.objects.filter(all_players__first_name='Samuel', all_players__last_name='Evans') & Team.objects.filter(\n curr_players__first_name='Samuel', curr_players__last_name='Evans')\n thunder_cat = Player.objects.filter(\n all_teams__team_name='Tigers') | Player.objects.filter(curr_team__team_name='Tigers')\n # whichitas team\n\n try:\n loswichitavikin = Team.objects.get(\n team_name=\"Vikings\", location=\"Wichita\")\n wichita_players = loswichitavikin.all_players.all()\n wichita_current_ids = [\n player.id for player in loswichitavikin.curr_players.all()]\n not_now_wichita = [\n player for player in wichita_players if player.id not in wichita_current_ids]\n\n except Team.DoesNotExist:\n not_now_wichita = []\n\n joshuas2 = Player.objects.filter(first_name='Joshua') & Player.objects.filter(\n all_teams__league__name='Atlantic Federation of Collegiate Baseball Athletics')\n team12 = Team.objects.annotate(Count('curr_players')).annotate(Count(\n 'all_players')).filter(curr_players__count__gte=12).filter(all_players__count__gte=12)\n orderplayer = Player.objects.annotate(\n Count('all_teams')).order_by('all_teams__count')\n\n '''\n\tDetroit colt 4 \n\ttry:\n\t\tloswichitavikin = Team.objects.get(team_name = \"Vikings\", location = \"Wichita\")\n\t\twichita_players = loswichitavikin.all_players.all()\n\t\twichita_current_ids = [player.id for player in loswichitavikin.curr_players.all()]\n\t\tnot_now_wichita = [player for player in wichita_players if player.id not in wichita_current_ids]\n\n\texcept Team.DoesNotExist:\n\t\tnot_now_wichita = []\n'''\n # jacob 12\n\n context = {\n \"leagues\": League.objects.all(),\n \"teams\": Team.objects.all(),\n \"players\": Player.objects.all(),\n 'baseball': baseball,\n 'women_league': women_league,\n 'hockey_league': hockey_league,\n 'not_football': not_football,\n 'conference': conference,\n 'atlanta': atlanta,\n 'dallas': dallas,\n 'raptor': raptors,\n 'city': city,\n 'ti': ti,\n 'order': order,\n 'Iorder': Iorder,\n 'cooper': cooper,\n 'joshua': joshua,\n 'josh': josh,\n 'aw': aw,\n 'atlantic': atlantic,\n 'player_boston': player_boston,\n 'jugadores': jugadores,\n 'amateur_soccer': amateur_soccer,\n 'soccer': soccer,\n 'sophia': sophia,\n 'sophia_leagues': sophia_leagues,\n 'flores': flores,\n 'evans': evans,\n 'thunder_cat': thunder_cat,\n 'not_now_wichita': not_now_wichita,\n 'joshuas2': joshuas2,\n 'team12': team12,\n 'orderplayer': orderplayer\n\n }\n return render(request, \"leagues/index.html\", context)\n\n\ndef make_data(request):\n team_maker.gen_leagues(10*2)\n team_maker.gen_teams(50*2)\n team_maker.gen_players(200*2)\n\n return redirect(\"index\")\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from app_auth.recaptcha.services.recaptcha_service import validate_recaptcha
from django.shortcuts import render, redirect
from django.contrib import auth
from django.views import View
from rest_framework.permissions import IsAuthenticated
from rest_framework.views import APIView
from rest_framework.response import Response
from .common.bearer_authentication import CustomBearerAuthentication
from .models import User
from .forms import UserCreationForm
from .serializers import UserSerializer
from .user_backend import UserBackend
from .common.token_utils import get_or_set_token
from app.common.meta_config import get_meta
class Auth(View):
auth_class = UserBackend()
# Create your views here.
def authenticate(self, request, username, password):
user = self.auth_class.authenticate(username=username, password=password)
if user is not None:
if user.is_active:
auth.login(request, user)
return True
return False
class Login(Auth):
def post(self, request):
username = request.POST.get('username', '')
password = request.POST.get('password', '')
# just so we can send back errors
if self.authenticate(request, username, password):
get_or_set_token(username)
return redirect('/')
return redirect('/')
def logout(request):
auth.logout(request)
return redirect('/')
class Signup(Auth):
form_class = UserCreationForm
def post(self, request):
form = self.form_class(request.POST)
if form.is_valid():
user = form.save(commit=False) # not saved permanently in db yet
# clean normalised data.
email = form.cleaned_data['email']
password = form.cleaned_data['password1']
# password setting.
user.set_password(password)
# register user.
user.save()
if self.authenticate(request, email, password):
return redirect('/')
return redirect('/')
class UserViewSet(APIView):
authentication_classes = [CustomBearerAuthentication]
permission_classes = [IsAuthenticated]
def get(self, request, format=None):
queryset = User.objects.all().order_by('-created_at')
serializer = UserSerializer(queryset, many=True)
content = {
'users': {
'data': serializer.data,
'page': 1,
'count': len(serializer.data)
},
'auth': str(request.auth),
}
return Response(content)
class LoginView(Auth):
template_name = 'app/login.html'
def get(self, request):
return render(
request,
self.template_name,
{ 'meta': get_meta('LoginView') }
)
def post(self, request):
username = request.POST.get('username', '')
password = request.POST.get('password', '')
recaptcha = request.POST.get('g-recaptcha-response')
valid = validate_recaptcha(recaptcha)
if (not valid): return redirect('/errors/unverified')
# just so we can send back errors
if self.authenticate(request, username, password):
get_or_set_token(username)
return redirect('/')
return render(
request,
self.template_name,
{
'errors': {
'authentication': 'Username or password is incorrect.'
},
'meta': get_meta('LoginView')
}
)
class SignupView(Auth):
template_name = 'app/signup.html'
form_class = UserCreationForm
def get(self, request):
return render(request, self.template_name, { 'meta': get_meta('SignupView') })
def post(self, request):
form = self.form_class(request.POST)
recaptcha = request.POST.get('g-recaptcha-response')
valid = validate_recaptcha(recaptcha)
if (not valid): return redirect('/errors/unverified')
if form.is_valid():
user = form.save(commit=False) # not saved permanently in db yet
# clean normalised data.
email = form.cleaned_data['email']
password = form.cleaned_data['password1']
# password setting.
user.set_password(password)
# register user.
user.save()
if self.authenticate(request, email, password):
return redirect('/')
else:
return render(
request,
self.template_name,
{
'errors': {
'authentication': 'Username or password is incorrect.'
},
'meta': get_meta('SignupView')
}
)
return render(
request,
self.template_name,
{
'errors': form.errors.get_json_data(),
'meta': get_meta('SignupView')
}
)
|
normal
|
{
"blob_id": "b2eb2d006d6285947cc5392e290af50f25a9f566",
"index": 4724,
"step-1": "<mask token>\n\n\nclass Signup(Auth):\n <mask token>\n <mask token>\n\n\nclass UserViewSet(APIView):\n authentication_classes = [CustomBearerAuthentication]\n permission_classes = [IsAuthenticated]\n\n def get(self, request, format=None):\n queryset = User.objects.all().order_by('-created_at')\n serializer = UserSerializer(queryset, many=True)\n content = {'users': {'data': serializer.data, 'page': 1, 'count':\n len(serializer.data)}, 'auth': str(request.auth)}\n return Response(content)\n\n\nclass LoginView(Auth):\n template_name = 'app/login.html'\n\n def get(self, request):\n return render(request, self.template_name, {'meta': get_meta(\n 'LoginView')})\n\n def post(self, request):\n username = request.POST.get('username', '')\n password = request.POST.get('password', '')\n recaptcha = request.POST.get('g-recaptcha-response')\n valid = validate_recaptcha(recaptcha)\n if not valid:\n return redirect('/errors/unverified')\n if self.authenticate(request, username, password):\n get_or_set_token(username)\n return redirect('/')\n return render(request, self.template_name, {'errors': {\n 'authentication': 'Username or password is incorrect.'}, 'meta':\n get_meta('LoginView')})\n\n\nclass SignupView(Auth):\n template_name = 'app/signup.html'\n form_class = UserCreationForm\n\n def get(self, request):\n return render(request, self.template_name, {'meta': get_meta(\n 'SignupView')})\n\n def post(self, request):\n form = self.form_class(request.POST)\n recaptcha = request.POST.get('g-recaptcha-response')\n valid = validate_recaptcha(recaptcha)\n if not valid:\n return redirect('/errors/unverified')\n if form.is_valid():\n user = form.save(commit=False)\n email = form.cleaned_data['email']\n password = form.cleaned_data['password1']\n user.set_password(password)\n user.save()\n if self.authenticate(request, email, password):\n return redirect('/')\n else:\n return render(request, self.template_name, {'errors': {\n 'authentication': 'Username or password is incorrect.'},\n 'meta': get_meta('SignupView')})\n return render(request, self.template_name, {'errors': form.errors.\n get_json_data(), 'meta': get_meta('SignupView')})\n",
"step-2": "<mask token>\n\n\nclass Signup(Auth):\n form_class = UserCreationForm\n\n def post(self, request):\n form = self.form_class(request.POST)\n if form.is_valid():\n user = form.save(commit=False)\n email = form.cleaned_data['email']\n password = form.cleaned_data['password1']\n user.set_password(password)\n user.save()\n if self.authenticate(request, email, password):\n return redirect('/')\n return redirect('/')\n\n\nclass UserViewSet(APIView):\n authentication_classes = [CustomBearerAuthentication]\n permission_classes = [IsAuthenticated]\n\n def get(self, request, format=None):\n queryset = User.objects.all().order_by('-created_at')\n serializer = UserSerializer(queryset, many=True)\n content = {'users': {'data': serializer.data, 'page': 1, 'count':\n len(serializer.data)}, 'auth': str(request.auth)}\n return Response(content)\n\n\nclass LoginView(Auth):\n template_name = 'app/login.html'\n\n def get(self, request):\n return render(request, self.template_name, {'meta': get_meta(\n 'LoginView')})\n\n def post(self, request):\n username = request.POST.get('username', '')\n password = request.POST.get('password', '')\n recaptcha = request.POST.get('g-recaptcha-response')\n valid = validate_recaptcha(recaptcha)\n if not valid:\n return redirect('/errors/unverified')\n if self.authenticate(request, username, password):\n get_or_set_token(username)\n return redirect('/')\n return render(request, self.template_name, {'errors': {\n 'authentication': 'Username or password is incorrect.'}, 'meta':\n get_meta('LoginView')})\n\n\nclass SignupView(Auth):\n template_name = 'app/signup.html'\n form_class = UserCreationForm\n\n def get(self, request):\n return render(request, self.template_name, {'meta': get_meta(\n 'SignupView')})\n\n def post(self, request):\n form = self.form_class(request.POST)\n recaptcha = request.POST.get('g-recaptcha-response')\n valid = validate_recaptcha(recaptcha)\n if not valid:\n return redirect('/errors/unverified')\n if form.is_valid():\n user = form.save(commit=False)\n email = form.cleaned_data['email']\n password = form.cleaned_data['password1']\n user.set_password(password)\n user.save()\n if self.authenticate(request, email, password):\n return redirect('/')\n else:\n return render(request, self.template_name, {'errors': {\n 'authentication': 'Username or password is incorrect.'},\n 'meta': get_meta('SignupView')})\n return render(request, self.template_name, {'errors': form.errors.\n get_json_data(), 'meta': get_meta('SignupView')})\n",
"step-3": "<mask token>\n\n\nclass Login(Auth):\n\n def post(self, request):\n username = request.POST.get('username', '')\n password = request.POST.get('password', '')\n if self.authenticate(request, username, password):\n get_or_set_token(username)\n return redirect('/')\n return redirect('/')\n\n\n<mask token>\n\n\nclass Signup(Auth):\n form_class = UserCreationForm\n\n def post(self, request):\n form = self.form_class(request.POST)\n if form.is_valid():\n user = form.save(commit=False)\n email = form.cleaned_data['email']\n password = form.cleaned_data['password1']\n user.set_password(password)\n user.save()\n if self.authenticate(request, email, password):\n return redirect('/')\n return redirect('/')\n\n\nclass UserViewSet(APIView):\n authentication_classes = [CustomBearerAuthentication]\n permission_classes = [IsAuthenticated]\n\n def get(self, request, format=None):\n queryset = User.objects.all().order_by('-created_at')\n serializer = UserSerializer(queryset, many=True)\n content = {'users': {'data': serializer.data, 'page': 1, 'count':\n len(serializer.data)}, 'auth': str(request.auth)}\n return Response(content)\n\n\nclass LoginView(Auth):\n template_name = 'app/login.html'\n\n def get(self, request):\n return render(request, self.template_name, {'meta': get_meta(\n 'LoginView')})\n\n def post(self, request):\n username = request.POST.get('username', '')\n password = request.POST.get('password', '')\n recaptcha = request.POST.get('g-recaptcha-response')\n valid = validate_recaptcha(recaptcha)\n if not valid:\n return redirect('/errors/unverified')\n if self.authenticate(request, username, password):\n get_or_set_token(username)\n return redirect('/')\n return render(request, self.template_name, {'errors': {\n 'authentication': 'Username or password is incorrect.'}, 'meta':\n get_meta('LoginView')})\n\n\nclass SignupView(Auth):\n template_name = 'app/signup.html'\n form_class = UserCreationForm\n\n def get(self, request):\n return render(request, self.template_name, {'meta': get_meta(\n 'SignupView')})\n\n def post(self, request):\n form = self.form_class(request.POST)\n recaptcha = request.POST.get('g-recaptcha-response')\n valid = validate_recaptcha(recaptcha)\n if not valid:\n return redirect('/errors/unverified')\n if form.is_valid():\n user = form.save(commit=False)\n email = form.cleaned_data['email']\n password = form.cleaned_data['password1']\n user.set_password(password)\n user.save()\n if self.authenticate(request, email, password):\n return redirect('/')\n else:\n return render(request, self.template_name, {'errors': {\n 'authentication': 'Username or password is incorrect.'},\n 'meta': get_meta('SignupView')})\n return render(request, self.template_name, {'errors': form.errors.\n get_json_data(), 'meta': get_meta('SignupView')})\n",
"step-4": "<mask token>\n\n\nclass Auth(View):\n <mask token>\n\n def authenticate(self, request, username, password):\n user = self.auth_class.authenticate(username=username, password=\n password)\n if user is not None:\n if user.is_active:\n auth.login(request, user)\n return True\n return False\n\n\nclass Login(Auth):\n\n def post(self, request):\n username = request.POST.get('username', '')\n password = request.POST.get('password', '')\n if self.authenticate(request, username, password):\n get_or_set_token(username)\n return redirect('/')\n return redirect('/')\n\n\n<mask token>\n\n\nclass Signup(Auth):\n form_class = UserCreationForm\n\n def post(self, request):\n form = self.form_class(request.POST)\n if form.is_valid():\n user = form.save(commit=False)\n email = form.cleaned_data['email']\n password = form.cleaned_data['password1']\n user.set_password(password)\n user.save()\n if self.authenticate(request, email, password):\n return redirect('/')\n return redirect('/')\n\n\nclass UserViewSet(APIView):\n authentication_classes = [CustomBearerAuthentication]\n permission_classes = [IsAuthenticated]\n\n def get(self, request, format=None):\n queryset = User.objects.all().order_by('-created_at')\n serializer = UserSerializer(queryset, many=True)\n content = {'users': {'data': serializer.data, 'page': 1, 'count':\n len(serializer.data)}, 'auth': str(request.auth)}\n return Response(content)\n\n\nclass LoginView(Auth):\n template_name = 'app/login.html'\n\n def get(self, request):\n return render(request, self.template_name, {'meta': get_meta(\n 'LoginView')})\n\n def post(self, request):\n username = request.POST.get('username', '')\n password = request.POST.get('password', '')\n recaptcha = request.POST.get('g-recaptcha-response')\n valid = validate_recaptcha(recaptcha)\n if not valid:\n return redirect('/errors/unverified')\n if self.authenticate(request, username, password):\n get_or_set_token(username)\n return redirect('/')\n return render(request, self.template_name, {'errors': {\n 'authentication': 'Username or password is incorrect.'}, 'meta':\n get_meta('LoginView')})\n\n\nclass SignupView(Auth):\n template_name = 'app/signup.html'\n form_class = UserCreationForm\n\n def get(self, request):\n return render(request, self.template_name, {'meta': get_meta(\n 'SignupView')})\n\n def post(self, request):\n form = self.form_class(request.POST)\n recaptcha = request.POST.get('g-recaptcha-response')\n valid = validate_recaptcha(recaptcha)\n if not valid:\n return redirect('/errors/unverified')\n if form.is_valid():\n user = form.save(commit=False)\n email = form.cleaned_data['email']\n password = form.cleaned_data['password1']\n user.set_password(password)\n user.save()\n if self.authenticate(request, email, password):\n return redirect('/')\n else:\n return render(request, self.template_name, {'errors': {\n 'authentication': 'Username or password is incorrect.'},\n 'meta': get_meta('SignupView')})\n return render(request, self.template_name, {'errors': form.errors.\n get_json_data(), 'meta': get_meta('SignupView')})\n",
"step-5": "from app_auth.recaptcha.services.recaptcha_service import validate_recaptcha\nfrom django.shortcuts import render, redirect\nfrom django.contrib import auth\nfrom django.views import View\nfrom rest_framework.permissions import IsAuthenticated\nfrom rest_framework.views import APIView\nfrom rest_framework.response import Response\n\nfrom .common.bearer_authentication import CustomBearerAuthentication\nfrom .models import User\nfrom .forms import UserCreationForm\nfrom .serializers import UserSerializer\nfrom .user_backend import UserBackend\nfrom .common.token_utils import get_or_set_token\nfrom app.common.meta_config import get_meta\n\n\nclass Auth(View):\n auth_class = UserBackend()\n\n # Create your views here.\n def authenticate(self, request, username, password):\n user = self.auth_class.authenticate(username=username, password=password)\n if user is not None:\n if user.is_active:\n auth.login(request, user)\n return True\n return False\n\n\nclass Login(Auth):\n def post(self, request):\n username = request.POST.get('username', '')\n password = request.POST.get('password', '')\n\n # just so we can send back errors\n if self.authenticate(request, username, password):\n get_or_set_token(username)\n return redirect('/')\n\n return redirect('/')\n\n\ndef logout(request):\n auth.logout(request)\n return redirect('/')\n\n\nclass Signup(Auth):\n form_class = UserCreationForm\n\n def post(self, request):\n form = self.form_class(request.POST)\n if form.is_valid():\n user = form.save(commit=False) # not saved permanently in db yet\n\n # clean normalised data.\n email = form.cleaned_data['email']\n password = form.cleaned_data['password1']\n\n # password setting.\n user.set_password(password)\n\n # register user.\n user.save()\n\n if self.authenticate(request, email, password):\n return redirect('/')\n\n return redirect('/')\n\n\nclass UserViewSet(APIView):\n authentication_classes = [CustomBearerAuthentication]\n permission_classes = [IsAuthenticated]\n\n def get(self, request, format=None):\n queryset = User.objects.all().order_by('-created_at')\n serializer = UserSerializer(queryset, many=True)\n content = {\n 'users': {\n 'data': serializer.data,\n 'page': 1,\n 'count': len(serializer.data)\n },\n 'auth': str(request.auth),\n }\n return Response(content)\n\n\nclass LoginView(Auth):\n template_name = 'app/login.html'\n\n def get(self, request):\n return render(\n request, \n self.template_name, \n { 'meta': get_meta('LoginView') }\n )\n\n def post(self, request):\n username = request.POST.get('username', '')\n password = request.POST.get('password', '')\n recaptcha = request.POST.get('g-recaptcha-response')\n valid = validate_recaptcha(recaptcha)\n if (not valid): return redirect('/errors/unverified')\n # just so we can send back errors\n if self.authenticate(request, username, password):\n get_or_set_token(username)\n return redirect('/')\n \n return render(\n request,\n self.template_name,\n {\n 'errors': {\n 'authentication': 'Username or password is incorrect.'\n },\n 'meta': get_meta('LoginView')\n }\n )\n\n\nclass SignupView(Auth):\n template_name = 'app/signup.html'\n form_class = UserCreationForm\n\n def get(self, request):\n return render(request, self.template_name, { 'meta': get_meta('SignupView') })\n\n def post(self, request):\n form = self.form_class(request.POST)\n recaptcha = request.POST.get('g-recaptcha-response')\n valid = validate_recaptcha(recaptcha)\n if (not valid): return redirect('/errors/unverified')\n if form.is_valid():\n user = form.save(commit=False) # not saved permanently in db yet\n\n # clean normalised data.\n email = form.cleaned_data['email']\n password = form.cleaned_data['password1']\n\n # password setting.\n user.set_password(password)\n\n # register user.\n user.save()\n\n if self.authenticate(request, email, password):\n return redirect('/')\n else:\n return render(\n request, \n self.template_name,\n {\n 'errors': {\n 'authentication': 'Username or password is incorrect.'\n },\n 'meta': get_meta('SignupView')\n }\n )\n\n return render(\n request, \n self.template_name,\n {\n 'errors': form.errors.get_json_data(),\n 'meta': get_meta('SignupView')\n }\n )",
"step-ids": [
12,
14,
16,
18,
22
]
}
|
[
12,
14,
16,
18,
22
] |
#!/usr/bin/python3
# encoding: utf-8
import sys
import argparse
import logging
from pathlib import Path
module = sys.modules['__main__'].__file__
__author__ = 'FFX'
__version__ = '1.0'
log = logging.getLogger(module)
def parse_command_line(argv):
"""Parse command line argument. See -h option
:param argv: arguments on the command line must include caller file name.
"""
formatter_class = argparse.RawDescriptionHelpFormatter
parser = argparse.ArgumentParser(description=module,formatter_class=formatter_class)
parser.add_argument('-n', '--name', dest="name", metavar="hostname",
nargs='?',
type=str,
required=True,
help="Define a virtual hostname")
parser.add_argument('-o', '--os', dest="os", metavar="os",
nargs='?',
required=True,
type=str,
choices=['debian','mint','centos','mac','w7'],
help="Define a operating system")
parser.add_argument("--dry-run", help="Only print throughput, do not execute command.",
action="store_true")
parser.add_argument("--legacy", help="Use legacy install (from official repository).",
action="store_true")
parser.add_argument("--version", action="version",
version="%(prog)s {}".format(__version__))
parser.add_argument("-v", "--verbose", dest="verbose_count",
action="count", default=0,
help="increases log verbosity for each occurence.")
arguments = parser.parse_args(argv[1:])
# Sets log level to WARN going more verbose for each new -v.
log.setLevel(max(3 - arguments.verbose_count, 0) * 10)
return arguments
def main():
"""Main program. Sets up logging and do some work."""
logging.basicConfig(stream=sys.stderr, level=logging.DEBUG,
format='%(name)s (%(levelname)s): %(message)s')
kvm = pathlib.Path("/disks/d/VMWare/KVM")
os = {
'win7':'/disks/d/OS/Windows/Windows_7/Windows_7_LITE_X64.iso',
'win7_full':'/disks/d/OS/Windows/Windows_7/fr_windows_7_ultimate_x64_dvd_x15-65928.iso',
'mint':'/disks/d/OS/Unix/Mint/linuxmint-18.3-cinnamon-64bit.iso',
'solaris':'/disks/d/OS/Unix/Solaris/11/sol-11_3-text-x86.iso'
}
try:
arguments = parse_command_line(sys.argv)
# Assign args to variables
server = arguments.name
os = arguments.os
legacy = arguments.legacy
dry = arguments.dry_run
if kvm.path.exists():
kvm_disk = kvm_path + server
command = "virt-install --ram 2048 --disk path=${DIR_HOST}/${HOST}.qcow2,size=8 --vcpus 2 --os-type linux --os-variant ubuntuquantal --network bridge=virbr0"
if dry:
print(command)
print(kvm_disk)
except KeyboardInterrupt:
log.error('Program interrupted!')
finally:
logging.shutdown()
if __name__ == "__main__":
sys.exit(main())
|
normal
|
{
"blob_id": "46adb1834f6013ca0f13a64f280182a805d76278",
"index": 215,
"step-1": "<mask token>\n\n\ndef parse_command_line(argv):\n \"\"\"Parse command line argument. See -h option\n :param argv: arguments on the command line must include caller file name.\n \"\"\"\n formatter_class = argparse.RawDescriptionHelpFormatter\n parser = argparse.ArgumentParser(description=module, formatter_class=\n formatter_class)\n parser.add_argument('-n', '--name', dest='name', metavar='hostname',\n nargs='?', type=str, required=True, help='Define a virtual hostname')\n parser.add_argument('-o', '--os', dest='os', metavar='os', nargs='?',\n required=True, type=str, choices=['debian', 'mint', 'centos', 'mac',\n 'w7'], help='Define a operating system')\n parser.add_argument('--dry-run', help=\n 'Only print throughput, do not execute command.', action='store_true')\n parser.add_argument('--legacy', help=\n 'Use legacy install (from official repository).', action='store_true')\n parser.add_argument('--version', action='version', version=\n '%(prog)s {}'.format(__version__))\n parser.add_argument('-v', '--verbose', dest='verbose_count', action=\n 'count', default=0, help='increases log verbosity for each occurence.')\n arguments = parser.parse_args(argv[1:])\n log.setLevel(max(3 - arguments.verbose_count, 0) * 10)\n return arguments\n\n\ndef main():\n \"\"\"Main program. Sets up logging and do some work.\"\"\"\n logging.basicConfig(stream=sys.stderr, level=logging.DEBUG, format=\n '%(name)s (%(levelname)s): %(message)s')\n kvm = pathlib.Path('/disks/d/VMWare/KVM')\n os = {'win7': '/disks/d/OS/Windows/Windows_7/Windows_7_LITE_X64.iso',\n 'win7_full':\n '/disks/d/OS/Windows/Windows_7/fr_windows_7_ultimate_x64_dvd_x15-65928.iso'\n , 'mint': '/disks/d/OS/Unix/Mint/linuxmint-18.3-cinnamon-64bit.iso',\n 'solaris': '/disks/d/OS/Unix/Solaris/11/sol-11_3-text-x86.iso'}\n try:\n arguments = parse_command_line(sys.argv)\n server = arguments.name\n os = arguments.os\n legacy = arguments.legacy\n dry = arguments.dry_run\n if kvm.path.exists():\n kvm_disk = kvm_path + server\n command = (\n 'virt-install --ram 2048 --disk path=${DIR_HOST}/${HOST}.qcow2,size=8 --vcpus 2 --os-type linux --os-variant ubuntuquantal --network bridge=virbr0'\n )\n if dry:\n print(command)\n print(kvm_disk)\n except KeyboardInterrupt:\n log.error('Program interrupted!')\n finally:\n logging.shutdown()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef parse_command_line(argv):\n \"\"\"Parse command line argument. See -h option\n :param argv: arguments on the command line must include caller file name.\n \"\"\"\n formatter_class = argparse.RawDescriptionHelpFormatter\n parser = argparse.ArgumentParser(description=module, formatter_class=\n formatter_class)\n parser.add_argument('-n', '--name', dest='name', metavar='hostname',\n nargs='?', type=str, required=True, help='Define a virtual hostname')\n parser.add_argument('-o', '--os', dest='os', metavar='os', nargs='?',\n required=True, type=str, choices=['debian', 'mint', 'centos', 'mac',\n 'w7'], help='Define a operating system')\n parser.add_argument('--dry-run', help=\n 'Only print throughput, do not execute command.', action='store_true')\n parser.add_argument('--legacy', help=\n 'Use legacy install (from official repository).', action='store_true')\n parser.add_argument('--version', action='version', version=\n '%(prog)s {}'.format(__version__))\n parser.add_argument('-v', '--verbose', dest='verbose_count', action=\n 'count', default=0, help='increases log verbosity for each occurence.')\n arguments = parser.parse_args(argv[1:])\n log.setLevel(max(3 - arguments.verbose_count, 0) * 10)\n return arguments\n\n\ndef main():\n \"\"\"Main program. Sets up logging and do some work.\"\"\"\n logging.basicConfig(stream=sys.stderr, level=logging.DEBUG, format=\n '%(name)s (%(levelname)s): %(message)s')\n kvm = pathlib.Path('/disks/d/VMWare/KVM')\n os = {'win7': '/disks/d/OS/Windows/Windows_7/Windows_7_LITE_X64.iso',\n 'win7_full':\n '/disks/d/OS/Windows/Windows_7/fr_windows_7_ultimate_x64_dvd_x15-65928.iso'\n , 'mint': '/disks/d/OS/Unix/Mint/linuxmint-18.3-cinnamon-64bit.iso',\n 'solaris': '/disks/d/OS/Unix/Solaris/11/sol-11_3-text-x86.iso'}\n try:\n arguments = parse_command_line(sys.argv)\n server = arguments.name\n os = arguments.os\n legacy = arguments.legacy\n dry = arguments.dry_run\n if kvm.path.exists():\n kvm_disk = kvm_path + server\n command = (\n 'virt-install --ram 2048 --disk path=${DIR_HOST}/${HOST}.qcow2,size=8 --vcpus 2 --os-type linux --os-variant ubuntuquantal --network bridge=virbr0'\n )\n if dry:\n print(command)\n print(kvm_disk)\n except KeyboardInterrupt:\n log.error('Program interrupted!')\n finally:\n logging.shutdown()\n\n\nif __name__ == '__main__':\n sys.exit(main())\n",
"step-3": "<mask token>\nmodule = sys.modules['__main__'].__file__\n__author__ = 'FFX'\n__version__ = '1.0'\nlog = logging.getLogger(module)\n\n\ndef parse_command_line(argv):\n \"\"\"Parse command line argument. See -h option\n :param argv: arguments on the command line must include caller file name.\n \"\"\"\n formatter_class = argparse.RawDescriptionHelpFormatter\n parser = argparse.ArgumentParser(description=module, formatter_class=\n formatter_class)\n parser.add_argument('-n', '--name', dest='name', metavar='hostname',\n nargs='?', type=str, required=True, help='Define a virtual hostname')\n parser.add_argument('-o', '--os', dest='os', metavar='os', nargs='?',\n required=True, type=str, choices=['debian', 'mint', 'centos', 'mac',\n 'w7'], help='Define a operating system')\n parser.add_argument('--dry-run', help=\n 'Only print throughput, do not execute command.', action='store_true')\n parser.add_argument('--legacy', help=\n 'Use legacy install (from official repository).', action='store_true')\n parser.add_argument('--version', action='version', version=\n '%(prog)s {}'.format(__version__))\n parser.add_argument('-v', '--verbose', dest='verbose_count', action=\n 'count', default=0, help='increases log verbosity for each occurence.')\n arguments = parser.parse_args(argv[1:])\n log.setLevel(max(3 - arguments.verbose_count, 0) * 10)\n return arguments\n\n\ndef main():\n \"\"\"Main program. Sets up logging and do some work.\"\"\"\n logging.basicConfig(stream=sys.stderr, level=logging.DEBUG, format=\n '%(name)s (%(levelname)s): %(message)s')\n kvm = pathlib.Path('/disks/d/VMWare/KVM')\n os = {'win7': '/disks/d/OS/Windows/Windows_7/Windows_7_LITE_X64.iso',\n 'win7_full':\n '/disks/d/OS/Windows/Windows_7/fr_windows_7_ultimate_x64_dvd_x15-65928.iso'\n , 'mint': '/disks/d/OS/Unix/Mint/linuxmint-18.3-cinnamon-64bit.iso',\n 'solaris': '/disks/d/OS/Unix/Solaris/11/sol-11_3-text-x86.iso'}\n try:\n arguments = parse_command_line(sys.argv)\n server = arguments.name\n os = arguments.os\n legacy = arguments.legacy\n dry = arguments.dry_run\n if kvm.path.exists():\n kvm_disk = kvm_path + server\n command = (\n 'virt-install --ram 2048 --disk path=${DIR_HOST}/${HOST}.qcow2,size=8 --vcpus 2 --os-type linux --os-variant ubuntuquantal --network bridge=virbr0'\n )\n if dry:\n print(command)\n print(kvm_disk)\n except KeyboardInterrupt:\n log.error('Program interrupted!')\n finally:\n logging.shutdown()\n\n\nif __name__ == '__main__':\n sys.exit(main())\n",
"step-4": "import sys\nimport argparse\nimport logging\nfrom pathlib import Path\nmodule = sys.modules['__main__'].__file__\n__author__ = 'FFX'\n__version__ = '1.0'\nlog = logging.getLogger(module)\n\n\ndef parse_command_line(argv):\n \"\"\"Parse command line argument. See -h option\n :param argv: arguments on the command line must include caller file name.\n \"\"\"\n formatter_class = argparse.RawDescriptionHelpFormatter\n parser = argparse.ArgumentParser(description=module, formatter_class=\n formatter_class)\n parser.add_argument('-n', '--name', dest='name', metavar='hostname',\n nargs='?', type=str, required=True, help='Define a virtual hostname')\n parser.add_argument('-o', '--os', dest='os', metavar='os', nargs='?',\n required=True, type=str, choices=['debian', 'mint', 'centos', 'mac',\n 'w7'], help='Define a operating system')\n parser.add_argument('--dry-run', help=\n 'Only print throughput, do not execute command.', action='store_true')\n parser.add_argument('--legacy', help=\n 'Use legacy install (from official repository).', action='store_true')\n parser.add_argument('--version', action='version', version=\n '%(prog)s {}'.format(__version__))\n parser.add_argument('-v', '--verbose', dest='verbose_count', action=\n 'count', default=0, help='increases log verbosity for each occurence.')\n arguments = parser.parse_args(argv[1:])\n log.setLevel(max(3 - arguments.verbose_count, 0) * 10)\n return arguments\n\n\ndef main():\n \"\"\"Main program. Sets up logging and do some work.\"\"\"\n logging.basicConfig(stream=sys.stderr, level=logging.DEBUG, format=\n '%(name)s (%(levelname)s): %(message)s')\n kvm = pathlib.Path('/disks/d/VMWare/KVM')\n os = {'win7': '/disks/d/OS/Windows/Windows_7/Windows_7_LITE_X64.iso',\n 'win7_full':\n '/disks/d/OS/Windows/Windows_7/fr_windows_7_ultimate_x64_dvd_x15-65928.iso'\n , 'mint': '/disks/d/OS/Unix/Mint/linuxmint-18.3-cinnamon-64bit.iso',\n 'solaris': '/disks/d/OS/Unix/Solaris/11/sol-11_3-text-x86.iso'}\n try:\n arguments = parse_command_line(sys.argv)\n server = arguments.name\n os = arguments.os\n legacy = arguments.legacy\n dry = arguments.dry_run\n if kvm.path.exists():\n kvm_disk = kvm_path + server\n command = (\n 'virt-install --ram 2048 --disk path=${DIR_HOST}/${HOST}.qcow2,size=8 --vcpus 2 --os-type linux --os-variant ubuntuquantal --network bridge=virbr0'\n )\n if dry:\n print(command)\n print(kvm_disk)\n except KeyboardInterrupt:\n log.error('Program interrupted!')\n finally:\n logging.shutdown()\n\n\nif __name__ == '__main__':\n sys.exit(main())\n",
"step-5": "#!/usr/bin/python3\n# encoding: utf-8\n\n\nimport sys\nimport argparse\nimport logging\nfrom pathlib import Path\n\n\nmodule = sys.modules['__main__'].__file__\n__author__ = 'FFX'\n__version__ = '1.0'\nlog = logging.getLogger(module)\n\n\ndef parse_command_line(argv):\n \"\"\"Parse command line argument. See -h option\n :param argv: arguments on the command line must include caller file name.\n \"\"\"\n formatter_class = argparse.RawDescriptionHelpFormatter\n parser = argparse.ArgumentParser(description=module,formatter_class=formatter_class)\n parser.add_argument('-n', '--name', dest=\"name\", metavar=\"hostname\",\n nargs='?', \n type=str,\n required=True,\n help=\"Define a virtual hostname\")\n parser.add_argument('-o', '--os', dest=\"os\", metavar=\"os\",\n nargs='?', \n required=True,\n type=str,\n choices=['debian','mint','centos','mac','w7'],\n help=\"Define a operating system\")\n parser.add_argument(\"--dry-run\", help=\"Only print throughput, do not execute command.\",\n action=\"store_true\")\n parser.add_argument(\"--legacy\", help=\"Use legacy install (from official repository).\",\n action=\"store_true\")\n parser.add_argument(\"--version\", action=\"version\",\n version=\"%(prog)s {}\".format(__version__))\n parser.add_argument(\"-v\", \"--verbose\", dest=\"verbose_count\",\n action=\"count\", default=0,\n help=\"increases log verbosity for each occurence.\")\n arguments = parser.parse_args(argv[1:])\n # Sets log level to WARN going more verbose for each new -v.\n log.setLevel(max(3 - arguments.verbose_count, 0) * 10)\n return arguments\n\n\ndef main():\n \"\"\"Main program. Sets up logging and do some work.\"\"\"\n logging.basicConfig(stream=sys.stderr, level=logging.DEBUG,\n format='%(name)s (%(levelname)s): %(message)s')\n\n kvm = pathlib.Path(\"/disks/d/VMWare/KVM\")\n os = { \n 'win7':'/disks/d/OS/Windows/Windows_7/Windows_7_LITE_X64.iso',\n 'win7_full':'/disks/d/OS/Windows/Windows_7/fr_windows_7_ultimate_x64_dvd_x15-65928.iso',\n 'mint':'/disks/d/OS/Unix/Mint/linuxmint-18.3-cinnamon-64bit.iso',\n 'solaris':'/disks/d/OS/Unix/Solaris/11/sol-11_3-text-x86.iso'\n }\n\n try:\n arguments = parse_command_line(sys.argv)\n # Assign args to variables\n server = arguments.name\n os = arguments.os\n legacy = arguments.legacy\n dry = arguments.dry_run\n if kvm.path.exists():\n kvm_disk = kvm_path + server\n command = \"virt-install --ram 2048 --disk path=${DIR_HOST}/${HOST}.qcow2,size=8 --vcpus 2 --os-type linux --os-variant ubuntuquantal --network bridge=virbr0\"\n if dry:\n print(command)\n print(kvm_disk)\n\n except KeyboardInterrupt:\n log.error('Program interrupted!')\n finally:\n logging.shutdown()\n\nif __name__ == \"__main__\":\n sys.exit(main())\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
class Estoque(object):
<|reserved_special_token_0|>
def save_categoria(self, categoria):
pass
<|reserved_special_token_0|>
def save_produtos(self, produto):
pass
<|reserved_special_token_0|>
def create_subcategoria(self):
""""
Cria uma categoria através dos dados recolhidos pelo formulário.
Os dados são: Codigo, nome e descrição e a passagem de um objeto categoria
"""
if len(self.categorias) == 0:
print('Você deve criar pelo menos uma CATEGORIA!\n')
self.create_categoria()
print('- Criar SUBCATEGORIA -')
codigo = input('CÓDIGO: ').strip()
nome = input('NOME: ').strip()
descrição = input('DESCRIÇÃO: ').strip()
escolhe = input('CATEGORIA (Nome ou Código): ')
categoria = 0
for cat in self.categorias:
if cat.nome == escolhe or cat.codigo == escolhe:
categoria = cat
break
else:
print(
'Categoria não Encontrada!\nVocê deve criar uma CATEGORIA!'
)
self.create_categoria()
subcategoria = Subcategoria(categoria, codigo, nome, descrição)
if subcategoria not in self.subcategorias:
self.subcategorias.append(subcategoria)
<|reserved_special_token_0|>
def low_stock_alarm(self):
pass
<|reserved_special_token_0|>
def altera_item(self):
print('alterando item do estoque')
self.menu_estoque()
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def valida_opcao(self, opcao):
if opcao.isdigit():
return True
else:
return False
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Estoque(object):
<|reserved_special_token_0|>
def save_categoria(self, categoria):
pass
<|reserved_special_token_0|>
def save_produtos(self, produto):
pass
def create_categoria(self):
""""
Cria uma categoria através dos dados recolhidos pelo formulário.
Os dados são: Codigo, nome e descrição
"""
print('- Criar CATEGORIA -')
codigo = input('CÓDIGO: ').strip()
nome = input('NOME: ').strip()
descrição = input('DESCRIÇÃO: ').strip()
categoria = Categoria(codigo, nome, descrição)
if categoria not in self.categorias:
self.categorias.append(categoria)
def create_subcategoria(self):
""""
Cria uma categoria através dos dados recolhidos pelo formulário.
Os dados são: Codigo, nome e descrição e a passagem de um objeto categoria
"""
if len(self.categorias) == 0:
print('Você deve criar pelo menos uma CATEGORIA!\n')
self.create_categoria()
print('- Criar SUBCATEGORIA -')
codigo = input('CÓDIGO: ').strip()
nome = input('NOME: ').strip()
descrição = input('DESCRIÇÃO: ').strip()
escolhe = input('CATEGORIA (Nome ou Código): ')
categoria = 0
for cat in self.categorias:
if cat.nome == escolhe or cat.codigo == escolhe:
categoria = cat
break
else:
print(
'Categoria não Encontrada!\nVocê deve criar uma CATEGORIA!'
)
self.create_categoria()
subcategoria = Subcategoria(categoria, codigo, nome, descrição)
if subcategoria not in self.subcategorias:
self.subcategorias.append(subcategoria)
def create_produto(self):
""""
Cria produto a ser controlado pelo estoque. Um produto deve pertencer a uma subcategoria.
Produtos são itens que podem ser vendidos.
Possuem subcategoria, codigo, nome, descricao, estoquemax, estoquemin, valorvenda, valorcompra, foto
TODELETE: Por enquanto foto recebe uma string qualquer
"""
if not len(self.subcategorias):
print('Produto deve ter CATEGORIA ou uma SUBCATEGORIA!\n')
self.create_subcategoria()
else:
print('- Cadastrar PRODUTO -')
escolhe = input('SUBCATEGORIA (Nome ou Código): ').lower()
codigo = input('CÓDIGO: ').strip()
nome = input('NOME: ').strip()
descrição = input('DESCRIÇÃO: ').strip()
estoquemax = input('Quantidade Maxima em Estoque: ')
while not produtos.valida_estoque(estoquemax):
print('Valor Inválido!')
estoquemax = input('Valor deve ser Numérico: ')
estoquemin = input('Quantidade Minima em Estoque: ')
while not produtos.valida_estoque(estoquemin):
print('Valor Inválido!')
estoquemin = input('Valor deve ser Numérico: ')
valorvenda = input('Preço Unitário: ')
while not produtos.valida_valorvenda(valorvenda):
print('Valor Inválido!')
estoquemax = input('Valor deve ser Numérico: ')
valorcompra = input('Valor de Compra: ')
while not produtos.valida_valorvenda(valorcompra):
print('Valor Inválido!')
estoquemax = input('Valor deve ser Numérico: ')
foto = input('Arquivo de foto: ')
subcategoria = 0
for scat in self.subcategorias:
if scat.nome.lower() == escolhe or scat.codigo == escolhe:
subcategoria = scat
break
else:
print(
"""Subcategoria não Encontrada!
Deseja criar uma SUBCATEGORIA?
1- Sim
2 - Não"""
)
choice = input()
if choice.lower() == 's' or choice == '1':
self.create_subcategoria()
else:
self.create_produto()
produto = Produtos(subcategoria, codigo, nome, descricao,
estoquemax, estoquemin, valorvenda, valorcompra, foto)
if produto not in self.produtos:
self.produtos.append(produto)
def low_stock_alarm(self):
pass
<|reserved_special_token_0|>
def altera_item(self):
print('alterando item do estoque')
self.menu_estoque()
<|reserved_special_token_0|>
def adiciona_item(self):
print('Adicionando item ao estoque')
while 1:
print('************* Menu Adicionar: ******************')
print(
"""Digite Ação!
1 - Adicionar Categoria
2 - Adicionar Subcategoria
3 - Adicionar Produtos
4 - Sair"""
)
opcao = input()
while not self.valida_opcao(opcao):
print('Opção Inválida!')
opcao = input()
if opcao == '1':
self.create_categoria()
elif opcao == '2':
self.create_subcategoria()
elif opcao == '3':
pass
elif opcao == '4':
break
self.menu_estoque()
def menu_estoque(self):
print('Sistema de Vendas ao Consumidor')
print('****** MENU DE ESTOQUE *****')
print(
'Digite Ação!\n1 - Consultar Estoque\n2 - Adicionar\n3 - Remover\n4 - Alterar'
)
opcao = input()
while not self.valida_opcao(opcao):
print('Opção Inválida!')
opcao = input()
if opcao == '1':
self.consulta_estoque()
elif opcao == '2':
self.adiciona_item()
elif opcao == '3':
self.remove_item()
elif opcao == '4':
self.altera_item()
def valida_opcao(self, opcao):
if opcao.isdigit():
return True
else:
return False
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Estoque(object):
<|reserved_special_token_0|>
def save_categoria(self, categoria):
pass
<|reserved_special_token_0|>
def save_produtos(self, produto):
pass
def create_categoria(self):
""""
Cria uma categoria através dos dados recolhidos pelo formulário.
Os dados são: Codigo, nome e descrição
"""
print('- Criar CATEGORIA -')
codigo = input('CÓDIGO: ').strip()
nome = input('NOME: ').strip()
descrição = input('DESCRIÇÃO: ').strip()
categoria = Categoria(codigo, nome, descrição)
if categoria not in self.categorias:
self.categorias.append(categoria)
def create_subcategoria(self):
""""
Cria uma categoria através dos dados recolhidos pelo formulário.
Os dados são: Codigo, nome e descrição e a passagem de um objeto categoria
"""
if len(self.categorias) == 0:
print('Você deve criar pelo menos uma CATEGORIA!\n')
self.create_categoria()
print('- Criar SUBCATEGORIA -')
codigo = input('CÓDIGO: ').strip()
nome = input('NOME: ').strip()
descrição = input('DESCRIÇÃO: ').strip()
escolhe = input('CATEGORIA (Nome ou Código): ')
categoria = 0
for cat in self.categorias:
if cat.nome == escolhe or cat.codigo == escolhe:
categoria = cat
break
else:
print(
'Categoria não Encontrada!\nVocê deve criar uma CATEGORIA!'
)
self.create_categoria()
subcategoria = Subcategoria(categoria, codigo, nome, descrição)
if subcategoria not in self.subcategorias:
self.subcategorias.append(subcategoria)
def create_produto(self):
""""
Cria produto a ser controlado pelo estoque. Um produto deve pertencer a uma subcategoria.
Produtos são itens que podem ser vendidos.
Possuem subcategoria, codigo, nome, descricao, estoquemax, estoquemin, valorvenda, valorcompra, foto
TODELETE: Por enquanto foto recebe uma string qualquer
"""
if not len(self.subcategorias):
print('Produto deve ter CATEGORIA ou uma SUBCATEGORIA!\n')
self.create_subcategoria()
else:
print('- Cadastrar PRODUTO -')
escolhe = input('SUBCATEGORIA (Nome ou Código): ').lower()
codigo = input('CÓDIGO: ').strip()
nome = input('NOME: ').strip()
descrição = input('DESCRIÇÃO: ').strip()
estoquemax = input('Quantidade Maxima em Estoque: ')
while not produtos.valida_estoque(estoquemax):
print('Valor Inválido!')
estoquemax = input('Valor deve ser Numérico: ')
estoquemin = input('Quantidade Minima em Estoque: ')
while not produtos.valida_estoque(estoquemin):
print('Valor Inválido!')
estoquemin = input('Valor deve ser Numérico: ')
valorvenda = input('Preço Unitário: ')
while not produtos.valida_valorvenda(valorvenda):
print('Valor Inválido!')
estoquemax = input('Valor deve ser Numérico: ')
valorcompra = input('Valor de Compra: ')
while not produtos.valida_valorvenda(valorcompra):
print('Valor Inválido!')
estoquemax = input('Valor deve ser Numérico: ')
foto = input('Arquivo de foto: ')
subcategoria = 0
for scat in self.subcategorias:
if scat.nome.lower() == escolhe or scat.codigo == escolhe:
subcategoria = scat
break
else:
print(
"""Subcategoria não Encontrada!
Deseja criar uma SUBCATEGORIA?
1- Sim
2 - Não"""
)
choice = input()
if choice.lower() == 's' or choice == '1':
self.create_subcategoria()
else:
self.create_produto()
produto = Produtos(subcategoria, codigo, nome, descricao,
estoquemax, estoquemin, valorvenda, valorcompra, foto)
if produto not in self.produtos:
self.produtos.append(produto)
def low_stock_alarm(self):
pass
def consulta_estoque(self):
print('Exibindo estoque')
if not len(self.categorias):
print('Não há Categorias Registrados!')
else:
for categoria in self.categorias:
print(categoria, end=' ')
print()
if not len(self.subcategorias):
print('Não há Subcategorias Registradas!')
else:
for subcategoria in self.subcategorias:
print(subcategoria, end=' ')
print()
if not len(self.produtos):
print('Não há Produtos Registrados!')
else:
for produto in self.produtos:
print(produto, end=' ')
self.menu_estoque()
def altera_item(self):
print('alterando item do estoque')
self.menu_estoque()
<|reserved_special_token_0|>
def adiciona_item(self):
print('Adicionando item ao estoque')
while 1:
print('************* Menu Adicionar: ******************')
print(
"""Digite Ação!
1 - Adicionar Categoria
2 - Adicionar Subcategoria
3 - Adicionar Produtos
4 - Sair"""
)
opcao = input()
while not self.valida_opcao(opcao):
print('Opção Inválida!')
opcao = input()
if opcao == '1':
self.create_categoria()
elif opcao == '2':
self.create_subcategoria()
elif opcao == '3':
pass
elif opcao == '4':
break
self.menu_estoque()
def menu_estoque(self):
print('Sistema de Vendas ao Consumidor')
print('****** MENU DE ESTOQUE *****')
print(
'Digite Ação!\n1 - Consultar Estoque\n2 - Adicionar\n3 - Remover\n4 - Alterar'
)
opcao = input()
while not self.valida_opcao(opcao):
print('Opção Inválida!')
opcao = input()
if opcao == '1':
self.consulta_estoque()
elif opcao == '2':
self.adiciona_item()
elif opcao == '3':
self.remove_item()
elif opcao == '4':
self.altera_item()
def valida_opcao(self, opcao):
if opcao.isdigit():
return True
else:
return False
<|reserved_special_token_0|>
<|reserved_special_token_1|>
from src.produtos import *
class Estoque(object):
def __init__(self):
self.categorias = []
self.subcategorias = []
self.produtos = []
self.menu_estoque()
def save_categoria(self, categoria):
pass
def save_subcategorias(self, subcategoria):
pass
def save_produtos(self, produto):
pass
def create_categoria(self):
""""
Cria uma categoria através dos dados recolhidos pelo formulário.
Os dados são: Codigo, nome e descrição
"""
print('- Criar CATEGORIA -')
codigo = input('CÓDIGO: ').strip()
nome = input('NOME: ').strip()
descrição = input('DESCRIÇÃO: ').strip()
categoria = Categoria(codigo, nome, descrição)
if categoria not in self.categorias:
self.categorias.append(categoria)
def create_subcategoria(self):
""""
Cria uma categoria através dos dados recolhidos pelo formulário.
Os dados são: Codigo, nome e descrição e a passagem de um objeto categoria
"""
if len(self.categorias) == 0:
print('Você deve criar pelo menos uma CATEGORIA!\n')
self.create_categoria()
print('- Criar SUBCATEGORIA -')
codigo = input('CÓDIGO: ').strip()
nome = input('NOME: ').strip()
descrição = input('DESCRIÇÃO: ').strip()
escolhe = input('CATEGORIA (Nome ou Código): ')
categoria = 0
for cat in self.categorias:
if cat.nome == escolhe or cat.codigo == escolhe:
categoria = cat
break
else:
print(
'Categoria não Encontrada!\nVocê deve criar uma CATEGORIA!'
)
self.create_categoria()
subcategoria = Subcategoria(categoria, codigo, nome, descrição)
if subcategoria not in self.subcategorias:
self.subcategorias.append(subcategoria)
def create_produto(self):
""""
Cria produto a ser controlado pelo estoque. Um produto deve pertencer a uma subcategoria.
Produtos são itens que podem ser vendidos.
Possuem subcategoria, codigo, nome, descricao, estoquemax, estoquemin, valorvenda, valorcompra, foto
TODELETE: Por enquanto foto recebe uma string qualquer
"""
if not len(self.subcategorias):
print('Produto deve ter CATEGORIA ou uma SUBCATEGORIA!\n')
self.create_subcategoria()
else:
print('- Cadastrar PRODUTO -')
escolhe = input('SUBCATEGORIA (Nome ou Código): ').lower()
codigo = input('CÓDIGO: ').strip()
nome = input('NOME: ').strip()
descrição = input('DESCRIÇÃO: ').strip()
estoquemax = input('Quantidade Maxima em Estoque: ')
while not produtos.valida_estoque(estoquemax):
print('Valor Inválido!')
estoquemax = input('Valor deve ser Numérico: ')
estoquemin = input('Quantidade Minima em Estoque: ')
while not produtos.valida_estoque(estoquemin):
print('Valor Inválido!')
estoquemin = input('Valor deve ser Numérico: ')
valorvenda = input('Preço Unitário: ')
while not produtos.valida_valorvenda(valorvenda):
print('Valor Inválido!')
estoquemax = input('Valor deve ser Numérico: ')
valorcompra = input('Valor de Compra: ')
while not produtos.valida_valorvenda(valorcompra):
print('Valor Inválido!')
estoquemax = input('Valor deve ser Numérico: ')
foto = input('Arquivo de foto: ')
subcategoria = 0
for scat in self.subcategorias:
if scat.nome.lower() == escolhe or scat.codigo == escolhe:
subcategoria = scat
break
else:
print(
"""Subcategoria não Encontrada!
Deseja criar uma SUBCATEGORIA?
1- Sim
2 - Não"""
)
choice = input()
if choice.lower() == 's' or choice == '1':
self.create_subcategoria()
else:
self.create_produto()
produto = Produtos(subcategoria, codigo, nome, descricao,
estoquemax, estoquemin, valorvenda, valorcompra, foto)
if produto not in self.produtos:
self.produtos.append(produto)
def low_stock_alarm(self):
pass
def consulta_estoque(self):
print('Exibindo estoque')
if not len(self.categorias):
print('Não há Categorias Registrados!')
else:
for categoria in self.categorias:
print(categoria, end=' ')
print()
if not len(self.subcategorias):
print('Não há Subcategorias Registradas!')
else:
for subcategoria in self.subcategorias:
print(subcategoria, end=' ')
print()
if not len(self.produtos):
print('Não há Produtos Registrados!')
else:
for produto in self.produtos:
print(produto, end=' ')
self.menu_estoque()
def altera_item(self):
print('alterando item do estoque')
self.menu_estoque()
def remove_item(self):
print('Removendo item do estoque')
self.menu_estoque()
def adiciona_item(self):
print('Adicionando item ao estoque')
while 1:
print('************* Menu Adicionar: ******************')
print(
"""Digite Ação!
1 - Adicionar Categoria
2 - Adicionar Subcategoria
3 - Adicionar Produtos
4 - Sair"""
)
opcao = input()
while not self.valida_opcao(opcao):
print('Opção Inválida!')
opcao = input()
if opcao == '1':
self.create_categoria()
elif opcao == '2':
self.create_subcategoria()
elif opcao == '3':
pass
elif opcao == '4':
break
self.menu_estoque()
def menu_estoque(self):
print('Sistema de Vendas ao Consumidor')
print('****** MENU DE ESTOQUE *****')
print(
'Digite Ação!\n1 - Consultar Estoque\n2 - Adicionar\n3 - Remover\n4 - Alterar'
)
opcao = input()
while not self.valida_opcao(opcao):
print('Opção Inválida!')
opcao = input()
if opcao == '1':
self.consulta_estoque()
elif opcao == '2':
self.adiciona_item()
elif opcao == '3':
self.remove_item()
elif opcao == '4':
self.altera_item()
def valida_opcao(self, opcao):
if opcao.isdigit():
return True
else:
return False
estoque = Estoque()
<|reserved_special_token_1|>
from src.produtos import *
class Estoque(object):
def __init__(self):
self.categorias = []
self.subcategorias = []
self.produtos = []
self.menu_estoque()
def save_categoria(self, categoria):
pass
def save_subcategorias(self, subcategoria):
pass
def save_produtos(self, produto):
pass
def create_categoria(self):
""""
Cria uma categoria através dos dados recolhidos pelo formulário.
Os dados são: Codigo, nome e descrição
"""
print("- Criar CATEGORIA -")
codigo = input("CÓDIGO: ").strip()
nome = input("NOME: ").strip()
descrição = input("DESCRIÇÃO: ").strip()
categoria = Categoria(codigo, nome, descrição)
if categoria not in self.categorias:
self.categorias.append(categoria)
def create_subcategoria(self):
""""
Cria uma categoria através dos dados recolhidos pelo formulário.
Os dados são: Codigo, nome e descrição e a passagem de um objeto categoria
"""
if len(self.categorias) == 0:
print("Você deve criar pelo menos uma CATEGORIA!\n")
self.create_categoria()
print("- Criar SUBCATEGORIA -")
codigo = input("CÓDIGO: ").strip()
nome = input("NOME: ").strip()
descrição = input("DESCRIÇÃO: ").strip()
escolhe = input("CATEGORIA (Nome ou Código): ")
categoria = 0
for cat in self.categorias:
if cat.nome == escolhe or cat.codigo == escolhe:
categoria = cat
break
else:
print("Categoria não Encontrada!\nVocê deve criar uma CATEGORIA!")
self.create_categoria()
subcategoria = Subcategoria(categoria, codigo, nome, descrição)
if subcategoria not in self.subcategorias:
self.subcategorias.append(subcategoria)
def create_produto(self):
""""
Cria produto a ser controlado pelo estoque. Um produto deve pertencer a uma subcategoria.
Produtos são itens que podem ser vendidos.
Possuem subcategoria, codigo, nome, descricao, estoquemax, estoquemin, valorvenda, valorcompra, foto
TODELETE: Por enquanto foto recebe uma string qualquer
"""
# TODO: Implementar a foto no sistemas
if not len(self.subcategorias):
print("Produto deve ter CATEGORIA ou uma SUBCATEGORIA!\n")
self.create_subcategoria()
else:
print("- Cadastrar PRODUTO -")
escolhe = input("SUBCATEGORIA (Nome ou Código): ").lower()
codigo = input("CÓDIGO: ").strip()
nome = input("NOME: ").strip()
descrição = input("DESCRIÇÃO: ").strip()
estoquemax = input("Quantidade Maxima em Estoque: ")
while not produtos.valida_estoque(estoquemax):
print("Valor Inválido!")
estoquemax = input("Valor deve ser Numérico: ")
estoquemin = input("Quantidade Minima em Estoque: ")
while not produtos.valida_estoque(estoquemin):
print("Valor Inválido!")
estoquemin = input("Valor deve ser Numérico: ")
valorvenda = input("Preço Unitário: ")
while not produtos.valida_valorvenda(valorvenda):
print("Valor Inválido!")
estoquemax = input("Valor deve ser Numérico: ")
valorcompra = input("Valor de Compra: ")
while not produtos.valida_valorvenda(valorcompra):
print("Valor Inválido!")
estoquemax = input("Valor deve ser Numérico: ")
foto = input("Arquivo de foto: ")
subcategoria = 0
for scat in self.subcategorias:
if scat.nome.lower() == escolhe or scat.codigo == escolhe:
subcategoria = scat
break
else:
print("Subcategoria não Encontrada!\nDeseja criar uma SUBCATEGORIA?\n1- Sim\n2 - Não")
choice = input()
if choice.lower() == 's' or choice == '1':
self.create_subcategoria()
else:
self.create_produto()
produto = Produtos( subcategoria, codigo, nome, descricao, estoquemax, estoquemin, valorvenda, valorcompra, foto)
if produto not in self.produtos:
self.produtos.append(produto)
# funcionalidade pedida na especificação
def low_stock_alarm(self): # aviso de estoque baixo
pass
def consulta_estoque(self): # exibe itens disponiveis no estoque
print("Exibindo estoque")
if not len(self.categorias):
print("Não há Categorias Registrados!")
else:
for categoria in self.categorias:
print(categoria, end=" ")
print()
if not len(self.subcategorias):
print("Não há Subcategorias Registradas!")
else:
for subcategoria in self.subcategorias:
print(subcategoria, end=" ")
print()
if not len(self.produtos):
print("Não há Produtos Registrados!")
else:
for produto in self.produtos:
print(produto, end=" ")
self.menu_estoque()
def altera_item(self): # altera um item disponivel no estoque
print("alterando item do estoque")
self.menu_estoque()
def remove_item(self): # remove um item disponivel no estoque - n remover se o item ainda tem produtos no estoque
print("Removendo item do estoque")
self.menu_estoque()
def adiciona_item(self): # adiciona novo item ao estoque
print("Adicionando item ao estoque")
while 1:
print("************* Menu Adicionar: ******************")
print("Digite Ação!\n1 - Adicionar Categoria\n2 - Adicionar Subcategoria\n3 - Adicionar Produtos\n4 - Sair")
opcao = input()
while not self.valida_opcao(opcao):
print("Opção Inválida!")
opcao = input()
if opcao == '1':
self.create_categoria()
elif opcao == '2':
self.create_subcategoria()
elif opcao == '3':
pass
elif opcao == '4':
break
self.menu_estoque()
def menu_estoque(self):
print("Sistema de Vendas ao Consumidor")
print("****** MENU DE ESTOQUE *****")
print("Digite Ação!\n1 - Consultar Estoque\n2 - Adicionar\n3 - Remover\n4 - Alterar")
opcao = input()
while not self.valida_opcao(opcao):
print("Opção Inválida!")
opcao = input()
if opcao == '1':
self.consulta_estoque()
elif opcao == '2':
self.adiciona_item()
elif opcao == '3':
self.remove_item()
elif opcao == '4':
self.altera_item()
def valida_opcao(self, opcao):
if opcao.isdigit():
return True
else:
return False
estoque = Estoque()
|
flexible
|
{
"blob_id": "9f3ca0d5a10a27d926a0f306665889418f8d6a0c",
"index": 5884,
"step-1": "<mask token>\n\n\nclass Estoque(object):\n <mask token>\n\n def save_categoria(self, categoria):\n pass\n <mask token>\n\n def save_produtos(self, produto):\n pass\n <mask token>\n\n def create_subcategoria(self):\n \"\"\"\"\n Cria uma categoria através dos dados recolhidos pelo formulário.\n Os dados são: Codigo, nome e descrição e a passagem de um objeto categoria\n \"\"\"\n if len(self.categorias) == 0:\n print('Você deve criar pelo menos uma CATEGORIA!\\n')\n self.create_categoria()\n print('- Criar SUBCATEGORIA -')\n codigo = input('CÓDIGO: ').strip()\n nome = input('NOME: ').strip()\n descrição = input('DESCRIÇÃO: ').strip()\n escolhe = input('CATEGORIA (Nome ou Código): ')\n categoria = 0\n for cat in self.categorias:\n if cat.nome == escolhe or cat.codigo == escolhe:\n categoria = cat\n break\n else:\n print(\n 'Categoria não Encontrada!\\nVocê deve criar uma CATEGORIA!'\n )\n self.create_categoria()\n subcategoria = Subcategoria(categoria, codigo, nome, descrição)\n if subcategoria not in self.subcategorias:\n self.subcategorias.append(subcategoria)\n <mask token>\n\n def low_stock_alarm(self):\n pass\n <mask token>\n\n def altera_item(self):\n print('alterando item do estoque')\n self.menu_estoque()\n <mask token>\n <mask token>\n <mask token>\n\n def valida_opcao(self, opcao):\n if opcao.isdigit():\n return True\n else:\n return False\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Estoque(object):\n <mask token>\n\n def save_categoria(self, categoria):\n pass\n <mask token>\n\n def save_produtos(self, produto):\n pass\n\n def create_categoria(self):\n \"\"\"\"\n Cria uma categoria através dos dados recolhidos pelo formulário.\n Os dados são: Codigo, nome e descrição\n \"\"\"\n print('- Criar CATEGORIA -')\n codigo = input('CÓDIGO: ').strip()\n nome = input('NOME: ').strip()\n descrição = input('DESCRIÇÃO: ').strip()\n categoria = Categoria(codigo, nome, descrição)\n if categoria not in self.categorias:\n self.categorias.append(categoria)\n\n def create_subcategoria(self):\n \"\"\"\"\n Cria uma categoria através dos dados recolhidos pelo formulário.\n Os dados são: Codigo, nome e descrição e a passagem de um objeto categoria\n \"\"\"\n if len(self.categorias) == 0:\n print('Você deve criar pelo menos uma CATEGORIA!\\n')\n self.create_categoria()\n print('- Criar SUBCATEGORIA -')\n codigo = input('CÓDIGO: ').strip()\n nome = input('NOME: ').strip()\n descrição = input('DESCRIÇÃO: ').strip()\n escolhe = input('CATEGORIA (Nome ou Código): ')\n categoria = 0\n for cat in self.categorias:\n if cat.nome == escolhe or cat.codigo == escolhe:\n categoria = cat\n break\n else:\n print(\n 'Categoria não Encontrada!\\nVocê deve criar uma CATEGORIA!'\n )\n self.create_categoria()\n subcategoria = Subcategoria(categoria, codigo, nome, descrição)\n if subcategoria not in self.subcategorias:\n self.subcategorias.append(subcategoria)\n\n def create_produto(self):\n \"\"\"\"\n Cria produto a ser controlado pelo estoque. Um produto deve pertencer a uma subcategoria.\n Produtos são itens que podem ser vendidos.\n Possuem subcategoria, codigo, nome, descricao, estoquemax, estoquemin, valorvenda, valorcompra, foto\n\n TODELETE: Por enquanto foto recebe uma string qualquer\n\n \"\"\"\n if not len(self.subcategorias):\n print('Produto deve ter CATEGORIA ou uma SUBCATEGORIA!\\n')\n self.create_subcategoria()\n else:\n print('- Cadastrar PRODUTO -')\n escolhe = input('SUBCATEGORIA (Nome ou Código): ').lower()\n codigo = input('CÓDIGO: ').strip()\n nome = input('NOME: ').strip()\n descrição = input('DESCRIÇÃO: ').strip()\n estoquemax = input('Quantidade Maxima em Estoque: ')\n while not produtos.valida_estoque(estoquemax):\n print('Valor Inválido!')\n estoquemax = input('Valor deve ser Numérico: ')\n estoquemin = input('Quantidade Minima em Estoque: ')\n while not produtos.valida_estoque(estoquemin):\n print('Valor Inválido!')\n estoquemin = input('Valor deve ser Numérico: ')\n valorvenda = input('Preço Unitário: ')\n while not produtos.valida_valorvenda(valorvenda):\n print('Valor Inválido!')\n estoquemax = input('Valor deve ser Numérico: ')\n valorcompra = input('Valor de Compra: ')\n while not produtos.valida_valorvenda(valorcompra):\n print('Valor Inválido!')\n estoquemax = input('Valor deve ser Numérico: ')\n foto = input('Arquivo de foto: ')\n subcategoria = 0\n for scat in self.subcategorias:\n if scat.nome.lower() == escolhe or scat.codigo == escolhe:\n subcategoria = scat\n break\n else:\n print(\n \"\"\"Subcategoria não Encontrada!\nDeseja criar uma SUBCATEGORIA?\n1- Sim\n2 - Não\"\"\"\n )\n choice = input()\n if choice.lower() == 's' or choice == '1':\n self.create_subcategoria()\n else:\n self.create_produto()\n produto = Produtos(subcategoria, codigo, nome, descricao,\n estoquemax, estoquemin, valorvenda, valorcompra, foto)\n if produto not in self.produtos:\n self.produtos.append(produto)\n\n def low_stock_alarm(self):\n pass\n <mask token>\n\n def altera_item(self):\n print('alterando item do estoque')\n self.menu_estoque()\n <mask token>\n\n def adiciona_item(self):\n print('Adicionando item ao estoque')\n while 1:\n print('************* Menu Adicionar: ******************')\n print(\n \"\"\"Digite Ação!\n1 - Adicionar Categoria\n2 - Adicionar Subcategoria\n3 - Adicionar Produtos\n4 - Sair\"\"\"\n )\n opcao = input()\n while not self.valida_opcao(opcao):\n print('Opção Inválida!')\n opcao = input()\n if opcao == '1':\n self.create_categoria()\n elif opcao == '2':\n self.create_subcategoria()\n elif opcao == '3':\n pass\n elif opcao == '4':\n break\n self.menu_estoque()\n\n def menu_estoque(self):\n print('Sistema de Vendas ao Consumidor')\n print('****** MENU DE ESTOQUE *****')\n print(\n 'Digite Ação!\\n1 - Consultar Estoque\\n2 - Adicionar\\n3 - Remover\\n4 - Alterar'\n )\n opcao = input()\n while not self.valida_opcao(opcao):\n print('Opção Inválida!')\n opcao = input()\n if opcao == '1':\n self.consulta_estoque()\n elif opcao == '2':\n self.adiciona_item()\n elif opcao == '3':\n self.remove_item()\n elif opcao == '4':\n self.altera_item()\n\n def valida_opcao(self, opcao):\n if opcao.isdigit():\n return True\n else:\n return False\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Estoque(object):\n <mask token>\n\n def save_categoria(self, categoria):\n pass\n <mask token>\n\n def save_produtos(self, produto):\n pass\n\n def create_categoria(self):\n \"\"\"\"\n Cria uma categoria através dos dados recolhidos pelo formulário.\n Os dados são: Codigo, nome e descrição\n \"\"\"\n print('- Criar CATEGORIA -')\n codigo = input('CÓDIGO: ').strip()\n nome = input('NOME: ').strip()\n descrição = input('DESCRIÇÃO: ').strip()\n categoria = Categoria(codigo, nome, descrição)\n if categoria not in self.categorias:\n self.categorias.append(categoria)\n\n def create_subcategoria(self):\n \"\"\"\"\n Cria uma categoria através dos dados recolhidos pelo formulário.\n Os dados são: Codigo, nome e descrição e a passagem de um objeto categoria\n \"\"\"\n if len(self.categorias) == 0:\n print('Você deve criar pelo menos uma CATEGORIA!\\n')\n self.create_categoria()\n print('- Criar SUBCATEGORIA -')\n codigo = input('CÓDIGO: ').strip()\n nome = input('NOME: ').strip()\n descrição = input('DESCRIÇÃO: ').strip()\n escolhe = input('CATEGORIA (Nome ou Código): ')\n categoria = 0\n for cat in self.categorias:\n if cat.nome == escolhe or cat.codigo == escolhe:\n categoria = cat\n break\n else:\n print(\n 'Categoria não Encontrada!\\nVocê deve criar uma CATEGORIA!'\n )\n self.create_categoria()\n subcategoria = Subcategoria(categoria, codigo, nome, descrição)\n if subcategoria not in self.subcategorias:\n self.subcategorias.append(subcategoria)\n\n def create_produto(self):\n \"\"\"\"\n Cria produto a ser controlado pelo estoque. Um produto deve pertencer a uma subcategoria.\n Produtos são itens que podem ser vendidos.\n Possuem subcategoria, codigo, nome, descricao, estoquemax, estoquemin, valorvenda, valorcompra, foto\n\n TODELETE: Por enquanto foto recebe uma string qualquer\n\n \"\"\"\n if not len(self.subcategorias):\n print('Produto deve ter CATEGORIA ou uma SUBCATEGORIA!\\n')\n self.create_subcategoria()\n else:\n print('- Cadastrar PRODUTO -')\n escolhe = input('SUBCATEGORIA (Nome ou Código): ').lower()\n codigo = input('CÓDIGO: ').strip()\n nome = input('NOME: ').strip()\n descrição = input('DESCRIÇÃO: ').strip()\n estoquemax = input('Quantidade Maxima em Estoque: ')\n while not produtos.valida_estoque(estoquemax):\n print('Valor Inválido!')\n estoquemax = input('Valor deve ser Numérico: ')\n estoquemin = input('Quantidade Minima em Estoque: ')\n while not produtos.valida_estoque(estoquemin):\n print('Valor Inválido!')\n estoquemin = input('Valor deve ser Numérico: ')\n valorvenda = input('Preço Unitário: ')\n while not produtos.valida_valorvenda(valorvenda):\n print('Valor Inválido!')\n estoquemax = input('Valor deve ser Numérico: ')\n valorcompra = input('Valor de Compra: ')\n while not produtos.valida_valorvenda(valorcompra):\n print('Valor Inválido!')\n estoquemax = input('Valor deve ser Numérico: ')\n foto = input('Arquivo de foto: ')\n subcategoria = 0\n for scat in self.subcategorias:\n if scat.nome.lower() == escolhe or scat.codigo == escolhe:\n subcategoria = scat\n break\n else:\n print(\n \"\"\"Subcategoria não Encontrada!\nDeseja criar uma SUBCATEGORIA?\n1- Sim\n2 - Não\"\"\"\n )\n choice = input()\n if choice.lower() == 's' or choice == '1':\n self.create_subcategoria()\n else:\n self.create_produto()\n produto = Produtos(subcategoria, codigo, nome, descricao,\n estoquemax, estoquemin, valorvenda, valorcompra, foto)\n if produto not in self.produtos:\n self.produtos.append(produto)\n\n def low_stock_alarm(self):\n pass\n\n def consulta_estoque(self):\n print('Exibindo estoque')\n if not len(self.categorias):\n print('Não há Categorias Registrados!')\n else:\n for categoria in self.categorias:\n print(categoria, end=' ')\n print()\n if not len(self.subcategorias):\n print('Não há Subcategorias Registradas!')\n else:\n for subcategoria in self.subcategorias:\n print(subcategoria, end=' ')\n print()\n if not len(self.produtos):\n print('Não há Produtos Registrados!')\n else:\n for produto in self.produtos:\n print(produto, end=' ')\n self.menu_estoque()\n\n def altera_item(self):\n print('alterando item do estoque')\n self.menu_estoque()\n <mask token>\n\n def adiciona_item(self):\n print('Adicionando item ao estoque')\n while 1:\n print('************* Menu Adicionar: ******************')\n print(\n \"\"\"Digite Ação!\n1 - Adicionar Categoria\n2 - Adicionar Subcategoria\n3 - Adicionar Produtos\n4 - Sair\"\"\"\n )\n opcao = input()\n while not self.valida_opcao(opcao):\n print('Opção Inválida!')\n opcao = input()\n if opcao == '1':\n self.create_categoria()\n elif opcao == '2':\n self.create_subcategoria()\n elif opcao == '3':\n pass\n elif opcao == '4':\n break\n self.menu_estoque()\n\n def menu_estoque(self):\n print('Sistema de Vendas ao Consumidor')\n print('****** MENU DE ESTOQUE *****')\n print(\n 'Digite Ação!\\n1 - Consultar Estoque\\n2 - Adicionar\\n3 - Remover\\n4 - Alterar'\n )\n opcao = input()\n while not self.valida_opcao(opcao):\n print('Opção Inválida!')\n opcao = input()\n if opcao == '1':\n self.consulta_estoque()\n elif opcao == '2':\n self.adiciona_item()\n elif opcao == '3':\n self.remove_item()\n elif opcao == '4':\n self.altera_item()\n\n def valida_opcao(self, opcao):\n if opcao.isdigit():\n return True\n else:\n return False\n\n\n<mask token>\n",
"step-4": "from src.produtos import *\n\n\nclass Estoque(object):\n\n def __init__(self):\n self.categorias = []\n self.subcategorias = []\n self.produtos = []\n self.menu_estoque()\n\n def save_categoria(self, categoria):\n pass\n\n def save_subcategorias(self, subcategoria):\n pass\n\n def save_produtos(self, produto):\n pass\n\n def create_categoria(self):\n \"\"\"\"\n Cria uma categoria através dos dados recolhidos pelo formulário.\n Os dados são: Codigo, nome e descrição\n \"\"\"\n print('- Criar CATEGORIA -')\n codigo = input('CÓDIGO: ').strip()\n nome = input('NOME: ').strip()\n descrição = input('DESCRIÇÃO: ').strip()\n categoria = Categoria(codigo, nome, descrição)\n if categoria not in self.categorias:\n self.categorias.append(categoria)\n\n def create_subcategoria(self):\n \"\"\"\"\n Cria uma categoria através dos dados recolhidos pelo formulário.\n Os dados são: Codigo, nome e descrição e a passagem de um objeto categoria\n \"\"\"\n if len(self.categorias) == 0:\n print('Você deve criar pelo menos uma CATEGORIA!\\n')\n self.create_categoria()\n print('- Criar SUBCATEGORIA -')\n codigo = input('CÓDIGO: ').strip()\n nome = input('NOME: ').strip()\n descrição = input('DESCRIÇÃO: ').strip()\n escolhe = input('CATEGORIA (Nome ou Código): ')\n categoria = 0\n for cat in self.categorias:\n if cat.nome == escolhe or cat.codigo == escolhe:\n categoria = cat\n break\n else:\n print(\n 'Categoria não Encontrada!\\nVocê deve criar uma CATEGORIA!'\n )\n self.create_categoria()\n subcategoria = Subcategoria(categoria, codigo, nome, descrição)\n if subcategoria not in self.subcategorias:\n self.subcategorias.append(subcategoria)\n\n def create_produto(self):\n \"\"\"\"\n Cria produto a ser controlado pelo estoque. Um produto deve pertencer a uma subcategoria.\n Produtos são itens que podem ser vendidos.\n Possuem subcategoria, codigo, nome, descricao, estoquemax, estoquemin, valorvenda, valorcompra, foto\n\n TODELETE: Por enquanto foto recebe uma string qualquer\n\n \"\"\"\n if not len(self.subcategorias):\n print('Produto deve ter CATEGORIA ou uma SUBCATEGORIA!\\n')\n self.create_subcategoria()\n else:\n print('- Cadastrar PRODUTO -')\n escolhe = input('SUBCATEGORIA (Nome ou Código): ').lower()\n codigo = input('CÓDIGO: ').strip()\n nome = input('NOME: ').strip()\n descrição = input('DESCRIÇÃO: ').strip()\n estoquemax = input('Quantidade Maxima em Estoque: ')\n while not produtos.valida_estoque(estoquemax):\n print('Valor Inválido!')\n estoquemax = input('Valor deve ser Numérico: ')\n estoquemin = input('Quantidade Minima em Estoque: ')\n while not produtos.valida_estoque(estoquemin):\n print('Valor Inválido!')\n estoquemin = input('Valor deve ser Numérico: ')\n valorvenda = input('Preço Unitário: ')\n while not produtos.valida_valorvenda(valorvenda):\n print('Valor Inválido!')\n estoquemax = input('Valor deve ser Numérico: ')\n valorcompra = input('Valor de Compra: ')\n while not produtos.valida_valorvenda(valorcompra):\n print('Valor Inválido!')\n estoquemax = input('Valor deve ser Numérico: ')\n foto = input('Arquivo de foto: ')\n subcategoria = 0\n for scat in self.subcategorias:\n if scat.nome.lower() == escolhe or scat.codigo == escolhe:\n subcategoria = scat\n break\n else:\n print(\n \"\"\"Subcategoria não Encontrada!\nDeseja criar uma SUBCATEGORIA?\n1- Sim\n2 - Não\"\"\"\n )\n choice = input()\n if choice.lower() == 's' or choice == '1':\n self.create_subcategoria()\n else:\n self.create_produto()\n produto = Produtos(subcategoria, codigo, nome, descricao,\n estoquemax, estoquemin, valorvenda, valorcompra, foto)\n if produto not in self.produtos:\n self.produtos.append(produto)\n\n def low_stock_alarm(self):\n pass\n\n def consulta_estoque(self):\n print('Exibindo estoque')\n if not len(self.categorias):\n print('Não há Categorias Registrados!')\n else:\n for categoria in self.categorias:\n print(categoria, end=' ')\n print()\n if not len(self.subcategorias):\n print('Não há Subcategorias Registradas!')\n else:\n for subcategoria in self.subcategorias:\n print(subcategoria, end=' ')\n print()\n if not len(self.produtos):\n print('Não há Produtos Registrados!')\n else:\n for produto in self.produtos:\n print(produto, end=' ')\n self.menu_estoque()\n\n def altera_item(self):\n print('alterando item do estoque')\n self.menu_estoque()\n\n def remove_item(self):\n print('Removendo item do estoque')\n self.menu_estoque()\n\n def adiciona_item(self):\n print('Adicionando item ao estoque')\n while 1:\n print('************* Menu Adicionar: ******************')\n print(\n \"\"\"Digite Ação!\n1 - Adicionar Categoria\n2 - Adicionar Subcategoria\n3 - Adicionar Produtos\n4 - Sair\"\"\"\n )\n opcao = input()\n while not self.valida_opcao(opcao):\n print('Opção Inválida!')\n opcao = input()\n if opcao == '1':\n self.create_categoria()\n elif opcao == '2':\n self.create_subcategoria()\n elif opcao == '3':\n pass\n elif opcao == '4':\n break\n self.menu_estoque()\n\n def menu_estoque(self):\n print('Sistema de Vendas ao Consumidor')\n print('****** MENU DE ESTOQUE *****')\n print(\n 'Digite Ação!\\n1 - Consultar Estoque\\n2 - Adicionar\\n3 - Remover\\n4 - Alterar'\n )\n opcao = input()\n while not self.valida_opcao(opcao):\n print('Opção Inválida!')\n opcao = input()\n if opcao == '1':\n self.consulta_estoque()\n elif opcao == '2':\n self.adiciona_item()\n elif opcao == '3':\n self.remove_item()\n elif opcao == '4':\n self.altera_item()\n\n def valida_opcao(self, opcao):\n if opcao.isdigit():\n return True\n else:\n return False\n\n\nestoque = Estoque()\n",
"step-5": "from src.produtos import *\r\n\r\n\r\nclass Estoque(object):\r\n def __init__(self):\r\n self.categorias = []\r\n self.subcategorias = []\r\n self.produtos = []\r\n self.menu_estoque()\r\n\r\n def save_categoria(self, categoria):\r\n pass\r\n\r\n def save_subcategorias(self, subcategoria):\r\n pass\r\n\r\n def save_produtos(self, produto):\r\n pass\r\n\r\n def create_categoria(self):\r\n \"\"\"\"\r\n Cria uma categoria através dos dados recolhidos pelo formulário.\r\n Os dados são: Codigo, nome e descrição\r\n \"\"\"\r\n print(\"- Criar CATEGORIA -\")\r\n codigo = input(\"CÓDIGO: \").strip()\r\n nome = input(\"NOME: \").strip()\r\n descrição = input(\"DESCRIÇÃO: \").strip()\r\n categoria = Categoria(codigo, nome, descrição)\r\n if categoria not in self.categorias:\r\n self.categorias.append(categoria)\r\n\r\n def create_subcategoria(self):\r\n \"\"\"\"\r\n Cria uma categoria através dos dados recolhidos pelo formulário.\r\n Os dados são: Codigo, nome e descrição e a passagem de um objeto categoria\r\n \"\"\"\r\n if len(self.categorias) == 0:\r\n print(\"Você deve criar pelo menos uma CATEGORIA!\\n\")\r\n self.create_categoria()\r\n print(\"- Criar SUBCATEGORIA -\")\r\n codigo = input(\"CÓDIGO: \").strip()\r\n nome = input(\"NOME: \").strip()\r\n descrição = input(\"DESCRIÇÃO: \").strip()\r\n escolhe = input(\"CATEGORIA (Nome ou Código): \")\r\n categoria = 0\r\n\r\n for cat in self.categorias:\r\n if cat.nome == escolhe or cat.codigo == escolhe:\r\n categoria = cat\r\n break\r\n else:\r\n print(\"Categoria não Encontrada!\\nVocê deve criar uma CATEGORIA!\")\r\n self.create_categoria()\r\n\r\n subcategoria = Subcategoria(categoria, codigo, nome, descrição)\r\n\r\n if subcategoria not in self.subcategorias:\r\n self.subcategorias.append(subcategoria)\r\n\r\n def create_produto(self):\r\n \"\"\"\"\r\n Cria produto a ser controlado pelo estoque. Um produto deve pertencer a uma subcategoria.\r\n Produtos são itens que podem ser vendidos.\r\n Possuem subcategoria, codigo, nome, descricao, estoquemax, estoquemin, valorvenda, valorcompra, foto\r\n\r\n TODELETE: Por enquanto foto recebe uma string qualquer\r\n\r\n \"\"\"\r\n # TODO: Implementar a foto no sistemas\r\n if not len(self.subcategorias):\r\n print(\"Produto deve ter CATEGORIA ou uma SUBCATEGORIA!\\n\")\r\n self.create_subcategoria()\r\n else:\r\n print(\"- Cadastrar PRODUTO -\")\r\n escolhe = input(\"SUBCATEGORIA (Nome ou Código): \").lower()\r\n codigo = input(\"CÓDIGO: \").strip()\r\n nome = input(\"NOME: \").strip()\r\n descrição = input(\"DESCRIÇÃO: \").strip()\r\n\r\n estoquemax = input(\"Quantidade Maxima em Estoque: \")\r\n while not produtos.valida_estoque(estoquemax):\r\n print(\"Valor Inválido!\")\r\n estoquemax = input(\"Valor deve ser Numérico: \")\r\n\r\n estoquemin = input(\"Quantidade Minima em Estoque: \")\r\n while not produtos.valida_estoque(estoquemin):\r\n print(\"Valor Inválido!\")\r\n estoquemin = input(\"Valor deve ser Numérico: \")\r\n\r\n valorvenda = input(\"Preço Unitário: \")\r\n while not produtos.valida_valorvenda(valorvenda):\r\n print(\"Valor Inválido!\")\r\n estoquemax = input(\"Valor deve ser Numérico: \")\r\n\r\n valorcompra = input(\"Valor de Compra: \")\r\n while not produtos.valida_valorvenda(valorcompra):\r\n print(\"Valor Inválido!\")\r\n estoquemax = input(\"Valor deve ser Numérico: \")\r\n\r\n foto = input(\"Arquivo de foto: \")\r\n\r\n subcategoria = 0\r\n\r\n for scat in self.subcategorias:\r\n if scat.nome.lower() == escolhe or scat.codigo == escolhe:\r\n subcategoria = scat\r\n break\r\n else:\r\n print(\"Subcategoria não Encontrada!\\nDeseja criar uma SUBCATEGORIA?\\n1- Sim\\n2 - Não\")\r\n choice = input()\r\n if choice.lower() == 's' or choice == '1':\r\n self.create_subcategoria()\r\n else:\r\n self.create_produto()\r\n\r\n produto = Produtos( subcategoria, codigo, nome, descricao, estoquemax, estoquemin, valorvenda, valorcompra, foto)\r\n\r\n if produto not in self.produtos:\r\n self.produtos.append(produto)\r\n\r\n # funcionalidade pedida na especificação\r\n\r\n def low_stock_alarm(self): # aviso de estoque baixo\r\n pass\r\n\r\n def consulta_estoque(self): # exibe itens disponiveis no estoque\r\n print(\"Exibindo estoque\")\r\n if not len(self.categorias):\r\n print(\"Não há Categorias Registrados!\")\r\n else:\r\n for categoria in self.categorias:\r\n print(categoria, end=\" \")\r\n print()\r\n if not len(self.subcategorias):\r\n print(\"Não há Subcategorias Registradas!\")\r\n else:\r\n for subcategoria in self.subcategorias:\r\n print(subcategoria, end=\" \")\r\n print()\r\n if not len(self.produtos):\r\n print(\"Não há Produtos Registrados!\")\r\n else:\r\n for produto in self.produtos:\r\n print(produto, end=\" \")\r\n\r\n self.menu_estoque()\r\n\r\n def altera_item(self): # altera um item disponivel no estoque\r\n print(\"alterando item do estoque\")\r\n self.menu_estoque()\r\n\r\n def remove_item(self): # remove um item disponivel no estoque - n remover se o item ainda tem produtos no estoque\r\n print(\"Removendo item do estoque\")\r\n self.menu_estoque()\r\n\r\n def adiciona_item(self): # adiciona novo item ao estoque\r\n print(\"Adicionando item ao estoque\")\r\n while 1:\r\n print(\"************* Menu Adicionar: ******************\")\r\n print(\"Digite Ação!\\n1 - Adicionar Categoria\\n2 - Adicionar Subcategoria\\n3 - Adicionar Produtos\\n4 - Sair\")\r\n opcao = input()\r\n while not self.valida_opcao(opcao):\r\n print(\"Opção Inválida!\")\r\n opcao = input()\r\n if opcao == '1':\r\n self.create_categoria()\r\n elif opcao == '2':\r\n self.create_subcategoria()\r\n elif opcao == '3':\r\n pass\r\n elif opcao == '4':\r\n break\r\n self.menu_estoque()\r\n\r\n def menu_estoque(self):\r\n print(\"Sistema de Vendas ao Consumidor\")\r\n print(\"****** MENU DE ESTOQUE *****\")\r\n print(\"Digite Ação!\\n1 - Consultar Estoque\\n2 - Adicionar\\n3 - Remover\\n4 - Alterar\")\r\n opcao = input()\r\n\r\n while not self.valida_opcao(opcao):\r\n print(\"Opção Inválida!\")\r\n opcao = input()\r\n\r\n if opcao == '1':\r\n self.consulta_estoque()\r\n elif opcao == '2':\r\n self.adiciona_item()\r\n elif opcao == '3':\r\n self.remove_item()\r\n elif opcao == '4':\r\n self.altera_item()\r\n\r\n def valida_opcao(self, opcao):\r\n if opcao.isdigit():\r\n return True\r\n else:\r\n return False\r\n\r\nestoque = Estoque()\r\n",
"step-ids": [
7,
11,
12,
17,
18
]
}
|
[
7,
11,
12,
17,
18
] |
#
# linter.py
# Linter for SublimeLinter version 4.
#
# Written by Brian Schott (Hackerpilot)
# Copyright © 2014-2019 Economic Modeling Specialists, Intl.
#
# License: MIT
#
"""This module exports the D-Scanner plugin class."""
from SublimeLinter.lint import Linter, STREAM_STDOUT
class Dscanner(Linter):
"""Provides an interface to dscanner."""
cmd = ("dscanner", "-S", "${file}")
regex = r'^.+?\((?P<line>\d+):(?P<col>\d+)\)\[((?P<warning>warn)|(?P<error>error))\]: (?P<message>.+)$'
multiline = False
tempfile_suffix = "-"
word_re = None
defaults = {
"selector": "source.d"
}
name = "D-Scanner"
|
normal
|
{
"blob_id": "fda73b5dac038f077da460d6ebfb432b756909d9",
"index": 3125,
"step-1": "<mask token>\n\n\nclass Dscanner(Linter):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Dscanner(Linter):\n <mask token>\n cmd = 'dscanner', '-S', '${file}'\n regex = (\n '^.+?\\\\((?P<line>\\\\d+):(?P<col>\\\\d+)\\\\)\\\\[((?P<warning>warn)|(?P<error>error))\\\\]: (?P<message>.+)$'\n )\n multiline = False\n tempfile_suffix = '-'\n word_re = None\n defaults = {'selector': 'source.d'}\n name = 'D-Scanner'\n",
"step-3": "<mask token>\n\n\nclass Dscanner(Linter):\n \"\"\"Provides an interface to dscanner.\"\"\"\n cmd = 'dscanner', '-S', '${file}'\n regex = (\n '^.+?\\\\((?P<line>\\\\d+):(?P<col>\\\\d+)\\\\)\\\\[((?P<warning>warn)|(?P<error>error))\\\\]: (?P<message>.+)$'\n )\n multiline = False\n tempfile_suffix = '-'\n word_re = None\n defaults = {'selector': 'source.d'}\n name = 'D-Scanner'\n",
"step-4": "<mask token>\nfrom SublimeLinter.lint import Linter, STREAM_STDOUT\n\n\nclass Dscanner(Linter):\n \"\"\"Provides an interface to dscanner.\"\"\"\n cmd = 'dscanner', '-S', '${file}'\n regex = (\n '^.+?\\\\((?P<line>\\\\d+):(?P<col>\\\\d+)\\\\)\\\\[((?P<warning>warn)|(?P<error>error))\\\\]: (?P<message>.+)$'\n )\n multiline = False\n tempfile_suffix = '-'\n word_re = None\n defaults = {'selector': 'source.d'}\n name = 'D-Scanner'\n",
"step-5": "#\n# linter.py\n# Linter for SublimeLinter version 4.\n#\n# Written by Brian Schott (Hackerpilot)\n# Copyright © 2014-2019 Economic Modeling Specialists, Intl.\n#\n# License: MIT\n#\n\n\"\"\"This module exports the D-Scanner plugin class.\"\"\"\n\nfrom SublimeLinter.lint import Linter, STREAM_STDOUT\n\n\nclass Dscanner(Linter):\n\n \"\"\"Provides an interface to dscanner.\"\"\"\n\n cmd = (\"dscanner\", \"-S\", \"${file}\")\n regex = r'^.+?\\((?P<line>\\d+):(?P<col>\\d+)\\)\\[((?P<warning>warn)|(?P<error>error))\\]: (?P<message>.+)$'\n multiline = False\n tempfile_suffix = \"-\"\n word_re = None\n defaults = {\n \"selector\": \"source.d\"\n }\n name = \"D-Scanner\"\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
# Copyright (c) 2011-2020 Eric Froemling
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# -----------------------------------------------------------------------------
"""Functionality related to scores and statistics."""
from __future__ import annotations
import random
import weakref
from typing import TYPE_CHECKING
from dataclasses import dataclass
import _ba
from ba._error import (print_exception, print_error, SessionTeamNotFoundError,
SessionPlayerNotFoundError, NotFoundError)
if TYPE_CHECKING:
import ba
from weakref import ReferenceType
from typing import Any, Dict, Optional, Sequence, Union, Tuple
@dataclass
class PlayerScoredMessage:
"""Informs something that a ba.Player scored.
Category: Message Classes
Attrs:
score
The score value.
"""
score: int
class PlayerRecord:
"""Stats for an individual player in a ba.Stats object.
Category: Gameplay Classes
This does not necessarily correspond to a ba.Player that is
still present (stats may be retained for players that leave
mid-game)
"""
character: str
def __init__(self, name: str, name_full: str,
sessionplayer: ba.SessionPlayer, stats: ba.Stats):
self.name = name
self.name_full = name_full
self.score = 0
self.accumscore = 0
self.kill_count = 0
self.accum_kill_count = 0
self.killed_count = 0
self.accum_killed_count = 0
self._multi_kill_timer: Optional[ba.Timer] = None
self._multi_kill_count = 0
self._stats = weakref.ref(stats)
self._last_sessionplayer: Optional[ba.SessionPlayer] = None
self._sessionplayer: Optional[ba.SessionPlayer] = None
self._sessionteam: Optional[ReferenceType[ba.SessionTeam]] = None
self.streak = 0
self.associate_with_sessionplayer(sessionplayer)
@property
def team(self) -> ba.SessionTeam:
"""The ba.SessionTeam the last associated player was last on.
This can still return a valid result even if the player is gone.
Raises a ba.SessionTeamNotFoundError if the team no longer exists.
"""
assert self._sessionteam is not None
team = self._sessionteam()
if team is None:
raise SessionTeamNotFoundError()
return team
@property
def player(self) -> ba.SessionPlayer:
"""Return the instance's associated ba.SessionPlayer.
Raises a ba.SessionPlayerNotFoundError if the player
no longer exists.
"""
if not self._sessionplayer:
raise SessionPlayerNotFoundError()
return self._sessionplayer
def getname(self, full: bool = False) -> str:
"""Return the player entry's name."""
return self.name_full if full else self.name
def get_icon(self) -> Dict[str, Any]:
"""Get the icon for this instance's player."""
player = self._last_sessionplayer
assert player is not None
return player.get_icon()
def cancel_multi_kill_timer(self) -> None:
"""Cancel any multi-kill timer for this player entry."""
self._multi_kill_timer = None
def getactivity(self) -> Optional[ba.Activity]:
"""Return the ba.Activity this instance is currently associated with.
Returns None if the activity no longer exists."""
stats = self._stats()
if stats is not None:
return stats.getactivity()
return None
def associate_with_sessionplayer(self,
sessionplayer: ba.SessionPlayer) -> None:
"""Associate this entry with a ba.SessionPlayer."""
self._sessionteam = weakref.ref(sessionplayer.sessionteam)
self.character = sessionplayer.character
self._last_sessionplayer = sessionplayer
self._sessionplayer = sessionplayer
self.streak = 0
def _end_multi_kill(self) -> None:
self._multi_kill_timer = None
self._multi_kill_count = 0
def get_last_sessionplayer(self) -> ba.SessionPlayer:
"""Return the last ba.Player we were associated with."""
assert self._last_sessionplayer is not None
return self._last_sessionplayer
def submit_kill(self, showpoints: bool = True) -> None:
"""Submit a kill for this player entry."""
# FIXME Clean this up.
# pylint: disable=too-many-statements
from ba._lang import Lstr
from ba._general import Call
self._multi_kill_count += 1
stats = self._stats()
assert stats
if self._multi_kill_count == 1:
score = 0
name = None
delay = 0.0
color = (0.0, 0.0, 0.0, 1.0)
scale = 1.0
sound = None
elif self._multi_kill_count == 2:
score = 20
name = Lstr(resource='twoKillText')
color = (0.1, 1.0, 0.0, 1)
scale = 1.0
delay = 0.0
sound = stats.orchestrahitsound1
elif self._multi_kill_count == 3:
score = 40
name = Lstr(resource='threeKillText')
color = (1.0, 0.7, 0.0, 1)
scale = 1.1
delay = 0.3
sound = stats.orchestrahitsound2
elif self._multi_kill_count == 4:
score = 60
name = Lstr(resource='fourKillText')
color = (1.0, 1.0, 0.0, 1)
scale = 1.2
delay = 0.6
sound = stats.orchestrahitsound3
elif self._multi_kill_count == 5:
score = 80
name = Lstr(resource='fiveKillText')
color = (1.0, 0.5, 0.0, 1)
scale = 1.3
delay = 0.9
sound = stats.orchestrahitsound4
else:
score = 100
name = Lstr(resource='multiKillText',
subs=[('${COUNT}', str(self._multi_kill_count))])
color = (1.0, 0.5, 0.0, 1)
scale = 1.3
delay = 1.0
sound = stats.orchestrahitsound4
def _apply(name2: Lstr, score2: int, showpoints2: bool,
color2: Tuple[float, float, float, float], scale2: float,
sound2: Optional[ba.Sound]) -> None:
from bastd.actor.popuptext import PopupText
# Only award this if they're still alive and we can get
# a current position for them.
our_pos: Optional[ba.Vec3] = None
if self._sessionplayer:
if self._sessionplayer.activityplayer is not None:
try:
our_pos = self._sessionplayer.activityplayer.position
except NotFoundError:
pass
if our_pos is None:
return
# Jitter position a bit since these often come in clusters.
our_pos = _ba.Vec3(our_pos[0] + (random.random() - 0.5) * 2.0,
our_pos[1] + (random.random() - 0.5) * 2.0,
our_pos[2] + (random.random() - 0.5) * 2.0)
activity = self.getactivity()
if activity is not None:
PopupText(Lstr(
value=(('+' + str(score2) + ' ') if showpoints2 else '') +
'${N}',
subs=[('${N}', name2)]),
color=color2,
scale=scale2,
position=our_pos).autoretain()
if sound2:
_ba.playsound(sound2)
self.score += score2
self.accumscore += score2
# Inform a running game of the score.
if score2 != 0 and activity is not None:
activity.handlemessage(PlayerScoredMessage(score=score2))
if name is not None:
_ba.timer(
0.3 + delay,
Call(_apply, name, score, showpoints, color, scale, sound))
# Keep the tally rollin'...
# set a timer for a bit in the future.
self._multi_kill_timer = _ba.Timer(1.0, self._end_multi_kill)
class Stats:
"""Manages scores and statistics for a ba.Session.
category: Gameplay Classes
"""
def __init__(self) -> None:
self._activity: Optional[ReferenceType[ba.Activity]] = None
self._player_records: Dict[str, PlayerRecord] = {}
self.orchestrahitsound1: Optional[ba.Sound] = None
self.orchestrahitsound2: Optional[ba.Sound] = None
self.orchestrahitsound3: Optional[ba.Sound] = None
self.orchestrahitsound4: Optional[ba.Sound] = None
def setactivity(self, activity: Optional[ba.Activity]) -> None:
"""Set the current activity for this instance."""
self._activity = None if activity is None else weakref.ref(activity)
# Load our media into this activity's context.
if activity is not None:
if activity.expired:
print_error('unexpected finalized activity')
else:
with _ba.Context(activity):
self._load_activity_media()
def getactivity(self) -> Optional[ba.Activity]:
"""Get the activity associated with this instance.
May return None.
"""
if self._activity is None:
return None
return self._activity()
def _load_activity_media(self) -> None:
self.orchestrahitsound1 = _ba.getsound('orchestraHit')
self.orchestrahitsound2 = _ba.getsound('orchestraHit2')
self.orchestrahitsound3 = _ba.getsound('orchestraHit3')
self.orchestrahitsound4 = _ba.getsound('orchestraHit4')
def reset(self) -> None:
"""Reset the stats instance completely."""
# Just to be safe, lets make sure no multi-kill timers are gonna go off
# for no-longer-on-the-list players.
for p_entry in list(self._player_records.values()):
p_entry.cancel_multi_kill_timer()
self._player_records = {}
def reset_accum(self) -> None:
"""Reset per-sound sub-scores."""
for s_player in list(self._player_records.values()):
s_player.cancel_multi_kill_timer()
s_player.accumscore = 0
s_player.accum_kill_count = 0
s_player.accum_killed_count = 0
s_player.streak = 0
def register_sessionplayer(self, player: ba.SessionPlayer) -> None:
"""Register a ba.SessionPlayer with this score-set."""
assert player.exists() # Invalid refs should never be passed to funcs.
name = player.getname()
if name in self._player_records:
# If the player already exists, update his character and such as
# it may have changed.
self._player_records[name].associate_with_sessionplayer(player)
else:
name_full = player.getname(full=True)
self._player_records[name] = PlayerRecord(name, name_full, player,
self)
def get_records(self) -> Dict[str, ba.PlayerRecord]:
"""Get PlayerRecord corresponding to still-existing players."""
records = {}
# Go through our player records and return ones whose player id still
# corresponds to a player with that name.
for record_id, record in self._player_records.items():
lastplayer = record.get_last_sessionplayer()
if lastplayer and lastplayer.getname() == record_id:
records[record_id] = record
return records
def player_scored(self,
player: ba.Player,
base_points: int = 1,
target: Sequence[float] = None,
kill: bool = False,
victim_player: ba.Player = None,
scale: float = 1.0,
color: Sequence[float] = None,
title: Union[str, ba.Lstr] = None,
screenmessage: bool = True,
display: bool = True,
importance: int = 1,
showpoints: bool = True,
big_message: bool = False) -> int:
"""Register a score for the player.
Return value is actual score with multipliers and such factored in.
"""
# FIXME: Tidy this up.
# pylint: disable=cyclic-import
# pylint: disable=too-many-branches
# pylint: disable=too-many-locals
# pylint: disable=too-many-statements
from bastd.actor.popuptext import PopupText
from ba import _math
from ba._gameactivity import GameActivity
from ba._lang import Lstr
del victim_player # Currently unused.
name = player.getname()
s_player = self._player_records[name]
if kill:
s_player.submit_kill(showpoints=showpoints)
display_color: Sequence[float] = (1.0, 1.0, 1.0, 1.0)
if color is not None:
display_color = color
elif importance != 1:
display_color = (1.0, 1.0, 0.4, 1.0)
points = base_points
# If they want a big announcement, throw a zoom-text up there.
if display and big_message:
try:
assert self._activity is not None
activity = self._activity()
if isinstance(activity, GameActivity):
name_full = player.getname(full=True, icon=False)
activity.show_zoom_message(
Lstr(resource='nameScoresText',
subs=[('${NAME}', name_full)]),
color=_math.normalized_color(player.team.color))
except Exception:
print_exception('error showing big_message')
# If we currently have a actor, pop up a score over it.
if display and showpoints:
our_pos = player.node.position if player.node else None
if our_pos is not None:
if target is None:
target = our_pos
# If display-pos is *way* lower than us, raise it up
# (so we can still see scores from dudes that fell off cliffs).
display_pos = (target[0], max(target[1], our_pos[1] - 2.0),
min(target[2], our_pos[2] + 2.0))
activity = self.getactivity()
if activity is not None:
if title is not None:
sval = Lstr(value='+${A} ${B}',
subs=[('${A}', str(points)),
('${B}', title)])
else:
sval = Lstr(value='+${A}',
subs=[('${A}', str(points))])
PopupText(sval,
color=display_color,
scale=1.2 * scale,
position=display_pos).autoretain()
# Tally kills.
if kill:
s_player.accum_kill_count += 1
s_player.kill_count += 1
# Report non-kill scorings.
try:
if screenmessage and not kill:
_ba.screenmessage(Lstr(resource='nameScoresText',
subs=[('${NAME}', name)]),
top=True,
color=player.color,
image=player.get_icon())
except Exception:
print_exception('error announcing score')
s_player.score += points
s_player.accumscore += points
# Inform a running game of the score.
if points != 0:
activity = self._activity() if self._activity is not None else None
if activity is not None:
activity.handlemessage(PlayerScoredMessage(score=points))
return points
def player_was_killed(self,
player: ba.Player,
killed: bool = False,
killer: ba.Player = None) -> None:
"""Should be called when a player is killed."""
from ba._lang import Lstr
name = player.getname()
prec = self._player_records[name]
prec.streak = 0
if killed:
prec.accum_killed_count += 1
prec.killed_count += 1
try:
if killed and _ba.getactivity().announce_player_deaths:
if killer is player:
_ba.screenmessage(Lstr(resource='nameSuicideText',
subs=[('${NAME}', name)]),
top=True,
color=player.color,
image=player.get_icon())
elif killer is not None:
if killer.team is player.team:
_ba.screenmessage(Lstr(resource='nameBetrayedText',
subs=[('${NAME}',
killer.getname()),
('${VICTIM}', name)]),
top=True,
color=killer.color,
image=killer.get_icon())
else:
_ba.screenmessage(Lstr(resource='nameKilledText',
subs=[('${NAME}',
killer.getname()),
('${VICTIM}', name)]),
top=True,
color=killer.color,
image=killer.get_icon())
else:
_ba.screenmessage(Lstr(resource='nameDiedText',
subs=[('${NAME}', name)]),
top=True,
color=player.color,
image=player.get_icon())
except Exception:
print_exception('error announcing kill')
|
normal
|
{
"blob_id": "7c63abacce07ee9d4c2b3941d05f951b75c8d0ff",
"index": 1157,
"step-1": "<mask token>\n\n\nclass PlayerRecord:\n <mask token>\n character: str\n <mask token>\n\n @property\n def team(self) ->ba.SessionTeam:\n \"\"\"The ba.SessionTeam the last associated player was last on.\n\n This can still return a valid result even if the player is gone.\n Raises a ba.SessionTeamNotFoundError if the team no longer exists.\n \"\"\"\n assert self._sessionteam is not None\n team = self._sessionteam()\n if team is None:\n raise SessionTeamNotFoundError()\n return team\n <mask token>\n\n def getname(self, full: bool=False) ->str:\n \"\"\"Return the player entry's name.\"\"\"\n return self.name_full if full else self.name\n\n def get_icon(self) ->Dict[str, Any]:\n \"\"\"Get the icon for this instance's player.\"\"\"\n player = self._last_sessionplayer\n assert player is not None\n return player.get_icon()\n <mask token>\n <mask token>\n\n def associate_with_sessionplayer(self, sessionplayer: ba.SessionPlayer\n ) ->None:\n \"\"\"Associate this entry with a ba.SessionPlayer.\"\"\"\n self._sessionteam = weakref.ref(sessionplayer.sessionteam)\n self.character = sessionplayer.character\n self._last_sessionplayer = sessionplayer\n self._sessionplayer = sessionplayer\n self.streak = 0\n <mask token>\n <mask token>\n <mask token>\n\n\nclass Stats:\n \"\"\"Manages scores and statistics for a ba.Session.\n\n category: Gameplay Classes\n \"\"\"\n\n def __init__(self) ->None:\n self._activity: Optional[ReferenceType[ba.Activity]] = None\n self._player_records: Dict[str, PlayerRecord] = {}\n self.orchestrahitsound1: Optional[ba.Sound] = None\n self.orchestrahitsound2: Optional[ba.Sound] = None\n self.orchestrahitsound3: Optional[ba.Sound] = None\n self.orchestrahitsound4: Optional[ba.Sound] = None\n\n def setactivity(self, activity: Optional[ba.Activity]) ->None:\n \"\"\"Set the current activity for this instance.\"\"\"\n self._activity = None if activity is None else weakref.ref(activity)\n if activity is not None:\n if activity.expired:\n print_error('unexpected finalized activity')\n else:\n with _ba.Context(activity):\n self._load_activity_media()\n\n def getactivity(self) ->Optional[ba.Activity]:\n \"\"\"Get the activity associated with this instance.\n\n May return None.\n \"\"\"\n if self._activity is None:\n return None\n return self._activity()\n\n def _load_activity_media(self) ->None:\n self.orchestrahitsound1 = _ba.getsound('orchestraHit')\n self.orchestrahitsound2 = _ba.getsound('orchestraHit2')\n self.orchestrahitsound3 = _ba.getsound('orchestraHit3')\n self.orchestrahitsound4 = _ba.getsound('orchestraHit4')\n\n def reset(self) ->None:\n \"\"\"Reset the stats instance completely.\"\"\"\n for p_entry in list(self._player_records.values()):\n p_entry.cancel_multi_kill_timer()\n self._player_records = {}\n\n def reset_accum(self) ->None:\n \"\"\"Reset per-sound sub-scores.\"\"\"\n for s_player in list(self._player_records.values()):\n s_player.cancel_multi_kill_timer()\n s_player.accumscore = 0\n s_player.accum_kill_count = 0\n s_player.accum_killed_count = 0\n s_player.streak = 0\n\n def register_sessionplayer(self, player: ba.SessionPlayer) ->None:\n \"\"\"Register a ba.SessionPlayer with this score-set.\"\"\"\n assert player.exists()\n name = player.getname()\n if name in self._player_records:\n self._player_records[name].associate_with_sessionplayer(player)\n else:\n name_full = player.getname(full=True)\n self._player_records[name] = PlayerRecord(name, name_full,\n player, self)\n\n def get_records(self) ->Dict[str, ba.PlayerRecord]:\n \"\"\"Get PlayerRecord corresponding to still-existing players.\"\"\"\n records = {}\n for record_id, record in self._player_records.items():\n lastplayer = record.get_last_sessionplayer()\n if lastplayer and lastplayer.getname() == record_id:\n records[record_id] = record\n return records\n\n def player_scored(self, player: ba.Player, base_points: int=1, target:\n Sequence[float]=None, kill: bool=False, victim_player: ba.Player=\n None, scale: float=1.0, color: Sequence[float]=None, title: Union[\n str, ba.Lstr]=None, screenmessage: bool=True, display: bool=True,\n importance: int=1, showpoints: bool=True, big_message: bool=False\n ) ->int:\n \"\"\"Register a score for the player.\n\n Return value is actual score with multipliers and such factored in.\n \"\"\"\n from bastd.actor.popuptext import PopupText\n from ba import _math\n from ba._gameactivity import GameActivity\n from ba._lang import Lstr\n del victim_player\n name = player.getname()\n s_player = self._player_records[name]\n if kill:\n s_player.submit_kill(showpoints=showpoints)\n display_color: Sequence[float] = (1.0, 1.0, 1.0, 1.0)\n if color is not None:\n display_color = color\n elif importance != 1:\n display_color = 1.0, 1.0, 0.4, 1.0\n points = base_points\n if display and big_message:\n try:\n assert self._activity is not None\n activity = self._activity()\n if isinstance(activity, GameActivity):\n name_full = player.getname(full=True, icon=False)\n activity.show_zoom_message(Lstr(resource=\n 'nameScoresText', subs=[('${NAME}', name_full)]),\n color=_math.normalized_color(player.team.color))\n except Exception:\n print_exception('error showing big_message')\n if display and showpoints:\n our_pos = player.node.position if player.node else None\n if our_pos is not None:\n if target is None:\n target = our_pos\n display_pos = target[0], max(target[1], our_pos[1] - 2.0), min(\n target[2], our_pos[2] + 2.0)\n activity = self.getactivity()\n if activity is not None:\n if title is not None:\n sval = Lstr(value='+${A} ${B}', subs=[('${A}', str(\n points)), ('${B}', title)])\n else:\n sval = Lstr(value='+${A}', subs=[('${A}', str(points))]\n )\n PopupText(sval, color=display_color, scale=1.2 * scale,\n position=display_pos).autoretain()\n if kill:\n s_player.accum_kill_count += 1\n s_player.kill_count += 1\n try:\n if screenmessage and not kill:\n _ba.screenmessage(Lstr(resource='nameScoresText', subs=[(\n '${NAME}', name)]), top=True, color=player.color, image\n =player.get_icon())\n except Exception:\n print_exception('error announcing score')\n s_player.score += points\n s_player.accumscore += points\n if points != 0:\n activity = self._activity() if self._activity is not None else None\n if activity is not None:\n activity.handlemessage(PlayerScoredMessage(score=points))\n return points\n\n def player_was_killed(self, player: ba.Player, killed: bool=False,\n killer: ba.Player=None) ->None:\n \"\"\"Should be called when a player is killed.\"\"\"\n from ba._lang import Lstr\n name = player.getname()\n prec = self._player_records[name]\n prec.streak = 0\n if killed:\n prec.accum_killed_count += 1\n prec.killed_count += 1\n try:\n if killed and _ba.getactivity().announce_player_deaths:\n if killer is player:\n _ba.screenmessage(Lstr(resource='nameSuicideText', subs\n =[('${NAME}', name)]), top=True, color=player.color,\n image=player.get_icon())\n elif killer is not None:\n if killer.team is player.team:\n _ba.screenmessage(Lstr(resource='nameBetrayedText',\n subs=[('${NAME}', killer.getname()), (\n '${VICTIM}', name)]), top=True, color=killer.\n color, image=killer.get_icon())\n else:\n _ba.screenmessage(Lstr(resource='nameKilledText',\n subs=[('${NAME}', killer.getname()), (\n '${VICTIM}', name)]), top=True, color=killer.\n color, image=killer.get_icon())\n else:\n _ba.screenmessage(Lstr(resource='nameDiedText', subs=[(\n '${NAME}', name)]), top=True, color=player.color,\n image=player.get_icon())\n except Exception:\n print_exception('error announcing kill')\n",
"step-2": "<mask token>\n\n\nclass PlayerRecord:\n <mask token>\n character: str\n\n def __init__(self, name: str, name_full: str, sessionplayer: ba.\n SessionPlayer, stats: ba.Stats):\n self.name = name\n self.name_full = name_full\n self.score = 0\n self.accumscore = 0\n self.kill_count = 0\n self.accum_kill_count = 0\n self.killed_count = 0\n self.accum_killed_count = 0\n self._multi_kill_timer: Optional[ba.Timer] = None\n self._multi_kill_count = 0\n self._stats = weakref.ref(stats)\n self._last_sessionplayer: Optional[ba.SessionPlayer] = None\n self._sessionplayer: Optional[ba.SessionPlayer] = None\n self._sessionteam: Optional[ReferenceType[ba.SessionTeam]] = None\n self.streak = 0\n self.associate_with_sessionplayer(sessionplayer)\n\n @property\n def team(self) ->ba.SessionTeam:\n \"\"\"The ba.SessionTeam the last associated player was last on.\n\n This can still return a valid result even if the player is gone.\n Raises a ba.SessionTeamNotFoundError if the team no longer exists.\n \"\"\"\n assert self._sessionteam is not None\n team = self._sessionteam()\n if team is None:\n raise SessionTeamNotFoundError()\n return team\n\n @property\n def player(self) ->ba.SessionPlayer:\n \"\"\"Return the instance's associated ba.SessionPlayer.\n\n Raises a ba.SessionPlayerNotFoundError if the player\n no longer exists.\n \"\"\"\n if not self._sessionplayer:\n raise SessionPlayerNotFoundError()\n return self._sessionplayer\n\n def getname(self, full: bool=False) ->str:\n \"\"\"Return the player entry's name.\"\"\"\n return self.name_full if full else self.name\n\n def get_icon(self) ->Dict[str, Any]:\n \"\"\"Get the icon for this instance's player.\"\"\"\n player = self._last_sessionplayer\n assert player is not None\n return player.get_icon()\n\n def cancel_multi_kill_timer(self) ->None:\n \"\"\"Cancel any multi-kill timer for this player entry.\"\"\"\n self._multi_kill_timer = None\n\n def getactivity(self) ->Optional[ba.Activity]:\n \"\"\"Return the ba.Activity this instance is currently associated with.\n\n Returns None if the activity no longer exists.\"\"\"\n stats = self._stats()\n if stats is not None:\n return stats.getactivity()\n return None\n\n def associate_with_sessionplayer(self, sessionplayer: ba.SessionPlayer\n ) ->None:\n \"\"\"Associate this entry with a ba.SessionPlayer.\"\"\"\n self._sessionteam = weakref.ref(sessionplayer.sessionteam)\n self.character = sessionplayer.character\n self._last_sessionplayer = sessionplayer\n self._sessionplayer = sessionplayer\n self.streak = 0\n\n def _end_multi_kill(self) ->None:\n self._multi_kill_timer = None\n self._multi_kill_count = 0\n\n def get_last_sessionplayer(self) ->ba.SessionPlayer:\n \"\"\"Return the last ba.Player we were associated with.\"\"\"\n assert self._last_sessionplayer is not None\n return self._last_sessionplayer\n <mask token>\n\n\nclass Stats:\n \"\"\"Manages scores and statistics for a ba.Session.\n\n category: Gameplay Classes\n \"\"\"\n\n def __init__(self) ->None:\n self._activity: Optional[ReferenceType[ba.Activity]] = None\n self._player_records: Dict[str, PlayerRecord] = {}\n self.orchestrahitsound1: Optional[ba.Sound] = None\n self.orchestrahitsound2: Optional[ba.Sound] = None\n self.orchestrahitsound3: Optional[ba.Sound] = None\n self.orchestrahitsound4: Optional[ba.Sound] = None\n\n def setactivity(self, activity: Optional[ba.Activity]) ->None:\n \"\"\"Set the current activity for this instance.\"\"\"\n self._activity = None if activity is None else weakref.ref(activity)\n if activity is not None:\n if activity.expired:\n print_error('unexpected finalized activity')\n else:\n with _ba.Context(activity):\n self._load_activity_media()\n\n def getactivity(self) ->Optional[ba.Activity]:\n \"\"\"Get the activity associated with this instance.\n\n May return None.\n \"\"\"\n if self._activity is None:\n return None\n return self._activity()\n\n def _load_activity_media(self) ->None:\n self.orchestrahitsound1 = _ba.getsound('orchestraHit')\n self.orchestrahitsound2 = _ba.getsound('orchestraHit2')\n self.orchestrahitsound3 = _ba.getsound('orchestraHit3')\n self.orchestrahitsound4 = _ba.getsound('orchestraHit4')\n\n def reset(self) ->None:\n \"\"\"Reset the stats instance completely.\"\"\"\n for p_entry in list(self._player_records.values()):\n p_entry.cancel_multi_kill_timer()\n self._player_records = {}\n\n def reset_accum(self) ->None:\n \"\"\"Reset per-sound sub-scores.\"\"\"\n for s_player in list(self._player_records.values()):\n s_player.cancel_multi_kill_timer()\n s_player.accumscore = 0\n s_player.accum_kill_count = 0\n s_player.accum_killed_count = 0\n s_player.streak = 0\n\n def register_sessionplayer(self, player: ba.SessionPlayer) ->None:\n \"\"\"Register a ba.SessionPlayer with this score-set.\"\"\"\n assert player.exists()\n name = player.getname()\n if name in self._player_records:\n self._player_records[name].associate_with_sessionplayer(player)\n else:\n name_full = player.getname(full=True)\n self._player_records[name] = PlayerRecord(name, name_full,\n player, self)\n\n def get_records(self) ->Dict[str, ba.PlayerRecord]:\n \"\"\"Get PlayerRecord corresponding to still-existing players.\"\"\"\n records = {}\n for record_id, record in self._player_records.items():\n lastplayer = record.get_last_sessionplayer()\n if lastplayer and lastplayer.getname() == record_id:\n records[record_id] = record\n return records\n\n def player_scored(self, player: ba.Player, base_points: int=1, target:\n Sequence[float]=None, kill: bool=False, victim_player: ba.Player=\n None, scale: float=1.0, color: Sequence[float]=None, title: Union[\n str, ba.Lstr]=None, screenmessage: bool=True, display: bool=True,\n importance: int=1, showpoints: bool=True, big_message: bool=False\n ) ->int:\n \"\"\"Register a score for the player.\n\n Return value is actual score with multipliers and such factored in.\n \"\"\"\n from bastd.actor.popuptext import PopupText\n from ba import _math\n from ba._gameactivity import GameActivity\n from ba._lang import Lstr\n del victim_player\n name = player.getname()\n s_player = self._player_records[name]\n if kill:\n s_player.submit_kill(showpoints=showpoints)\n display_color: Sequence[float] = (1.0, 1.0, 1.0, 1.0)\n if color is not None:\n display_color = color\n elif importance != 1:\n display_color = 1.0, 1.0, 0.4, 1.0\n points = base_points\n if display and big_message:\n try:\n assert self._activity is not None\n activity = self._activity()\n if isinstance(activity, GameActivity):\n name_full = player.getname(full=True, icon=False)\n activity.show_zoom_message(Lstr(resource=\n 'nameScoresText', subs=[('${NAME}', name_full)]),\n color=_math.normalized_color(player.team.color))\n except Exception:\n print_exception('error showing big_message')\n if display and showpoints:\n our_pos = player.node.position if player.node else None\n if our_pos is not None:\n if target is None:\n target = our_pos\n display_pos = target[0], max(target[1], our_pos[1] - 2.0), min(\n target[2], our_pos[2] + 2.0)\n activity = self.getactivity()\n if activity is not None:\n if title is not None:\n sval = Lstr(value='+${A} ${B}', subs=[('${A}', str(\n points)), ('${B}', title)])\n else:\n sval = Lstr(value='+${A}', subs=[('${A}', str(points))]\n )\n PopupText(sval, color=display_color, scale=1.2 * scale,\n position=display_pos).autoretain()\n if kill:\n s_player.accum_kill_count += 1\n s_player.kill_count += 1\n try:\n if screenmessage and not kill:\n _ba.screenmessage(Lstr(resource='nameScoresText', subs=[(\n '${NAME}', name)]), top=True, color=player.color, image\n =player.get_icon())\n except Exception:\n print_exception('error announcing score')\n s_player.score += points\n s_player.accumscore += points\n if points != 0:\n activity = self._activity() if self._activity is not None else None\n if activity is not None:\n activity.handlemessage(PlayerScoredMessage(score=points))\n return points\n\n def player_was_killed(self, player: ba.Player, killed: bool=False,\n killer: ba.Player=None) ->None:\n \"\"\"Should be called when a player is killed.\"\"\"\n from ba._lang import Lstr\n name = player.getname()\n prec = self._player_records[name]\n prec.streak = 0\n if killed:\n prec.accum_killed_count += 1\n prec.killed_count += 1\n try:\n if killed and _ba.getactivity().announce_player_deaths:\n if killer is player:\n _ba.screenmessage(Lstr(resource='nameSuicideText', subs\n =[('${NAME}', name)]), top=True, color=player.color,\n image=player.get_icon())\n elif killer is not None:\n if killer.team is player.team:\n _ba.screenmessage(Lstr(resource='nameBetrayedText',\n subs=[('${NAME}', killer.getname()), (\n '${VICTIM}', name)]), top=True, color=killer.\n color, image=killer.get_icon())\n else:\n _ba.screenmessage(Lstr(resource='nameKilledText',\n subs=[('${NAME}', killer.getname()), (\n '${VICTIM}', name)]), top=True, color=killer.\n color, image=killer.get_icon())\n else:\n _ba.screenmessage(Lstr(resource='nameDiedText', subs=[(\n '${NAME}', name)]), top=True, color=player.color,\n image=player.get_icon())\n except Exception:\n print_exception('error announcing kill')\n",
"step-3": "<mask token>\nif TYPE_CHECKING:\n import ba\n from weakref import ReferenceType\n from typing import Any, Dict, Optional, Sequence, Union, Tuple\n\n\n@dataclass\nclass PlayerScoredMessage:\n \"\"\"Informs something that a ba.Player scored.\n\n Category: Message Classes\n\n Attrs:\n\n score\n The score value.\n \"\"\"\n score: int\n\n\nclass PlayerRecord:\n \"\"\"Stats for an individual player in a ba.Stats object.\n\n Category: Gameplay Classes\n\n This does not necessarily correspond to a ba.Player that is\n still present (stats may be retained for players that leave\n mid-game)\n \"\"\"\n character: str\n\n def __init__(self, name: str, name_full: str, sessionplayer: ba.\n SessionPlayer, stats: ba.Stats):\n self.name = name\n self.name_full = name_full\n self.score = 0\n self.accumscore = 0\n self.kill_count = 0\n self.accum_kill_count = 0\n self.killed_count = 0\n self.accum_killed_count = 0\n self._multi_kill_timer: Optional[ba.Timer] = None\n self._multi_kill_count = 0\n self._stats = weakref.ref(stats)\n self._last_sessionplayer: Optional[ba.SessionPlayer] = None\n self._sessionplayer: Optional[ba.SessionPlayer] = None\n self._sessionteam: Optional[ReferenceType[ba.SessionTeam]] = None\n self.streak = 0\n self.associate_with_sessionplayer(sessionplayer)\n\n @property\n def team(self) ->ba.SessionTeam:\n \"\"\"The ba.SessionTeam the last associated player was last on.\n\n This can still return a valid result even if the player is gone.\n Raises a ba.SessionTeamNotFoundError if the team no longer exists.\n \"\"\"\n assert self._sessionteam is not None\n team = self._sessionteam()\n if team is None:\n raise SessionTeamNotFoundError()\n return team\n\n @property\n def player(self) ->ba.SessionPlayer:\n \"\"\"Return the instance's associated ba.SessionPlayer.\n\n Raises a ba.SessionPlayerNotFoundError if the player\n no longer exists.\n \"\"\"\n if not self._sessionplayer:\n raise SessionPlayerNotFoundError()\n return self._sessionplayer\n\n def getname(self, full: bool=False) ->str:\n \"\"\"Return the player entry's name.\"\"\"\n return self.name_full if full else self.name\n\n def get_icon(self) ->Dict[str, Any]:\n \"\"\"Get the icon for this instance's player.\"\"\"\n player = self._last_sessionplayer\n assert player is not None\n return player.get_icon()\n\n def cancel_multi_kill_timer(self) ->None:\n \"\"\"Cancel any multi-kill timer for this player entry.\"\"\"\n self._multi_kill_timer = None\n\n def getactivity(self) ->Optional[ba.Activity]:\n \"\"\"Return the ba.Activity this instance is currently associated with.\n\n Returns None if the activity no longer exists.\"\"\"\n stats = self._stats()\n if stats is not None:\n return stats.getactivity()\n return None\n\n def associate_with_sessionplayer(self, sessionplayer: ba.SessionPlayer\n ) ->None:\n \"\"\"Associate this entry with a ba.SessionPlayer.\"\"\"\n self._sessionteam = weakref.ref(sessionplayer.sessionteam)\n self.character = sessionplayer.character\n self._last_sessionplayer = sessionplayer\n self._sessionplayer = sessionplayer\n self.streak = 0\n\n def _end_multi_kill(self) ->None:\n self._multi_kill_timer = None\n self._multi_kill_count = 0\n\n def get_last_sessionplayer(self) ->ba.SessionPlayer:\n \"\"\"Return the last ba.Player we were associated with.\"\"\"\n assert self._last_sessionplayer is not None\n return self._last_sessionplayer\n\n def submit_kill(self, showpoints: bool=True) ->None:\n \"\"\"Submit a kill for this player entry.\"\"\"\n from ba._lang import Lstr\n from ba._general import Call\n self._multi_kill_count += 1\n stats = self._stats()\n assert stats\n if self._multi_kill_count == 1:\n score = 0\n name = None\n delay = 0.0\n color = 0.0, 0.0, 0.0, 1.0\n scale = 1.0\n sound = None\n elif self._multi_kill_count == 2:\n score = 20\n name = Lstr(resource='twoKillText')\n color = 0.1, 1.0, 0.0, 1\n scale = 1.0\n delay = 0.0\n sound = stats.orchestrahitsound1\n elif self._multi_kill_count == 3:\n score = 40\n name = Lstr(resource='threeKillText')\n color = 1.0, 0.7, 0.0, 1\n scale = 1.1\n delay = 0.3\n sound = stats.orchestrahitsound2\n elif self._multi_kill_count == 4:\n score = 60\n name = Lstr(resource='fourKillText')\n color = 1.0, 1.0, 0.0, 1\n scale = 1.2\n delay = 0.6\n sound = stats.orchestrahitsound3\n elif self._multi_kill_count == 5:\n score = 80\n name = Lstr(resource='fiveKillText')\n color = 1.0, 0.5, 0.0, 1\n scale = 1.3\n delay = 0.9\n sound = stats.orchestrahitsound4\n else:\n score = 100\n name = Lstr(resource='multiKillText', subs=[('${COUNT}', str(\n self._multi_kill_count))])\n color = 1.0, 0.5, 0.0, 1\n scale = 1.3\n delay = 1.0\n sound = stats.orchestrahitsound4\n\n def _apply(name2: Lstr, score2: int, showpoints2: bool, color2:\n Tuple[float, float, float, float], scale2: float, sound2:\n Optional[ba.Sound]) ->None:\n from bastd.actor.popuptext import PopupText\n our_pos: Optional[ba.Vec3] = None\n if self._sessionplayer:\n if self._sessionplayer.activityplayer is not None:\n try:\n our_pos = self._sessionplayer.activityplayer.position\n except NotFoundError:\n pass\n if our_pos is None:\n return\n our_pos = _ba.Vec3(our_pos[0] + (random.random() - 0.5) * 2.0, \n our_pos[1] + (random.random() - 0.5) * 2.0, our_pos[2] + (\n random.random() - 0.5) * 2.0)\n activity = self.getactivity()\n if activity is not None:\n PopupText(Lstr(value=('+' + str(score2) + ' ' if\n showpoints2 else '') + '${N}', subs=[('${N}', name2)]),\n color=color2, scale=scale2, position=our_pos).autoretain()\n if sound2:\n _ba.playsound(sound2)\n self.score += score2\n self.accumscore += score2\n if score2 != 0 and activity is not None:\n activity.handlemessage(PlayerScoredMessage(score=score2))\n if name is not None:\n _ba.timer(0.3 + delay, Call(_apply, name, score, showpoints,\n color, scale, sound))\n self._multi_kill_timer = _ba.Timer(1.0, self._end_multi_kill)\n\n\nclass Stats:\n \"\"\"Manages scores and statistics for a ba.Session.\n\n category: Gameplay Classes\n \"\"\"\n\n def __init__(self) ->None:\n self._activity: Optional[ReferenceType[ba.Activity]] = None\n self._player_records: Dict[str, PlayerRecord] = {}\n self.orchestrahitsound1: Optional[ba.Sound] = None\n self.orchestrahitsound2: Optional[ba.Sound] = None\n self.orchestrahitsound3: Optional[ba.Sound] = None\n self.orchestrahitsound4: Optional[ba.Sound] = None\n\n def setactivity(self, activity: Optional[ba.Activity]) ->None:\n \"\"\"Set the current activity for this instance.\"\"\"\n self._activity = None if activity is None else weakref.ref(activity)\n if activity is not None:\n if activity.expired:\n print_error('unexpected finalized activity')\n else:\n with _ba.Context(activity):\n self._load_activity_media()\n\n def getactivity(self) ->Optional[ba.Activity]:\n \"\"\"Get the activity associated with this instance.\n\n May return None.\n \"\"\"\n if self._activity is None:\n return None\n return self._activity()\n\n def _load_activity_media(self) ->None:\n self.orchestrahitsound1 = _ba.getsound('orchestraHit')\n self.orchestrahitsound2 = _ba.getsound('orchestraHit2')\n self.orchestrahitsound3 = _ba.getsound('orchestraHit3')\n self.orchestrahitsound4 = _ba.getsound('orchestraHit4')\n\n def reset(self) ->None:\n \"\"\"Reset the stats instance completely.\"\"\"\n for p_entry in list(self._player_records.values()):\n p_entry.cancel_multi_kill_timer()\n self._player_records = {}\n\n def reset_accum(self) ->None:\n \"\"\"Reset per-sound sub-scores.\"\"\"\n for s_player in list(self._player_records.values()):\n s_player.cancel_multi_kill_timer()\n s_player.accumscore = 0\n s_player.accum_kill_count = 0\n s_player.accum_killed_count = 0\n s_player.streak = 0\n\n def register_sessionplayer(self, player: ba.SessionPlayer) ->None:\n \"\"\"Register a ba.SessionPlayer with this score-set.\"\"\"\n assert player.exists()\n name = player.getname()\n if name in self._player_records:\n self._player_records[name].associate_with_sessionplayer(player)\n else:\n name_full = player.getname(full=True)\n self._player_records[name] = PlayerRecord(name, name_full,\n player, self)\n\n def get_records(self) ->Dict[str, ba.PlayerRecord]:\n \"\"\"Get PlayerRecord corresponding to still-existing players.\"\"\"\n records = {}\n for record_id, record in self._player_records.items():\n lastplayer = record.get_last_sessionplayer()\n if lastplayer and lastplayer.getname() == record_id:\n records[record_id] = record\n return records\n\n def player_scored(self, player: ba.Player, base_points: int=1, target:\n Sequence[float]=None, kill: bool=False, victim_player: ba.Player=\n None, scale: float=1.0, color: Sequence[float]=None, title: Union[\n str, ba.Lstr]=None, screenmessage: bool=True, display: bool=True,\n importance: int=1, showpoints: bool=True, big_message: bool=False\n ) ->int:\n \"\"\"Register a score for the player.\n\n Return value is actual score with multipliers and such factored in.\n \"\"\"\n from bastd.actor.popuptext import PopupText\n from ba import _math\n from ba._gameactivity import GameActivity\n from ba._lang import Lstr\n del victim_player\n name = player.getname()\n s_player = self._player_records[name]\n if kill:\n s_player.submit_kill(showpoints=showpoints)\n display_color: Sequence[float] = (1.0, 1.0, 1.0, 1.0)\n if color is not None:\n display_color = color\n elif importance != 1:\n display_color = 1.0, 1.0, 0.4, 1.0\n points = base_points\n if display and big_message:\n try:\n assert self._activity is not None\n activity = self._activity()\n if isinstance(activity, GameActivity):\n name_full = player.getname(full=True, icon=False)\n activity.show_zoom_message(Lstr(resource=\n 'nameScoresText', subs=[('${NAME}', name_full)]),\n color=_math.normalized_color(player.team.color))\n except Exception:\n print_exception('error showing big_message')\n if display and showpoints:\n our_pos = player.node.position if player.node else None\n if our_pos is not None:\n if target is None:\n target = our_pos\n display_pos = target[0], max(target[1], our_pos[1] - 2.0), min(\n target[2], our_pos[2] + 2.0)\n activity = self.getactivity()\n if activity is not None:\n if title is not None:\n sval = Lstr(value='+${A} ${B}', subs=[('${A}', str(\n points)), ('${B}', title)])\n else:\n sval = Lstr(value='+${A}', subs=[('${A}', str(points))]\n )\n PopupText(sval, color=display_color, scale=1.2 * scale,\n position=display_pos).autoretain()\n if kill:\n s_player.accum_kill_count += 1\n s_player.kill_count += 1\n try:\n if screenmessage and not kill:\n _ba.screenmessage(Lstr(resource='nameScoresText', subs=[(\n '${NAME}', name)]), top=True, color=player.color, image\n =player.get_icon())\n except Exception:\n print_exception('error announcing score')\n s_player.score += points\n s_player.accumscore += points\n if points != 0:\n activity = self._activity() if self._activity is not None else None\n if activity is not None:\n activity.handlemessage(PlayerScoredMessage(score=points))\n return points\n\n def player_was_killed(self, player: ba.Player, killed: bool=False,\n killer: ba.Player=None) ->None:\n \"\"\"Should be called when a player is killed.\"\"\"\n from ba._lang import Lstr\n name = player.getname()\n prec = self._player_records[name]\n prec.streak = 0\n if killed:\n prec.accum_killed_count += 1\n prec.killed_count += 1\n try:\n if killed and _ba.getactivity().announce_player_deaths:\n if killer is player:\n _ba.screenmessage(Lstr(resource='nameSuicideText', subs\n =[('${NAME}', name)]), top=True, color=player.color,\n image=player.get_icon())\n elif killer is not None:\n if killer.team is player.team:\n _ba.screenmessage(Lstr(resource='nameBetrayedText',\n subs=[('${NAME}', killer.getname()), (\n '${VICTIM}', name)]), top=True, color=killer.\n color, image=killer.get_icon())\n else:\n _ba.screenmessage(Lstr(resource='nameKilledText',\n subs=[('${NAME}', killer.getname()), (\n '${VICTIM}', name)]), top=True, color=killer.\n color, image=killer.get_icon())\n else:\n _ba.screenmessage(Lstr(resource='nameDiedText', subs=[(\n '${NAME}', name)]), top=True, color=player.color,\n image=player.get_icon())\n except Exception:\n print_exception('error announcing kill')\n",
"step-4": "<mask token>\nfrom __future__ import annotations\nimport random\nimport weakref\nfrom typing import TYPE_CHECKING\nfrom dataclasses import dataclass\nimport _ba\nfrom ba._error import print_exception, print_error, SessionTeamNotFoundError, SessionPlayerNotFoundError, NotFoundError\nif TYPE_CHECKING:\n import ba\n from weakref import ReferenceType\n from typing import Any, Dict, Optional, Sequence, Union, Tuple\n\n\n@dataclass\nclass PlayerScoredMessage:\n \"\"\"Informs something that a ba.Player scored.\n\n Category: Message Classes\n\n Attrs:\n\n score\n The score value.\n \"\"\"\n score: int\n\n\nclass PlayerRecord:\n \"\"\"Stats for an individual player in a ba.Stats object.\n\n Category: Gameplay Classes\n\n This does not necessarily correspond to a ba.Player that is\n still present (stats may be retained for players that leave\n mid-game)\n \"\"\"\n character: str\n\n def __init__(self, name: str, name_full: str, sessionplayer: ba.\n SessionPlayer, stats: ba.Stats):\n self.name = name\n self.name_full = name_full\n self.score = 0\n self.accumscore = 0\n self.kill_count = 0\n self.accum_kill_count = 0\n self.killed_count = 0\n self.accum_killed_count = 0\n self._multi_kill_timer: Optional[ba.Timer] = None\n self._multi_kill_count = 0\n self._stats = weakref.ref(stats)\n self._last_sessionplayer: Optional[ba.SessionPlayer] = None\n self._sessionplayer: Optional[ba.SessionPlayer] = None\n self._sessionteam: Optional[ReferenceType[ba.SessionTeam]] = None\n self.streak = 0\n self.associate_with_sessionplayer(sessionplayer)\n\n @property\n def team(self) ->ba.SessionTeam:\n \"\"\"The ba.SessionTeam the last associated player was last on.\n\n This can still return a valid result even if the player is gone.\n Raises a ba.SessionTeamNotFoundError if the team no longer exists.\n \"\"\"\n assert self._sessionteam is not None\n team = self._sessionteam()\n if team is None:\n raise SessionTeamNotFoundError()\n return team\n\n @property\n def player(self) ->ba.SessionPlayer:\n \"\"\"Return the instance's associated ba.SessionPlayer.\n\n Raises a ba.SessionPlayerNotFoundError if the player\n no longer exists.\n \"\"\"\n if not self._sessionplayer:\n raise SessionPlayerNotFoundError()\n return self._sessionplayer\n\n def getname(self, full: bool=False) ->str:\n \"\"\"Return the player entry's name.\"\"\"\n return self.name_full if full else self.name\n\n def get_icon(self) ->Dict[str, Any]:\n \"\"\"Get the icon for this instance's player.\"\"\"\n player = self._last_sessionplayer\n assert player is not None\n return player.get_icon()\n\n def cancel_multi_kill_timer(self) ->None:\n \"\"\"Cancel any multi-kill timer for this player entry.\"\"\"\n self._multi_kill_timer = None\n\n def getactivity(self) ->Optional[ba.Activity]:\n \"\"\"Return the ba.Activity this instance is currently associated with.\n\n Returns None if the activity no longer exists.\"\"\"\n stats = self._stats()\n if stats is not None:\n return stats.getactivity()\n return None\n\n def associate_with_sessionplayer(self, sessionplayer: ba.SessionPlayer\n ) ->None:\n \"\"\"Associate this entry with a ba.SessionPlayer.\"\"\"\n self._sessionteam = weakref.ref(sessionplayer.sessionteam)\n self.character = sessionplayer.character\n self._last_sessionplayer = sessionplayer\n self._sessionplayer = sessionplayer\n self.streak = 0\n\n def _end_multi_kill(self) ->None:\n self._multi_kill_timer = None\n self._multi_kill_count = 0\n\n def get_last_sessionplayer(self) ->ba.SessionPlayer:\n \"\"\"Return the last ba.Player we were associated with.\"\"\"\n assert self._last_sessionplayer is not None\n return self._last_sessionplayer\n\n def submit_kill(self, showpoints: bool=True) ->None:\n \"\"\"Submit a kill for this player entry.\"\"\"\n from ba._lang import Lstr\n from ba._general import Call\n self._multi_kill_count += 1\n stats = self._stats()\n assert stats\n if self._multi_kill_count == 1:\n score = 0\n name = None\n delay = 0.0\n color = 0.0, 0.0, 0.0, 1.0\n scale = 1.0\n sound = None\n elif self._multi_kill_count == 2:\n score = 20\n name = Lstr(resource='twoKillText')\n color = 0.1, 1.0, 0.0, 1\n scale = 1.0\n delay = 0.0\n sound = stats.orchestrahitsound1\n elif self._multi_kill_count == 3:\n score = 40\n name = Lstr(resource='threeKillText')\n color = 1.0, 0.7, 0.0, 1\n scale = 1.1\n delay = 0.3\n sound = stats.orchestrahitsound2\n elif self._multi_kill_count == 4:\n score = 60\n name = Lstr(resource='fourKillText')\n color = 1.0, 1.0, 0.0, 1\n scale = 1.2\n delay = 0.6\n sound = stats.orchestrahitsound3\n elif self._multi_kill_count == 5:\n score = 80\n name = Lstr(resource='fiveKillText')\n color = 1.0, 0.5, 0.0, 1\n scale = 1.3\n delay = 0.9\n sound = stats.orchestrahitsound4\n else:\n score = 100\n name = Lstr(resource='multiKillText', subs=[('${COUNT}', str(\n self._multi_kill_count))])\n color = 1.0, 0.5, 0.0, 1\n scale = 1.3\n delay = 1.0\n sound = stats.orchestrahitsound4\n\n def _apply(name2: Lstr, score2: int, showpoints2: bool, color2:\n Tuple[float, float, float, float], scale2: float, sound2:\n Optional[ba.Sound]) ->None:\n from bastd.actor.popuptext import PopupText\n our_pos: Optional[ba.Vec3] = None\n if self._sessionplayer:\n if self._sessionplayer.activityplayer is not None:\n try:\n our_pos = self._sessionplayer.activityplayer.position\n except NotFoundError:\n pass\n if our_pos is None:\n return\n our_pos = _ba.Vec3(our_pos[0] + (random.random() - 0.5) * 2.0, \n our_pos[1] + (random.random() - 0.5) * 2.0, our_pos[2] + (\n random.random() - 0.5) * 2.0)\n activity = self.getactivity()\n if activity is not None:\n PopupText(Lstr(value=('+' + str(score2) + ' ' if\n showpoints2 else '') + '${N}', subs=[('${N}', name2)]),\n color=color2, scale=scale2, position=our_pos).autoretain()\n if sound2:\n _ba.playsound(sound2)\n self.score += score2\n self.accumscore += score2\n if score2 != 0 and activity is not None:\n activity.handlemessage(PlayerScoredMessage(score=score2))\n if name is not None:\n _ba.timer(0.3 + delay, Call(_apply, name, score, showpoints,\n color, scale, sound))\n self._multi_kill_timer = _ba.Timer(1.0, self._end_multi_kill)\n\n\nclass Stats:\n \"\"\"Manages scores and statistics for a ba.Session.\n\n category: Gameplay Classes\n \"\"\"\n\n def __init__(self) ->None:\n self._activity: Optional[ReferenceType[ba.Activity]] = None\n self._player_records: Dict[str, PlayerRecord] = {}\n self.orchestrahitsound1: Optional[ba.Sound] = None\n self.orchestrahitsound2: Optional[ba.Sound] = None\n self.orchestrahitsound3: Optional[ba.Sound] = None\n self.orchestrahitsound4: Optional[ba.Sound] = None\n\n def setactivity(self, activity: Optional[ba.Activity]) ->None:\n \"\"\"Set the current activity for this instance.\"\"\"\n self._activity = None if activity is None else weakref.ref(activity)\n if activity is not None:\n if activity.expired:\n print_error('unexpected finalized activity')\n else:\n with _ba.Context(activity):\n self._load_activity_media()\n\n def getactivity(self) ->Optional[ba.Activity]:\n \"\"\"Get the activity associated with this instance.\n\n May return None.\n \"\"\"\n if self._activity is None:\n return None\n return self._activity()\n\n def _load_activity_media(self) ->None:\n self.orchestrahitsound1 = _ba.getsound('orchestraHit')\n self.orchestrahitsound2 = _ba.getsound('orchestraHit2')\n self.orchestrahitsound3 = _ba.getsound('orchestraHit3')\n self.orchestrahitsound4 = _ba.getsound('orchestraHit4')\n\n def reset(self) ->None:\n \"\"\"Reset the stats instance completely.\"\"\"\n for p_entry in list(self._player_records.values()):\n p_entry.cancel_multi_kill_timer()\n self._player_records = {}\n\n def reset_accum(self) ->None:\n \"\"\"Reset per-sound sub-scores.\"\"\"\n for s_player in list(self._player_records.values()):\n s_player.cancel_multi_kill_timer()\n s_player.accumscore = 0\n s_player.accum_kill_count = 0\n s_player.accum_killed_count = 0\n s_player.streak = 0\n\n def register_sessionplayer(self, player: ba.SessionPlayer) ->None:\n \"\"\"Register a ba.SessionPlayer with this score-set.\"\"\"\n assert player.exists()\n name = player.getname()\n if name in self._player_records:\n self._player_records[name].associate_with_sessionplayer(player)\n else:\n name_full = player.getname(full=True)\n self._player_records[name] = PlayerRecord(name, name_full,\n player, self)\n\n def get_records(self) ->Dict[str, ba.PlayerRecord]:\n \"\"\"Get PlayerRecord corresponding to still-existing players.\"\"\"\n records = {}\n for record_id, record in self._player_records.items():\n lastplayer = record.get_last_sessionplayer()\n if lastplayer and lastplayer.getname() == record_id:\n records[record_id] = record\n return records\n\n def player_scored(self, player: ba.Player, base_points: int=1, target:\n Sequence[float]=None, kill: bool=False, victim_player: ba.Player=\n None, scale: float=1.0, color: Sequence[float]=None, title: Union[\n str, ba.Lstr]=None, screenmessage: bool=True, display: bool=True,\n importance: int=1, showpoints: bool=True, big_message: bool=False\n ) ->int:\n \"\"\"Register a score for the player.\n\n Return value is actual score with multipliers and such factored in.\n \"\"\"\n from bastd.actor.popuptext import PopupText\n from ba import _math\n from ba._gameactivity import GameActivity\n from ba._lang import Lstr\n del victim_player\n name = player.getname()\n s_player = self._player_records[name]\n if kill:\n s_player.submit_kill(showpoints=showpoints)\n display_color: Sequence[float] = (1.0, 1.0, 1.0, 1.0)\n if color is not None:\n display_color = color\n elif importance != 1:\n display_color = 1.0, 1.0, 0.4, 1.0\n points = base_points\n if display and big_message:\n try:\n assert self._activity is not None\n activity = self._activity()\n if isinstance(activity, GameActivity):\n name_full = player.getname(full=True, icon=False)\n activity.show_zoom_message(Lstr(resource=\n 'nameScoresText', subs=[('${NAME}', name_full)]),\n color=_math.normalized_color(player.team.color))\n except Exception:\n print_exception('error showing big_message')\n if display and showpoints:\n our_pos = player.node.position if player.node else None\n if our_pos is not None:\n if target is None:\n target = our_pos\n display_pos = target[0], max(target[1], our_pos[1] - 2.0), min(\n target[2], our_pos[2] + 2.0)\n activity = self.getactivity()\n if activity is not None:\n if title is not None:\n sval = Lstr(value='+${A} ${B}', subs=[('${A}', str(\n points)), ('${B}', title)])\n else:\n sval = Lstr(value='+${A}', subs=[('${A}', str(points))]\n )\n PopupText(sval, color=display_color, scale=1.2 * scale,\n position=display_pos).autoretain()\n if kill:\n s_player.accum_kill_count += 1\n s_player.kill_count += 1\n try:\n if screenmessage and not kill:\n _ba.screenmessage(Lstr(resource='nameScoresText', subs=[(\n '${NAME}', name)]), top=True, color=player.color, image\n =player.get_icon())\n except Exception:\n print_exception('error announcing score')\n s_player.score += points\n s_player.accumscore += points\n if points != 0:\n activity = self._activity() if self._activity is not None else None\n if activity is not None:\n activity.handlemessage(PlayerScoredMessage(score=points))\n return points\n\n def player_was_killed(self, player: ba.Player, killed: bool=False,\n killer: ba.Player=None) ->None:\n \"\"\"Should be called when a player is killed.\"\"\"\n from ba._lang import Lstr\n name = player.getname()\n prec = self._player_records[name]\n prec.streak = 0\n if killed:\n prec.accum_killed_count += 1\n prec.killed_count += 1\n try:\n if killed and _ba.getactivity().announce_player_deaths:\n if killer is player:\n _ba.screenmessage(Lstr(resource='nameSuicideText', subs\n =[('${NAME}', name)]), top=True, color=player.color,\n image=player.get_icon())\n elif killer is not None:\n if killer.team is player.team:\n _ba.screenmessage(Lstr(resource='nameBetrayedText',\n subs=[('${NAME}', killer.getname()), (\n '${VICTIM}', name)]), top=True, color=killer.\n color, image=killer.get_icon())\n else:\n _ba.screenmessage(Lstr(resource='nameKilledText',\n subs=[('${NAME}', killer.getname()), (\n '${VICTIM}', name)]), top=True, color=killer.\n color, image=killer.get_icon())\n else:\n _ba.screenmessage(Lstr(resource='nameDiedText', subs=[(\n '${NAME}', name)]), top=True, color=player.color,\n image=player.get_icon())\n except Exception:\n print_exception('error announcing kill')\n",
"step-5": "# Copyright (c) 2011-2020 Eric Froemling\n#\n# Permission is hereby granted, free of charge, to any person obtaining a copy\n# of this software and associated documentation files (the \"Software\"), to deal\n# in the Software without restriction, including without limitation the rights\n# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n# copies of the Software, and to permit persons to whom the Software is\n# furnished to do so, subject to the following conditions:\n#\n# The above copyright notice and this permission notice shall be included in\n# all copies or substantial portions of the Software.\n#\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n# SOFTWARE.\n# -----------------------------------------------------------------------------\n\"\"\"Functionality related to scores and statistics.\"\"\"\nfrom __future__ import annotations\n\nimport random\nimport weakref\nfrom typing import TYPE_CHECKING\nfrom dataclasses import dataclass\n\nimport _ba\nfrom ba._error import (print_exception, print_error, SessionTeamNotFoundError,\n SessionPlayerNotFoundError, NotFoundError)\n\nif TYPE_CHECKING:\n import ba\n from weakref import ReferenceType\n from typing import Any, Dict, Optional, Sequence, Union, Tuple\n\n\n@dataclass\nclass PlayerScoredMessage:\n \"\"\"Informs something that a ba.Player scored.\n\n Category: Message Classes\n\n Attrs:\n\n score\n The score value.\n \"\"\"\n score: int\n\n\nclass PlayerRecord:\n \"\"\"Stats for an individual player in a ba.Stats object.\n\n Category: Gameplay Classes\n\n This does not necessarily correspond to a ba.Player that is\n still present (stats may be retained for players that leave\n mid-game)\n \"\"\"\n character: str\n\n def __init__(self, name: str, name_full: str,\n sessionplayer: ba.SessionPlayer, stats: ba.Stats):\n self.name = name\n self.name_full = name_full\n self.score = 0\n self.accumscore = 0\n self.kill_count = 0\n self.accum_kill_count = 0\n self.killed_count = 0\n self.accum_killed_count = 0\n self._multi_kill_timer: Optional[ba.Timer] = None\n self._multi_kill_count = 0\n self._stats = weakref.ref(stats)\n self._last_sessionplayer: Optional[ba.SessionPlayer] = None\n self._sessionplayer: Optional[ba.SessionPlayer] = None\n self._sessionteam: Optional[ReferenceType[ba.SessionTeam]] = None\n self.streak = 0\n self.associate_with_sessionplayer(sessionplayer)\n\n @property\n def team(self) -> ba.SessionTeam:\n \"\"\"The ba.SessionTeam the last associated player was last on.\n\n This can still return a valid result even if the player is gone.\n Raises a ba.SessionTeamNotFoundError if the team no longer exists.\n \"\"\"\n assert self._sessionteam is not None\n team = self._sessionteam()\n if team is None:\n raise SessionTeamNotFoundError()\n return team\n\n @property\n def player(self) -> ba.SessionPlayer:\n \"\"\"Return the instance's associated ba.SessionPlayer.\n\n Raises a ba.SessionPlayerNotFoundError if the player\n no longer exists.\n \"\"\"\n if not self._sessionplayer:\n raise SessionPlayerNotFoundError()\n return self._sessionplayer\n\n def getname(self, full: bool = False) -> str:\n \"\"\"Return the player entry's name.\"\"\"\n return self.name_full if full else self.name\n\n def get_icon(self) -> Dict[str, Any]:\n \"\"\"Get the icon for this instance's player.\"\"\"\n player = self._last_sessionplayer\n assert player is not None\n return player.get_icon()\n\n def cancel_multi_kill_timer(self) -> None:\n \"\"\"Cancel any multi-kill timer for this player entry.\"\"\"\n self._multi_kill_timer = None\n\n def getactivity(self) -> Optional[ba.Activity]:\n \"\"\"Return the ba.Activity this instance is currently associated with.\n\n Returns None if the activity no longer exists.\"\"\"\n stats = self._stats()\n if stats is not None:\n return stats.getactivity()\n return None\n\n def associate_with_sessionplayer(self,\n sessionplayer: ba.SessionPlayer) -> None:\n \"\"\"Associate this entry with a ba.SessionPlayer.\"\"\"\n self._sessionteam = weakref.ref(sessionplayer.sessionteam)\n self.character = sessionplayer.character\n self._last_sessionplayer = sessionplayer\n self._sessionplayer = sessionplayer\n self.streak = 0\n\n def _end_multi_kill(self) -> None:\n self._multi_kill_timer = None\n self._multi_kill_count = 0\n\n def get_last_sessionplayer(self) -> ba.SessionPlayer:\n \"\"\"Return the last ba.Player we were associated with.\"\"\"\n assert self._last_sessionplayer is not None\n return self._last_sessionplayer\n\n def submit_kill(self, showpoints: bool = True) -> None:\n \"\"\"Submit a kill for this player entry.\"\"\"\n # FIXME Clean this up.\n # pylint: disable=too-many-statements\n from ba._lang import Lstr\n from ba._general import Call\n self._multi_kill_count += 1\n stats = self._stats()\n assert stats\n if self._multi_kill_count == 1:\n score = 0\n name = None\n delay = 0.0\n color = (0.0, 0.0, 0.0, 1.0)\n scale = 1.0\n sound = None\n elif self._multi_kill_count == 2:\n score = 20\n name = Lstr(resource='twoKillText')\n color = (0.1, 1.0, 0.0, 1)\n scale = 1.0\n delay = 0.0\n sound = stats.orchestrahitsound1\n elif self._multi_kill_count == 3:\n score = 40\n name = Lstr(resource='threeKillText')\n color = (1.0, 0.7, 0.0, 1)\n scale = 1.1\n delay = 0.3\n sound = stats.orchestrahitsound2\n elif self._multi_kill_count == 4:\n score = 60\n name = Lstr(resource='fourKillText')\n color = (1.0, 1.0, 0.0, 1)\n scale = 1.2\n delay = 0.6\n sound = stats.orchestrahitsound3\n elif self._multi_kill_count == 5:\n score = 80\n name = Lstr(resource='fiveKillText')\n color = (1.0, 0.5, 0.0, 1)\n scale = 1.3\n delay = 0.9\n sound = stats.orchestrahitsound4\n else:\n score = 100\n name = Lstr(resource='multiKillText',\n subs=[('${COUNT}', str(self._multi_kill_count))])\n color = (1.0, 0.5, 0.0, 1)\n scale = 1.3\n delay = 1.0\n sound = stats.orchestrahitsound4\n\n def _apply(name2: Lstr, score2: int, showpoints2: bool,\n color2: Tuple[float, float, float, float], scale2: float,\n sound2: Optional[ba.Sound]) -> None:\n from bastd.actor.popuptext import PopupText\n\n # Only award this if they're still alive and we can get\n # a current position for them.\n our_pos: Optional[ba.Vec3] = None\n if self._sessionplayer:\n if self._sessionplayer.activityplayer is not None:\n try:\n our_pos = self._sessionplayer.activityplayer.position\n except NotFoundError:\n pass\n if our_pos is None:\n return\n\n # Jitter position a bit since these often come in clusters.\n our_pos = _ba.Vec3(our_pos[0] + (random.random() - 0.5) * 2.0,\n our_pos[1] + (random.random() - 0.5) * 2.0,\n our_pos[2] + (random.random() - 0.5) * 2.0)\n activity = self.getactivity()\n if activity is not None:\n PopupText(Lstr(\n value=(('+' + str(score2) + ' ') if showpoints2 else '') +\n '${N}',\n subs=[('${N}', name2)]),\n color=color2,\n scale=scale2,\n position=our_pos).autoretain()\n if sound2:\n _ba.playsound(sound2)\n\n self.score += score2\n self.accumscore += score2\n\n # Inform a running game of the score.\n if score2 != 0 and activity is not None:\n activity.handlemessage(PlayerScoredMessage(score=score2))\n\n if name is not None:\n _ba.timer(\n 0.3 + delay,\n Call(_apply, name, score, showpoints, color, scale, sound))\n\n # Keep the tally rollin'...\n # set a timer for a bit in the future.\n self._multi_kill_timer = _ba.Timer(1.0, self._end_multi_kill)\n\n\nclass Stats:\n \"\"\"Manages scores and statistics for a ba.Session.\n\n category: Gameplay Classes\n \"\"\"\n\n def __init__(self) -> None:\n self._activity: Optional[ReferenceType[ba.Activity]] = None\n self._player_records: Dict[str, PlayerRecord] = {}\n self.orchestrahitsound1: Optional[ba.Sound] = None\n self.orchestrahitsound2: Optional[ba.Sound] = None\n self.orchestrahitsound3: Optional[ba.Sound] = None\n self.orchestrahitsound4: Optional[ba.Sound] = None\n\n def setactivity(self, activity: Optional[ba.Activity]) -> None:\n \"\"\"Set the current activity for this instance.\"\"\"\n\n self._activity = None if activity is None else weakref.ref(activity)\n\n # Load our media into this activity's context.\n if activity is not None:\n if activity.expired:\n print_error('unexpected finalized activity')\n else:\n with _ba.Context(activity):\n self._load_activity_media()\n\n def getactivity(self) -> Optional[ba.Activity]:\n \"\"\"Get the activity associated with this instance.\n\n May return None.\n \"\"\"\n if self._activity is None:\n return None\n return self._activity()\n\n def _load_activity_media(self) -> None:\n self.orchestrahitsound1 = _ba.getsound('orchestraHit')\n self.orchestrahitsound2 = _ba.getsound('orchestraHit2')\n self.orchestrahitsound3 = _ba.getsound('orchestraHit3')\n self.orchestrahitsound4 = _ba.getsound('orchestraHit4')\n\n def reset(self) -> None:\n \"\"\"Reset the stats instance completely.\"\"\"\n\n # Just to be safe, lets make sure no multi-kill timers are gonna go off\n # for no-longer-on-the-list players.\n for p_entry in list(self._player_records.values()):\n p_entry.cancel_multi_kill_timer()\n self._player_records = {}\n\n def reset_accum(self) -> None:\n \"\"\"Reset per-sound sub-scores.\"\"\"\n for s_player in list(self._player_records.values()):\n s_player.cancel_multi_kill_timer()\n s_player.accumscore = 0\n s_player.accum_kill_count = 0\n s_player.accum_killed_count = 0\n s_player.streak = 0\n\n def register_sessionplayer(self, player: ba.SessionPlayer) -> None:\n \"\"\"Register a ba.SessionPlayer with this score-set.\"\"\"\n assert player.exists() # Invalid refs should never be passed to funcs.\n name = player.getname()\n if name in self._player_records:\n # If the player already exists, update his character and such as\n # it may have changed.\n self._player_records[name].associate_with_sessionplayer(player)\n else:\n name_full = player.getname(full=True)\n self._player_records[name] = PlayerRecord(name, name_full, player,\n self)\n\n def get_records(self) -> Dict[str, ba.PlayerRecord]:\n \"\"\"Get PlayerRecord corresponding to still-existing players.\"\"\"\n records = {}\n\n # Go through our player records and return ones whose player id still\n # corresponds to a player with that name.\n for record_id, record in self._player_records.items():\n lastplayer = record.get_last_sessionplayer()\n if lastplayer and lastplayer.getname() == record_id:\n records[record_id] = record\n return records\n\n def player_scored(self,\n player: ba.Player,\n base_points: int = 1,\n target: Sequence[float] = None,\n kill: bool = False,\n victim_player: ba.Player = None,\n scale: float = 1.0,\n color: Sequence[float] = None,\n title: Union[str, ba.Lstr] = None,\n screenmessage: bool = True,\n display: bool = True,\n importance: int = 1,\n showpoints: bool = True,\n big_message: bool = False) -> int:\n \"\"\"Register a score for the player.\n\n Return value is actual score with multipliers and such factored in.\n \"\"\"\n # FIXME: Tidy this up.\n # pylint: disable=cyclic-import\n # pylint: disable=too-many-branches\n # pylint: disable=too-many-locals\n # pylint: disable=too-many-statements\n from bastd.actor.popuptext import PopupText\n from ba import _math\n from ba._gameactivity import GameActivity\n from ba._lang import Lstr\n del victim_player # Currently unused.\n name = player.getname()\n s_player = self._player_records[name]\n\n if kill:\n s_player.submit_kill(showpoints=showpoints)\n\n display_color: Sequence[float] = (1.0, 1.0, 1.0, 1.0)\n\n if color is not None:\n display_color = color\n elif importance != 1:\n display_color = (1.0, 1.0, 0.4, 1.0)\n points = base_points\n\n # If they want a big announcement, throw a zoom-text up there.\n if display and big_message:\n try:\n assert self._activity is not None\n activity = self._activity()\n if isinstance(activity, GameActivity):\n name_full = player.getname(full=True, icon=False)\n activity.show_zoom_message(\n Lstr(resource='nameScoresText',\n subs=[('${NAME}', name_full)]),\n color=_math.normalized_color(player.team.color))\n except Exception:\n print_exception('error showing big_message')\n\n # If we currently have a actor, pop up a score over it.\n if display and showpoints:\n our_pos = player.node.position if player.node else None\n if our_pos is not None:\n if target is None:\n target = our_pos\n\n # If display-pos is *way* lower than us, raise it up\n # (so we can still see scores from dudes that fell off cliffs).\n display_pos = (target[0], max(target[1], our_pos[1] - 2.0),\n min(target[2], our_pos[2] + 2.0))\n activity = self.getactivity()\n if activity is not None:\n if title is not None:\n sval = Lstr(value='+${A} ${B}',\n subs=[('${A}', str(points)),\n ('${B}', title)])\n else:\n sval = Lstr(value='+${A}',\n subs=[('${A}', str(points))])\n PopupText(sval,\n color=display_color,\n scale=1.2 * scale,\n position=display_pos).autoretain()\n\n # Tally kills.\n if kill:\n s_player.accum_kill_count += 1\n s_player.kill_count += 1\n\n # Report non-kill scorings.\n try:\n if screenmessage and not kill:\n _ba.screenmessage(Lstr(resource='nameScoresText',\n subs=[('${NAME}', name)]),\n top=True,\n color=player.color,\n image=player.get_icon())\n except Exception:\n print_exception('error announcing score')\n\n s_player.score += points\n s_player.accumscore += points\n\n # Inform a running game of the score.\n if points != 0:\n activity = self._activity() if self._activity is not None else None\n if activity is not None:\n activity.handlemessage(PlayerScoredMessage(score=points))\n\n return points\n\n def player_was_killed(self,\n player: ba.Player,\n killed: bool = False,\n killer: ba.Player = None) -> None:\n \"\"\"Should be called when a player is killed.\"\"\"\n from ba._lang import Lstr\n name = player.getname()\n prec = self._player_records[name]\n prec.streak = 0\n if killed:\n prec.accum_killed_count += 1\n prec.killed_count += 1\n try:\n if killed and _ba.getactivity().announce_player_deaths:\n if killer is player:\n _ba.screenmessage(Lstr(resource='nameSuicideText',\n subs=[('${NAME}', name)]),\n top=True,\n color=player.color,\n image=player.get_icon())\n elif killer is not None:\n if killer.team is player.team:\n _ba.screenmessage(Lstr(resource='nameBetrayedText',\n subs=[('${NAME}',\n killer.getname()),\n ('${VICTIM}', name)]),\n top=True,\n color=killer.color,\n image=killer.get_icon())\n else:\n _ba.screenmessage(Lstr(resource='nameKilledText',\n subs=[('${NAME}',\n killer.getname()),\n ('${VICTIM}', name)]),\n top=True,\n color=killer.color,\n image=killer.get_icon())\n else:\n _ba.screenmessage(Lstr(resource='nameDiedText',\n subs=[('${NAME}', name)]),\n top=True,\n color=player.color,\n image=player.get_icon())\n except Exception:\n print_exception('error announcing kill')\n",
"step-ids": [
17,
23,
28,
29,
30
]
}
|
[
17,
23,
28,
29,
30
] |
'''
IplNorm.py
Description:
Normalizing 0 - 255 initial fingerprint to a normalized image.
Using energy normalization.
Input:
-image
Output:
-norm_im
@author: Edoardo Foco
'''
import cv2
import numpy as np
def normalise(image):
dbl_image = image.astype(float)
# calculate the mean of the image.
mean = np.mean(dbl_image)
# converting numpy 8-bit image to 8- bit cv2.iplimage
iplImage = cv2.cv.CreateImageHeader((image.shape[1], image.shape[0]), cv2.cv.IPL_DEPTH_8U, 1)
cv2.cv.SetData(iplImage, image.tostring(), image.dtype.itemsize * 1 * image.shape[1])
# initializing 32-bit floating point iplimage
image_32F = cv2.cv.CreateImage(cv2.cv.GetSize(iplImage), cv2.cv.IPL_DEPTH_32F,1)
# converting 8-bit unsigned integer image to 32-bit floating point image
cv2.cv.CvtScale(iplImage,image_32F)
# energy Normalization. Formula: image = image/mean(image)
cv2.cv.ConvertScale(image_32F, image_32F, (1/mean), 0);
# re-converting to numpy image
norm_im = np.asarray(image_32F[:,:])
return norm_im
|
normal
|
{
"blob_id": "f51d85ff352d9c84a8ded29ad94b24ca6dda46ad",
"index": 7593,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef normalise(image):\n dbl_image = image.astype(float)\n mean = np.mean(dbl_image)\n iplImage = cv2.cv.CreateImageHeader((image.shape[1], image.shape[0]),\n cv2.cv.IPL_DEPTH_8U, 1)\n cv2.cv.SetData(iplImage, image.tostring(), image.dtype.itemsize * 1 *\n image.shape[1])\n image_32F = cv2.cv.CreateImage(cv2.cv.GetSize(iplImage), cv2.cv.\n IPL_DEPTH_32F, 1)\n cv2.cv.CvtScale(iplImage, image_32F)\n cv2.cv.ConvertScale(image_32F, image_32F, 1 / mean, 0)\n norm_im = np.asarray(image_32F[:, :])\n return norm_im\n",
"step-3": "<mask token>\nimport cv2\nimport numpy as np\n\n\ndef normalise(image):\n dbl_image = image.astype(float)\n mean = np.mean(dbl_image)\n iplImage = cv2.cv.CreateImageHeader((image.shape[1], image.shape[0]),\n cv2.cv.IPL_DEPTH_8U, 1)\n cv2.cv.SetData(iplImage, image.tostring(), image.dtype.itemsize * 1 *\n image.shape[1])\n image_32F = cv2.cv.CreateImage(cv2.cv.GetSize(iplImage), cv2.cv.\n IPL_DEPTH_32F, 1)\n cv2.cv.CvtScale(iplImage, image_32F)\n cv2.cv.ConvertScale(image_32F, image_32F, 1 / mean, 0)\n norm_im = np.asarray(image_32F[:, :])\n return norm_im\n",
"step-4": "\n'''\nIplNorm.py\nDescription: \n Normalizing 0 - 255 initial fingerprint to a normalized image.\n Using energy normalization.\n \n Input:\n -image\n \n Output:\n -norm_im\n@author: Edoardo Foco\n'''\n\nimport cv2\nimport numpy as np\n\ndef normalise(image):\n \n dbl_image = image.astype(float)\n # calculate the mean of the image.\n mean = np.mean(dbl_image)\n \n # converting numpy 8-bit image to 8- bit cv2.iplimage\n iplImage = cv2.cv.CreateImageHeader((image.shape[1], image.shape[0]), cv2.cv.IPL_DEPTH_8U, 1)\n cv2.cv.SetData(iplImage, image.tostring(), image.dtype.itemsize * 1 * image.shape[1])\n \n # initializing 32-bit floating point iplimage\n image_32F = cv2.cv.CreateImage(cv2.cv.GetSize(iplImage), cv2.cv.IPL_DEPTH_32F,1)\n \n # converting 8-bit unsigned integer image to 32-bit floating point image\n cv2.cv.CvtScale(iplImage,image_32F)\n \n # energy Normalization. Formula: image = image/mean(image)\n cv2.cv.ConvertScale(image_32F, image_32F, (1/mean), 0);\n \n # re-converting to numpy image\n norm_im = np.asarray(image_32F[:,:])\n \n return norm_im",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import sys
import psyco
sys.stdin = open("/home/shiva/Learning/1.txt", "r")
sys.stdout = open("/home/shiva/Learning/2.txt", "w")
def compute(plus,minus,total,inp):
if plus == 1 and minus == 0:
print(total); return
elif (plus == 1 and minus == 1):
print("Impossible"); return
elif (abs(plus-minus) > total):
plus
temp = total
total += minus
res = []
if int(total/plus) > temp:
print("Impossible"); return
elif int(total%plus) == 0:
res = [int(total/plus) for i in range(0,plus)]
else:
res = [int(total/(plus-1)) for i in range(0,plus-1)]
res.append(total%(plus-1))
j = 0
prev = 0
for i in inp.split():
if j == 0:
print(res[j],end=' ')
j+=1
elif i == '+' or i=='-':
print(i,end=' ')
prev = i
elif i == '?':
if prev == '+':
print(res[j],end=' ')
j+=1
else:
print('1',end=' ')
else:
print(i,end=' ')
inp = input()
plus =1
minus = 0
total = 0
for i in inp.split():
if i=='?' or i=='=':
continue
elif i == '+':
plus+=1
elif i == '-':
minus +=1
else:
total = int(i)
compute(plus,minus,total,inp)
|
normal
|
{
"blob_id": "d29c8ec737b8e962d381c8fdd0999e7e01847836",
"index": 5274,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef compute(plus, minus, total, inp):\n if plus == 1 and minus == 0:\n print(total)\n return\n elif plus == 1 and minus == 1:\n print('Impossible')\n return\n elif abs(plus - minus) > total:\n plus\n temp = total\n total += minus\n res = []\n if int(total / plus) > temp:\n print('Impossible')\n return\n elif int(total % plus) == 0:\n res = [int(total / plus) for i in range(0, plus)]\n else:\n res = [int(total / (plus - 1)) for i in range(0, plus - 1)]\n res.append(total % (plus - 1))\n j = 0\n prev = 0\n for i in inp.split():\n if j == 0:\n print(res[j], end=' ')\n j += 1\n elif i == '+' or i == '-':\n print(i, end=' ')\n prev = i\n elif i == '?':\n if prev == '+':\n print(res[j], end=' ')\n j += 1\n else:\n print('1', end=' ')\n else:\n print(i, end=' ')\n\n\n<mask token>\nfor i in inp.split():\n if i == '?' or i == '=':\n continue\n elif i == '+':\n plus += 1\n elif i == '-':\n minus += 1\n else:\n total = int(i)\ncompute(plus, minus, total, inp)\n",
"step-3": "<mask token>\nsys.stdin = open('/home/shiva/Learning/1.txt', 'r')\nsys.stdout = open('/home/shiva/Learning/2.txt', 'w')\n\n\ndef compute(plus, minus, total, inp):\n if plus == 1 and minus == 0:\n print(total)\n return\n elif plus == 1 and minus == 1:\n print('Impossible')\n return\n elif abs(plus - minus) > total:\n plus\n temp = total\n total += minus\n res = []\n if int(total / plus) > temp:\n print('Impossible')\n return\n elif int(total % plus) == 0:\n res = [int(total / plus) for i in range(0, plus)]\n else:\n res = [int(total / (plus - 1)) for i in range(0, plus - 1)]\n res.append(total % (plus - 1))\n j = 0\n prev = 0\n for i in inp.split():\n if j == 0:\n print(res[j], end=' ')\n j += 1\n elif i == '+' or i == '-':\n print(i, end=' ')\n prev = i\n elif i == '?':\n if prev == '+':\n print(res[j], end=' ')\n j += 1\n else:\n print('1', end=' ')\n else:\n print(i, end=' ')\n\n\ninp = input()\nplus = 1\nminus = 0\ntotal = 0\nfor i in inp.split():\n if i == '?' or i == '=':\n continue\n elif i == '+':\n plus += 1\n elif i == '-':\n minus += 1\n else:\n total = int(i)\ncompute(plus, minus, total, inp)\n",
"step-4": "import sys\nimport psyco\nsys.stdin = open('/home/shiva/Learning/1.txt', 'r')\nsys.stdout = open('/home/shiva/Learning/2.txt', 'w')\n\n\ndef compute(plus, minus, total, inp):\n if plus == 1 and minus == 0:\n print(total)\n return\n elif plus == 1 and minus == 1:\n print('Impossible')\n return\n elif abs(plus - minus) > total:\n plus\n temp = total\n total += minus\n res = []\n if int(total / plus) > temp:\n print('Impossible')\n return\n elif int(total % plus) == 0:\n res = [int(total / plus) for i in range(0, plus)]\n else:\n res = [int(total / (plus - 1)) for i in range(0, plus - 1)]\n res.append(total % (plus - 1))\n j = 0\n prev = 0\n for i in inp.split():\n if j == 0:\n print(res[j], end=' ')\n j += 1\n elif i == '+' or i == '-':\n print(i, end=' ')\n prev = i\n elif i == '?':\n if prev == '+':\n print(res[j], end=' ')\n j += 1\n else:\n print('1', end=' ')\n else:\n print(i, end=' ')\n\n\ninp = input()\nplus = 1\nminus = 0\ntotal = 0\nfor i in inp.split():\n if i == '?' or i == '=':\n continue\n elif i == '+':\n plus += 1\n elif i == '-':\n minus += 1\n else:\n total = int(i)\ncompute(plus, minus, total, inp)\n",
"step-5": "import sys\nimport psyco\nsys.stdin = open(\"/home/shiva/Learning/1.txt\", \"r\")\nsys.stdout = open(\"/home/shiva/Learning/2.txt\", \"w\")\n\ndef compute(plus,minus,total,inp):\n\tif plus == 1 and minus == 0:\n\t\tprint(total); return\n\telif (plus == 1 and minus == 1): \n\t\tprint(\"Impossible\"); return\n\telif (abs(plus-minus) > total):\n\t\tplus\n\n\ttemp = total\n\ttotal += minus\n\tres = []\n\tif int(total/plus) > temp:\n\t\tprint(\"Impossible\"); return\n\telif int(total%plus) == 0:\n\t\tres = [int(total/plus) for i in range(0,plus)]\n\telse:\n\t\tres = [int(total/(plus-1)) for i in range(0,plus-1)]\n\t\tres.append(total%(plus-1))\n\t\n\tj = 0\n\tprev = 0\n\tfor i in inp.split():\n\t\tif j == 0:\n\t\t\tprint(res[j],end=' ')\n\t\t\tj+=1\n\t\telif i == '+' or i=='-':\n\t\t\tprint(i,end=' ')\n\t\t\tprev = i\n\t\telif i == '?':\n\t\t\tif prev == '+':\n\n\t\t\t\tprint(res[j],end=' ')\n\t\t\t\tj+=1\n\t\t\telse:\n\t\t\t\tprint('1',end=' ')\n\t\telse:\n\t\t\tprint(i,end=' ')\n\ninp = input()\nplus =1\nminus = 0\ntotal = 0\nfor i in inp.split():\n\tif i=='?' or i=='=':\n\t\tcontinue\n\telif i == '+':\n\t\tplus+=1\n\telif i == '-':\n\t\tminus +=1\n\telse:\n\t\ttotal = int(i)\n\ncompute(plus,minus,total,inp)\n\n\n\n\n\n",
"step-ids": [
0,
2,
3,
4,
5
]
}
|
[
0,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
def compute_integrated_acquisition(acquisition, x):
"""
Used to compute the acquisition function when samples of the hyper-parameters have been generated (used in GP_MCMC model).
:param acquisition: acquisition function with GpyOpt model type GP_MCMC.
:param x: location where the acquisition is evaluated.
"""
acqu_x = 0
for i in range(acquisition.model.num_hmc_samples):
acquisition.model.model.kern[:] = acquisition.model.hmc_samples[i, :]
acqu_x += acquisition.acquisition_function(x)
acqu_x = acqu_x / acquisition.model.num_hmc_samples
return acqu_x
def compute_integrated_acquisition_withGradients(acquisition, x):
"""
Used to compute the acquisition function with gradients when samples of the hyper-parameters have been generated (used in GP_MCMC model).
:param acquisition: acquisition function with GpyOpt model type GP_MCMC.
:param x: location where the acquisition is evaluated.
"""
acqu_x = 0
d_acqu_x = 0
for i in range(acquisition.model.num_hmc_samples):
acquisition.model.model.kern[:] = acquisition.model.hmc_samples[i, :]
acqu_x_sample, d_acqu_x_sample = (acquisition.
acquisition_function_withGradients(x))
acqu_x += acqu_x_sample
d_acqu_x += d_acqu_x_sample
acqu_x = acqu_x / acquisition.model.num_hmc_samples
d_acqu_x = d_acqu_x / acquisition.model.num_hmc_samples
return acqu_x, d_acqu_x
def best_guess(f, X):
"""
Gets the best current guess from a vector.
:param f: function to evaluate.
:param X: locations.
"""
n = X.shape[0]
xbest = np.zeros(n)
for i in range(n):
ff = f(X[0:i + 1])
xbest[i] = ff[np.argmin(ff)]
return xbest
<|reserved_special_token_0|>
def best_value(Y, sign=1):
"""
Returns a vector whose components i are the minimum (default) or maximum of Y[:i]
"""
n = Y.shape[0]
Y_best = np.ones(n)
for i in range(n):
if sign == 1:
Y_best[i] = Y[:i + 1].min()
else:
Y_best[i] = Y[:i + 1].max()
return Y_best
<|reserved_special_token_0|>
def evaluate_function(f, X):
"""
Returns the evaluation of a function *f* and the time per evaluation
"""
num_data, dim_data = X.shape
Y_eval = np.zeros((num_data, dim_data))
Y_time = np.zeros((num_data, 1))
for i in range(num_data):
time_zero = time.time()
Y_eval[i, :] = f(X[i, :])
Y_time[i, :] = time.time() - time_zero
return Y_eval, Y_time
<|reserved_special_token_0|>
def merge_values(values1, values2):
"""
Merges two numpy arrays by calculating all possible combinations of rows
"""
array1 = values_to_array(values1)
array2 = values_to_array(values2)
if array1.size == 0:
return array2
if array2.size == 0:
return array1
merged_array = []
for row_array1 in array1:
for row_array2 in array2:
merged_row = np.hstack((row_array1, row_array2))
merged_array.append(merged_row)
return np.atleast_2d(merged_array)
def normalize(Y, normalization_type='stats'):
"""Normalize the vector Y using statistics or its range.
:param Y: Row or column vector that you want to normalize.
:param normalization_type: String specifying the kind of normalization
to use. Options are 'stats' to use mean and standard deviation,
or 'maxmin' to use the range of function values.
:return Y_normalized: The normalized vector.
"""
Y = np.asarray(Y, dtype=float)
if np.max(Y.shape) != Y.size:
raise NotImplementedError('Only 1-dimensional arrays are supported.')
if normalization_type == 'stats':
Y_norm = Y - Y.mean()
std = Y.std()
if std > 0:
Y_norm /= std
elif normalization_type == 'maxmin':
Y_norm = Y - Y.min()
y_range = np.ptp(Y)
if y_range > 0:
Y_norm /= y_range
Y_norm = 2 * (Y_norm - 0.5)
else:
raise ValueError('Unknown normalization type: {}'.format(
normalization_type))
return Y_norm
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def compute_integrated_acquisition(acquisition, x):
"""
Used to compute the acquisition function when samples of the hyper-parameters have been generated (used in GP_MCMC model).
:param acquisition: acquisition function with GpyOpt model type GP_MCMC.
:param x: location where the acquisition is evaluated.
"""
acqu_x = 0
for i in range(acquisition.model.num_hmc_samples):
acquisition.model.model.kern[:] = acquisition.model.hmc_samples[i, :]
acqu_x += acquisition.acquisition_function(x)
acqu_x = acqu_x / acquisition.model.num_hmc_samples
return acqu_x
def compute_integrated_acquisition_withGradients(acquisition, x):
"""
Used to compute the acquisition function with gradients when samples of the hyper-parameters have been generated (used in GP_MCMC model).
:param acquisition: acquisition function with GpyOpt model type GP_MCMC.
:param x: location where the acquisition is evaluated.
"""
acqu_x = 0
d_acqu_x = 0
for i in range(acquisition.model.num_hmc_samples):
acquisition.model.model.kern[:] = acquisition.model.hmc_samples[i, :]
acqu_x_sample, d_acqu_x_sample = (acquisition.
acquisition_function_withGradients(x))
acqu_x += acqu_x_sample
d_acqu_x += d_acqu_x_sample
acqu_x = acqu_x / acquisition.model.num_hmc_samples
d_acqu_x = d_acqu_x / acquisition.model.num_hmc_samples
return acqu_x, d_acqu_x
def best_guess(f, X):
"""
Gets the best current guess from a vector.
:param f: function to evaluate.
:param X: locations.
"""
n = X.shape[0]
xbest = np.zeros(n)
for i in range(n):
ff = f(X[0:i + 1])
xbest[i] = ff[np.argmin(ff)]
return xbest
def samples_multidimensional_uniform(bounds, num_data):
"""
Generates a multidimensional grid uniformly distributed.
:param bounds: tuple defining the box constraints.
:num_data: number of data points to generate.
"""
dim = len(bounds)
Z_rand = np.zeros(shape=(num_data, dim))
for k in range(0, dim):
Z_rand[:, k] = np.random.uniform(low=bounds[k][0], high=bounds[k][1
], size=num_data)
return Z_rand
def reshape(x, input_dim):
"""
Reshapes x into a matrix with input_dim columns
"""
x = np.array(x)
if x.size == input_dim:
x = x.reshape((1, input_dim))
return x
<|reserved_special_token_0|>
def best_value(Y, sign=1):
"""
Returns a vector whose components i are the minimum (default) or maximum of Y[:i]
"""
n = Y.shape[0]
Y_best = np.ones(n)
for i in range(n):
if sign == 1:
Y_best[i] = Y[:i + 1].min()
else:
Y_best[i] = Y[:i + 1].max()
return Y_best
<|reserved_special_token_0|>
def evaluate_function(f, X):
"""
Returns the evaluation of a function *f* and the time per evaluation
"""
num_data, dim_data = X.shape
Y_eval = np.zeros((num_data, dim_data))
Y_time = np.zeros((num_data, 1))
for i in range(num_data):
time_zero = time.time()
Y_eval[i, :] = f(X[i, :])
Y_time[i, :] = time.time() - time_zero
return Y_eval, Y_time
<|reserved_special_token_0|>
def merge_values(values1, values2):
"""
Merges two numpy arrays by calculating all possible combinations of rows
"""
array1 = values_to_array(values1)
array2 = values_to_array(values2)
if array1.size == 0:
return array2
if array2.size == 0:
return array1
merged_array = []
for row_array1 in array1:
for row_array2 in array2:
merged_row = np.hstack((row_array1, row_array2))
merged_array.append(merged_row)
return np.atleast_2d(merged_array)
def normalize(Y, normalization_type='stats'):
"""Normalize the vector Y using statistics or its range.
:param Y: Row or column vector that you want to normalize.
:param normalization_type: String specifying the kind of normalization
to use. Options are 'stats' to use mean and standard deviation,
or 'maxmin' to use the range of function values.
:return Y_normalized: The normalized vector.
"""
Y = np.asarray(Y, dtype=float)
if np.max(Y.shape) != Y.size:
raise NotImplementedError('Only 1-dimensional arrays are supported.')
if normalization_type == 'stats':
Y_norm = Y - Y.mean()
std = Y.std()
if std > 0:
Y_norm /= std
elif normalization_type == 'maxmin':
Y_norm = Y - Y.min()
y_range = np.ptp(Y)
if y_range > 0:
Y_norm /= y_range
Y_norm = 2 * (Y_norm - 0.5)
else:
raise ValueError('Unknown normalization type: {}'.format(
normalization_type))
return Y_norm
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def compute_integrated_acquisition(acquisition, x):
"""
Used to compute the acquisition function when samples of the hyper-parameters have been generated (used in GP_MCMC model).
:param acquisition: acquisition function with GpyOpt model type GP_MCMC.
:param x: location where the acquisition is evaluated.
"""
acqu_x = 0
for i in range(acquisition.model.num_hmc_samples):
acquisition.model.model.kern[:] = acquisition.model.hmc_samples[i, :]
acqu_x += acquisition.acquisition_function(x)
acqu_x = acqu_x / acquisition.model.num_hmc_samples
return acqu_x
def compute_integrated_acquisition_withGradients(acquisition, x):
"""
Used to compute the acquisition function with gradients when samples of the hyper-parameters have been generated (used in GP_MCMC model).
:param acquisition: acquisition function with GpyOpt model type GP_MCMC.
:param x: location where the acquisition is evaluated.
"""
acqu_x = 0
d_acqu_x = 0
for i in range(acquisition.model.num_hmc_samples):
acquisition.model.model.kern[:] = acquisition.model.hmc_samples[i, :]
acqu_x_sample, d_acqu_x_sample = (acquisition.
acquisition_function_withGradients(x))
acqu_x += acqu_x_sample
d_acqu_x += d_acqu_x_sample
acqu_x = acqu_x / acquisition.model.num_hmc_samples
d_acqu_x = d_acqu_x / acquisition.model.num_hmc_samples
return acqu_x, d_acqu_x
def best_guess(f, X):
"""
Gets the best current guess from a vector.
:param f: function to evaluate.
:param X: locations.
"""
n = X.shape[0]
xbest = np.zeros(n)
for i in range(n):
ff = f(X[0:i + 1])
xbest[i] = ff[np.argmin(ff)]
return xbest
def samples_multidimensional_uniform(bounds, num_data):
"""
Generates a multidimensional grid uniformly distributed.
:param bounds: tuple defining the box constraints.
:num_data: number of data points to generate.
"""
dim = len(bounds)
Z_rand = np.zeros(shape=(num_data, dim))
for k in range(0, dim):
Z_rand[:, k] = np.random.uniform(low=bounds[k][0], high=bounds[k][1
], size=num_data)
return Z_rand
def reshape(x, input_dim):
"""
Reshapes x into a matrix with input_dim columns
"""
x = np.array(x)
if x.size == input_dim:
x = x.reshape((1, input_dim))
return x
<|reserved_special_token_0|>
def get_d_moments(model, x):
"""
Gradients with respect to x of the moments (mean and sdev.) of the GP
:param model: GPy model.
:param x: location where the gradients are evaluated.
"""
input_dim = model.input_dim
x = reshape(x, input_dim)
_, v = model.predict(x)
dmdx, dvdx = model.predictive_gradients(x)
dmdx = dmdx[:, :, 0]
dsdx = dvdx / (2 * np.sqrt(v))
return dmdx, dsdx
def get_quantiles(acquisition_par, fmin, m, s):
"""
Quantiles of the Gaussian distribution useful to determine the acquisition function values
:param acquisition_par: parameter of the acquisition function
:param fmin: current minimum.
:param m: vector of means.
:param s: vector of standard deviations.
"""
if isinstance(s, np.ndarray):
s[s < 1e-10] = 1e-10
elif s < 1e-10:
s = 1e-10
u = (fmin - m - acquisition_par) / s
phi = np.exp(-0.5 * u ** 2) / np.sqrt(2 * np.pi)
Phi = 0.5 * erfc(-u / np.sqrt(2))
return phi, Phi, u
def best_value(Y, sign=1):
"""
Returns a vector whose components i are the minimum (default) or maximum of Y[:i]
"""
n = Y.shape[0]
Y_best = np.ones(n)
for i in range(n):
if sign == 1:
Y_best[i] = Y[:i + 1].min()
else:
Y_best[i] = Y[:i + 1].max()
return Y_best
<|reserved_special_token_0|>
def evaluate_function(f, X):
"""
Returns the evaluation of a function *f* and the time per evaluation
"""
num_data, dim_data = X.shape
Y_eval = np.zeros((num_data, dim_data))
Y_time = np.zeros((num_data, 1))
for i in range(num_data):
time_zero = time.time()
Y_eval[i, :] = f(X[i, :])
Y_time[i, :] = time.time() - time_zero
return Y_eval, Y_time
def values_to_array(input_values):
"""
Transforms a values of int, float and tuples to a column vector numpy array
"""
if type(input_values) == tuple:
values = np.array(input_values).reshape(-1, 1)
elif type(input_values) == np.ndarray:
values = np.atleast_2d(input_values)
elif type(input_values) == int or type(input_values) == float or type(np
.int64):
values = np.atleast_2d(np.array(input_values))
else:
print('Type to transform not recognized')
return values
def merge_values(values1, values2):
"""
Merges two numpy arrays by calculating all possible combinations of rows
"""
array1 = values_to_array(values1)
array2 = values_to_array(values2)
if array1.size == 0:
return array2
if array2.size == 0:
return array1
merged_array = []
for row_array1 in array1:
for row_array2 in array2:
merged_row = np.hstack((row_array1, row_array2))
merged_array.append(merged_row)
return np.atleast_2d(merged_array)
def normalize(Y, normalization_type='stats'):
"""Normalize the vector Y using statistics or its range.
:param Y: Row or column vector that you want to normalize.
:param normalization_type: String specifying the kind of normalization
to use. Options are 'stats' to use mean and standard deviation,
or 'maxmin' to use the range of function values.
:return Y_normalized: The normalized vector.
"""
Y = np.asarray(Y, dtype=float)
if np.max(Y.shape) != Y.size:
raise NotImplementedError('Only 1-dimensional arrays are supported.')
if normalization_type == 'stats':
Y_norm = Y - Y.mean()
std = Y.std()
if std > 0:
Y_norm /= std
elif normalization_type == 'maxmin':
Y_norm = Y - Y.min()
y_range = np.ptp(Y)
if y_range > 0:
Y_norm /= y_range
Y_norm = 2 * (Y_norm - 0.5)
else:
raise ValueError('Unknown normalization type: {}'.format(
normalization_type))
return Y_norm
<|reserved_special_token_1|>
import numpy as np
from scipy.special import erfc
import time
from ..core.errors import InvalidConfigError
def compute_integrated_acquisition(acquisition, x):
"""
Used to compute the acquisition function when samples of the hyper-parameters have been generated (used in GP_MCMC model).
:param acquisition: acquisition function with GpyOpt model type GP_MCMC.
:param x: location where the acquisition is evaluated.
"""
acqu_x = 0
for i in range(acquisition.model.num_hmc_samples):
acquisition.model.model.kern[:] = acquisition.model.hmc_samples[i, :]
acqu_x += acquisition.acquisition_function(x)
acqu_x = acqu_x / acquisition.model.num_hmc_samples
return acqu_x
def compute_integrated_acquisition_withGradients(acquisition, x):
"""
Used to compute the acquisition function with gradients when samples of the hyper-parameters have been generated (used in GP_MCMC model).
:param acquisition: acquisition function with GpyOpt model type GP_MCMC.
:param x: location where the acquisition is evaluated.
"""
acqu_x = 0
d_acqu_x = 0
for i in range(acquisition.model.num_hmc_samples):
acquisition.model.model.kern[:] = acquisition.model.hmc_samples[i, :]
acqu_x_sample, d_acqu_x_sample = (acquisition.
acquisition_function_withGradients(x))
acqu_x += acqu_x_sample
d_acqu_x += d_acqu_x_sample
acqu_x = acqu_x / acquisition.model.num_hmc_samples
d_acqu_x = d_acqu_x / acquisition.model.num_hmc_samples
return acqu_x, d_acqu_x
def best_guess(f, X):
"""
Gets the best current guess from a vector.
:param f: function to evaluate.
:param X: locations.
"""
n = X.shape[0]
xbest = np.zeros(n)
for i in range(n):
ff = f(X[0:i + 1])
xbest[i] = ff[np.argmin(ff)]
return xbest
def samples_multidimensional_uniform(bounds, num_data):
"""
Generates a multidimensional grid uniformly distributed.
:param bounds: tuple defining the box constraints.
:num_data: number of data points to generate.
"""
dim = len(bounds)
Z_rand = np.zeros(shape=(num_data, dim))
for k in range(0, dim):
Z_rand[:, k] = np.random.uniform(low=bounds[k][0], high=bounds[k][1
], size=num_data)
return Z_rand
def reshape(x, input_dim):
"""
Reshapes x into a matrix with input_dim columns
"""
x = np.array(x)
if x.size == input_dim:
x = x.reshape((1, input_dim))
return x
def get_moments(model, x):
"""
Moments (mean and sdev.) of a GP model at x
"""
input_dim = model.X.shape[1]
x = reshape(x, input_dim)
fmin = min(model.predict(model.X)[0])
m, v = model.predict(x)
s = np.sqrt(np.clip(v, 0, np.inf))
return m, s, fmin
def get_d_moments(model, x):
"""
Gradients with respect to x of the moments (mean and sdev.) of the GP
:param model: GPy model.
:param x: location where the gradients are evaluated.
"""
input_dim = model.input_dim
x = reshape(x, input_dim)
_, v = model.predict(x)
dmdx, dvdx = model.predictive_gradients(x)
dmdx = dmdx[:, :, 0]
dsdx = dvdx / (2 * np.sqrt(v))
return dmdx, dsdx
def get_quantiles(acquisition_par, fmin, m, s):
"""
Quantiles of the Gaussian distribution useful to determine the acquisition function values
:param acquisition_par: parameter of the acquisition function
:param fmin: current minimum.
:param m: vector of means.
:param s: vector of standard deviations.
"""
if isinstance(s, np.ndarray):
s[s < 1e-10] = 1e-10
elif s < 1e-10:
s = 1e-10
u = (fmin - m - acquisition_par) / s
phi = np.exp(-0.5 * u ** 2) / np.sqrt(2 * np.pi)
Phi = 0.5 * erfc(-u / np.sqrt(2))
return phi, Phi, u
def best_value(Y, sign=1):
"""
Returns a vector whose components i are the minimum (default) or maximum of Y[:i]
"""
n = Y.shape[0]
Y_best = np.ones(n)
for i in range(n):
if sign == 1:
Y_best[i] = Y[:i + 1].min()
else:
Y_best[i] = Y[:i + 1].max()
return Y_best
def spawn(f):
"""
Function for parallel evaluation of the acquisition function
"""
def fun(pipe, x):
pipe.send(f(x))
pipe.close()
return fun
def evaluate_function(f, X):
"""
Returns the evaluation of a function *f* and the time per evaluation
"""
num_data, dim_data = X.shape
Y_eval = np.zeros((num_data, dim_data))
Y_time = np.zeros((num_data, 1))
for i in range(num_data):
time_zero = time.time()
Y_eval[i, :] = f(X[i, :])
Y_time[i, :] = time.time() - time_zero
return Y_eval, Y_time
def values_to_array(input_values):
"""
Transforms a values of int, float and tuples to a column vector numpy array
"""
if type(input_values) == tuple:
values = np.array(input_values).reshape(-1, 1)
elif type(input_values) == np.ndarray:
values = np.atleast_2d(input_values)
elif type(input_values) == int or type(input_values) == float or type(np
.int64):
values = np.atleast_2d(np.array(input_values))
else:
print('Type to transform not recognized')
return values
def merge_values(values1, values2):
"""
Merges two numpy arrays by calculating all possible combinations of rows
"""
array1 = values_to_array(values1)
array2 = values_to_array(values2)
if array1.size == 0:
return array2
if array2.size == 0:
return array1
merged_array = []
for row_array1 in array1:
for row_array2 in array2:
merged_row = np.hstack((row_array1, row_array2))
merged_array.append(merged_row)
return np.atleast_2d(merged_array)
def normalize(Y, normalization_type='stats'):
"""Normalize the vector Y using statistics or its range.
:param Y: Row or column vector that you want to normalize.
:param normalization_type: String specifying the kind of normalization
to use. Options are 'stats' to use mean and standard deviation,
or 'maxmin' to use the range of function values.
:return Y_normalized: The normalized vector.
"""
Y = np.asarray(Y, dtype=float)
if np.max(Y.shape) != Y.size:
raise NotImplementedError('Only 1-dimensional arrays are supported.')
if normalization_type == 'stats':
Y_norm = Y - Y.mean()
std = Y.std()
if std > 0:
Y_norm /= std
elif normalization_type == 'maxmin':
Y_norm = Y - Y.min()
y_range = np.ptp(Y)
if y_range > 0:
Y_norm /= y_range
Y_norm = 2 * (Y_norm - 0.5)
else:
raise ValueError('Unknown normalization type: {}'.format(
normalization_type))
return Y_norm
<|reserved_special_token_1|>
# Copyright (c) 2016, the GPyOpt Authors
# Licensed under the BSD 3-clause license (see LICENSE.txt)
import numpy as np
from scipy.special import erfc
import time
from ..core.errors import InvalidConfigError
def compute_integrated_acquisition(acquisition,x):
'''
Used to compute the acquisition function when samples of the hyper-parameters have been generated (used in GP_MCMC model).
:param acquisition: acquisition function with GpyOpt model type GP_MCMC.
:param x: location where the acquisition is evaluated.
'''
acqu_x = 0
for i in range(acquisition.model.num_hmc_samples):
acquisition.model.model.kern[:] = acquisition.model.hmc_samples[i,:]
acqu_x += acquisition.acquisition_function(x)
acqu_x = acqu_x/acquisition.model.num_hmc_samples
return acqu_x
def compute_integrated_acquisition_withGradients(acquisition,x):
'''
Used to compute the acquisition function with gradients when samples of the hyper-parameters have been generated (used in GP_MCMC model).
:param acquisition: acquisition function with GpyOpt model type GP_MCMC.
:param x: location where the acquisition is evaluated.
'''
acqu_x = 0
d_acqu_x = 0
for i in range(acquisition.model.num_hmc_samples):
acquisition.model.model.kern[:] = acquisition.model.hmc_samples[i,:]
acqu_x_sample, d_acqu_x_sample = acquisition.acquisition_function_withGradients(x)
acqu_x += acqu_x_sample
d_acqu_x += d_acqu_x_sample
acqu_x = acqu_x/acquisition.model.num_hmc_samples
d_acqu_x = d_acqu_x/acquisition.model.num_hmc_samples
return acqu_x, d_acqu_x
def best_guess(f,X):
'''
Gets the best current guess from a vector.
:param f: function to evaluate.
:param X: locations.
'''
n = X.shape[0]
xbest = np.zeros(n)
for i in range(n):
ff = f(X[0:(i+1)])
xbest[i] = ff[np.argmin(ff)]
return xbest
def samples_multidimensional_uniform(bounds,num_data):
'''
Generates a multidimensional grid uniformly distributed.
:param bounds: tuple defining the box constraints.
:num_data: number of data points to generate.
'''
dim = len(bounds)
Z_rand = np.zeros(shape=(num_data,dim))
for k in range(0,dim): Z_rand[:,k] = np.random.uniform(low=bounds[k][0],high=bounds[k][1],size=num_data)
return Z_rand
def reshape(x,input_dim):
'''
Reshapes x into a matrix with input_dim columns
'''
x = np.array(x)
if x.size ==input_dim:
x = x.reshape((1,input_dim))
return x
def get_moments(model,x):
'''
Moments (mean and sdev.) of a GP model at x
'''
input_dim = model.X.shape[1]
x = reshape(x,input_dim)
fmin = min(model.predict(model.X)[0])
m, v = model.predict(x)
s = np.sqrt(np.clip(v, 0, np.inf))
return (m,s, fmin)
def get_d_moments(model,x):
'''
Gradients with respect to x of the moments (mean and sdev.) of the GP
:param model: GPy model.
:param x: location where the gradients are evaluated.
'''
input_dim = model.input_dim
x = reshape(x,input_dim)
_, v = model.predict(x)
dmdx, dvdx = model.predictive_gradients(x)
dmdx = dmdx[:,:,0]
dsdx = dvdx / (2*np.sqrt(v))
return (dmdx, dsdx)
def get_quantiles(acquisition_par, fmin, m, s):
'''
Quantiles of the Gaussian distribution useful to determine the acquisition function values
:param acquisition_par: parameter of the acquisition function
:param fmin: current minimum.
:param m: vector of means.
:param s: vector of standard deviations.
'''
if isinstance(s, np.ndarray):
s[s<1e-10] = 1e-10
elif s< 1e-10:
s = 1e-10
u = (fmin - m - acquisition_par)/s
phi = np.exp(-0.5 * u**2) / np.sqrt(2*np.pi)
Phi = 0.5 * erfc(-u / np.sqrt(2))
return (phi, Phi, u)
def best_value(Y,sign=1):
'''
Returns a vector whose components i are the minimum (default) or maximum of Y[:i]
'''
n = Y.shape[0]
Y_best = np.ones(n)
for i in range(n):
if sign == 1:
Y_best[i]=Y[:(i+1)].min()
else:
Y_best[i]=Y[:(i+1)].max()
return Y_best
def spawn(f):
'''
Function for parallel evaluation of the acquisition function
'''
def fun(pipe,x):
pipe.send(f(x))
pipe.close()
return fun
def evaluate_function(f,X):
'''
Returns the evaluation of a function *f* and the time per evaluation
'''
num_data, dim_data = X.shape
Y_eval = np.zeros((num_data, dim_data))
Y_time = np.zeros((num_data, 1))
for i in range(num_data):
time_zero = time.time()
Y_eval[i,:] = f(X[i,:])
Y_time[i,:] = time.time() - time_zero
return Y_eval, Y_time
def values_to_array(input_values):
'''
Transforms a values of int, float and tuples to a column vector numpy array
'''
if type(input_values)==tuple:
values = np.array(input_values).reshape(-1,1)
elif type(input_values) == np.ndarray:
values = np.atleast_2d(input_values)
elif type(input_values)==int or type(input_values)==float or type(np.int64):
values = np.atleast_2d(np.array(input_values))
else:
print('Type to transform not recognized')
return values
def merge_values(values1,values2):
'''
Merges two numpy arrays by calculating all possible combinations of rows
'''
array1 = values_to_array(values1)
array2 = values_to_array(values2)
if array1.size == 0:
return array2
if array2.size == 0:
return array1
merged_array = []
for row_array1 in array1:
for row_array2 in array2:
merged_row = np.hstack((row_array1,row_array2))
merged_array.append(merged_row)
return np.atleast_2d(merged_array)
def normalize(Y, normalization_type='stats'):
"""Normalize the vector Y using statistics or its range.
:param Y: Row or column vector that you want to normalize.
:param normalization_type: String specifying the kind of normalization
to use. Options are 'stats' to use mean and standard deviation,
or 'maxmin' to use the range of function values.
:return Y_normalized: The normalized vector.
"""
Y = np.asarray(Y, dtype=float)
if np.max(Y.shape) != Y.size:
raise NotImplementedError('Only 1-dimensional arrays are supported.')
# Only normalize with non null sdev (divide by zero). For only one
# data point both std and ptp return 0.
if normalization_type == 'stats':
Y_norm = Y - Y.mean()
std = Y.std()
if std > 0:
Y_norm /= std
elif normalization_type == 'maxmin':
Y_norm = Y - Y.min()
y_range = np.ptp(Y)
if y_range > 0:
Y_norm /= y_range
# A range of [-1, 1] is more natural for a zero-mean GP
Y_norm = 2 * (Y_norm - 0.5)
else:
raise ValueError('Unknown normalization type: {}'.format(normalization_type))
return Y_norm
|
flexible
|
{
"blob_id": "4e7cfbf51ec9bad691d8dd9f103f22728cf5e952",
"index": 1229,
"step-1": "<mask token>\n\n\ndef compute_integrated_acquisition(acquisition, x):\n \"\"\"\n Used to compute the acquisition function when samples of the hyper-parameters have been generated (used in GP_MCMC model).\n\n :param acquisition: acquisition function with GpyOpt model type GP_MCMC.\n :param x: location where the acquisition is evaluated.\n \"\"\"\n acqu_x = 0\n for i in range(acquisition.model.num_hmc_samples):\n acquisition.model.model.kern[:] = acquisition.model.hmc_samples[i, :]\n acqu_x += acquisition.acquisition_function(x)\n acqu_x = acqu_x / acquisition.model.num_hmc_samples\n return acqu_x\n\n\ndef compute_integrated_acquisition_withGradients(acquisition, x):\n \"\"\"\n Used to compute the acquisition function with gradients when samples of the hyper-parameters have been generated (used in GP_MCMC model).\n\n :param acquisition: acquisition function with GpyOpt model type GP_MCMC.\n :param x: location where the acquisition is evaluated.\n \"\"\"\n acqu_x = 0\n d_acqu_x = 0\n for i in range(acquisition.model.num_hmc_samples):\n acquisition.model.model.kern[:] = acquisition.model.hmc_samples[i, :]\n acqu_x_sample, d_acqu_x_sample = (acquisition.\n acquisition_function_withGradients(x))\n acqu_x += acqu_x_sample\n d_acqu_x += d_acqu_x_sample\n acqu_x = acqu_x / acquisition.model.num_hmc_samples\n d_acqu_x = d_acqu_x / acquisition.model.num_hmc_samples\n return acqu_x, d_acqu_x\n\n\ndef best_guess(f, X):\n \"\"\"\n Gets the best current guess from a vector.\n :param f: function to evaluate.\n :param X: locations.\n \"\"\"\n n = X.shape[0]\n xbest = np.zeros(n)\n for i in range(n):\n ff = f(X[0:i + 1])\n xbest[i] = ff[np.argmin(ff)]\n return xbest\n\n\n<mask token>\n\n\ndef best_value(Y, sign=1):\n \"\"\"\n Returns a vector whose components i are the minimum (default) or maximum of Y[:i]\n \"\"\"\n n = Y.shape[0]\n Y_best = np.ones(n)\n for i in range(n):\n if sign == 1:\n Y_best[i] = Y[:i + 1].min()\n else:\n Y_best[i] = Y[:i + 1].max()\n return Y_best\n\n\n<mask token>\n\n\ndef evaluate_function(f, X):\n \"\"\"\n Returns the evaluation of a function *f* and the time per evaluation\n \"\"\"\n num_data, dim_data = X.shape\n Y_eval = np.zeros((num_data, dim_data))\n Y_time = np.zeros((num_data, 1))\n for i in range(num_data):\n time_zero = time.time()\n Y_eval[i, :] = f(X[i, :])\n Y_time[i, :] = time.time() - time_zero\n return Y_eval, Y_time\n\n\n<mask token>\n\n\ndef merge_values(values1, values2):\n \"\"\"\n Merges two numpy arrays by calculating all possible combinations of rows\n \"\"\"\n array1 = values_to_array(values1)\n array2 = values_to_array(values2)\n if array1.size == 0:\n return array2\n if array2.size == 0:\n return array1\n merged_array = []\n for row_array1 in array1:\n for row_array2 in array2:\n merged_row = np.hstack((row_array1, row_array2))\n merged_array.append(merged_row)\n return np.atleast_2d(merged_array)\n\n\ndef normalize(Y, normalization_type='stats'):\n \"\"\"Normalize the vector Y using statistics or its range.\n\n :param Y: Row or column vector that you want to normalize.\n :param normalization_type: String specifying the kind of normalization\n to use. Options are 'stats' to use mean and standard deviation,\n or 'maxmin' to use the range of function values.\n :return Y_normalized: The normalized vector.\n \"\"\"\n Y = np.asarray(Y, dtype=float)\n if np.max(Y.shape) != Y.size:\n raise NotImplementedError('Only 1-dimensional arrays are supported.')\n if normalization_type == 'stats':\n Y_norm = Y - Y.mean()\n std = Y.std()\n if std > 0:\n Y_norm /= std\n elif normalization_type == 'maxmin':\n Y_norm = Y - Y.min()\n y_range = np.ptp(Y)\n if y_range > 0:\n Y_norm /= y_range\n Y_norm = 2 * (Y_norm - 0.5)\n else:\n raise ValueError('Unknown normalization type: {}'.format(\n normalization_type))\n return Y_norm\n",
"step-2": "<mask token>\n\n\ndef compute_integrated_acquisition(acquisition, x):\n \"\"\"\n Used to compute the acquisition function when samples of the hyper-parameters have been generated (used in GP_MCMC model).\n\n :param acquisition: acquisition function with GpyOpt model type GP_MCMC.\n :param x: location where the acquisition is evaluated.\n \"\"\"\n acqu_x = 0\n for i in range(acquisition.model.num_hmc_samples):\n acquisition.model.model.kern[:] = acquisition.model.hmc_samples[i, :]\n acqu_x += acquisition.acquisition_function(x)\n acqu_x = acqu_x / acquisition.model.num_hmc_samples\n return acqu_x\n\n\ndef compute_integrated_acquisition_withGradients(acquisition, x):\n \"\"\"\n Used to compute the acquisition function with gradients when samples of the hyper-parameters have been generated (used in GP_MCMC model).\n\n :param acquisition: acquisition function with GpyOpt model type GP_MCMC.\n :param x: location where the acquisition is evaluated.\n \"\"\"\n acqu_x = 0\n d_acqu_x = 0\n for i in range(acquisition.model.num_hmc_samples):\n acquisition.model.model.kern[:] = acquisition.model.hmc_samples[i, :]\n acqu_x_sample, d_acqu_x_sample = (acquisition.\n acquisition_function_withGradients(x))\n acqu_x += acqu_x_sample\n d_acqu_x += d_acqu_x_sample\n acqu_x = acqu_x / acquisition.model.num_hmc_samples\n d_acqu_x = d_acqu_x / acquisition.model.num_hmc_samples\n return acqu_x, d_acqu_x\n\n\ndef best_guess(f, X):\n \"\"\"\n Gets the best current guess from a vector.\n :param f: function to evaluate.\n :param X: locations.\n \"\"\"\n n = X.shape[0]\n xbest = np.zeros(n)\n for i in range(n):\n ff = f(X[0:i + 1])\n xbest[i] = ff[np.argmin(ff)]\n return xbest\n\n\ndef samples_multidimensional_uniform(bounds, num_data):\n \"\"\"\n Generates a multidimensional grid uniformly distributed.\n :param bounds: tuple defining the box constraints.\n :num_data: number of data points to generate.\n\n \"\"\"\n dim = len(bounds)\n Z_rand = np.zeros(shape=(num_data, dim))\n for k in range(0, dim):\n Z_rand[:, k] = np.random.uniform(low=bounds[k][0], high=bounds[k][1\n ], size=num_data)\n return Z_rand\n\n\ndef reshape(x, input_dim):\n \"\"\"\n Reshapes x into a matrix with input_dim columns\n\n \"\"\"\n x = np.array(x)\n if x.size == input_dim:\n x = x.reshape((1, input_dim))\n return x\n\n\n<mask token>\n\n\ndef best_value(Y, sign=1):\n \"\"\"\n Returns a vector whose components i are the minimum (default) or maximum of Y[:i]\n \"\"\"\n n = Y.shape[0]\n Y_best = np.ones(n)\n for i in range(n):\n if sign == 1:\n Y_best[i] = Y[:i + 1].min()\n else:\n Y_best[i] = Y[:i + 1].max()\n return Y_best\n\n\n<mask token>\n\n\ndef evaluate_function(f, X):\n \"\"\"\n Returns the evaluation of a function *f* and the time per evaluation\n \"\"\"\n num_data, dim_data = X.shape\n Y_eval = np.zeros((num_data, dim_data))\n Y_time = np.zeros((num_data, 1))\n for i in range(num_data):\n time_zero = time.time()\n Y_eval[i, :] = f(X[i, :])\n Y_time[i, :] = time.time() - time_zero\n return Y_eval, Y_time\n\n\n<mask token>\n\n\ndef merge_values(values1, values2):\n \"\"\"\n Merges two numpy arrays by calculating all possible combinations of rows\n \"\"\"\n array1 = values_to_array(values1)\n array2 = values_to_array(values2)\n if array1.size == 0:\n return array2\n if array2.size == 0:\n return array1\n merged_array = []\n for row_array1 in array1:\n for row_array2 in array2:\n merged_row = np.hstack((row_array1, row_array2))\n merged_array.append(merged_row)\n return np.atleast_2d(merged_array)\n\n\ndef normalize(Y, normalization_type='stats'):\n \"\"\"Normalize the vector Y using statistics or its range.\n\n :param Y: Row or column vector that you want to normalize.\n :param normalization_type: String specifying the kind of normalization\n to use. Options are 'stats' to use mean and standard deviation,\n or 'maxmin' to use the range of function values.\n :return Y_normalized: The normalized vector.\n \"\"\"\n Y = np.asarray(Y, dtype=float)\n if np.max(Y.shape) != Y.size:\n raise NotImplementedError('Only 1-dimensional arrays are supported.')\n if normalization_type == 'stats':\n Y_norm = Y - Y.mean()\n std = Y.std()\n if std > 0:\n Y_norm /= std\n elif normalization_type == 'maxmin':\n Y_norm = Y - Y.min()\n y_range = np.ptp(Y)\n if y_range > 0:\n Y_norm /= y_range\n Y_norm = 2 * (Y_norm - 0.5)\n else:\n raise ValueError('Unknown normalization type: {}'.format(\n normalization_type))\n return Y_norm\n",
"step-3": "<mask token>\n\n\ndef compute_integrated_acquisition(acquisition, x):\n \"\"\"\n Used to compute the acquisition function when samples of the hyper-parameters have been generated (used in GP_MCMC model).\n\n :param acquisition: acquisition function with GpyOpt model type GP_MCMC.\n :param x: location where the acquisition is evaluated.\n \"\"\"\n acqu_x = 0\n for i in range(acquisition.model.num_hmc_samples):\n acquisition.model.model.kern[:] = acquisition.model.hmc_samples[i, :]\n acqu_x += acquisition.acquisition_function(x)\n acqu_x = acqu_x / acquisition.model.num_hmc_samples\n return acqu_x\n\n\ndef compute_integrated_acquisition_withGradients(acquisition, x):\n \"\"\"\n Used to compute the acquisition function with gradients when samples of the hyper-parameters have been generated (used in GP_MCMC model).\n\n :param acquisition: acquisition function with GpyOpt model type GP_MCMC.\n :param x: location where the acquisition is evaluated.\n \"\"\"\n acqu_x = 0\n d_acqu_x = 0\n for i in range(acquisition.model.num_hmc_samples):\n acquisition.model.model.kern[:] = acquisition.model.hmc_samples[i, :]\n acqu_x_sample, d_acqu_x_sample = (acquisition.\n acquisition_function_withGradients(x))\n acqu_x += acqu_x_sample\n d_acqu_x += d_acqu_x_sample\n acqu_x = acqu_x / acquisition.model.num_hmc_samples\n d_acqu_x = d_acqu_x / acquisition.model.num_hmc_samples\n return acqu_x, d_acqu_x\n\n\ndef best_guess(f, X):\n \"\"\"\n Gets the best current guess from a vector.\n :param f: function to evaluate.\n :param X: locations.\n \"\"\"\n n = X.shape[0]\n xbest = np.zeros(n)\n for i in range(n):\n ff = f(X[0:i + 1])\n xbest[i] = ff[np.argmin(ff)]\n return xbest\n\n\ndef samples_multidimensional_uniform(bounds, num_data):\n \"\"\"\n Generates a multidimensional grid uniformly distributed.\n :param bounds: tuple defining the box constraints.\n :num_data: number of data points to generate.\n\n \"\"\"\n dim = len(bounds)\n Z_rand = np.zeros(shape=(num_data, dim))\n for k in range(0, dim):\n Z_rand[:, k] = np.random.uniform(low=bounds[k][0], high=bounds[k][1\n ], size=num_data)\n return Z_rand\n\n\ndef reshape(x, input_dim):\n \"\"\"\n Reshapes x into a matrix with input_dim columns\n\n \"\"\"\n x = np.array(x)\n if x.size == input_dim:\n x = x.reshape((1, input_dim))\n return x\n\n\n<mask token>\n\n\ndef get_d_moments(model, x):\n \"\"\"\n Gradients with respect to x of the moments (mean and sdev.) of the GP\n :param model: GPy model.\n :param x: location where the gradients are evaluated.\n \"\"\"\n input_dim = model.input_dim\n x = reshape(x, input_dim)\n _, v = model.predict(x)\n dmdx, dvdx = model.predictive_gradients(x)\n dmdx = dmdx[:, :, 0]\n dsdx = dvdx / (2 * np.sqrt(v))\n return dmdx, dsdx\n\n\ndef get_quantiles(acquisition_par, fmin, m, s):\n \"\"\"\n Quantiles of the Gaussian distribution useful to determine the acquisition function values\n :param acquisition_par: parameter of the acquisition function\n :param fmin: current minimum.\n :param m: vector of means.\n :param s: vector of standard deviations.\n \"\"\"\n if isinstance(s, np.ndarray):\n s[s < 1e-10] = 1e-10\n elif s < 1e-10:\n s = 1e-10\n u = (fmin - m - acquisition_par) / s\n phi = np.exp(-0.5 * u ** 2) / np.sqrt(2 * np.pi)\n Phi = 0.5 * erfc(-u / np.sqrt(2))\n return phi, Phi, u\n\n\ndef best_value(Y, sign=1):\n \"\"\"\n Returns a vector whose components i are the minimum (default) or maximum of Y[:i]\n \"\"\"\n n = Y.shape[0]\n Y_best = np.ones(n)\n for i in range(n):\n if sign == 1:\n Y_best[i] = Y[:i + 1].min()\n else:\n Y_best[i] = Y[:i + 1].max()\n return Y_best\n\n\n<mask token>\n\n\ndef evaluate_function(f, X):\n \"\"\"\n Returns the evaluation of a function *f* and the time per evaluation\n \"\"\"\n num_data, dim_data = X.shape\n Y_eval = np.zeros((num_data, dim_data))\n Y_time = np.zeros((num_data, 1))\n for i in range(num_data):\n time_zero = time.time()\n Y_eval[i, :] = f(X[i, :])\n Y_time[i, :] = time.time() - time_zero\n return Y_eval, Y_time\n\n\ndef values_to_array(input_values):\n \"\"\"\n Transforms a values of int, float and tuples to a column vector numpy array\n \"\"\"\n if type(input_values) == tuple:\n values = np.array(input_values).reshape(-1, 1)\n elif type(input_values) == np.ndarray:\n values = np.atleast_2d(input_values)\n elif type(input_values) == int or type(input_values) == float or type(np\n .int64):\n values = np.atleast_2d(np.array(input_values))\n else:\n print('Type to transform not recognized')\n return values\n\n\ndef merge_values(values1, values2):\n \"\"\"\n Merges two numpy arrays by calculating all possible combinations of rows\n \"\"\"\n array1 = values_to_array(values1)\n array2 = values_to_array(values2)\n if array1.size == 0:\n return array2\n if array2.size == 0:\n return array1\n merged_array = []\n for row_array1 in array1:\n for row_array2 in array2:\n merged_row = np.hstack((row_array1, row_array2))\n merged_array.append(merged_row)\n return np.atleast_2d(merged_array)\n\n\ndef normalize(Y, normalization_type='stats'):\n \"\"\"Normalize the vector Y using statistics or its range.\n\n :param Y: Row or column vector that you want to normalize.\n :param normalization_type: String specifying the kind of normalization\n to use. Options are 'stats' to use mean and standard deviation,\n or 'maxmin' to use the range of function values.\n :return Y_normalized: The normalized vector.\n \"\"\"\n Y = np.asarray(Y, dtype=float)\n if np.max(Y.shape) != Y.size:\n raise NotImplementedError('Only 1-dimensional arrays are supported.')\n if normalization_type == 'stats':\n Y_norm = Y - Y.mean()\n std = Y.std()\n if std > 0:\n Y_norm /= std\n elif normalization_type == 'maxmin':\n Y_norm = Y - Y.min()\n y_range = np.ptp(Y)\n if y_range > 0:\n Y_norm /= y_range\n Y_norm = 2 * (Y_norm - 0.5)\n else:\n raise ValueError('Unknown normalization type: {}'.format(\n normalization_type))\n return Y_norm\n",
"step-4": "import numpy as np\nfrom scipy.special import erfc\nimport time\nfrom ..core.errors import InvalidConfigError\n\n\ndef compute_integrated_acquisition(acquisition, x):\n \"\"\"\n Used to compute the acquisition function when samples of the hyper-parameters have been generated (used in GP_MCMC model).\n\n :param acquisition: acquisition function with GpyOpt model type GP_MCMC.\n :param x: location where the acquisition is evaluated.\n \"\"\"\n acqu_x = 0\n for i in range(acquisition.model.num_hmc_samples):\n acquisition.model.model.kern[:] = acquisition.model.hmc_samples[i, :]\n acqu_x += acquisition.acquisition_function(x)\n acqu_x = acqu_x / acquisition.model.num_hmc_samples\n return acqu_x\n\n\ndef compute_integrated_acquisition_withGradients(acquisition, x):\n \"\"\"\n Used to compute the acquisition function with gradients when samples of the hyper-parameters have been generated (used in GP_MCMC model).\n\n :param acquisition: acquisition function with GpyOpt model type GP_MCMC.\n :param x: location where the acquisition is evaluated.\n \"\"\"\n acqu_x = 0\n d_acqu_x = 0\n for i in range(acquisition.model.num_hmc_samples):\n acquisition.model.model.kern[:] = acquisition.model.hmc_samples[i, :]\n acqu_x_sample, d_acqu_x_sample = (acquisition.\n acquisition_function_withGradients(x))\n acqu_x += acqu_x_sample\n d_acqu_x += d_acqu_x_sample\n acqu_x = acqu_x / acquisition.model.num_hmc_samples\n d_acqu_x = d_acqu_x / acquisition.model.num_hmc_samples\n return acqu_x, d_acqu_x\n\n\ndef best_guess(f, X):\n \"\"\"\n Gets the best current guess from a vector.\n :param f: function to evaluate.\n :param X: locations.\n \"\"\"\n n = X.shape[0]\n xbest = np.zeros(n)\n for i in range(n):\n ff = f(X[0:i + 1])\n xbest[i] = ff[np.argmin(ff)]\n return xbest\n\n\ndef samples_multidimensional_uniform(bounds, num_data):\n \"\"\"\n Generates a multidimensional grid uniformly distributed.\n :param bounds: tuple defining the box constraints.\n :num_data: number of data points to generate.\n\n \"\"\"\n dim = len(bounds)\n Z_rand = np.zeros(shape=(num_data, dim))\n for k in range(0, dim):\n Z_rand[:, k] = np.random.uniform(low=bounds[k][0], high=bounds[k][1\n ], size=num_data)\n return Z_rand\n\n\ndef reshape(x, input_dim):\n \"\"\"\n Reshapes x into a matrix with input_dim columns\n\n \"\"\"\n x = np.array(x)\n if x.size == input_dim:\n x = x.reshape((1, input_dim))\n return x\n\n\ndef get_moments(model, x):\n \"\"\"\n Moments (mean and sdev.) of a GP model at x\n\n \"\"\"\n input_dim = model.X.shape[1]\n x = reshape(x, input_dim)\n fmin = min(model.predict(model.X)[0])\n m, v = model.predict(x)\n s = np.sqrt(np.clip(v, 0, np.inf))\n return m, s, fmin\n\n\ndef get_d_moments(model, x):\n \"\"\"\n Gradients with respect to x of the moments (mean and sdev.) of the GP\n :param model: GPy model.\n :param x: location where the gradients are evaluated.\n \"\"\"\n input_dim = model.input_dim\n x = reshape(x, input_dim)\n _, v = model.predict(x)\n dmdx, dvdx = model.predictive_gradients(x)\n dmdx = dmdx[:, :, 0]\n dsdx = dvdx / (2 * np.sqrt(v))\n return dmdx, dsdx\n\n\ndef get_quantiles(acquisition_par, fmin, m, s):\n \"\"\"\n Quantiles of the Gaussian distribution useful to determine the acquisition function values\n :param acquisition_par: parameter of the acquisition function\n :param fmin: current minimum.\n :param m: vector of means.\n :param s: vector of standard deviations.\n \"\"\"\n if isinstance(s, np.ndarray):\n s[s < 1e-10] = 1e-10\n elif s < 1e-10:\n s = 1e-10\n u = (fmin - m - acquisition_par) / s\n phi = np.exp(-0.5 * u ** 2) / np.sqrt(2 * np.pi)\n Phi = 0.5 * erfc(-u / np.sqrt(2))\n return phi, Phi, u\n\n\ndef best_value(Y, sign=1):\n \"\"\"\n Returns a vector whose components i are the minimum (default) or maximum of Y[:i]\n \"\"\"\n n = Y.shape[0]\n Y_best = np.ones(n)\n for i in range(n):\n if sign == 1:\n Y_best[i] = Y[:i + 1].min()\n else:\n Y_best[i] = Y[:i + 1].max()\n return Y_best\n\n\ndef spawn(f):\n \"\"\"\n Function for parallel evaluation of the acquisition function\n \"\"\"\n\n def fun(pipe, x):\n pipe.send(f(x))\n pipe.close()\n return fun\n\n\ndef evaluate_function(f, X):\n \"\"\"\n Returns the evaluation of a function *f* and the time per evaluation\n \"\"\"\n num_data, dim_data = X.shape\n Y_eval = np.zeros((num_data, dim_data))\n Y_time = np.zeros((num_data, 1))\n for i in range(num_data):\n time_zero = time.time()\n Y_eval[i, :] = f(X[i, :])\n Y_time[i, :] = time.time() - time_zero\n return Y_eval, Y_time\n\n\ndef values_to_array(input_values):\n \"\"\"\n Transforms a values of int, float and tuples to a column vector numpy array\n \"\"\"\n if type(input_values) == tuple:\n values = np.array(input_values).reshape(-1, 1)\n elif type(input_values) == np.ndarray:\n values = np.atleast_2d(input_values)\n elif type(input_values) == int or type(input_values) == float or type(np\n .int64):\n values = np.atleast_2d(np.array(input_values))\n else:\n print('Type to transform not recognized')\n return values\n\n\ndef merge_values(values1, values2):\n \"\"\"\n Merges two numpy arrays by calculating all possible combinations of rows\n \"\"\"\n array1 = values_to_array(values1)\n array2 = values_to_array(values2)\n if array1.size == 0:\n return array2\n if array2.size == 0:\n return array1\n merged_array = []\n for row_array1 in array1:\n for row_array2 in array2:\n merged_row = np.hstack((row_array1, row_array2))\n merged_array.append(merged_row)\n return np.atleast_2d(merged_array)\n\n\ndef normalize(Y, normalization_type='stats'):\n \"\"\"Normalize the vector Y using statistics or its range.\n\n :param Y: Row or column vector that you want to normalize.\n :param normalization_type: String specifying the kind of normalization\n to use. Options are 'stats' to use mean and standard deviation,\n or 'maxmin' to use the range of function values.\n :return Y_normalized: The normalized vector.\n \"\"\"\n Y = np.asarray(Y, dtype=float)\n if np.max(Y.shape) != Y.size:\n raise NotImplementedError('Only 1-dimensional arrays are supported.')\n if normalization_type == 'stats':\n Y_norm = Y - Y.mean()\n std = Y.std()\n if std > 0:\n Y_norm /= std\n elif normalization_type == 'maxmin':\n Y_norm = Y - Y.min()\n y_range = np.ptp(Y)\n if y_range > 0:\n Y_norm /= y_range\n Y_norm = 2 * (Y_norm - 0.5)\n else:\n raise ValueError('Unknown normalization type: {}'.format(\n normalization_type))\n return Y_norm\n",
"step-5": "# Copyright (c) 2016, the GPyOpt Authors\n# Licensed under the BSD 3-clause license (see LICENSE.txt)\n\nimport numpy as np\nfrom scipy.special import erfc\nimport time\nfrom ..core.errors import InvalidConfigError\n\ndef compute_integrated_acquisition(acquisition,x):\n '''\n Used to compute the acquisition function when samples of the hyper-parameters have been generated (used in GP_MCMC model).\n\n :param acquisition: acquisition function with GpyOpt model type GP_MCMC.\n :param x: location where the acquisition is evaluated.\n '''\n\n acqu_x = 0\n\n for i in range(acquisition.model.num_hmc_samples):\n acquisition.model.model.kern[:] = acquisition.model.hmc_samples[i,:]\n acqu_x += acquisition.acquisition_function(x)\n\n acqu_x = acqu_x/acquisition.model.num_hmc_samples\n return acqu_x\n\ndef compute_integrated_acquisition_withGradients(acquisition,x):\n '''\n Used to compute the acquisition function with gradients when samples of the hyper-parameters have been generated (used in GP_MCMC model).\n\n :param acquisition: acquisition function with GpyOpt model type GP_MCMC.\n :param x: location where the acquisition is evaluated.\n '''\n\n acqu_x = 0\n d_acqu_x = 0\n\n for i in range(acquisition.model.num_hmc_samples):\n acquisition.model.model.kern[:] = acquisition.model.hmc_samples[i,:]\n acqu_x_sample, d_acqu_x_sample = acquisition.acquisition_function_withGradients(x)\n acqu_x += acqu_x_sample\n d_acqu_x += d_acqu_x_sample\n\n acqu_x = acqu_x/acquisition.model.num_hmc_samples\n d_acqu_x = d_acqu_x/acquisition.model.num_hmc_samples\n\n return acqu_x, d_acqu_x\n\n\ndef best_guess(f,X):\n '''\n Gets the best current guess from a vector.\n :param f: function to evaluate.\n :param X: locations.\n '''\n n = X.shape[0]\n xbest = np.zeros(n)\n for i in range(n):\n ff = f(X[0:(i+1)])\n xbest[i] = ff[np.argmin(ff)]\n return xbest\n\n\ndef samples_multidimensional_uniform(bounds,num_data):\n '''\n Generates a multidimensional grid uniformly distributed.\n :param bounds: tuple defining the box constraints.\n :num_data: number of data points to generate.\n\n '''\n dim = len(bounds)\n Z_rand = np.zeros(shape=(num_data,dim))\n for k in range(0,dim): Z_rand[:,k] = np.random.uniform(low=bounds[k][0],high=bounds[k][1],size=num_data)\n return Z_rand\n\n\ndef reshape(x,input_dim):\n '''\n Reshapes x into a matrix with input_dim columns\n\n '''\n x = np.array(x)\n if x.size ==input_dim:\n x = x.reshape((1,input_dim))\n return x\n\ndef get_moments(model,x):\n '''\n Moments (mean and sdev.) of a GP model at x\n\n '''\n input_dim = model.X.shape[1]\n x = reshape(x,input_dim)\n fmin = min(model.predict(model.X)[0])\n m, v = model.predict(x)\n s = np.sqrt(np.clip(v, 0, np.inf))\n return (m,s, fmin)\n\ndef get_d_moments(model,x):\n '''\n Gradients with respect to x of the moments (mean and sdev.) of the GP\n :param model: GPy model.\n :param x: location where the gradients are evaluated.\n '''\n input_dim = model.input_dim\n x = reshape(x,input_dim)\n _, v = model.predict(x)\n dmdx, dvdx = model.predictive_gradients(x)\n dmdx = dmdx[:,:,0]\n dsdx = dvdx / (2*np.sqrt(v))\n return (dmdx, dsdx)\n\n\ndef get_quantiles(acquisition_par, fmin, m, s):\n '''\n Quantiles of the Gaussian distribution useful to determine the acquisition function values\n :param acquisition_par: parameter of the acquisition function\n :param fmin: current minimum.\n :param m: vector of means.\n :param s: vector of standard deviations.\n '''\n if isinstance(s, np.ndarray):\n s[s<1e-10] = 1e-10\n elif s< 1e-10:\n s = 1e-10\n u = (fmin - m - acquisition_par)/s\n phi = np.exp(-0.5 * u**2) / np.sqrt(2*np.pi)\n Phi = 0.5 * erfc(-u / np.sqrt(2))\n return (phi, Phi, u)\n\n\ndef best_value(Y,sign=1):\n '''\n Returns a vector whose components i are the minimum (default) or maximum of Y[:i]\n '''\n n = Y.shape[0]\n Y_best = np.ones(n)\n for i in range(n):\n if sign == 1:\n Y_best[i]=Y[:(i+1)].min()\n else:\n Y_best[i]=Y[:(i+1)].max()\n return Y_best\n\ndef spawn(f):\n '''\n Function for parallel evaluation of the acquisition function\n '''\n def fun(pipe,x):\n pipe.send(f(x))\n pipe.close()\n return fun\n\n\ndef evaluate_function(f,X):\n '''\n Returns the evaluation of a function *f* and the time per evaluation\n '''\n num_data, dim_data = X.shape\n Y_eval = np.zeros((num_data, dim_data))\n Y_time = np.zeros((num_data, 1))\n for i in range(num_data):\n time_zero = time.time()\n Y_eval[i,:] = f(X[i,:])\n Y_time[i,:] = time.time() - time_zero\n return Y_eval, Y_time\n\n\ndef values_to_array(input_values):\n '''\n Transforms a values of int, float and tuples to a column vector numpy array\n '''\n if type(input_values)==tuple:\n values = np.array(input_values).reshape(-1,1)\n elif type(input_values) == np.ndarray:\n values = np.atleast_2d(input_values)\n elif type(input_values)==int or type(input_values)==float or type(np.int64):\n values = np.atleast_2d(np.array(input_values))\n else:\n print('Type to transform not recognized')\n return values\n\n\ndef merge_values(values1,values2):\n '''\n Merges two numpy arrays by calculating all possible combinations of rows\n '''\n array1 = values_to_array(values1)\n array2 = values_to_array(values2)\n\n if array1.size == 0:\n return array2\n if array2.size == 0:\n return array1\n\n merged_array = []\n for row_array1 in array1:\n for row_array2 in array2:\n merged_row = np.hstack((row_array1,row_array2))\n merged_array.append(merged_row)\n return np.atleast_2d(merged_array)\n\n\ndef normalize(Y, normalization_type='stats'):\n \"\"\"Normalize the vector Y using statistics or its range.\n\n :param Y: Row or column vector that you want to normalize.\n :param normalization_type: String specifying the kind of normalization\n to use. Options are 'stats' to use mean and standard deviation,\n or 'maxmin' to use the range of function values.\n :return Y_normalized: The normalized vector.\n \"\"\"\n Y = np.asarray(Y, dtype=float)\n\n if np.max(Y.shape) != Y.size:\n raise NotImplementedError('Only 1-dimensional arrays are supported.')\n\n # Only normalize with non null sdev (divide by zero). For only one\n # data point both std and ptp return 0.\n if normalization_type == 'stats':\n Y_norm = Y - Y.mean()\n std = Y.std()\n if std > 0:\n Y_norm /= std\n elif normalization_type == 'maxmin':\n Y_norm = Y - Y.min()\n y_range = np.ptp(Y)\n if y_range > 0:\n Y_norm /= y_range\n # A range of [-1, 1] is more natural for a zero-mean GP\n Y_norm = 2 * (Y_norm - 0.5)\n else:\n raise ValueError('Unknown normalization type: {}'.format(normalization_type))\n\n return Y_norm\n",
"step-ids": [
7,
9,
12,
15,
16
]
}
|
[
7,
9,
12,
15,
16
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
print(c)
<|reserved_special_token_1|>
i = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
f = [10.5, 12.2, 13.7, 14.9, 14.9, 18.8, 19.7, 23.6, 90.9, 25.7]
s = ['Arpi', 'world', 'Hello', 'Python', 'Consultadd', 'job', 'c++',
'Concepts', 'interesting']
c = [1 + 2.0j, 2 + 3.0j, 4 + 5.0j, 5 + 6.0j, 56 + 7.0j, 8 + 9.0j, 7 + 8.0j,
3 + 6.0j, 7 + 9.0j]
print(c)
<|reserved_special_token_1|>
# 1.Create a list of 10 elements of four different data types like int, string, complex and float.
i=[1,2,3,4,5,6,7,8,9,10]
f=[10.5,12.2,13.7,14.9,14.9,18.8,19.7,23.6,90.9,25.7]
s=['Arpi','world','Hello','Python','Consultadd','job','c++','Concepts','interesting']
c=[1+2j,2+3j,4+5j,5+6j,56+7j,8+9j,7+8j,3+6j,7+9j]
print(c)
|
flexible
|
{
"blob_id": "87d1c28819d187944a3cf99b35b1d41eab11b139",
"index": 6652,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(c)\n",
"step-3": "i = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]\nf = [10.5, 12.2, 13.7, 14.9, 14.9, 18.8, 19.7, 23.6, 90.9, 25.7]\ns = ['Arpi', 'world', 'Hello', 'Python', 'Consultadd', 'job', 'c++',\n 'Concepts', 'interesting']\nc = [1 + 2.0j, 2 + 3.0j, 4 + 5.0j, 5 + 6.0j, 56 + 7.0j, 8 + 9.0j, 7 + 8.0j,\n 3 + 6.0j, 7 + 9.0j]\nprint(c)\n",
"step-4": "# 1.Create a list of 10 elements of four different data types like int, string, complex and float.\n\ni=[1,2,3,4,5,6,7,8,9,10]\nf=[10.5,12.2,13.7,14.9,14.9,18.8,19.7,23.6,90.9,25.7]\ns=['Arpi','world','Hello','Python','Consultadd','job','c++','Concepts','interesting']\nc=[1+2j,2+3j,4+5j,5+6j,56+7j,8+9j,7+8j,3+6j,7+9j]\nprint(c)",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
"""
Tests based on: https://github.com/pydata/xarray/blob/071da2a900702d65c47d265192bc7e424bb57932/xarray/tests/test_backends_file_manager.py
"""
import concurrent.futures
import gc
import pickle
from unittest import mock
import pytest
from rioxarray._io import URIManager
def test_uri_manager_mock_write():
mock_file = mock.Mock()
opener = mock.Mock(spec=open, return_value=mock_file)
manager = URIManager(opener, "filename")
f = manager.acquire()
f.write("contents")
manager.close()
opener.assert_called_once_with("filename", mode="r")
mock_file.write.assert_called_once_with("contents")
mock_file.close.assert_called_once_with()
def test_uri_manager_mock_write__threaded():
mock_file = mock.Mock()
opener = mock.Mock(spec=open, return_value=mock_file)
manager = URIManager(opener, "filename")
def write(iter):
nonlocal manager
fh = manager.acquire()
fh.write("contents")
manager._local.thread_manager = None
with concurrent.futures.ThreadPoolExecutor(max_workers=2) as executor:
for result in executor.map(write, range(5)):
pass
gc.collect()
opener.assert_has_calls([mock.call("filename", mode="r") for _ in range(5)])
mock_file.write.assert_has_calls([mock.call("contents") for _ in range(5)])
mock_file.close.assert_has_calls([mock.call() for _ in range(5)])
@pytest.mark.parametrize("expected_warning", [None, RuntimeWarning])
def test_uri_manager_autoclose(expected_warning):
mock_file = mock.Mock()
opener = mock.Mock(return_value=mock_file)
manager = URIManager(opener, "filename")
manager.acquire()
del manager
gc.collect()
mock_file.close.assert_called_once_with()
def test_uri_manager_write_concurrent(tmpdir):
path = str(tmpdir.join("testing.txt"))
manager = URIManager(open, path, mode="w")
f1 = manager.acquire()
f2 = manager.acquire()
f3 = manager.acquire()
assert f1 is f2
assert f2 is f3
f1.write("foo")
f1.flush()
f2.write("bar")
f2.flush()
f3.write("baz")
f3.flush()
del manager
gc.collect()
with open(path) as f:
assert f.read() == "foobarbaz"
def test_uri_manager_write_pickle(tmpdir):
path = str(tmpdir.join("testing.txt"))
manager = URIManager(open, path, mode="a")
f = manager.acquire()
f.write("foo")
f.flush()
manager2 = pickle.loads(pickle.dumps(manager))
f2 = manager2.acquire()
f2.write("bar")
del manager
del manager2
gc.collect()
with open(path) as f:
assert f.read() == "foobar"
def test_uri_manager_read(tmpdir):
path = str(tmpdir.join("testing.txt"))
with open(path, "w") as f:
f.write("foobar")
manager = URIManager(open, path)
f = manager.acquire()
assert f.read() == "foobar"
manager.close()
def test_uri_manager_acquire_context(tmpdir):
path = str(tmpdir.join("testing.txt"))
with open(path, "w") as f:
f.write("foobar")
class AcquisitionError(Exception):
pass
manager = URIManager(open, path)
with pytest.raises(AcquisitionError):
with manager.acquire_context() as f:
assert f.read() == "foobar"
raise AcquisitionError
with manager.acquire_context() as f:
assert f.read() == "foobar"
with pytest.raises(AcquisitionError):
with manager.acquire_context() as f:
f.seek(0)
assert f.read() == "foobar"
raise AcquisitionError
manager.close()
|
normal
|
{
"blob_id": "8fe71e87512dfd2ccfcd21c9c175cb50274d9661",
"index": 1867,
"step-1": "<mask token>\n\n\ndef test_uri_manager_mock_write():\n mock_file = mock.Mock()\n opener = mock.Mock(spec=open, return_value=mock_file)\n manager = URIManager(opener, 'filename')\n f = manager.acquire()\n f.write('contents')\n manager.close()\n opener.assert_called_once_with('filename', mode='r')\n mock_file.write.assert_called_once_with('contents')\n mock_file.close.assert_called_once_with()\n\n\ndef test_uri_manager_mock_write__threaded():\n mock_file = mock.Mock()\n opener = mock.Mock(spec=open, return_value=mock_file)\n manager = URIManager(opener, 'filename')\n\n def write(iter):\n nonlocal manager\n fh = manager.acquire()\n fh.write('contents')\n manager._local.thread_manager = None\n with concurrent.futures.ThreadPoolExecutor(max_workers=2) as executor:\n for result in executor.map(write, range(5)):\n pass\n gc.collect()\n opener.assert_has_calls([mock.call('filename', mode='r') for _ in range(5)]\n )\n mock_file.write.assert_has_calls([mock.call('contents') for _ in range(5)])\n mock_file.close.assert_has_calls([mock.call() for _ in range(5)])\n\n\n@pytest.mark.parametrize('expected_warning', [None, RuntimeWarning])\ndef test_uri_manager_autoclose(expected_warning):\n mock_file = mock.Mock()\n opener = mock.Mock(return_value=mock_file)\n manager = URIManager(opener, 'filename')\n manager.acquire()\n del manager\n gc.collect()\n mock_file.close.assert_called_once_with()\n\n\ndef test_uri_manager_write_concurrent(tmpdir):\n path = str(tmpdir.join('testing.txt'))\n manager = URIManager(open, path, mode='w')\n f1 = manager.acquire()\n f2 = manager.acquire()\n f3 = manager.acquire()\n assert f1 is f2\n assert f2 is f3\n f1.write('foo')\n f1.flush()\n f2.write('bar')\n f2.flush()\n f3.write('baz')\n f3.flush()\n del manager\n gc.collect()\n with open(path) as f:\n assert f.read() == 'foobarbaz'\n\n\ndef test_uri_manager_write_pickle(tmpdir):\n path = str(tmpdir.join('testing.txt'))\n manager = URIManager(open, path, mode='a')\n f = manager.acquire()\n f.write('foo')\n f.flush()\n manager2 = pickle.loads(pickle.dumps(manager))\n f2 = manager2.acquire()\n f2.write('bar')\n del manager\n del manager2\n gc.collect()\n with open(path) as f:\n assert f.read() == 'foobar'\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef test_uri_manager_mock_write():\n mock_file = mock.Mock()\n opener = mock.Mock(spec=open, return_value=mock_file)\n manager = URIManager(opener, 'filename')\n f = manager.acquire()\n f.write('contents')\n manager.close()\n opener.assert_called_once_with('filename', mode='r')\n mock_file.write.assert_called_once_with('contents')\n mock_file.close.assert_called_once_with()\n\n\ndef test_uri_manager_mock_write__threaded():\n mock_file = mock.Mock()\n opener = mock.Mock(spec=open, return_value=mock_file)\n manager = URIManager(opener, 'filename')\n\n def write(iter):\n nonlocal manager\n fh = manager.acquire()\n fh.write('contents')\n manager._local.thread_manager = None\n with concurrent.futures.ThreadPoolExecutor(max_workers=2) as executor:\n for result in executor.map(write, range(5)):\n pass\n gc.collect()\n opener.assert_has_calls([mock.call('filename', mode='r') for _ in range(5)]\n )\n mock_file.write.assert_has_calls([mock.call('contents') for _ in range(5)])\n mock_file.close.assert_has_calls([mock.call() for _ in range(5)])\n\n\n@pytest.mark.parametrize('expected_warning', [None, RuntimeWarning])\ndef test_uri_manager_autoclose(expected_warning):\n mock_file = mock.Mock()\n opener = mock.Mock(return_value=mock_file)\n manager = URIManager(opener, 'filename')\n manager.acquire()\n del manager\n gc.collect()\n mock_file.close.assert_called_once_with()\n\n\ndef test_uri_manager_write_concurrent(tmpdir):\n path = str(tmpdir.join('testing.txt'))\n manager = URIManager(open, path, mode='w')\n f1 = manager.acquire()\n f2 = manager.acquire()\n f3 = manager.acquire()\n assert f1 is f2\n assert f2 is f3\n f1.write('foo')\n f1.flush()\n f2.write('bar')\n f2.flush()\n f3.write('baz')\n f3.flush()\n del manager\n gc.collect()\n with open(path) as f:\n assert f.read() == 'foobarbaz'\n\n\ndef test_uri_manager_write_pickle(tmpdir):\n path = str(tmpdir.join('testing.txt'))\n manager = URIManager(open, path, mode='a')\n f = manager.acquire()\n f.write('foo')\n f.flush()\n manager2 = pickle.loads(pickle.dumps(manager))\n f2 = manager2.acquire()\n f2.write('bar')\n del manager\n del manager2\n gc.collect()\n with open(path) as f:\n assert f.read() == 'foobar'\n\n\ndef test_uri_manager_read(tmpdir):\n path = str(tmpdir.join('testing.txt'))\n with open(path, 'w') as f:\n f.write('foobar')\n manager = URIManager(open, path)\n f = manager.acquire()\n assert f.read() == 'foobar'\n manager.close()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef test_uri_manager_mock_write():\n mock_file = mock.Mock()\n opener = mock.Mock(spec=open, return_value=mock_file)\n manager = URIManager(opener, 'filename')\n f = manager.acquire()\n f.write('contents')\n manager.close()\n opener.assert_called_once_with('filename', mode='r')\n mock_file.write.assert_called_once_with('contents')\n mock_file.close.assert_called_once_with()\n\n\ndef test_uri_manager_mock_write__threaded():\n mock_file = mock.Mock()\n opener = mock.Mock(spec=open, return_value=mock_file)\n manager = URIManager(opener, 'filename')\n\n def write(iter):\n nonlocal manager\n fh = manager.acquire()\n fh.write('contents')\n manager._local.thread_manager = None\n with concurrent.futures.ThreadPoolExecutor(max_workers=2) as executor:\n for result in executor.map(write, range(5)):\n pass\n gc.collect()\n opener.assert_has_calls([mock.call('filename', mode='r') for _ in range(5)]\n )\n mock_file.write.assert_has_calls([mock.call('contents') for _ in range(5)])\n mock_file.close.assert_has_calls([mock.call() for _ in range(5)])\n\n\n@pytest.mark.parametrize('expected_warning', [None, RuntimeWarning])\ndef test_uri_manager_autoclose(expected_warning):\n mock_file = mock.Mock()\n opener = mock.Mock(return_value=mock_file)\n manager = URIManager(opener, 'filename')\n manager.acquire()\n del manager\n gc.collect()\n mock_file.close.assert_called_once_with()\n\n\ndef test_uri_manager_write_concurrent(tmpdir):\n path = str(tmpdir.join('testing.txt'))\n manager = URIManager(open, path, mode='w')\n f1 = manager.acquire()\n f2 = manager.acquire()\n f3 = manager.acquire()\n assert f1 is f2\n assert f2 is f3\n f1.write('foo')\n f1.flush()\n f2.write('bar')\n f2.flush()\n f3.write('baz')\n f3.flush()\n del manager\n gc.collect()\n with open(path) as f:\n assert f.read() == 'foobarbaz'\n\n\ndef test_uri_manager_write_pickle(tmpdir):\n path = str(tmpdir.join('testing.txt'))\n manager = URIManager(open, path, mode='a')\n f = manager.acquire()\n f.write('foo')\n f.flush()\n manager2 = pickle.loads(pickle.dumps(manager))\n f2 = manager2.acquire()\n f2.write('bar')\n del manager\n del manager2\n gc.collect()\n with open(path) as f:\n assert f.read() == 'foobar'\n\n\ndef test_uri_manager_read(tmpdir):\n path = str(tmpdir.join('testing.txt'))\n with open(path, 'w') as f:\n f.write('foobar')\n manager = URIManager(open, path)\n f = manager.acquire()\n assert f.read() == 'foobar'\n manager.close()\n\n\ndef test_uri_manager_acquire_context(tmpdir):\n path = str(tmpdir.join('testing.txt'))\n with open(path, 'w') as f:\n f.write('foobar')\n\n\n class AcquisitionError(Exception):\n pass\n manager = URIManager(open, path)\n with pytest.raises(AcquisitionError):\n with manager.acquire_context() as f:\n assert f.read() == 'foobar'\n raise AcquisitionError\n with manager.acquire_context() as f:\n assert f.read() == 'foobar'\n with pytest.raises(AcquisitionError):\n with manager.acquire_context() as f:\n f.seek(0)\n assert f.read() == 'foobar'\n raise AcquisitionError\n manager.close()\n",
"step-4": "<mask token>\nimport concurrent.futures\nimport gc\nimport pickle\nfrom unittest import mock\nimport pytest\nfrom rioxarray._io import URIManager\n\n\ndef test_uri_manager_mock_write():\n mock_file = mock.Mock()\n opener = mock.Mock(spec=open, return_value=mock_file)\n manager = URIManager(opener, 'filename')\n f = manager.acquire()\n f.write('contents')\n manager.close()\n opener.assert_called_once_with('filename', mode='r')\n mock_file.write.assert_called_once_with('contents')\n mock_file.close.assert_called_once_with()\n\n\ndef test_uri_manager_mock_write__threaded():\n mock_file = mock.Mock()\n opener = mock.Mock(spec=open, return_value=mock_file)\n manager = URIManager(opener, 'filename')\n\n def write(iter):\n nonlocal manager\n fh = manager.acquire()\n fh.write('contents')\n manager._local.thread_manager = None\n with concurrent.futures.ThreadPoolExecutor(max_workers=2) as executor:\n for result in executor.map(write, range(5)):\n pass\n gc.collect()\n opener.assert_has_calls([mock.call('filename', mode='r') for _ in range(5)]\n )\n mock_file.write.assert_has_calls([mock.call('contents') for _ in range(5)])\n mock_file.close.assert_has_calls([mock.call() for _ in range(5)])\n\n\n@pytest.mark.parametrize('expected_warning', [None, RuntimeWarning])\ndef test_uri_manager_autoclose(expected_warning):\n mock_file = mock.Mock()\n opener = mock.Mock(return_value=mock_file)\n manager = URIManager(opener, 'filename')\n manager.acquire()\n del manager\n gc.collect()\n mock_file.close.assert_called_once_with()\n\n\ndef test_uri_manager_write_concurrent(tmpdir):\n path = str(tmpdir.join('testing.txt'))\n manager = URIManager(open, path, mode='w')\n f1 = manager.acquire()\n f2 = manager.acquire()\n f3 = manager.acquire()\n assert f1 is f2\n assert f2 is f3\n f1.write('foo')\n f1.flush()\n f2.write('bar')\n f2.flush()\n f3.write('baz')\n f3.flush()\n del manager\n gc.collect()\n with open(path) as f:\n assert f.read() == 'foobarbaz'\n\n\ndef test_uri_manager_write_pickle(tmpdir):\n path = str(tmpdir.join('testing.txt'))\n manager = URIManager(open, path, mode='a')\n f = manager.acquire()\n f.write('foo')\n f.flush()\n manager2 = pickle.loads(pickle.dumps(manager))\n f2 = manager2.acquire()\n f2.write('bar')\n del manager\n del manager2\n gc.collect()\n with open(path) as f:\n assert f.read() == 'foobar'\n\n\ndef test_uri_manager_read(tmpdir):\n path = str(tmpdir.join('testing.txt'))\n with open(path, 'w') as f:\n f.write('foobar')\n manager = URIManager(open, path)\n f = manager.acquire()\n assert f.read() == 'foobar'\n manager.close()\n\n\ndef test_uri_manager_acquire_context(tmpdir):\n path = str(tmpdir.join('testing.txt'))\n with open(path, 'w') as f:\n f.write('foobar')\n\n\n class AcquisitionError(Exception):\n pass\n manager = URIManager(open, path)\n with pytest.raises(AcquisitionError):\n with manager.acquire_context() as f:\n assert f.read() == 'foobar'\n raise AcquisitionError\n with manager.acquire_context() as f:\n assert f.read() == 'foobar'\n with pytest.raises(AcquisitionError):\n with manager.acquire_context() as f:\n f.seek(0)\n assert f.read() == 'foobar'\n raise AcquisitionError\n manager.close()\n",
"step-5": "\"\"\"\nTests based on: https://github.com/pydata/xarray/blob/071da2a900702d65c47d265192bc7e424bb57932/xarray/tests/test_backends_file_manager.py\n\"\"\"\nimport concurrent.futures\nimport gc\nimport pickle\nfrom unittest import mock\n\nimport pytest\n\nfrom rioxarray._io import URIManager\n\n\ndef test_uri_manager_mock_write():\n mock_file = mock.Mock()\n opener = mock.Mock(spec=open, return_value=mock_file)\n\n manager = URIManager(opener, \"filename\")\n f = manager.acquire()\n f.write(\"contents\")\n manager.close()\n\n opener.assert_called_once_with(\"filename\", mode=\"r\")\n mock_file.write.assert_called_once_with(\"contents\")\n mock_file.close.assert_called_once_with()\n\n\ndef test_uri_manager_mock_write__threaded():\n mock_file = mock.Mock()\n opener = mock.Mock(spec=open, return_value=mock_file)\n\n manager = URIManager(opener, \"filename\")\n\n def write(iter):\n nonlocal manager\n fh = manager.acquire()\n fh.write(\"contents\")\n manager._local.thread_manager = None\n\n with concurrent.futures.ThreadPoolExecutor(max_workers=2) as executor:\n for result in executor.map(write, range(5)):\n pass\n\n gc.collect()\n\n opener.assert_has_calls([mock.call(\"filename\", mode=\"r\") for _ in range(5)])\n mock_file.write.assert_has_calls([mock.call(\"contents\") for _ in range(5)])\n mock_file.close.assert_has_calls([mock.call() for _ in range(5)])\n\n\n@pytest.mark.parametrize(\"expected_warning\", [None, RuntimeWarning])\ndef test_uri_manager_autoclose(expected_warning):\n mock_file = mock.Mock()\n opener = mock.Mock(return_value=mock_file)\n\n manager = URIManager(opener, \"filename\")\n manager.acquire()\n\n del manager\n gc.collect()\n\n mock_file.close.assert_called_once_with()\n\n\ndef test_uri_manager_write_concurrent(tmpdir):\n path = str(tmpdir.join(\"testing.txt\"))\n manager = URIManager(open, path, mode=\"w\")\n f1 = manager.acquire()\n f2 = manager.acquire()\n f3 = manager.acquire()\n assert f1 is f2\n assert f2 is f3\n f1.write(\"foo\")\n f1.flush()\n f2.write(\"bar\")\n f2.flush()\n f3.write(\"baz\")\n f3.flush()\n\n del manager\n gc.collect()\n\n with open(path) as f:\n assert f.read() == \"foobarbaz\"\n\n\ndef test_uri_manager_write_pickle(tmpdir):\n path = str(tmpdir.join(\"testing.txt\"))\n manager = URIManager(open, path, mode=\"a\")\n f = manager.acquire()\n f.write(\"foo\")\n f.flush()\n manager2 = pickle.loads(pickle.dumps(manager))\n f2 = manager2.acquire()\n f2.write(\"bar\")\n del manager\n del manager2\n gc.collect()\n\n with open(path) as f:\n assert f.read() == \"foobar\"\n\n\ndef test_uri_manager_read(tmpdir):\n path = str(tmpdir.join(\"testing.txt\"))\n\n with open(path, \"w\") as f:\n f.write(\"foobar\")\n\n manager = URIManager(open, path)\n f = manager.acquire()\n assert f.read() == \"foobar\"\n manager.close()\n\n\ndef test_uri_manager_acquire_context(tmpdir):\n path = str(tmpdir.join(\"testing.txt\"))\n\n with open(path, \"w\") as f:\n f.write(\"foobar\")\n\n class AcquisitionError(Exception):\n pass\n\n manager = URIManager(open, path)\n with pytest.raises(AcquisitionError):\n with manager.acquire_context() as f:\n assert f.read() == \"foobar\"\n raise AcquisitionError\n\n with manager.acquire_context() as f:\n assert f.read() == \"foobar\"\n\n with pytest.raises(AcquisitionError):\n with manager.acquire_context() as f:\n f.seek(0)\n assert f.read() == \"foobar\"\n raise AcquisitionError\n manager.close()\n",
"step-ids": [
5,
6,
7,
8,
9
]
}
|
[
5,
6,
7,
8,
9
] |
<|reserved_special_token_0|>
@app.route('/')
def home_page():
"""Offer user choice of Madlib Games"""
return render_template('index.html', stories=stories.values())
<|reserved_special_token_0|>
@app.route('/story')
def show_story():
"""Display Madlib Story"""
answers = request.args
story_title = request.args['story_title']
for story in stories.values():
if story.title == story_title:
story_to_gen = story
return render_template('story.html', story_to_gen=story_to_gen,
user_answers=answers)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
@app.route('/')
def home_page():
"""Offer user choice of Madlib Games"""
return render_template('index.html', stories=stories.values())
@app.route('/form')
def show_form():
"""Show Form for User Input"""
story_title = request.args['madlib']
for story in stories.values():
if story.title == story_title:
story_for_form = story
return render_template('form.html', s=story_for_form, story_title=
story_title)
@app.route('/story')
def show_story():
"""Display Madlib Story"""
answers = request.args
story_title = request.args['story_title']
for story in stories.values():
if story.title == story_title:
story_to_gen = story
return render_template('story.html', story_to_gen=story_to_gen,
user_answers=answers)
@app.route('/play-again')
def play_again():
"""Redirect Home"""
return redirect('/')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
app = Flask(__name__)
@app.route('/')
def home_page():
"""Offer user choice of Madlib Games"""
return render_template('index.html', stories=stories.values())
@app.route('/form')
def show_form():
"""Show Form for User Input"""
story_title = request.args['madlib']
for story in stories.values():
if story.title == story_title:
story_for_form = story
return render_template('form.html', s=story_for_form, story_title=
story_title)
@app.route('/story')
def show_story():
"""Display Madlib Story"""
answers = request.args
story_title = request.args['story_title']
for story in stories.values():
if story.title == story_title:
story_to_gen = story
return render_template('story.html', story_to_gen=story_to_gen,
user_answers=answers)
@app.route('/play-again')
def play_again():
"""Redirect Home"""
return redirect('/')
<|reserved_special_token_1|>
from flask import Flask, request, render_template, redirect
from stories import Story, stories
app = Flask(__name__)
@app.route('/')
def home_page():
"""Offer user choice of Madlib Games"""
return render_template('index.html', stories=stories.values())
@app.route('/form')
def show_form():
"""Show Form for User Input"""
story_title = request.args['madlib']
for story in stories.values():
if story.title == story_title:
story_for_form = story
return render_template('form.html', s=story_for_form, story_title=
story_title)
@app.route('/story')
def show_story():
"""Display Madlib Story"""
answers = request.args
story_title = request.args['story_title']
for story in stories.values():
if story.title == story_title:
story_to_gen = story
return render_template('story.html', story_to_gen=story_to_gen,
user_answers=answers)
@app.route('/play-again')
def play_again():
"""Redirect Home"""
return redirect('/')
<|reserved_special_token_1|>
from flask import Flask, request, render_template, redirect
from stories import Story, stories
# from flask_debugtoolbar import DebugToolbarExtension
app = Flask(__name__)
# app.config['SECRET_KEY'] = "secret"
# debug = DebugToolbarExtension(app)
# my original approach involved using a global story variable to store the instances which were in this file
# After looking at the answer code, storing this data in the instance maskes more sense
# story_global = None
@app.route('/')
def home_page():
"""Offer user choice of Madlib Games"""
return render_template('index.html', stories=stories.values())
@app.route('/form')
def show_form():
"""Show Form for User Input"""
story_title = request.args["madlib"]
for story in stories.values():
if story.title == story_title:
story_for_form = story
return render_template('form.html', s=story_for_form, story_title=story_title)
@app.route("/story")
def show_story():
"""Display Madlib Story"""
answers = request.args
story_title = request.args["story_title"]
for story in stories.values():
if story.title == story_title:
story_to_gen = story
return render_template("story.html", story_to_gen=story_to_gen, user_answers=answers)
@app.route('/play-again')
def play_again():
"""Redirect Home"""
return redirect('/')
|
flexible
|
{
"blob_id": "08ed57ffb7a83973059d62f686f77b1bea136fbd",
"index": 3828,
"step-1": "<mask token>\n\n\n@app.route('/')\ndef home_page():\n \"\"\"Offer user choice of Madlib Games\"\"\"\n return render_template('index.html', stories=stories.values())\n\n\n<mask token>\n\n\n@app.route('/story')\ndef show_story():\n \"\"\"Display Madlib Story\"\"\"\n answers = request.args\n story_title = request.args['story_title']\n for story in stories.values():\n if story.title == story_title:\n story_to_gen = story\n return render_template('story.html', story_to_gen=story_to_gen,\n user_answers=answers)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\n@app.route('/')\ndef home_page():\n \"\"\"Offer user choice of Madlib Games\"\"\"\n return render_template('index.html', stories=stories.values())\n\n\n@app.route('/form')\ndef show_form():\n \"\"\"Show Form for User Input\"\"\"\n story_title = request.args['madlib']\n for story in stories.values():\n if story.title == story_title:\n story_for_form = story\n return render_template('form.html', s=story_for_form, story_title=\n story_title)\n\n\n@app.route('/story')\ndef show_story():\n \"\"\"Display Madlib Story\"\"\"\n answers = request.args\n story_title = request.args['story_title']\n for story in stories.values():\n if story.title == story_title:\n story_to_gen = story\n return render_template('story.html', story_to_gen=story_to_gen,\n user_answers=answers)\n\n\n@app.route('/play-again')\ndef play_again():\n \"\"\"Redirect Home\"\"\"\n return redirect('/')\n",
"step-3": "<mask token>\napp = Flask(__name__)\n\n\n@app.route('/')\ndef home_page():\n \"\"\"Offer user choice of Madlib Games\"\"\"\n return render_template('index.html', stories=stories.values())\n\n\n@app.route('/form')\ndef show_form():\n \"\"\"Show Form for User Input\"\"\"\n story_title = request.args['madlib']\n for story in stories.values():\n if story.title == story_title:\n story_for_form = story\n return render_template('form.html', s=story_for_form, story_title=\n story_title)\n\n\n@app.route('/story')\ndef show_story():\n \"\"\"Display Madlib Story\"\"\"\n answers = request.args\n story_title = request.args['story_title']\n for story in stories.values():\n if story.title == story_title:\n story_to_gen = story\n return render_template('story.html', story_to_gen=story_to_gen,\n user_answers=answers)\n\n\n@app.route('/play-again')\ndef play_again():\n \"\"\"Redirect Home\"\"\"\n return redirect('/')\n",
"step-4": "from flask import Flask, request, render_template, redirect\nfrom stories import Story, stories\napp = Flask(__name__)\n\n\n@app.route('/')\ndef home_page():\n \"\"\"Offer user choice of Madlib Games\"\"\"\n return render_template('index.html', stories=stories.values())\n\n\n@app.route('/form')\ndef show_form():\n \"\"\"Show Form for User Input\"\"\"\n story_title = request.args['madlib']\n for story in stories.values():\n if story.title == story_title:\n story_for_form = story\n return render_template('form.html', s=story_for_form, story_title=\n story_title)\n\n\n@app.route('/story')\ndef show_story():\n \"\"\"Display Madlib Story\"\"\"\n answers = request.args\n story_title = request.args['story_title']\n for story in stories.values():\n if story.title == story_title:\n story_to_gen = story\n return render_template('story.html', story_to_gen=story_to_gen,\n user_answers=answers)\n\n\n@app.route('/play-again')\ndef play_again():\n \"\"\"Redirect Home\"\"\"\n return redirect('/')\n",
"step-5": "from flask import Flask, request, render_template, redirect\nfrom stories import Story, stories\n# from flask_debugtoolbar import DebugToolbarExtension\n\napp = Flask(__name__)\t\n# app.config['SECRET_KEY'] = \"secret\"\n\n# debug = DebugToolbarExtension(app)\n\n\n# my original approach involved using a global story variable to store the instances which were in this file\n# After looking at the answer code, storing this data in the instance maskes more sense\n# story_global = None\n\n@app.route('/')\ndef home_page():\n \"\"\"Offer user choice of Madlib Games\"\"\"\n\n return render_template('index.html', stories=stories.values())\n\n\n@app.route('/form')\ndef show_form():\n \"\"\"Show Form for User Input\"\"\"\n\n story_title = request.args[\"madlib\"]\n for story in stories.values():\n if story.title == story_title:\n story_for_form = story\n \n return render_template('form.html', s=story_for_form, story_title=story_title)\n\n\n@app.route(\"/story\")\ndef show_story():\n \"\"\"Display Madlib Story\"\"\"\n\n answers = request.args\n story_title = request.args[\"story_title\"]\n for story in stories.values():\n if story.title == story_title:\n story_to_gen = story\n \n return render_template(\"story.html\", story_to_gen=story_to_gen, user_answers=answers)\n\n\n@app.route('/play-again')\ndef play_again():\n \"\"\"Redirect Home\"\"\"\n\n return redirect('/')\n",
"step-ids": [
2,
4,
5,
6,
7
]
}
|
[
2,
4,
5,
6,
7
] |
import xlsxwriter
workbook = xlsxwriter.Workbook('商品编码.xlsx')
worksheet = workbook.add_worksheet()
with open('商品编码.txt', 'rt') as f:
data = f.read()
data = data.splitlines(True)
count = 1
row = 0
for x in data:
if count < 3:
count += 1
continue
x = x.split(',')
column = 0
for e in x:
if row == 0 and column == 0:
e = e[3:]
worksheet.write(row, column, e)
column += 1
row += 1
workbook.close()
|
normal
|
{
"blob_id": "59a8a4cf4b04a191bfb70fd07668141dbfeda790",
"index": 6822,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwith open('商品编码.txt', 'rt') as f:\n data = f.read()\n<mask token>\nfor x in data:\n if count < 3:\n count += 1\n continue\n x = x.split(',')\n column = 0\n for e in x:\n if row == 0 and column == 0:\n e = e[3:]\n worksheet.write(row, column, e)\n column += 1\n row += 1\nworkbook.close()\n",
"step-3": "<mask token>\nworkbook = xlsxwriter.Workbook('商品编码.xlsx')\nworksheet = workbook.add_worksheet()\nwith open('商品编码.txt', 'rt') as f:\n data = f.read()\ndata = data.splitlines(True)\ncount = 1\nrow = 0\nfor x in data:\n if count < 3:\n count += 1\n continue\n x = x.split(',')\n column = 0\n for e in x:\n if row == 0 and column == 0:\n e = e[3:]\n worksheet.write(row, column, e)\n column += 1\n row += 1\nworkbook.close()\n",
"step-4": "import xlsxwriter\nworkbook = xlsxwriter.Workbook('商品编码.xlsx')\nworksheet = workbook.add_worksheet()\nwith open('商品编码.txt', 'rt') as f:\n data = f.read()\ndata = data.splitlines(True)\ncount = 1\nrow = 0\nfor x in data:\n if count < 3:\n count += 1\n continue\n x = x.split(',')\n column = 0\n for e in x:\n if row == 0 and column == 0:\n e = e[3:]\n worksheet.write(row, column, e)\n column += 1\n row += 1\nworkbook.close()\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
class IndividualStack:
def __init__(self):
self.stack=[None]*5
class StackwithStacks:
def __init__(self):
self.stacks = []
self.stackcount=-1
self.count=0
self.st = None
def push(self, element):
if self.count%5==0:
self.stackcount = self.stackcount+1
self.count=0
self.st=IndividualStack()
self.stacks.append(self.st)
self.st.stack[self.count]=element
self.count = self.count+1
else:
self.st.stack[self.count] = element
self.count = self.count + 1
def pop(self):
if self.count == 1:
self.count=self.count-1
returnval= self.stacks[self.stackcount].stack[self.count]
self.stacks.pop()
self.stackcount=self.stackcount-1
self.count=5
return returnval
else:
self.count = self.count - 1
return self.stacks[self.stackcount].stack[self.count]
st = StackwithStacks()
st.push(1)
st.push(1)
st.push(1)
st.push(1)
st.push(1)
st.push(12)
st.push(13)
st.push(1)
st.push(4)
st.push(7)
st.push(1)
st.push(8)
st.push(1)
st.push(6)
print st.pop()
print st.pop()
print st.pop()
print st.pop()
print st.pop()
print st.pop()
print st.pop()
print st.pop()
|
normal
|
{
"blob_id": "a8f52772522d1efc097c3d17d9c08199816f1168",
"index": 3785,
"step-1": "class IndividualStack:\n def __init__(self):\n self.stack=[None]*5\n\n\nclass StackwithStacks:\n def __init__(self):\n self.stacks = []\n self.stackcount=-1\n self.count=0\n self.st = None\n\n def push(self, element):\n if self.count%5==0:\n self.stackcount = self.stackcount+1\n self.count=0\n self.st=IndividualStack()\n self.stacks.append(self.st)\n self.st.stack[self.count]=element\n self.count = self.count+1\n\n else:\n self.st.stack[self.count] = element\n self.count = self.count + 1\n\n def pop(self):\n if self.count == 1:\n self.count=self.count-1\n returnval= self.stacks[self.stackcount].stack[self.count]\n self.stacks.pop()\n self.stackcount=self.stackcount-1\n self.count=5\n return returnval\n\n else:\n self.count = self.count - 1\n return self.stacks[self.stackcount].stack[self.count]\n\n\nst = StackwithStacks()\n\nst.push(1)\nst.push(1)\nst.push(1)\nst.push(1)\nst.push(1)\nst.push(12)\nst.push(13)\nst.push(1)\nst.push(4)\nst.push(7)\nst.push(1)\nst.push(8)\nst.push(1)\nst.push(6)\n\n\nprint st.pop()\nprint st.pop()\nprint st.pop()\nprint st.pop()\nprint st.pop()\nprint st.pop()\nprint st.pop()\nprint st.pop()",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
import cv2,os
import sqlite3
cam = cv2.VideoCapture(0)
detector = cv2.CascadeClassifier('Classifiers/face.xml')
i = 0
offset = 50
def create_or_open_db(db_file):
db_is_new = not os.path.exists(db_file)
conn = sqlite3.connect(db_file)
if db_is_new:
print 'Creating schema'
sql = '''create table if not exists PEOPLES(
ID INTEGER PRIMARY KEY,
Name TEXT);'''
sql_image = '''create table if not exists PICTURES(
ID INTEGER PRIMARY KEY AUTOINCREMENT,
Picture BLOB,
Type TEXT,
File_name TEXT);'''
sql_trainer = '''create table if not exists TRAINER(
ID INTEGER PRIMARY KEY,
File BLOB,
Type TEXT,
File_name TEXT);'''
conn.execute(sql) # shortcut for conn.cursor().execute(sql)
conn.execute(sql_image) # create image table
conn.execute(sql_trainer) # create trainer table
else:
print 'Schema exists\n'
return conn
def insertOrUpdate(Id,Name):
conn=sqlite3.connect("FaceBase.db")
cmd="SELECT * FROM PEOPLES WHERE ID="+str(Id)
cursor=conn.execute(cmd)
isRecordExist=0
for row in cursor:
isRecordExist=1
if(isRecordExist==1):
cmd="UPDATE PEOPLES SET NAME='"+str(Name)+"' WHERE ID="+str(Id)
else:
cmd="INSERT INTO PEOPLES(ID,NAME)Values("+str(Id)+",'"+str(Name)+"')"
conn.execute(cmd)
conn.commit()
conn.close()
def insert_picture(picture_file):
conn = create_or_open_db('FaceBase.db')
with open(picture_file, 'rb') as input_file:
ablob = input_file.read()
base=os.path.basename(picture_file)
afile, ext = os.path.splitext(base)
sql = '''INSERT INTO PICTURES
(PICTURE, TYPE, FILE_NAME)
VALUES(?, ?, ?);'''
conn.execute(sql,[sqlite3.Binary(ablob), ext, afile])
conn.commit()
# picture_file = "./dataSet/face- 2.1.jpg"
# insert_picture(conn, picture_file)
# conn.close()
id=raw_input('Digite o id ')
name=raw_input('Digite o Nome ')
create_or_open_db('FaceBase.db')
insertOrUpdate(id,name)
while True:
ret, im =cam.read()
gray=cv2.cvtColor(im,cv2.COLOR_BGR2GRAY)
faces=detector.detectMultiScale(gray, scaleFactor=1.2, minNeighbors=5, minSize=(100, 100), flags=cv2.CASCADE_SCALE_IMAGE)
for(x,y,w,h) in faces:
i=i+1
cv2.imwrite("dataSet/face-"+id +'.'+ str(i) + ".jpg", gray[y-offset:y+h+offset,x-offset:x+w+offset])
#picture_file = "./dataSet/face-"+id +'.'+ str(i) + ".jpg"
#insert_picture(picture_file)
cv2.rectangle(im,(x-50,y-50),(x+w+50,y+h+50),(225,0,0),2)
cv2.imshow('im',im[y-offset:y+h+offset,x-offset:x+w+offset])
cv2.waitKey(100)
if i>70:
cam.release()
cv2.destroyAllWindows()
break
|
normal
|
{
"blob_id": "3beaea1f2b1b085a60bdc5e53f4e6d9aff7e8b6f",
"index": 5538,
"step-1": "import cv2,os\nimport sqlite3\ncam = cv2.VideoCapture(0)\ndetector = cv2.CascadeClassifier('Classifiers/face.xml')\ni = 0\noffset = 50\n\n\ndef create_or_open_db(db_file):\n db_is_new = not os.path.exists(db_file)\n conn = sqlite3.connect(db_file)\n if db_is_new:\n print 'Creating schema'\n sql = '''create table if not exists PEOPLES(\n ID INTEGER PRIMARY KEY,\n Name TEXT);'''\n sql_image = '''create table if not exists PICTURES(\n ID INTEGER PRIMARY KEY AUTOINCREMENT,\n Picture BLOB,\n Type TEXT,\n File_name TEXT);'''\n sql_trainer = '''create table if not exists TRAINER(\n ID INTEGER PRIMARY KEY,\n File BLOB,\n Type TEXT,\n File_name TEXT);'''\n conn.execute(sql) # shortcut for conn.cursor().execute(sql)\n conn.execute(sql_image) # create image table\n conn.execute(sql_trainer) # create trainer table\n else:\n print 'Schema exists\\n'\n return conn\n\ndef insertOrUpdate(Id,Name):\n conn=sqlite3.connect(\"FaceBase.db\")\n cmd=\"SELECT * FROM PEOPLES WHERE ID=\"+str(Id)\n cursor=conn.execute(cmd)\n isRecordExist=0\n for row in cursor:\n isRecordExist=1\n if(isRecordExist==1):\n cmd=\"UPDATE PEOPLES SET NAME='\"+str(Name)+\"' WHERE ID=\"+str(Id)\n else:\n cmd=\"INSERT INTO PEOPLES(ID,NAME)Values(\"+str(Id)+\",'\"+str(Name)+\"')\"\n conn.execute(cmd)\n conn.commit()\n conn.close()\n\ndef insert_picture(picture_file):\n conn = create_or_open_db('FaceBase.db')\n with open(picture_file, 'rb') as input_file:\n ablob = input_file.read()\n base=os.path.basename(picture_file)\n afile, ext = os.path.splitext(base)\n sql = '''INSERT INTO PICTURES\n (PICTURE, TYPE, FILE_NAME)\n VALUES(?, ?, ?);'''\n conn.execute(sql,[sqlite3.Binary(ablob), ext, afile]) \n conn.commit()\n\t\n# picture_file = \"./dataSet/face- 2.1.jpg\"\n# insert_picture(conn, picture_file)\n# conn.close()\n\nid=raw_input('Digite o id ')\nname=raw_input('Digite o Nome ')\ncreate_or_open_db('FaceBase.db')\ninsertOrUpdate(id,name)\n\nwhile True:\n ret, im =cam.read()\n gray=cv2.cvtColor(im,cv2.COLOR_BGR2GRAY)\n faces=detector.detectMultiScale(gray, scaleFactor=1.2, minNeighbors=5, minSize=(100, 100), flags=cv2.CASCADE_SCALE_IMAGE)\n for(x,y,w,h) in faces:\n i=i+1\n cv2.imwrite(\"dataSet/face-\"+id +'.'+ str(i) + \".jpg\", gray[y-offset:y+h+offset,x-offset:x+w+offset])\n #picture_file = \"./dataSet/face-\"+id +'.'+ str(i) + \".jpg\"\n #insert_picture(picture_file)\n cv2.rectangle(im,(x-50,y-50),(x+w+50,y+h+50),(225,0,0),2)\n cv2.imshow('im',im[y-offset:y+h+offset,x-offset:x+w+offset])\n cv2.waitKey(100)\n if i>70:\n cam.release()\n cv2.destroyAllWindows()\n break\n\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
def lcs(X, Y, m, n):
dp = [[0]*(n+1) for i in range(m+1)]
for i in range(1,m+1):
for j in range(1,n+1):
if X[i-1] == Y[j-1]:
dp[i][j] = 1 + dp[i-1][j-1]
else:
dp[i][j] = max(dp[i-1][j], dp[i][j-1])
index = dp[m][n]
s = ""
i = m
j = n
while i > 0 and j > 0:
if X[i-1] == Y[j-1]:
s += X[i-1]
i -= 1
j -= 1
elif dp[i-1][j] > dp[i][j-1]:
i -= 1
else:
j -= 1
return s
X = "AGGTAB"
Y = "GXTXAYB"
print("Length of LCS is ", lcs(X , Y, len(X), len(Y)))
|
normal
|
{
"blob_id": "247e352b7772a1da74a26f007228355f5af8d3b3",
"index": 191,
"step-1": "<mask token>\n",
"step-2": "def lcs(X, Y, m, n):\n dp = [([0] * (n + 1)) for i in range(m + 1)]\n for i in range(1, m + 1):\n for j in range(1, n + 1):\n if X[i - 1] == Y[j - 1]:\n dp[i][j] = 1 + dp[i - 1][j - 1]\n else:\n dp[i][j] = max(dp[i - 1][j], dp[i][j - 1])\n index = dp[m][n]\n s = ''\n i = m\n j = n\n while i > 0 and j > 0:\n if X[i - 1] == Y[j - 1]:\n s += X[i - 1]\n i -= 1\n j -= 1\n elif dp[i - 1][j] > dp[i][j - 1]:\n i -= 1\n else:\n j -= 1\n return s\n\n\n<mask token>\n",
"step-3": "def lcs(X, Y, m, n):\n dp = [([0] * (n + 1)) for i in range(m + 1)]\n for i in range(1, m + 1):\n for j in range(1, n + 1):\n if X[i - 1] == Y[j - 1]:\n dp[i][j] = 1 + dp[i - 1][j - 1]\n else:\n dp[i][j] = max(dp[i - 1][j], dp[i][j - 1])\n index = dp[m][n]\n s = ''\n i = m\n j = n\n while i > 0 and j > 0:\n if X[i - 1] == Y[j - 1]:\n s += X[i - 1]\n i -= 1\n j -= 1\n elif dp[i - 1][j] > dp[i][j - 1]:\n i -= 1\n else:\n j -= 1\n return s\n\n\n<mask token>\nprint('Length of LCS is ', lcs(X, Y, len(X), len(Y)))\n",
"step-4": "def lcs(X, Y, m, n):\n dp = [([0] * (n + 1)) for i in range(m + 1)]\n for i in range(1, m + 1):\n for j in range(1, n + 1):\n if X[i - 1] == Y[j - 1]:\n dp[i][j] = 1 + dp[i - 1][j - 1]\n else:\n dp[i][j] = max(dp[i - 1][j], dp[i][j - 1])\n index = dp[m][n]\n s = ''\n i = m\n j = n\n while i > 0 and j > 0:\n if X[i - 1] == Y[j - 1]:\n s += X[i - 1]\n i -= 1\n j -= 1\n elif dp[i - 1][j] > dp[i][j - 1]:\n i -= 1\n else:\n j -= 1\n return s\n\n\nX = 'AGGTAB'\nY = 'GXTXAYB'\nprint('Length of LCS is ', lcs(X, Y, len(X), len(Y)))\n",
"step-5": "def lcs(X, Y, m, n):\r\n dp = [[0]*(n+1) for i in range(m+1)]\r\n\r\n for i in range(1,m+1):\r\n for j in range(1,n+1):\r\n\r\n if X[i-1] == Y[j-1]:\r\n dp[i][j] = 1 + dp[i-1][j-1]\r\n else:\r\n dp[i][j] = max(dp[i-1][j], dp[i][j-1])\r\n\r\n index = dp[m][n]\r\n s = \"\"\r\n\r\n i = m\r\n j = n\r\n while i > 0 and j > 0:\r\n if X[i-1] == Y[j-1]:\r\n s += X[i-1]\r\n i -= 1\r\n j -= 1\r\n elif dp[i-1][j] > dp[i][j-1]:\r\n i -= 1\r\n else:\r\n j -= 1\r\n\r\n return s\r\n\r\nX = \"AGGTAB\"\r\nY = \"GXTXAYB\"\r\nprint(\"Length of LCS is \", lcs(X , Y, len(X), len(Y)))\r\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
def update(a, b):
global counter
if b == 'x':
b = 0
a = week
counter = 0
else:
counter += b
a += counter
train_set = get_train_set(a)
txtLbl1.configure(text=train_set[0])
txtLbl2.configure(text=train_set[2])
txtLbl3.configure(text=train_set[4])
img1.configure(file=train_set[1])
img2.configure(file=train_set[3])
img3.configure(file=train_set[5])
curr_week.configure(text=str(a) + 'th week')
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def update(a, b):
global counter
if b == 'x':
b = 0
a = week
counter = 0
else:
counter += b
a += counter
train_set = get_train_set(a)
txtLbl1.configure(text=train_set[0])
txtLbl2.configure(text=train_set[2])
txtLbl3.configure(text=train_set[4])
img1.configure(file=train_set[1])
img2.configure(file=train_set[3])
img3.configure(file=train_set[5])
curr_week.configure(text=str(a) + 'th week')
<|reserved_special_token_0|>
window.geometry('435x250')
window.title('Training schedule')
window.configure(background='White')
window.resizable(0, 0)
<|reserved_special_token_0|>
day1.place(x=30, y=10)
day2.place(x=180, y=10)
day3.place(x=345, y=10)
imgLbl1.place(x=10, y=50)
imgLbl2.place(x=160, y=50)
imgLbl3.place(x=320, y=50)
txtLbl1.place(x=35, y=160)
txtLbl2.place(x=200, y=160)
txtLbl3.place(x=345, y=160)
<|reserved_special_token_0|>
can.create_line(0, 0, 435, 0, width=2, fill='#4286f4')
can.place(x=0, y=185)
btn_home.place(x=15, y=200)
btn_prev.place(x=335, y=200)
btn_next.place(x=381, y=200)
curr_week.place(x=160, y=190)
update(week, 0)
window.mainloop()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
counter = 0
week = int(datetime.today().isocalendar()[1])
def update(a, b):
global counter
if b == 'x':
b = 0
a = week
counter = 0
else:
counter += b
a += counter
train_set = get_train_set(a)
txtLbl1.configure(text=train_set[0])
txtLbl2.configure(text=train_set[2])
txtLbl3.configure(text=train_set[4])
img1.configure(file=train_set[1])
img2.configure(file=train_set[3])
img3.configure(file=train_set[5])
curr_week.configure(text=str(a) + 'th week')
window = Tk()
window.geometry('435x250')
window.title('Training schedule')
window.configure(background='White')
window.resizable(0, 0)
day1 = Label(window, text='MON', font=('Arial', 20), bg='White')
day2 = Label(window, text='WED', font=('Arial', 20), bg='White')
day3 = Label(window, text='FRI', font=('Arial', 20), bg='White')
img1 = PhotoImage(file='default.png', width=100, height=100)
img2 = PhotoImage(file='default.png', width=100, height=100)
img3 = PhotoImage(file='default.png', width=100, height=100)
imgLbl1 = Label(window, image=img1)
imgLbl2 = Label(window, image=img2)
imgLbl3 = Label(window, image=img3)
txtLbl1 = Label(window, font=('Arial', 12), bg='White')
txtLbl2 = Label(window, font=('Arial', 12), bg='White')
txtLbl3 = Label(window, font=('Arial', 12), bg='White')
img_home = PhotoImage(file='home (3).gif')
btn_home = Button(window, image=img_home, relief='ridge', command=lambda :
update(week, 'x'))
img_next = PhotoImage(file='next.gif')
btn_next = Button(window, image=img_next, relief='ridge', command=lambda :
update(week, 1))
img_prev = PhotoImage(file='prev.gif')
btn_prev = Button(window, image=img_prev, relief='ridge', command=lambda :
update(week, -1))
curr_week = Label(window, text=str(week) + 'th week', font=('Arial', 15),
bg='White')
day1.place(x=30, y=10)
day2.place(x=180, y=10)
day3.place(x=345, y=10)
imgLbl1.place(x=10, y=50)
imgLbl2.place(x=160, y=50)
imgLbl3.place(x=320, y=50)
txtLbl1.place(x=35, y=160)
txtLbl2.place(x=200, y=160)
txtLbl3.place(x=345, y=160)
can = Canvas(window, width=435, height=2, bg='#4286f4')
can.create_line(0, 0, 435, 0, width=2, fill='#4286f4')
can.place(x=0, y=185)
btn_home.place(x=15, y=200)
btn_prev.place(x=335, y=200)
btn_next.place(x=381, y=200)
curr_week.place(x=160, y=190)
update(week, 0)
window.mainloop()
<|reserved_special_token_1|>
from tkinter import *
from get_train_set import *
from datetime import *
counter = 0
week = int(datetime.today().isocalendar()[1])
def update(a, b):
global counter
if b == 'x':
b = 0
a = week
counter = 0
else:
counter += b
a += counter
train_set = get_train_set(a)
txtLbl1.configure(text=train_set[0])
txtLbl2.configure(text=train_set[2])
txtLbl3.configure(text=train_set[4])
img1.configure(file=train_set[1])
img2.configure(file=train_set[3])
img3.configure(file=train_set[5])
curr_week.configure(text=str(a) + 'th week')
window = Tk()
window.geometry('435x250')
window.title('Training schedule')
window.configure(background='White')
window.resizable(0, 0)
day1 = Label(window, text='MON', font=('Arial', 20), bg='White')
day2 = Label(window, text='WED', font=('Arial', 20), bg='White')
day3 = Label(window, text='FRI', font=('Arial', 20), bg='White')
img1 = PhotoImage(file='default.png', width=100, height=100)
img2 = PhotoImage(file='default.png', width=100, height=100)
img3 = PhotoImage(file='default.png', width=100, height=100)
imgLbl1 = Label(window, image=img1)
imgLbl2 = Label(window, image=img2)
imgLbl3 = Label(window, image=img3)
txtLbl1 = Label(window, font=('Arial', 12), bg='White')
txtLbl2 = Label(window, font=('Arial', 12), bg='White')
txtLbl3 = Label(window, font=('Arial', 12), bg='White')
img_home = PhotoImage(file='home (3).gif')
btn_home = Button(window, image=img_home, relief='ridge', command=lambda :
update(week, 'x'))
img_next = PhotoImage(file='next.gif')
btn_next = Button(window, image=img_next, relief='ridge', command=lambda :
update(week, 1))
img_prev = PhotoImage(file='prev.gif')
btn_prev = Button(window, image=img_prev, relief='ridge', command=lambda :
update(week, -1))
curr_week = Label(window, text=str(week) + 'th week', font=('Arial', 15),
bg='White')
day1.place(x=30, y=10)
day2.place(x=180, y=10)
day3.place(x=345, y=10)
imgLbl1.place(x=10, y=50)
imgLbl2.place(x=160, y=50)
imgLbl3.place(x=320, y=50)
txtLbl1.place(x=35, y=160)
txtLbl2.place(x=200, y=160)
txtLbl3.place(x=345, y=160)
can = Canvas(window, width=435, height=2, bg='#4286f4')
can.create_line(0, 0, 435, 0, width=2, fill='#4286f4')
can.place(x=0, y=185)
btn_home.place(x=15, y=200)
btn_prev.place(x=335, y=200)
btn_next.place(x=381, y=200)
curr_week.place(x=160, y=190)
update(week, 0)
window.mainloop()
|
flexible
|
{
"blob_id": "62fe29b0ac4dee8fec4908cf803dba9bd7e92fa5",
"index": 4602,
"step-1": "<mask token>\n\n\ndef update(a, b):\n global counter\n if b == 'x':\n b = 0\n a = week\n counter = 0\n else:\n counter += b\n a += counter\n train_set = get_train_set(a)\n txtLbl1.configure(text=train_set[0])\n txtLbl2.configure(text=train_set[2])\n txtLbl3.configure(text=train_set[4])\n img1.configure(file=train_set[1])\n img2.configure(file=train_set[3])\n img3.configure(file=train_set[5])\n curr_week.configure(text=str(a) + 'th week')\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef update(a, b):\n global counter\n if b == 'x':\n b = 0\n a = week\n counter = 0\n else:\n counter += b\n a += counter\n train_set = get_train_set(a)\n txtLbl1.configure(text=train_set[0])\n txtLbl2.configure(text=train_set[2])\n txtLbl3.configure(text=train_set[4])\n img1.configure(file=train_set[1])\n img2.configure(file=train_set[3])\n img3.configure(file=train_set[5])\n curr_week.configure(text=str(a) + 'th week')\n\n\n<mask token>\nwindow.geometry('435x250')\nwindow.title('Training schedule')\nwindow.configure(background='White')\nwindow.resizable(0, 0)\n<mask token>\nday1.place(x=30, y=10)\nday2.place(x=180, y=10)\nday3.place(x=345, y=10)\nimgLbl1.place(x=10, y=50)\nimgLbl2.place(x=160, y=50)\nimgLbl3.place(x=320, y=50)\ntxtLbl1.place(x=35, y=160)\ntxtLbl2.place(x=200, y=160)\ntxtLbl3.place(x=345, y=160)\n<mask token>\ncan.create_line(0, 0, 435, 0, width=2, fill='#4286f4')\ncan.place(x=0, y=185)\nbtn_home.place(x=15, y=200)\nbtn_prev.place(x=335, y=200)\nbtn_next.place(x=381, y=200)\ncurr_week.place(x=160, y=190)\nupdate(week, 0)\nwindow.mainloop()\n",
"step-3": "<mask token>\ncounter = 0\nweek = int(datetime.today().isocalendar()[1])\n\n\ndef update(a, b):\n global counter\n if b == 'x':\n b = 0\n a = week\n counter = 0\n else:\n counter += b\n a += counter\n train_set = get_train_set(a)\n txtLbl1.configure(text=train_set[0])\n txtLbl2.configure(text=train_set[2])\n txtLbl3.configure(text=train_set[4])\n img1.configure(file=train_set[1])\n img2.configure(file=train_set[3])\n img3.configure(file=train_set[5])\n curr_week.configure(text=str(a) + 'th week')\n\n\nwindow = Tk()\nwindow.geometry('435x250')\nwindow.title('Training schedule')\nwindow.configure(background='White')\nwindow.resizable(0, 0)\nday1 = Label(window, text='MON', font=('Arial', 20), bg='White')\nday2 = Label(window, text='WED', font=('Arial', 20), bg='White')\nday3 = Label(window, text='FRI', font=('Arial', 20), bg='White')\nimg1 = PhotoImage(file='default.png', width=100, height=100)\nimg2 = PhotoImage(file='default.png', width=100, height=100)\nimg3 = PhotoImage(file='default.png', width=100, height=100)\nimgLbl1 = Label(window, image=img1)\nimgLbl2 = Label(window, image=img2)\nimgLbl3 = Label(window, image=img3)\ntxtLbl1 = Label(window, font=('Arial', 12), bg='White')\ntxtLbl2 = Label(window, font=('Arial', 12), bg='White')\ntxtLbl3 = Label(window, font=('Arial', 12), bg='White')\nimg_home = PhotoImage(file='home (3).gif')\nbtn_home = Button(window, image=img_home, relief='ridge', command=lambda :\n update(week, 'x'))\nimg_next = PhotoImage(file='next.gif')\nbtn_next = Button(window, image=img_next, relief='ridge', command=lambda :\n update(week, 1))\nimg_prev = PhotoImage(file='prev.gif')\nbtn_prev = Button(window, image=img_prev, relief='ridge', command=lambda :\n update(week, -1))\ncurr_week = Label(window, text=str(week) + 'th week', font=('Arial', 15),\n bg='White')\nday1.place(x=30, y=10)\nday2.place(x=180, y=10)\nday3.place(x=345, y=10)\nimgLbl1.place(x=10, y=50)\nimgLbl2.place(x=160, y=50)\nimgLbl3.place(x=320, y=50)\ntxtLbl1.place(x=35, y=160)\ntxtLbl2.place(x=200, y=160)\ntxtLbl3.place(x=345, y=160)\ncan = Canvas(window, width=435, height=2, bg='#4286f4')\ncan.create_line(0, 0, 435, 0, width=2, fill='#4286f4')\ncan.place(x=0, y=185)\nbtn_home.place(x=15, y=200)\nbtn_prev.place(x=335, y=200)\nbtn_next.place(x=381, y=200)\ncurr_week.place(x=160, y=190)\nupdate(week, 0)\nwindow.mainloop()\n",
"step-4": "from tkinter import *\nfrom get_train_set import *\nfrom datetime import *\ncounter = 0\nweek = int(datetime.today().isocalendar()[1])\n\n\ndef update(a, b):\n global counter\n if b == 'x':\n b = 0\n a = week\n counter = 0\n else:\n counter += b\n a += counter\n train_set = get_train_set(a)\n txtLbl1.configure(text=train_set[0])\n txtLbl2.configure(text=train_set[2])\n txtLbl3.configure(text=train_set[4])\n img1.configure(file=train_set[1])\n img2.configure(file=train_set[3])\n img3.configure(file=train_set[5])\n curr_week.configure(text=str(a) + 'th week')\n\n\nwindow = Tk()\nwindow.geometry('435x250')\nwindow.title('Training schedule')\nwindow.configure(background='White')\nwindow.resizable(0, 0)\nday1 = Label(window, text='MON', font=('Arial', 20), bg='White')\nday2 = Label(window, text='WED', font=('Arial', 20), bg='White')\nday3 = Label(window, text='FRI', font=('Arial', 20), bg='White')\nimg1 = PhotoImage(file='default.png', width=100, height=100)\nimg2 = PhotoImage(file='default.png', width=100, height=100)\nimg3 = PhotoImage(file='default.png', width=100, height=100)\nimgLbl1 = Label(window, image=img1)\nimgLbl2 = Label(window, image=img2)\nimgLbl3 = Label(window, image=img3)\ntxtLbl1 = Label(window, font=('Arial', 12), bg='White')\ntxtLbl2 = Label(window, font=('Arial', 12), bg='White')\ntxtLbl3 = Label(window, font=('Arial', 12), bg='White')\nimg_home = PhotoImage(file='home (3).gif')\nbtn_home = Button(window, image=img_home, relief='ridge', command=lambda :\n update(week, 'x'))\nimg_next = PhotoImage(file='next.gif')\nbtn_next = Button(window, image=img_next, relief='ridge', command=lambda :\n update(week, 1))\nimg_prev = PhotoImage(file='prev.gif')\nbtn_prev = Button(window, image=img_prev, relief='ridge', command=lambda :\n update(week, -1))\ncurr_week = Label(window, text=str(week) + 'th week', font=('Arial', 15),\n bg='White')\nday1.place(x=30, y=10)\nday2.place(x=180, y=10)\nday3.place(x=345, y=10)\nimgLbl1.place(x=10, y=50)\nimgLbl2.place(x=160, y=50)\nimgLbl3.place(x=320, y=50)\ntxtLbl1.place(x=35, y=160)\ntxtLbl2.place(x=200, y=160)\ntxtLbl3.place(x=345, y=160)\ncan = Canvas(window, width=435, height=2, bg='#4286f4')\ncan.create_line(0, 0, 435, 0, width=2, fill='#4286f4')\ncan.place(x=0, y=185)\nbtn_home.place(x=15, y=200)\nbtn_prev.place(x=335, y=200)\nbtn_next.place(x=381, y=200)\ncurr_week.place(x=160, y=190)\nupdate(week, 0)\nwindow.mainloop()\n",
"step-5": null,
"step-ids": [
1,
2,
3,
4
]
}
|
[
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
print(name * 1000)
<|reserved_special_token_1|>
name = 'valentina '
print(name * 1000)
|
flexible
|
{
"blob_id": "aff1a9263e183610f403a4d6a7f27b45eacb7ff2",
"index": 0,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(name * 1000)\n",
"step-3": "name = 'valentina '\nprint(name * 1000)\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
A = []
ans = 0
def merge(left, mid, right):
global A
global ans
n1 = mid - left
n2 = right - mid
l = []
r = []
for i in range(n1):
l += [A[left + i]]
for i in range(n2):
r += [A[mid + i]]
l += [10**18]
r += [10**18]
i = 0
j = 0
ans += right - left
for k in range(left, right):
if l[i] <= r[j]:
A[k] = l[i]
i += 1
else:
A[k] = r[j]
j += 1
def Msort(left, right):
if left + 1 < right:
mid = int((left + right)/2)
Msort(left, mid)
Msort(mid,right)
merge(left,mid,right)
def main():
global ans
global A
n = int(input())
A = list(map(int,input().split()))
Msort(0,n)
print(" ".join(list(map(str,A))))
print(ans)
main()
|
normal
|
{
"blob_id": "dc81ab808720c3a2c76174264c9be9bcdd99c292",
"index": 1265,
"step-1": "<mask token>\n\n\ndef Msort(left, right):\n if left + 1 < right:\n mid = int((left + right) / 2)\n Msort(left, mid)\n Msort(mid, right)\n merge(left, mid, right)\n\n\ndef main():\n global ans\n global A\n n = int(input())\n A = list(map(int, input().split()))\n Msort(0, n)\n print(' '.join(list(map(str, A))))\n print(ans)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef merge(left, mid, right):\n global A\n global ans\n n1 = mid - left\n n2 = right - mid\n l = []\n r = []\n for i in range(n1):\n l += [A[left + i]]\n for i in range(n2):\n r += [A[mid + i]]\n l += [10 ** 18]\n r += [10 ** 18]\n i = 0\n j = 0\n ans += right - left\n for k in range(left, right):\n if l[i] <= r[j]:\n A[k] = l[i]\n i += 1\n else:\n A[k] = r[j]\n j += 1\n\n\ndef Msort(left, right):\n if left + 1 < right:\n mid = int((left + right) / 2)\n Msort(left, mid)\n Msort(mid, right)\n merge(left, mid, right)\n\n\ndef main():\n global ans\n global A\n n = int(input())\n A = list(map(int, input().split()))\n Msort(0, n)\n print(' '.join(list(map(str, A))))\n print(ans)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef merge(left, mid, right):\n global A\n global ans\n n1 = mid - left\n n2 = right - mid\n l = []\n r = []\n for i in range(n1):\n l += [A[left + i]]\n for i in range(n2):\n r += [A[mid + i]]\n l += [10 ** 18]\n r += [10 ** 18]\n i = 0\n j = 0\n ans += right - left\n for k in range(left, right):\n if l[i] <= r[j]:\n A[k] = l[i]\n i += 1\n else:\n A[k] = r[j]\n j += 1\n\n\ndef Msort(left, right):\n if left + 1 < right:\n mid = int((left + right) / 2)\n Msort(left, mid)\n Msort(mid, right)\n merge(left, mid, right)\n\n\ndef main():\n global ans\n global A\n n = int(input())\n A = list(map(int, input().split()))\n Msort(0, n)\n print(' '.join(list(map(str, A))))\n print(ans)\n\n\nmain()\n",
"step-4": "A = []\nans = 0\n\n\ndef merge(left, mid, right):\n global A\n global ans\n n1 = mid - left\n n2 = right - mid\n l = []\n r = []\n for i in range(n1):\n l += [A[left + i]]\n for i in range(n2):\n r += [A[mid + i]]\n l += [10 ** 18]\n r += [10 ** 18]\n i = 0\n j = 0\n ans += right - left\n for k in range(left, right):\n if l[i] <= r[j]:\n A[k] = l[i]\n i += 1\n else:\n A[k] = r[j]\n j += 1\n\n\ndef Msort(left, right):\n if left + 1 < right:\n mid = int((left + right) / 2)\n Msort(left, mid)\n Msort(mid, right)\n merge(left, mid, right)\n\n\ndef main():\n global ans\n global A\n n = int(input())\n A = list(map(int, input().split()))\n Msort(0, n)\n print(' '.join(list(map(str, A))))\n print(ans)\n\n\nmain()\n",
"step-5": "A = []\nans = 0\n\ndef merge(left, mid, right):\n\tglobal A\n\tglobal ans\n\tn1 = mid - left\n\tn2 = right - mid\n\tl = []\n\tr = []\n\tfor i in range(n1):\n\t\tl += [A[left + i]]\n\tfor i in range(n2):\n\t\tr += [A[mid + i]]\n\tl += [10**18]\n\tr += [10**18]\n\ti = 0\n\tj = 0\n\tans += right - left\n\tfor k in range(left, right):\n\t\tif l[i] <= r[j]:\n\t\t\tA[k] = l[i]\n\t\t\ti += 1\n\t\telse:\n\t\t\tA[k] = r[j]\n\t\t\tj += 1\n\n\ndef Msort(left, right):\n\tif left + 1 < right:\n\t\tmid = int((left + right)/2)\n\t\tMsort(left, mid)\n\t\tMsort(mid,right)\n\t\tmerge(left,mid,right)\n\ndef main():\n\tglobal ans\n\tglobal A\n\tn = int(input())\n\tA = list(map(int,input().split()))\n\tMsort(0,n)\n\tprint(\" \".join(list(map(str,A))))\n\tprint(ans)\n\nmain()\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
from cobra.model.fabric import HIfPol
from createMo import *
DEFAULT_AUTO_NEGOTIATION = 'on'
DEFAULT_SPEED = '10G'
DEFAULT_LINK_DEBOUNCE_INTERVAL = 100
AUTO_NEGOTIATION_CHOICES = ['on', 'off']
SPEED_CHOICES = ['100M', '1G', '10G', '40G']
def input_key_args(msg='\nPlease Specify Link Level Policy:'):
print msg
return input_raw_input("Link Level Policy Name", required=True)
def input_optional_args():
args = {}
args['atuo_negotiation'] = input_options('Auto Negotiation', DEFAULT_AUTO_NEGOTIATION, AUTO_NEGOTIATION_CHOICES)
args['speed'] = input_options('Speed', DEFAULT_SPEED, SPEED_CHOICES)
args['link_debounce_interval'] = input_options('Link Debounce Interval (msec)', str(DEFAULT_LINK_DEBOUNCE_INTERVAL), '', num_accept=True)
return args
def create_link_level_policy(parent_mo, link_level_policy, **args):
"""Create Link Level Policy"""
args = args['optional_args'] if 'optional_args' in args.keys() else args
# Create mo
if is_valid_key(args, 'atuo_negotiation'):
if args['atuo_negotiation'] or args['atuo_negotiation'] == 'on':
args['atuo_negotiation'] = 'on'
elif not args['atuo_negotiation'] or args['atuo_negotiation'] == 'off':
args['atuo_negotiation'] = 'off'
fabric_hifpol = HIfPol(parent_mo, link_level_policy,
autoNeg=get_value(args, 'atuo_negotiation', DEFAULT_AUTO_NEGOTIATION),
speed=get_value(args, 'speed', DEFAULT_SPEED),
linkDebounce=get_value(args, 'link_debounce_interval', DEFAULT_LINK_DEBOUNCE_INTERVAL),
)
return fabric_hifpol
class CreateLinkLevelPolicy(CreateMo):
def __init__(self):
self.description = 'Create Link Level Policy. The host interface policy. This specifies the layer 1 parameters of host facing ports. '
self.link_level_policy = None
super(CreateLinkLevelPolicy, self).__init__()
def set_cli_mode(self):
super(CreateLinkLevelPolicy, self).set_cli_mode()
self.parser_cli.add_argument('link_level_policy', help='The name of the interface policy. ')
self.parser_cli.add_argument('-a', '--atuo_negotiation', default= DEFAULT_AUTO_NEGOTIATION, choices=AUTO_NEGOTIATION_CHOICES, help='The policy auto-negotiation. Auto-negotiation is an optional function of the IEEE 802.3u Fast Ethernet standard that enables devices to automatically exchange information over a link about speed and duplex abilities.')
self.parser_cli.add_argument('-s', '--speed', default= DEFAULT_SPEED, choices=SPEED_CHOICES, help='The interface policy administrative port speed. The data transfer rate for the port should match the destination to which the port is linked. The administrative speed can be changed only for certain ports, and not all speeds are available on all systems. For more information, see the Hardware Installation Guide for your fabric interconnect.')
self.parser_cli.add_argument('-l', '--link_debounce_interval', default= DEFAULT_LINK_DEBOUNCE_INTERVAL, help='The interface policy administrative port link debounce interval. Enables the debounce timer for physical interface ports and sets it for a specified amount of time in milliseconds. The debounce timer is disabled if you specify the time to 0 ms.')
def read_key_args(self):
self.link_level_policy = self.args.pop('link_level_policy')
def wizard_mode_input_args(self):
self.args['link_level_policy'] = input_key_args()
if not self.delete:
self.args['optional_args'] = input_optional_args()
def delete_mo(self):
self.check_if_mo_exist('uni/infra/hintfpol-', self.link_level_policy, HIfPol, description='Link Level Policy')
super(CreateLinkLevelPolicy, self).delete_mo()
def main_function(self):
# Query to parent
self.look_up_mo('uni/infra/', '')
create_link_level_policy(self.mo, self.link_level_policy, optional_args=self.optional_args)
if __name__ == '__main__':
mo = CreateLinkLevelPolicy()
|
normal
|
{
"blob_id": "36ab827b889adcd4d54296e7da432d3b39d5a2e6",
"index": 2246,
"step-1": "from cobra.model.fabric import HIfPol\n\nfrom createMo import *\n\nDEFAULT_AUTO_NEGOTIATION = 'on'\nDEFAULT_SPEED = '10G'\nDEFAULT_LINK_DEBOUNCE_INTERVAL = 100\n\nAUTO_NEGOTIATION_CHOICES = ['on', 'off']\nSPEED_CHOICES = ['100M', '1G', '10G', '40G']\n\n\ndef input_key_args(msg='\\nPlease Specify Link Level Policy:'):\n print msg\n return input_raw_input(\"Link Level Policy Name\", required=True)\n\n\ndef input_optional_args():\n args = {}\n args['atuo_negotiation'] = input_options('Auto Negotiation', DEFAULT_AUTO_NEGOTIATION, AUTO_NEGOTIATION_CHOICES)\n args['speed'] = input_options('Speed', DEFAULT_SPEED, SPEED_CHOICES)\n args['link_debounce_interval'] = input_options('Link Debounce Interval (msec)', str(DEFAULT_LINK_DEBOUNCE_INTERVAL), '', num_accept=True)\n return args\n\n\ndef create_link_level_policy(parent_mo, link_level_policy, **args):\n \"\"\"Create Link Level Policy\"\"\"\n args = args['optional_args'] if 'optional_args' in args.keys() else args\n # Create mo\n\n if is_valid_key(args, 'atuo_negotiation'):\n if args['atuo_negotiation'] or args['atuo_negotiation'] == 'on':\n args['atuo_negotiation'] = 'on'\n elif not args['atuo_negotiation'] or args['atuo_negotiation'] == 'off':\n args['atuo_negotiation'] = 'off'\n\n fabric_hifpol = HIfPol(parent_mo, link_level_policy,\n autoNeg=get_value(args, 'atuo_negotiation', DEFAULT_AUTO_NEGOTIATION),\n speed=get_value(args, 'speed', DEFAULT_SPEED),\n linkDebounce=get_value(args, 'link_debounce_interval', DEFAULT_LINK_DEBOUNCE_INTERVAL),\n )\n return fabric_hifpol\n\n\nclass CreateLinkLevelPolicy(CreateMo):\n\n def __init__(self):\n self.description = 'Create Link Level Policy. The host interface policy. This specifies the layer 1 parameters of host facing ports. '\n self.link_level_policy = None\n super(CreateLinkLevelPolicy, self).__init__()\n\n def set_cli_mode(self):\n super(CreateLinkLevelPolicy, self).set_cli_mode()\n self.parser_cli.add_argument('link_level_policy', help='The name of the interface policy. ')\n self.parser_cli.add_argument('-a', '--atuo_negotiation', default= DEFAULT_AUTO_NEGOTIATION, choices=AUTO_NEGOTIATION_CHOICES, help='The policy auto-negotiation. Auto-negotiation is an optional function of the IEEE 802.3u Fast Ethernet standard that enables devices to automatically exchange information over a link about speed and duplex abilities.')\n self.parser_cli.add_argument('-s', '--speed', default= DEFAULT_SPEED, choices=SPEED_CHOICES, help='The interface policy administrative port speed. The data transfer rate for the port should match the destination to which the port is linked. The administrative speed can be changed only for certain ports, and not all speeds are available on all systems. For more information, see the Hardware Installation Guide for your fabric interconnect.')\n self.parser_cli.add_argument('-l', '--link_debounce_interval', default= DEFAULT_LINK_DEBOUNCE_INTERVAL, help='The interface policy administrative port link debounce interval. Enables the debounce timer for physical interface ports and sets it for a specified amount of time in milliseconds. The debounce timer is disabled if you specify the time to 0 ms.')\n\n def read_key_args(self):\n self.link_level_policy = self.args.pop('link_level_policy')\n\n def wizard_mode_input_args(self):\n self.args['link_level_policy'] = input_key_args()\n if not self.delete:\n self.args['optional_args'] = input_optional_args()\n\n def delete_mo(self):\n self.check_if_mo_exist('uni/infra/hintfpol-', self.link_level_policy, HIfPol, description='Link Level Policy')\n super(CreateLinkLevelPolicy, self).delete_mo()\n\n def main_function(self):\n # Query to parent\n self.look_up_mo('uni/infra/', '')\n create_link_level_policy(self.mo, self.link_level_policy, optional_args=self.optional_args)\n\nif __name__ == '__main__':\n mo = CreateLinkLevelPolicy()\n\n\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
list = input().split()
n = int(list[0])
k = int(list[1])
list.clear()
for i in range(0, n):
list.append("")
tmp = input().split()
list[i] = tmp[0] + list[int(tmp[1])-1]
for i in range(0, k):
start = input()
print(len([word for word in list if word.startswith(start)]))
|
normal
|
{
"blob_id": "1808be09c2730af5829bb0c7c0c7cfe9f80fe84c",
"index": 7546,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nlist.clear()\nfor i in range(0, n):\n list.append('')\n tmp = input().split()\n list[i] = tmp[0] + list[int(tmp[1]) - 1]\nfor i in range(0, k):\n start = input()\n print(len([word for word in list if word.startswith(start)]))\n",
"step-3": "list = input().split()\nn = int(list[0])\nk = int(list[1])\nlist.clear()\nfor i in range(0, n):\n list.append('')\n tmp = input().split()\n list[i] = tmp[0] + list[int(tmp[1]) - 1]\nfor i in range(0, k):\n start = input()\n print(len([word for word in list if word.startswith(start)]))\n",
"step-4": "list = input().split()\nn = int(list[0])\nk = int(list[1])\nlist.clear()\nfor i in range(0, n):\n list.append(\"\")\n tmp = input().split()\n list[i] = tmp[0] + list[int(tmp[1])-1]\nfor i in range(0, k):\n start = input()\n print(len([word for word in list if word.startswith(start)]))",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
class Player(object):
def __init__(self, player_num, px, py, sx, sy, start_direction):
self.player_num = player_num
self.rect = pygame.Rect(px, py, sx, sy)
self.direction = start_direction
self.moto = Moto(player_num, start_direction)
self.moto.rect.x = px
self.moto.rect.y = py
def moveRight(self):
if self.direction != 1:
self.direction = 0
self.moto.moveRight()
def moveLeft(self):
if self.direction != 0:
self.direction = 1
self.moto.moveLeft()
def moveUp(self):
if self.direction != 3:
self.direction = 2
self.moto.moveUp()
def moveDown(self):
if self.direction != 2:
self.direction = 3
self.moto.moveDown()
def moveOn(self):
if self.direction == 0:
self.move(2, 0)
if self.direction == 1:
self.move(-2, 0)
if self.direction == 2:
self.move(0, -2)
if self.direction == 3:
self.move(0, 2)
<|reserved_special_token_0|>
def move_single_axis(self, dx, dy):
self.rect.x += dx
self.rect.y += dy
Wall(self.player_num, (self.rect.centerx, self.rect.centery))
class Wall(object):
def __init__(self, player_num, pos):
Game.walls[player_num].append(self)
self.rect = pygame.Rect(pos[0], pos[1], 3, 3)
class Game:
walls = [[], []]
def main(self):
winner = 0
screen = pygame.display.set_mode((SCREEN_WIDTH, SCREEN_HEIGHT))
clock = pygame.time.Clock()
Game.walls = [[], []]
player = Player(0, SCREEN_WIDTH - 80, int(SCREEN_HEIGHT / 2), 2, 16, 1)
player2 = Player(1, 80, int(SCREEN_HEIGHT / 2), 2, 16, 0)
try:
pygame.joystick.init()
joysticks = [pygame.joystick.Joystick(x) for x in range(pygame.
joystick.get_count())]
joysticks[0].init()
joysticks[1].init()
player1_joystick = joysticks[0]
player2_joystick = joysticks[1]
except IndexError:
player1_joystick = None
player2_joystick = None
end = pygame.image.load('number3.png')
screen.fill((0, 0, 0))
screen.blit(end, (0.5 * SCREEN_WIDTH - 0.5 * 500, 0.5 *
SCREEN_HEIGHT - 0.5 * 500))
pygame.display.flip()
pygame.time.wait(1000)
end = pygame.image.load('number2.png')
screen.fill((0, 0, 0))
screen.blit(end, (0.5 * SCREEN_WIDTH - 0.5 * 500, 0.5 *
SCREEN_HEIGHT - 0.5 * 500))
pygame.display.flip()
pygame.time.wait(1000)
end = pygame.image.load('number1.png')
screen.fill((0, 0, 0))
screen.blit(end, (0.5 * SCREEN_WIDTH - 0.5 * 500, 0.5 *
SCREEN_HEIGHT - 0.5 * 500))
pygame.display.flip()
pygame.time.wait(1000)
pygame.display.flip()
running = True
while running:
clock.tick(60)
for e in pygame.event.get():
if e.type == pygame.QUIT:
running = False
if e.type == pygame.KEYDOWN and e.key == pygame.K_ESCAPE:
running = False
try:
if e.type == pygame.locals.JOYAXISMOTION:
player1jx, player1jy = player1_joystick.get_axis(0
), player1_joystick.get_axis(1)
if player1jx < 0:
player2.moveLeft()
if player1jx > 0:
player2.moveRight()
if player1jy < 0:
player2.moveUp()
if player1jy > 0:
player2.moveDown()
player2jx, player2jy = player2_joystick.get_axis(0
), player2_joystick.get_axis(1)
if player2jx < 0:
player.moveLeft()
if player2jx > 0:
player.moveRight()
if player2jy < 0:
player.moveUp()
if player2jy > 0:
player.moveDown()
except:
pass
key = pygame.key.get_pressed()
if key[pygame.K_LEFT]:
player.moveLeft()
if key[pygame.K_RIGHT]:
player.moveRight()
if key[pygame.K_UP]:
player.moveUp()
if key[pygame.K_DOWN]:
player.moveDown()
player.moveOn()
key = pygame.key.get_pressed()
if key[pygame.K_a]:
player2.moveLeft()
if key[pygame.K_d]:
player2.moveRight()
if key[pygame.K_w]:
player2.moveUp()
if key[pygame.K_s]:
player2.moveDown()
player2.moveOn()
if player.moto.rect.x < 0 or player.moto.rect.x > SCREEN_WIDTH:
winner = 2
running = False
if player2.moto.rect.x < 0 or player2.moto.rect.x > SCREEN_WIDTH:
winner = 1
running = False
if player.moto.rect.y < 0 or player.moto.rect.y > SCREEN_HEIGHT:
winner = 2
running = False
if player2.moto.rect.y < 0 or player2.moto.rect.y > SCREEN_HEIGHT:
winner = 1
running = False
screen.fill((0, 0, 0))
counter1 = 0
counter2 = 0
coll_range = len(Game.walls[0]) - (player.moto.rect.width / 2 + 10)
coll_range_2 = len(Game.walls[1]) - (player2.moto.rect.width /
2 + 10)
for wall in Game.walls[0]:
if player2.moto.rect.colliderect(wall.rect):
winner = 1
running = False
if counter1 < coll_range and player.moto.rect.colliderect(wall
.rect):
winner = 2
running = False
counter1 += 1
pygame.draw.rect(screen, (255, 0, 0), wall.rect)
for wall in Game.walls[1]:
if player.moto.rect.colliderect(wall.rect):
winner = 2
running = False
if counter2 < coll_range_2 and player2.moto.rect.colliderect(
wall.rect):
winner = 1
running = False
counter2 += 1
pygame.draw.rect(screen, (0, 0, 255), wall.rect)
pygame.draw.rect(screen, (255, 200, 0), player.rect)
screen.blit(player.moto.image, (player.moto.rect.x, player.moto
.rect.y))
pygame.draw.rect(screen, (255, 200, 0), player2.rect)
screen.blit(player2.moto.image, (player2.moto.rect.x, player2.
moto.rect.y))
pygame.display.flip()
print('Winner: ', winner)
running = True
clock = pygame.time.Clock()
sound = pygame.mixer.Sound('blast.wav')
sound.play(loops=0, maxtime=0, fade_ms=0)
while running:
clock.tick(60)
for e in pygame.event.get():
if e.type == pygame.JOYBUTTONDOWN:
player1Button = player1_joystick.get_button(0)
if player1Button > 0:
running = False
print('BACK TO MENU')
return True
player2Button = player2_joystick.get_button(0)
if player2Button > 0:
running = False
print('BACK TO MENU')
return True
if e.type == pygame.KEYDOWN and (e.key == pygame.K_KP_ENTER or
e.key == pygame.K_RETURN):
running = False
print('BACK TO MENU')
return True
end = pygame.image.load('gameover.png')
screen.blit(end, (0.5 * SCREEN_WIDTH - 0.5 * 1024, 0.5 *
SCREEN_HEIGHT - 0.5 * 768))
screen.fill((0, 0, 0))
screen.blit(end, (10, 10))
if winner == 2:
myfont = pygame.font.SysFont('monospace', 72)
label = myfont.render('Blue won!', 1, (0, 0, 225))
screen.blit(label, (0.5 * SCREEN_WIDTH - 0.5 * 500, 0.5 *
SCREEN_HEIGHT - 0.5 * 750))
else:
myfont = pygame.font.SysFont('monospace', 72)
label = myfont.render('Red won!', 1, (255, 0, 0))
screen.blit(label, (0.5 * SCREEN_WIDTH - 0.5 * 500, 0.5 *
SCREEN_HEIGHT - 0.5 * 750))
pygame.display.flip()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Moto(pygame.sprite.Sprite):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class Player(object):
def __init__(self, player_num, px, py, sx, sy, start_direction):
self.player_num = player_num
self.rect = pygame.Rect(px, py, sx, sy)
self.direction = start_direction
self.moto = Moto(player_num, start_direction)
self.moto.rect.x = px
self.moto.rect.y = py
def moveRight(self):
if self.direction != 1:
self.direction = 0
self.moto.moveRight()
def moveLeft(self):
if self.direction != 0:
self.direction = 1
self.moto.moveLeft()
def moveUp(self):
if self.direction != 3:
self.direction = 2
self.moto.moveUp()
def moveDown(self):
if self.direction != 2:
self.direction = 3
self.moto.moveDown()
def moveOn(self):
if self.direction == 0:
self.move(2, 0)
if self.direction == 1:
self.move(-2, 0)
if self.direction == 2:
self.move(0, -2)
if self.direction == 3:
self.move(0, 2)
def move(self, dx, dy):
if dx != 0:
self.move_single_axis(dx, 0)
self.moto.move_single_axis(dx, 0)
if dy != 0:
self.move_single_axis(0, dy)
self.moto.move_single_axis(0, dy)
def move_single_axis(self, dx, dy):
self.rect.x += dx
self.rect.y += dy
Wall(self.player_num, (self.rect.centerx, self.rect.centery))
class Wall(object):
def __init__(self, player_num, pos):
Game.walls[player_num].append(self)
self.rect = pygame.Rect(pos[0], pos[1], 3, 3)
class Game:
walls = [[], []]
def main(self):
winner = 0
screen = pygame.display.set_mode((SCREEN_WIDTH, SCREEN_HEIGHT))
clock = pygame.time.Clock()
Game.walls = [[], []]
player = Player(0, SCREEN_WIDTH - 80, int(SCREEN_HEIGHT / 2), 2, 16, 1)
player2 = Player(1, 80, int(SCREEN_HEIGHT / 2), 2, 16, 0)
try:
pygame.joystick.init()
joysticks = [pygame.joystick.Joystick(x) for x in range(pygame.
joystick.get_count())]
joysticks[0].init()
joysticks[1].init()
player1_joystick = joysticks[0]
player2_joystick = joysticks[1]
except IndexError:
player1_joystick = None
player2_joystick = None
end = pygame.image.load('number3.png')
screen.fill((0, 0, 0))
screen.blit(end, (0.5 * SCREEN_WIDTH - 0.5 * 500, 0.5 *
SCREEN_HEIGHT - 0.5 * 500))
pygame.display.flip()
pygame.time.wait(1000)
end = pygame.image.load('number2.png')
screen.fill((0, 0, 0))
screen.blit(end, (0.5 * SCREEN_WIDTH - 0.5 * 500, 0.5 *
SCREEN_HEIGHT - 0.5 * 500))
pygame.display.flip()
pygame.time.wait(1000)
end = pygame.image.load('number1.png')
screen.fill((0, 0, 0))
screen.blit(end, (0.5 * SCREEN_WIDTH - 0.5 * 500, 0.5 *
SCREEN_HEIGHT - 0.5 * 500))
pygame.display.flip()
pygame.time.wait(1000)
pygame.display.flip()
running = True
while running:
clock.tick(60)
for e in pygame.event.get():
if e.type == pygame.QUIT:
running = False
if e.type == pygame.KEYDOWN and e.key == pygame.K_ESCAPE:
running = False
try:
if e.type == pygame.locals.JOYAXISMOTION:
player1jx, player1jy = player1_joystick.get_axis(0
), player1_joystick.get_axis(1)
if player1jx < 0:
player2.moveLeft()
if player1jx > 0:
player2.moveRight()
if player1jy < 0:
player2.moveUp()
if player1jy > 0:
player2.moveDown()
player2jx, player2jy = player2_joystick.get_axis(0
), player2_joystick.get_axis(1)
if player2jx < 0:
player.moveLeft()
if player2jx > 0:
player.moveRight()
if player2jy < 0:
player.moveUp()
if player2jy > 0:
player.moveDown()
except:
pass
key = pygame.key.get_pressed()
if key[pygame.K_LEFT]:
player.moveLeft()
if key[pygame.K_RIGHT]:
player.moveRight()
if key[pygame.K_UP]:
player.moveUp()
if key[pygame.K_DOWN]:
player.moveDown()
player.moveOn()
key = pygame.key.get_pressed()
if key[pygame.K_a]:
player2.moveLeft()
if key[pygame.K_d]:
player2.moveRight()
if key[pygame.K_w]:
player2.moveUp()
if key[pygame.K_s]:
player2.moveDown()
player2.moveOn()
if player.moto.rect.x < 0 or player.moto.rect.x > SCREEN_WIDTH:
winner = 2
running = False
if player2.moto.rect.x < 0 or player2.moto.rect.x > SCREEN_WIDTH:
winner = 1
running = False
if player.moto.rect.y < 0 or player.moto.rect.y > SCREEN_HEIGHT:
winner = 2
running = False
if player2.moto.rect.y < 0 or player2.moto.rect.y > SCREEN_HEIGHT:
winner = 1
running = False
screen.fill((0, 0, 0))
counter1 = 0
counter2 = 0
coll_range = len(Game.walls[0]) - (player.moto.rect.width / 2 + 10)
coll_range_2 = len(Game.walls[1]) - (player2.moto.rect.width /
2 + 10)
for wall in Game.walls[0]:
if player2.moto.rect.colliderect(wall.rect):
winner = 1
running = False
if counter1 < coll_range and player.moto.rect.colliderect(wall
.rect):
winner = 2
running = False
counter1 += 1
pygame.draw.rect(screen, (255, 0, 0), wall.rect)
for wall in Game.walls[1]:
if player.moto.rect.colliderect(wall.rect):
winner = 2
running = False
if counter2 < coll_range_2 and player2.moto.rect.colliderect(
wall.rect):
winner = 1
running = False
counter2 += 1
pygame.draw.rect(screen, (0, 0, 255), wall.rect)
pygame.draw.rect(screen, (255, 200, 0), player.rect)
screen.blit(player.moto.image, (player.moto.rect.x, player.moto
.rect.y))
pygame.draw.rect(screen, (255, 200, 0), player2.rect)
screen.blit(player2.moto.image, (player2.moto.rect.x, player2.
moto.rect.y))
pygame.display.flip()
print('Winner: ', winner)
running = True
clock = pygame.time.Clock()
sound = pygame.mixer.Sound('blast.wav')
sound.play(loops=0, maxtime=0, fade_ms=0)
while running:
clock.tick(60)
for e in pygame.event.get():
if e.type == pygame.JOYBUTTONDOWN:
player1Button = player1_joystick.get_button(0)
if player1Button > 0:
running = False
print('BACK TO MENU')
return True
player2Button = player2_joystick.get_button(0)
if player2Button > 0:
running = False
print('BACK TO MENU')
return True
if e.type == pygame.KEYDOWN and (e.key == pygame.K_KP_ENTER or
e.key == pygame.K_RETURN):
running = False
print('BACK TO MENU')
return True
end = pygame.image.load('gameover.png')
screen.blit(end, (0.5 * SCREEN_WIDTH - 0.5 * 1024, 0.5 *
SCREEN_HEIGHT - 0.5 * 768))
screen.fill((0, 0, 0))
screen.blit(end, (10, 10))
if winner == 2:
myfont = pygame.font.SysFont('monospace', 72)
label = myfont.render('Blue won!', 1, (0, 0, 225))
screen.blit(label, (0.5 * SCREEN_WIDTH - 0.5 * 500, 0.5 *
SCREEN_HEIGHT - 0.5 * 750))
else:
myfont = pygame.font.SysFont('monospace', 72)
label = myfont.render('Red won!', 1, (255, 0, 0))
screen.blit(label, (0.5 * SCREEN_WIDTH - 0.5 * 500, 0.5 *
SCREEN_HEIGHT - 0.5 * 750))
pygame.display.flip()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Moto(pygame.sprite.Sprite):
<|reserved_special_token_0|>
def move_single_axis(self, dx, dy):
self.rect.x += dx
self.rect.y += dy
def moveRight(self):
self.direction = 0
self.image = pygame.transform.rotate(self.orig_image, 0)
def moveLeft(self):
self.direction = 1
self.image = pygame.transform.rotate(self.orig_image, 0)
def moveUp(self):
self.direction = 2
self.image = pygame.transform.rotate(self.orig_image, 90)
<|reserved_special_token_0|>
class Player(object):
def __init__(self, player_num, px, py, sx, sy, start_direction):
self.player_num = player_num
self.rect = pygame.Rect(px, py, sx, sy)
self.direction = start_direction
self.moto = Moto(player_num, start_direction)
self.moto.rect.x = px
self.moto.rect.y = py
def moveRight(self):
if self.direction != 1:
self.direction = 0
self.moto.moveRight()
def moveLeft(self):
if self.direction != 0:
self.direction = 1
self.moto.moveLeft()
def moveUp(self):
if self.direction != 3:
self.direction = 2
self.moto.moveUp()
def moveDown(self):
if self.direction != 2:
self.direction = 3
self.moto.moveDown()
def moveOn(self):
if self.direction == 0:
self.move(2, 0)
if self.direction == 1:
self.move(-2, 0)
if self.direction == 2:
self.move(0, -2)
if self.direction == 3:
self.move(0, 2)
def move(self, dx, dy):
if dx != 0:
self.move_single_axis(dx, 0)
self.moto.move_single_axis(dx, 0)
if dy != 0:
self.move_single_axis(0, dy)
self.moto.move_single_axis(0, dy)
def move_single_axis(self, dx, dy):
self.rect.x += dx
self.rect.y += dy
Wall(self.player_num, (self.rect.centerx, self.rect.centery))
class Wall(object):
def __init__(self, player_num, pos):
Game.walls[player_num].append(self)
self.rect = pygame.Rect(pos[0], pos[1], 3, 3)
class Game:
walls = [[], []]
def main(self):
winner = 0
screen = pygame.display.set_mode((SCREEN_WIDTH, SCREEN_HEIGHT))
clock = pygame.time.Clock()
Game.walls = [[], []]
player = Player(0, SCREEN_WIDTH - 80, int(SCREEN_HEIGHT / 2), 2, 16, 1)
player2 = Player(1, 80, int(SCREEN_HEIGHT / 2), 2, 16, 0)
try:
pygame.joystick.init()
joysticks = [pygame.joystick.Joystick(x) for x in range(pygame.
joystick.get_count())]
joysticks[0].init()
joysticks[1].init()
player1_joystick = joysticks[0]
player2_joystick = joysticks[1]
except IndexError:
player1_joystick = None
player2_joystick = None
end = pygame.image.load('number3.png')
screen.fill((0, 0, 0))
screen.blit(end, (0.5 * SCREEN_WIDTH - 0.5 * 500, 0.5 *
SCREEN_HEIGHT - 0.5 * 500))
pygame.display.flip()
pygame.time.wait(1000)
end = pygame.image.load('number2.png')
screen.fill((0, 0, 0))
screen.blit(end, (0.5 * SCREEN_WIDTH - 0.5 * 500, 0.5 *
SCREEN_HEIGHT - 0.5 * 500))
pygame.display.flip()
pygame.time.wait(1000)
end = pygame.image.load('number1.png')
screen.fill((0, 0, 0))
screen.blit(end, (0.5 * SCREEN_WIDTH - 0.5 * 500, 0.5 *
SCREEN_HEIGHT - 0.5 * 500))
pygame.display.flip()
pygame.time.wait(1000)
pygame.display.flip()
running = True
while running:
clock.tick(60)
for e in pygame.event.get():
if e.type == pygame.QUIT:
running = False
if e.type == pygame.KEYDOWN and e.key == pygame.K_ESCAPE:
running = False
try:
if e.type == pygame.locals.JOYAXISMOTION:
player1jx, player1jy = player1_joystick.get_axis(0
), player1_joystick.get_axis(1)
if player1jx < 0:
player2.moveLeft()
if player1jx > 0:
player2.moveRight()
if player1jy < 0:
player2.moveUp()
if player1jy > 0:
player2.moveDown()
player2jx, player2jy = player2_joystick.get_axis(0
), player2_joystick.get_axis(1)
if player2jx < 0:
player.moveLeft()
if player2jx > 0:
player.moveRight()
if player2jy < 0:
player.moveUp()
if player2jy > 0:
player.moveDown()
except:
pass
key = pygame.key.get_pressed()
if key[pygame.K_LEFT]:
player.moveLeft()
if key[pygame.K_RIGHT]:
player.moveRight()
if key[pygame.K_UP]:
player.moveUp()
if key[pygame.K_DOWN]:
player.moveDown()
player.moveOn()
key = pygame.key.get_pressed()
if key[pygame.K_a]:
player2.moveLeft()
if key[pygame.K_d]:
player2.moveRight()
if key[pygame.K_w]:
player2.moveUp()
if key[pygame.K_s]:
player2.moveDown()
player2.moveOn()
if player.moto.rect.x < 0 or player.moto.rect.x > SCREEN_WIDTH:
winner = 2
running = False
if player2.moto.rect.x < 0 or player2.moto.rect.x > SCREEN_WIDTH:
winner = 1
running = False
if player.moto.rect.y < 0 or player.moto.rect.y > SCREEN_HEIGHT:
winner = 2
running = False
if player2.moto.rect.y < 0 or player2.moto.rect.y > SCREEN_HEIGHT:
winner = 1
running = False
screen.fill((0, 0, 0))
counter1 = 0
counter2 = 0
coll_range = len(Game.walls[0]) - (player.moto.rect.width / 2 + 10)
coll_range_2 = len(Game.walls[1]) - (player2.moto.rect.width /
2 + 10)
for wall in Game.walls[0]:
if player2.moto.rect.colliderect(wall.rect):
winner = 1
running = False
if counter1 < coll_range and player.moto.rect.colliderect(wall
.rect):
winner = 2
running = False
counter1 += 1
pygame.draw.rect(screen, (255, 0, 0), wall.rect)
for wall in Game.walls[1]:
if player.moto.rect.colliderect(wall.rect):
winner = 2
running = False
if counter2 < coll_range_2 and player2.moto.rect.colliderect(
wall.rect):
winner = 1
running = False
counter2 += 1
pygame.draw.rect(screen, (0, 0, 255), wall.rect)
pygame.draw.rect(screen, (255, 200, 0), player.rect)
screen.blit(player.moto.image, (player.moto.rect.x, player.moto
.rect.y))
pygame.draw.rect(screen, (255, 200, 0), player2.rect)
screen.blit(player2.moto.image, (player2.moto.rect.x, player2.
moto.rect.y))
pygame.display.flip()
print('Winner: ', winner)
running = True
clock = pygame.time.Clock()
sound = pygame.mixer.Sound('blast.wav')
sound.play(loops=0, maxtime=0, fade_ms=0)
while running:
clock.tick(60)
for e in pygame.event.get():
if e.type == pygame.JOYBUTTONDOWN:
player1Button = player1_joystick.get_button(0)
if player1Button > 0:
running = False
print('BACK TO MENU')
return True
player2Button = player2_joystick.get_button(0)
if player2Button > 0:
running = False
print('BACK TO MENU')
return True
if e.type == pygame.KEYDOWN and (e.key == pygame.K_KP_ENTER or
e.key == pygame.K_RETURN):
running = False
print('BACK TO MENU')
return True
end = pygame.image.load('gameover.png')
screen.blit(end, (0.5 * SCREEN_WIDTH - 0.5 * 1024, 0.5 *
SCREEN_HEIGHT - 0.5 * 768))
screen.fill((0, 0, 0))
screen.blit(end, (10, 10))
if winner == 2:
myfont = pygame.font.SysFont('monospace', 72)
label = myfont.render('Blue won!', 1, (0, 0, 225))
screen.blit(label, (0.5 * SCREEN_WIDTH - 0.5 * 500, 0.5 *
SCREEN_HEIGHT - 0.5 * 750))
else:
myfont = pygame.font.SysFont('monospace', 72)
label = myfont.render('Red won!', 1, (255, 0, 0))
screen.blit(label, (0.5 * SCREEN_WIDTH - 0.5 * 500, 0.5 *
SCREEN_HEIGHT - 0.5 * 750))
pygame.display.flip()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
SCREEN_WIDTH = 1280
SCREEN_HEIGHT = 1024
class Moto(pygame.sprite.Sprite):
def __init__(self, player_num, start_direction):
pygame.sprite.Sprite.__init__(self)
self.image = pygame.image.load('motor' + str(player_num) + '.png'
).convert()
self.orig_image = self.image
self.rect = self.image.get_rect()
self.direction = start_direction
def move_single_axis(self, dx, dy):
self.rect.x += dx
self.rect.y += dy
def moveRight(self):
self.direction = 0
self.image = pygame.transform.rotate(self.orig_image, 0)
def moveLeft(self):
self.direction = 1
self.image = pygame.transform.rotate(self.orig_image, 0)
def moveUp(self):
self.direction = 2
self.image = pygame.transform.rotate(self.orig_image, 90)
def moveDown(self):
self.direction = 3
self.image = pygame.transform.rotate(self.orig_image, 90)
class Player(object):
def __init__(self, player_num, px, py, sx, sy, start_direction):
self.player_num = player_num
self.rect = pygame.Rect(px, py, sx, sy)
self.direction = start_direction
self.moto = Moto(player_num, start_direction)
self.moto.rect.x = px
self.moto.rect.y = py
def moveRight(self):
if self.direction != 1:
self.direction = 0
self.moto.moveRight()
def moveLeft(self):
if self.direction != 0:
self.direction = 1
self.moto.moveLeft()
def moveUp(self):
if self.direction != 3:
self.direction = 2
self.moto.moveUp()
def moveDown(self):
if self.direction != 2:
self.direction = 3
self.moto.moveDown()
def moveOn(self):
if self.direction == 0:
self.move(2, 0)
if self.direction == 1:
self.move(-2, 0)
if self.direction == 2:
self.move(0, -2)
if self.direction == 3:
self.move(0, 2)
def move(self, dx, dy):
if dx != 0:
self.move_single_axis(dx, 0)
self.moto.move_single_axis(dx, 0)
if dy != 0:
self.move_single_axis(0, dy)
self.moto.move_single_axis(0, dy)
def move_single_axis(self, dx, dy):
self.rect.x += dx
self.rect.y += dy
Wall(self.player_num, (self.rect.centerx, self.rect.centery))
class Wall(object):
def __init__(self, player_num, pos):
Game.walls[player_num].append(self)
self.rect = pygame.Rect(pos[0], pos[1], 3, 3)
class Game:
walls = [[], []]
def main(self):
winner = 0
screen = pygame.display.set_mode((SCREEN_WIDTH, SCREEN_HEIGHT))
clock = pygame.time.Clock()
Game.walls = [[], []]
player = Player(0, SCREEN_WIDTH - 80, int(SCREEN_HEIGHT / 2), 2, 16, 1)
player2 = Player(1, 80, int(SCREEN_HEIGHT / 2), 2, 16, 0)
try:
pygame.joystick.init()
joysticks = [pygame.joystick.Joystick(x) for x in range(pygame.
joystick.get_count())]
joysticks[0].init()
joysticks[1].init()
player1_joystick = joysticks[0]
player2_joystick = joysticks[1]
except IndexError:
player1_joystick = None
player2_joystick = None
end = pygame.image.load('number3.png')
screen.fill((0, 0, 0))
screen.blit(end, (0.5 * SCREEN_WIDTH - 0.5 * 500, 0.5 *
SCREEN_HEIGHT - 0.5 * 500))
pygame.display.flip()
pygame.time.wait(1000)
end = pygame.image.load('number2.png')
screen.fill((0, 0, 0))
screen.blit(end, (0.5 * SCREEN_WIDTH - 0.5 * 500, 0.5 *
SCREEN_HEIGHT - 0.5 * 500))
pygame.display.flip()
pygame.time.wait(1000)
end = pygame.image.load('number1.png')
screen.fill((0, 0, 0))
screen.blit(end, (0.5 * SCREEN_WIDTH - 0.5 * 500, 0.5 *
SCREEN_HEIGHT - 0.5 * 500))
pygame.display.flip()
pygame.time.wait(1000)
pygame.display.flip()
running = True
while running:
clock.tick(60)
for e in pygame.event.get():
if e.type == pygame.QUIT:
running = False
if e.type == pygame.KEYDOWN and e.key == pygame.K_ESCAPE:
running = False
try:
if e.type == pygame.locals.JOYAXISMOTION:
player1jx, player1jy = player1_joystick.get_axis(0
), player1_joystick.get_axis(1)
if player1jx < 0:
player2.moveLeft()
if player1jx > 0:
player2.moveRight()
if player1jy < 0:
player2.moveUp()
if player1jy > 0:
player2.moveDown()
player2jx, player2jy = player2_joystick.get_axis(0
), player2_joystick.get_axis(1)
if player2jx < 0:
player.moveLeft()
if player2jx > 0:
player.moveRight()
if player2jy < 0:
player.moveUp()
if player2jy > 0:
player.moveDown()
except:
pass
key = pygame.key.get_pressed()
if key[pygame.K_LEFT]:
player.moveLeft()
if key[pygame.K_RIGHT]:
player.moveRight()
if key[pygame.K_UP]:
player.moveUp()
if key[pygame.K_DOWN]:
player.moveDown()
player.moveOn()
key = pygame.key.get_pressed()
if key[pygame.K_a]:
player2.moveLeft()
if key[pygame.K_d]:
player2.moveRight()
if key[pygame.K_w]:
player2.moveUp()
if key[pygame.K_s]:
player2.moveDown()
player2.moveOn()
if player.moto.rect.x < 0 or player.moto.rect.x > SCREEN_WIDTH:
winner = 2
running = False
if player2.moto.rect.x < 0 or player2.moto.rect.x > SCREEN_WIDTH:
winner = 1
running = False
if player.moto.rect.y < 0 or player.moto.rect.y > SCREEN_HEIGHT:
winner = 2
running = False
if player2.moto.rect.y < 0 or player2.moto.rect.y > SCREEN_HEIGHT:
winner = 1
running = False
screen.fill((0, 0, 0))
counter1 = 0
counter2 = 0
coll_range = len(Game.walls[0]) - (player.moto.rect.width / 2 + 10)
coll_range_2 = len(Game.walls[1]) - (player2.moto.rect.width /
2 + 10)
for wall in Game.walls[0]:
if player2.moto.rect.colliderect(wall.rect):
winner = 1
running = False
if counter1 < coll_range and player.moto.rect.colliderect(wall
.rect):
winner = 2
running = False
counter1 += 1
pygame.draw.rect(screen, (255, 0, 0), wall.rect)
for wall in Game.walls[1]:
if player.moto.rect.colliderect(wall.rect):
winner = 2
running = False
if counter2 < coll_range_2 and player2.moto.rect.colliderect(
wall.rect):
winner = 1
running = False
counter2 += 1
pygame.draw.rect(screen, (0, 0, 255), wall.rect)
pygame.draw.rect(screen, (255, 200, 0), player.rect)
screen.blit(player.moto.image, (player.moto.rect.x, player.moto
.rect.y))
pygame.draw.rect(screen, (255, 200, 0), player2.rect)
screen.blit(player2.moto.image, (player2.moto.rect.x, player2.
moto.rect.y))
pygame.display.flip()
print('Winner: ', winner)
running = True
clock = pygame.time.Clock()
sound = pygame.mixer.Sound('blast.wav')
sound.play(loops=0, maxtime=0, fade_ms=0)
while running:
clock.tick(60)
for e in pygame.event.get():
if e.type == pygame.JOYBUTTONDOWN:
player1Button = player1_joystick.get_button(0)
if player1Button > 0:
running = False
print('BACK TO MENU')
return True
player2Button = player2_joystick.get_button(0)
if player2Button > 0:
running = False
print('BACK TO MENU')
return True
if e.type == pygame.KEYDOWN and (e.key == pygame.K_KP_ENTER or
e.key == pygame.K_RETURN):
running = False
print('BACK TO MENU')
return True
end = pygame.image.load('gameover.png')
screen.blit(end, (0.5 * SCREEN_WIDTH - 0.5 * 1024, 0.5 *
SCREEN_HEIGHT - 0.5 * 768))
screen.fill((0, 0, 0))
screen.blit(end, (10, 10))
if winner == 2:
myfont = pygame.font.SysFont('monospace', 72)
label = myfont.render('Blue won!', 1, (0, 0, 225))
screen.blit(label, (0.5 * SCREEN_WIDTH - 0.5 * 500, 0.5 *
SCREEN_HEIGHT - 0.5 * 750))
else:
myfont = pygame.font.SysFont('monospace', 72)
label = myfont.render('Red won!', 1, (255, 0, 0))
screen.blit(label, (0.5 * SCREEN_WIDTH - 0.5 * 500, 0.5 *
SCREEN_HEIGHT - 0.5 * 750))
pygame.display.flip()
<|reserved_special_token_1|>
import os
import sys
import random
import pygame
import time
from pygame import locals
SCREEN_WIDTH = 1280
SCREEN_HEIGHT = 1024
class Moto(pygame.sprite.Sprite):
def __init__(self, player_num, start_direction):
pygame.sprite.Sprite.__init__(self)
self.image = pygame.image.load("motor" + str(player_num) + ".png").convert()
self.orig_image = self.image
# Fetch the rectangle object that has the dimensions of the image
# Update the position of this object by setting the values of rect.x and rect.y
self.rect = self.image.get_rect()
self.direction = start_direction
def move_single_axis(self, dx, dy):
# Move the rect
self.rect.x += dx
self.rect.y += dy
def moveRight(self):
self.direction = 0
self.image = pygame.transform.rotate(self.orig_image, 0)
def moveLeft(self):
self.direction = 1
self.image = pygame.transform.rotate(self.orig_image, 0)
def moveUp(self):
self.direction = 2
self.image = pygame.transform.rotate(self.orig_image, 90)
def moveDown(self):
self.direction = 3
self.image = pygame.transform.rotate(self.orig_image, 90)
# Class for the orange dude
class Player(object):
def __init__(self, player_num, px, py, sx, sy, start_direction):
self.player_num = player_num
self.rect = pygame.Rect(px, py, sx, sy)
self.direction = start_direction
self.moto = Moto(player_num, start_direction)
self.moto.rect.x = px
self.moto.rect.y = py
def moveRight(self):
if self.direction != 1:
self.direction = 0
self.moto.moveRight()
def moveLeft(self):
if self.direction != 0:
self.direction = 1
self.moto.moveLeft()
def moveUp(self):
if self.direction != 3:
self.direction = 2
self.moto.moveUp()
def moveDown(self):
if self.direction != 2:
self.direction = 3
self.moto.moveDown()
def moveOn(self):
if self.direction == 0:
self.move(2, 0)
if self.direction == 1:
self.move(-2, 0)
if self.direction == 2:
self.move(0, -2)
if self.direction == 3:
self.move(0, 2)
def move(self, dx, dy):
# Move each axis separately. Note that this checks for collisions both times.
if dx != 0:
self.move_single_axis(dx, 0)
self.moto.move_single_axis(dx, 0)
if dy != 0:
self.move_single_axis(0, dy)
self.moto.move_single_axis(0, dy)
def move_single_axis(self, dx, dy):
# Move the rect
self.rect.x += dx
self.rect.y += dy
# Draw a wall (after the movement)
Wall(self.player_num, (self.rect.centerx, self.rect.centery))
# Nice class to hold a wall rect
class Wall(object):
def __init__(self, player_num, pos):
Game.walls[player_num].append(self)
self.rect = pygame.Rect(pos[0], pos[1], 3, 3)
# MAIN
class Game:
walls = [[], []]
def main(self):
winner = 0
screen = pygame.display.set_mode((SCREEN_WIDTH, SCREEN_HEIGHT))
clock = pygame.time.Clock()
# walls for 2 players: lists in list
Game.walls = [[], []]
# starting positions
player = Player(0, SCREEN_WIDTH - 80, int(SCREEN_HEIGHT / 2), 2, 16, 1)
player2 = Player(1, 80, int(SCREEN_HEIGHT / 2), 2, 16, 0)
# JOYSTICK
try:
pygame.joystick.init()
joysticks = [pygame.joystick.Joystick(x) for x in range(pygame.joystick.get_count())]
joysticks[0].init()
joysticks[1].init()
player1_joystick = joysticks[0]
player2_joystick = joysticks[1]
except IndexError:
player1_joystick = None
player2_joystick = None
end = pygame.image.load('number3.png')
screen.fill((0, 0, 0))
screen.blit(end, ((0.5 * SCREEN_WIDTH) - (0.5 * 500), (0.5 * SCREEN_HEIGHT) - (0.5 * 500)))
pygame.display.flip()
pygame.time.wait(1000)
end = pygame.image.load('number2.png')
screen.fill((0, 0, 0))
screen.blit(end, ((0.5 * SCREEN_WIDTH) - (0.5 * 500), (0.5 * SCREEN_HEIGHT) - (0.5 * 500)))
pygame.display.flip()
pygame.time.wait(1000)
end = pygame.image.load('number1.png')
screen.fill((0, 0, 0))
screen.blit(end, ((0.5 * SCREEN_WIDTH) - (0.5 * 500), (0.5 * SCREEN_HEIGHT) - (0.5 * 500)))
pygame.display.flip()
pygame.time.wait(1000)
# end = pygame.image.load('arcade.jpg').convert()
# screen.blit(end, ((0.5 * SCREEN_WIDTH) - (0.5 * 500), (0.5 * SCREEN_HEIGHT) - (0.5 * 500)))
pygame.display.flip()
# background_image = pygame.transform.scale(pygame.image.load('arcade.jpg').convert(), (1280, 1024))
# screen.blit(background_image, [0, 0])
running = True
while running:
clock.tick(60)
for e in pygame.event.get():
if e.type == pygame.QUIT:
running = False
if e.type == pygame.KEYDOWN and e.key == pygame.K_ESCAPE:
running = False
# JOYSTICK
try:
if e.type == pygame.locals.JOYAXISMOTION:
player1jx, player1jy = player1_joystick.get_axis(0), player1_joystick.get_axis(1)
if player1jx < 0:
player2.moveLeft()
if player1jx > 0:
player2.moveRight()
if player1jy < 0:
player2.moveUp()
if player1jy > 0:
player2.moveDown()
player2jx, player2jy = player2_joystick.get_axis(0), player2_joystick.get_axis(1)
if player2jx < 0:
player.moveLeft()
if player2jx > 0:
player.moveRight()
if player2jy < 0:
player.moveUp()
if player2jy > 0:
player.moveDown()
except:
pass
# PLAYER 1
# Move the player if an arrow key is pressed
key = pygame.key.get_pressed()
if key[pygame.K_LEFT]:
player.moveLeft()
if key[pygame.K_RIGHT]:
player.moveRight()
if key[pygame.K_UP]:
player.moveUp()
if key[pygame.K_DOWN]:
player.moveDown()
player.moveOn()
# PLAYER 2
key = pygame.key.get_pressed()
if key[pygame.K_a]:
player2.moveLeft()
if key[pygame.K_d]:
player2.moveRight()
if key[pygame.K_w]:
player2.moveUp()
if key[pygame.K_s]:
player2.moveDown()
player2.moveOn()
# check borders
if player.moto.rect.x < 0 or player.moto.rect.x > SCREEN_WIDTH:
winner = 2
running = False
if player2.moto.rect.x < 0 or player2.moto.rect.x > SCREEN_WIDTH:
winner = 1
running = False
if player.moto.rect.y < 0 or player.moto.rect.y > SCREEN_HEIGHT:
winner = 2
running = False
if player2.moto.rect.y < 0 or player2.moto.rect.y > SCREEN_HEIGHT:
winner = 1
running = False
# Draw the scene
# screen.blit(background_image, [0, 0])
# pygame.display.flip()
screen.fill((0, 0, 0))
# Player 1 walls
counter1 = 0
counter2 = 0
coll_range = len(Game.walls[0]) - (player.moto.rect.width / 2 + 10)
coll_range_2 = len(Game.walls[1]) - (player2.moto.rect.width / 2 + 10)
for wall in Game.walls[0]:
if player2.moto.rect.colliderect(wall.rect):
winner = 1
running = False
if (counter1 < coll_range) and player.moto.rect.colliderect(wall.rect):
winner = 2
running = False
counter1 += 1
pygame.draw.rect(screen, (255, 0, 0), wall.rect)
# Player 2 walls
for wall in Game.walls[1]:
if player.moto.rect.colliderect(wall.rect):
winner = 2
running = False
if (counter2 < coll_range_2) and player2.moto.rect.colliderect(wall.rect):
winner = 1
running = False
counter2 += 1
pygame.draw.rect(screen, (0, 0, 255), wall.rect)
# Player 1
pygame.draw.rect(screen, (255, 200, 0), player.rect)
screen.blit(player.moto.image, (player.moto.rect.x, player.moto.rect.y))
# Player 2
pygame.draw.rect(screen, (255, 200, 0), player2.rect)
screen.blit(player2.moto.image, (player2.moto.rect.x, player2.moto.rect.y))
pygame.display.flip()
# GAME OVER
print("Winner: ", winner)
running = True
clock = pygame.time.Clock()
sound = pygame.mixer.Sound('blast.wav')
sound.play(loops=0, maxtime=0, fade_ms=0)
while running:
clock.tick(60)
for e in pygame.event.get():
if e.type == pygame.JOYBUTTONDOWN:
player1Button = player1_joystick.get_button(0)
if (player1Button > 0):
running = False
print("BACK TO MENU")
return True
player2Button = player2_joystick.get_button(0)
if (player2Button > 0):
running = False
print("BACK TO MENU")
return True
if e.type == pygame.KEYDOWN and (e.key == pygame.K_KP_ENTER or e.key == pygame.K_RETURN):
running = False
print("BACK TO MENU")
return True
end = pygame.image.load('gameover.png')
screen.blit(end, ((0.5 * SCREEN_WIDTH) - (0.5 * 1024), (0.5 * SCREEN_HEIGHT) - (0.5 * 768)))
screen.fill((0, 0, 0))
screen.blit(end, (10, 10))
if winner == 2:
myfont = pygame.font.SysFont("monospace", 72)
label = myfont.render('Blue won!', 1, (0, 0, 225))
screen.blit(label, ((0.5 * SCREEN_WIDTH) - (0.5 * 500), (0.5 * SCREEN_HEIGHT) - (0.5 * 750)))
else:
myfont = pygame.font.SysFont("monospace", 72)
label = myfont.render('Red won!', 1, (255, 0, 0))
screen.blit(label, ((0.5 * SCREEN_WIDTH) - (0.5 * 500), (0.5 * SCREEN_HEIGHT) - (0.5 * 750)))
pygame.display.flip()
|
flexible
|
{
"blob_id": "1d1f1c9b70ca487b48593c85c3e0b5afc10f0b07",
"index": 6642,
"step-1": "<mask token>\n\n\nclass Player(object):\n\n def __init__(self, player_num, px, py, sx, sy, start_direction):\n self.player_num = player_num\n self.rect = pygame.Rect(px, py, sx, sy)\n self.direction = start_direction\n self.moto = Moto(player_num, start_direction)\n self.moto.rect.x = px\n self.moto.rect.y = py\n\n def moveRight(self):\n if self.direction != 1:\n self.direction = 0\n self.moto.moveRight()\n\n def moveLeft(self):\n if self.direction != 0:\n self.direction = 1\n self.moto.moveLeft()\n\n def moveUp(self):\n if self.direction != 3:\n self.direction = 2\n self.moto.moveUp()\n\n def moveDown(self):\n if self.direction != 2:\n self.direction = 3\n self.moto.moveDown()\n\n def moveOn(self):\n if self.direction == 0:\n self.move(2, 0)\n if self.direction == 1:\n self.move(-2, 0)\n if self.direction == 2:\n self.move(0, -2)\n if self.direction == 3:\n self.move(0, 2)\n <mask token>\n\n def move_single_axis(self, dx, dy):\n self.rect.x += dx\n self.rect.y += dy\n Wall(self.player_num, (self.rect.centerx, self.rect.centery))\n\n\nclass Wall(object):\n\n def __init__(self, player_num, pos):\n Game.walls[player_num].append(self)\n self.rect = pygame.Rect(pos[0], pos[1], 3, 3)\n\n\nclass Game:\n walls = [[], []]\n\n def main(self):\n winner = 0\n screen = pygame.display.set_mode((SCREEN_WIDTH, SCREEN_HEIGHT))\n clock = pygame.time.Clock()\n Game.walls = [[], []]\n player = Player(0, SCREEN_WIDTH - 80, int(SCREEN_HEIGHT / 2), 2, 16, 1)\n player2 = Player(1, 80, int(SCREEN_HEIGHT / 2), 2, 16, 0)\n try:\n pygame.joystick.init()\n joysticks = [pygame.joystick.Joystick(x) for x in range(pygame.\n joystick.get_count())]\n joysticks[0].init()\n joysticks[1].init()\n player1_joystick = joysticks[0]\n player2_joystick = joysticks[1]\n except IndexError:\n player1_joystick = None\n player2_joystick = None\n end = pygame.image.load('number3.png')\n screen.fill((0, 0, 0))\n screen.blit(end, (0.5 * SCREEN_WIDTH - 0.5 * 500, 0.5 *\n SCREEN_HEIGHT - 0.5 * 500))\n pygame.display.flip()\n pygame.time.wait(1000)\n end = pygame.image.load('number2.png')\n screen.fill((0, 0, 0))\n screen.blit(end, (0.5 * SCREEN_WIDTH - 0.5 * 500, 0.5 *\n SCREEN_HEIGHT - 0.5 * 500))\n pygame.display.flip()\n pygame.time.wait(1000)\n end = pygame.image.load('number1.png')\n screen.fill((0, 0, 0))\n screen.blit(end, (0.5 * SCREEN_WIDTH - 0.5 * 500, 0.5 *\n SCREEN_HEIGHT - 0.5 * 500))\n pygame.display.flip()\n pygame.time.wait(1000)\n pygame.display.flip()\n running = True\n while running:\n clock.tick(60)\n for e in pygame.event.get():\n if e.type == pygame.QUIT:\n running = False\n if e.type == pygame.KEYDOWN and e.key == pygame.K_ESCAPE:\n running = False\n try:\n if e.type == pygame.locals.JOYAXISMOTION:\n player1jx, player1jy = player1_joystick.get_axis(0\n ), player1_joystick.get_axis(1)\n if player1jx < 0:\n player2.moveLeft()\n if player1jx > 0:\n player2.moveRight()\n if player1jy < 0:\n player2.moveUp()\n if player1jy > 0:\n player2.moveDown()\n player2jx, player2jy = player2_joystick.get_axis(0\n ), player2_joystick.get_axis(1)\n if player2jx < 0:\n player.moveLeft()\n if player2jx > 0:\n player.moveRight()\n if player2jy < 0:\n player.moveUp()\n if player2jy > 0:\n player.moveDown()\n except:\n pass\n key = pygame.key.get_pressed()\n if key[pygame.K_LEFT]:\n player.moveLeft()\n if key[pygame.K_RIGHT]:\n player.moveRight()\n if key[pygame.K_UP]:\n player.moveUp()\n if key[pygame.K_DOWN]:\n player.moveDown()\n player.moveOn()\n key = pygame.key.get_pressed()\n if key[pygame.K_a]:\n player2.moveLeft()\n if key[pygame.K_d]:\n player2.moveRight()\n if key[pygame.K_w]:\n player2.moveUp()\n if key[pygame.K_s]:\n player2.moveDown()\n player2.moveOn()\n if player.moto.rect.x < 0 or player.moto.rect.x > SCREEN_WIDTH:\n winner = 2\n running = False\n if player2.moto.rect.x < 0 or player2.moto.rect.x > SCREEN_WIDTH:\n winner = 1\n running = False\n if player.moto.rect.y < 0 or player.moto.rect.y > SCREEN_HEIGHT:\n winner = 2\n running = False\n if player2.moto.rect.y < 0 or player2.moto.rect.y > SCREEN_HEIGHT:\n winner = 1\n running = False\n screen.fill((0, 0, 0))\n counter1 = 0\n counter2 = 0\n coll_range = len(Game.walls[0]) - (player.moto.rect.width / 2 + 10)\n coll_range_2 = len(Game.walls[1]) - (player2.moto.rect.width / \n 2 + 10)\n for wall in Game.walls[0]:\n if player2.moto.rect.colliderect(wall.rect):\n winner = 1\n running = False\n if counter1 < coll_range and player.moto.rect.colliderect(wall\n .rect):\n winner = 2\n running = False\n counter1 += 1\n pygame.draw.rect(screen, (255, 0, 0), wall.rect)\n for wall in Game.walls[1]:\n if player.moto.rect.colliderect(wall.rect):\n winner = 2\n running = False\n if counter2 < coll_range_2 and player2.moto.rect.colliderect(\n wall.rect):\n winner = 1\n running = False\n counter2 += 1\n pygame.draw.rect(screen, (0, 0, 255), wall.rect)\n pygame.draw.rect(screen, (255, 200, 0), player.rect)\n screen.blit(player.moto.image, (player.moto.rect.x, player.moto\n .rect.y))\n pygame.draw.rect(screen, (255, 200, 0), player2.rect)\n screen.blit(player2.moto.image, (player2.moto.rect.x, player2.\n moto.rect.y))\n pygame.display.flip()\n print('Winner: ', winner)\n running = True\n clock = pygame.time.Clock()\n sound = pygame.mixer.Sound('blast.wav')\n sound.play(loops=0, maxtime=0, fade_ms=0)\n while running:\n clock.tick(60)\n for e in pygame.event.get():\n if e.type == pygame.JOYBUTTONDOWN:\n player1Button = player1_joystick.get_button(0)\n if player1Button > 0:\n running = False\n print('BACK TO MENU')\n return True\n player2Button = player2_joystick.get_button(0)\n if player2Button > 0:\n running = False\n print('BACK TO MENU')\n return True\n if e.type == pygame.KEYDOWN and (e.key == pygame.K_KP_ENTER or\n e.key == pygame.K_RETURN):\n running = False\n print('BACK TO MENU')\n return True\n end = pygame.image.load('gameover.png')\n screen.blit(end, (0.5 * SCREEN_WIDTH - 0.5 * 1024, 0.5 *\n SCREEN_HEIGHT - 0.5 * 768))\n screen.fill((0, 0, 0))\n screen.blit(end, (10, 10))\n if winner == 2:\n myfont = pygame.font.SysFont('monospace', 72)\n label = myfont.render('Blue won!', 1, (0, 0, 225))\n screen.blit(label, (0.5 * SCREEN_WIDTH - 0.5 * 500, 0.5 *\n SCREEN_HEIGHT - 0.5 * 750))\n else:\n myfont = pygame.font.SysFont('monospace', 72)\n label = myfont.render('Red won!', 1, (255, 0, 0))\n screen.blit(label, (0.5 * SCREEN_WIDTH - 0.5 * 500, 0.5 *\n SCREEN_HEIGHT - 0.5 * 750))\n pygame.display.flip()\n",
"step-2": "<mask token>\n\n\nclass Moto(pygame.sprite.Sprite):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass Player(object):\n\n def __init__(self, player_num, px, py, sx, sy, start_direction):\n self.player_num = player_num\n self.rect = pygame.Rect(px, py, sx, sy)\n self.direction = start_direction\n self.moto = Moto(player_num, start_direction)\n self.moto.rect.x = px\n self.moto.rect.y = py\n\n def moveRight(self):\n if self.direction != 1:\n self.direction = 0\n self.moto.moveRight()\n\n def moveLeft(self):\n if self.direction != 0:\n self.direction = 1\n self.moto.moveLeft()\n\n def moveUp(self):\n if self.direction != 3:\n self.direction = 2\n self.moto.moveUp()\n\n def moveDown(self):\n if self.direction != 2:\n self.direction = 3\n self.moto.moveDown()\n\n def moveOn(self):\n if self.direction == 0:\n self.move(2, 0)\n if self.direction == 1:\n self.move(-2, 0)\n if self.direction == 2:\n self.move(0, -2)\n if self.direction == 3:\n self.move(0, 2)\n\n def move(self, dx, dy):\n if dx != 0:\n self.move_single_axis(dx, 0)\n self.moto.move_single_axis(dx, 0)\n if dy != 0:\n self.move_single_axis(0, dy)\n self.moto.move_single_axis(0, dy)\n\n def move_single_axis(self, dx, dy):\n self.rect.x += dx\n self.rect.y += dy\n Wall(self.player_num, (self.rect.centerx, self.rect.centery))\n\n\nclass Wall(object):\n\n def __init__(self, player_num, pos):\n Game.walls[player_num].append(self)\n self.rect = pygame.Rect(pos[0], pos[1], 3, 3)\n\n\nclass Game:\n walls = [[], []]\n\n def main(self):\n winner = 0\n screen = pygame.display.set_mode((SCREEN_WIDTH, SCREEN_HEIGHT))\n clock = pygame.time.Clock()\n Game.walls = [[], []]\n player = Player(0, SCREEN_WIDTH - 80, int(SCREEN_HEIGHT / 2), 2, 16, 1)\n player2 = Player(1, 80, int(SCREEN_HEIGHT / 2), 2, 16, 0)\n try:\n pygame.joystick.init()\n joysticks = [pygame.joystick.Joystick(x) for x in range(pygame.\n joystick.get_count())]\n joysticks[0].init()\n joysticks[1].init()\n player1_joystick = joysticks[0]\n player2_joystick = joysticks[1]\n except IndexError:\n player1_joystick = None\n player2_joystick = None\n end = pygame.image.load('number3.png')\n screen.fill((0, 0, 0))\n screen.blit(end, (0.5 * SCREEN_WIDTH - 0.5 * 500, 0.5 *\n SCREEN_HEIGHT - 0.5 * 500))\n pygame.display.flip()\n pygame.time.wait(1000)\n end = pygame.image.load('number2.png')\n screen.fill((0, 0, 0))\n screen.blit(end, (0.5 * SCREEN_WIDTH - 0.5 * 500, 0.5 *\n SCREEN_HEIGHT - 0.5 * 500))\n pygame.display.flip()\n pygame.time.wait(1000)\n end = pygame.image.load('number1.png')\n screen.fill((0, 0, 0))\n screen.blit(end, (0.5 * SCREEN_WIDTH - 0.5 * 500, 0.5 *\n SCREEN_HEIGHT - 0.5 * 500))\n pygame.display.flip()\n pygame.time.wait(1000)\n pygame.display.flip()\n running = True\n while running:\n clock.tick(60)\n for e in pygame.event.get():\n if e.type == pygame.QUIT:\n running = False\n if e.type == pygame.KEYDOWN and e.key == pygame.K_ESCAPE:\n running = False\n try:\n if e.type == pygame.locals.JOYAXISMOTION:\n player1jx, player1jy = player1_joystick.get_axis(0\n ), player1_joystick.get_axis(1)\n if player1jx < 0:\n player2.moveLeft()\n if player1jx > 0:\n player2.moveRight()\n if player1jy < 0:\n player2.moveUp()\n if player1jy > 0:\n player2.moveDown()\n player2jx, player2jy = player2_joystick.get_axis(0\n ), player2_joystick.get_axis(1)\n if player2jx < 0:\n player.moveLeft()\n if player2jx > 0:\n player.moveRight()\n if player2jy < 0:\n player.moveUp()\n if player2jy > 0:\n player.moveDown()\n except:\n pass\n key = pygame.key.get_pressed()\n if key[pygame.K_LEFT]:\n player.moveLeft()\n if key[pygame.K_RIGHT]:\n player.moveRight()\n if key[pygame.K_UP]:\n player.moveUp()\n if key[pygame.K_DOWN]:\n player.moveDown()\n player.moveOn()\n key = pygame.key.get_pressed()\n if key[pygame.K_a]:\n player2.moveLeft()\n if key[pygame.K_d]:\n player2.moveRight()\n if key[pygame.K_w]:\n player2.moveUp()\n if key[pygame.K_s]:\n player2.moveDown()\n player2.moveOn()\n if player.moto.rect.x < 0 or player.moto.rect.x > SCREEN_WIDTH:\n winner = 2\n running = False\n if player2.moto.rect.x < 0 or player2.moto.rect.x > SCREEN_WIDTH:\n winner = 1\n running = False\n if player.moto.rect.y < 0 or player.moto.rect.y > SCREEN_HEIGHT:\n winner = 2\n running = False\n if player2.moto.rect.y < 0 or player2.moto.rect.y > SCREEN_HEIGHT:\n winner = 1\n running = False\n screen.fill((0, 0, 0))\n counter1 = 0\n counter2 = 0\n coll_range = len(Game.walls[0]) - (player.moto.rect.width / 2 + 10)\n coll_range_2 = len(Game.walls[1]) - (player2.moto.rect.width / \n 2 + 10)\n for wall in Game.walls[0]:\n if player2.moto.rect.colliderect(wall.rect):\n winner = 1\n running = False\n if counter1 < coll_range and player.moto.rect.colliderect(wall\n .rect):\n winner = 2\n running = False\n counter1 += 1\n pygame.draw.rect(screen, (255, 0, 0), wall.rect)\n for wall in Game.walls[1]:\n if player.moto.rect.colliderect(wall.rect):\n winner = 2\n running = False\n if counter2 < coll_range_2 and player2.moto.rect.colliderect(\n wall.rect):\n winner = 1\n running = False\n counter2 += 1\n pygame.draw.rect(screen, (0, 0, 255), wall.rect)\n pygame.draw.rect(screen, (255, 200, 0), player.rect)\n screen.blit(player.moto.image, (player.moto.rect.x, player.moto\n .rect.y))\n pygame.draw.rect(screen, (255, 200, 0), player2.rect)\n screen.blit(player2.moto.image, (player2.moto.rect.x, player2.\n moto.rect.y))\n pygame.display.flip()\n print('Winner: ', winner)\n running = True\n clock = pygame.time.Clock()\n sound = pygame.mixer.Sound('blast.wav')\n sound.play(loops=0, maxtime=0, fade_ms=0)\n while running:\n clock.tick(60)\n for e in pygame.event.get():\n if e.type == pygame.JOYBUTTONDOWN:\n player1Button = player1_joystick.get_button(0)\n if player1Button > 0:\n running = False\n print('BACK TO MENU')\n return True\n player2Button = player2_joystick.get_button(0)\n if player2Button > 0:\n running = False\n print('BACK TO MENU')\n return True\n if e.type == pygame.KEYDOWN and (e.key == pygame.K_KP_ENTER or\n e.key == pygame.K_RETURN):\n running = False\n print('BACK TO MENU')\n return True\n end = pygame.image.load('gameover.png')\n screen.blit(end, (0.5 * SCREEN_WIDTH - 0.5 * 1024, 0.5 *\n SCREEN_HEIGHT - 0.5 * 768))\n screen.fill((0, 0, 0))\n screen.blit(end, (10, 10))\n if winner == 2:\n myfont = pygame.font.SysFont('monospace', 72)\n label = myfont.render('Blue won!', 1, (0, 0, 225))\n screen.blit(label, (0.5 * SCREEN_WIDTH - 0.5 * 500, 0.5 *\n SCREEN_HEIGHT - 0.5 * 750))\n else:\n myfont = pygame.font.SysFont('monospace', 72)\n label = myfont.render('Red won!', 1, (255, 0, 0))\n screen.blit(label, (0.5 * SCREEN_WIDTH - 0.5 * 500, 0.5 *\n SCREEN_HEIGHT - 0.5 * 750))\n pygame.display.flip()\n",
"step-3": "<mask token>\n\n\nclass Moto(pygame.sprite.Sprite):\n <mask token>\n\n def move_single_axis(self, dx, dy):\n self.rect.x += dx\n self.rect.y += dy\n\n def moveRight(self):\n self.direction = 0\n self.image = pygame.transform.rotate(self.orig_image, 0)\n\n def moveLeft(self):\n self.direction = 1\n self.image = pygame.transform.rotate(self.orig_image, 0)\n\n def moveUp(self):\n self.direction = 2\n self.image = pygame.transform.rotate(self.orig_image, 90)\n <mask token>\n\n\nclass Player(object):\n\n def __init__(self, player_num, px, py, sx, sy, start_direction):\n self.player_num = player_num\n self.rect = pygame.Rect(px, py, sx, sy)\n self.direction = start_direction\n self.moto = Moto(player_num, start_direction)\n self.moto.rect.x = px\n self.moto.rect.y = py\n\n def moveRight(self):\n if self.direction != 1:\n self.direction = 0\n self.moto.moveRight()\n\n def moveLeft(self):\n if self.direction != 0:\n self.direction = 1\n self.moto.moveLeft()\n\n def moveUp(self):\n if self.direction != 3:\n self.direction = 2\n self.moto.moveUp()\n\n def moveDown(self):\n if self.direction != 2:\n self.direction = 3\n self.moto.moveDown()\n\n def moveOn(self):\n if self.direction == 0:\n self.move(2, 0)\n if self.direction == 1:\n self.move(-2, 0)\n if self.direction == 2:\n self.move(0, -2)\n if self.direction == 3:\n self.move(0, 2)\n\n def move(self, dx, dy):\n if dx != 0:\n self.move_single_axis(dx, 0)\n self.moto.move_single_axis(dx, 0)\n if dy != 0:\n self.move_single_axis(0, dy)\n self.moto.move_single_axis(0, dy)\n\n def move_single_axis(self, dx, dy):\n self.rect.x += dx\n self.rect.y += dy\n Wall(self.player_num, (self.rect.centerx, self.rect.centery))\n\n\nclass Wall(object):\n\n def __init__(self, player_num, pos):\n Game.walls[player_num].append(self)\n self.rect = pygame.Rect(pos[0], pos[1], 3, 3)\n\n\nclass Game:\n walls = [[], []]\n\n def main(self):\n winner = 0\n screen = pygame.display.set_mode((SCREEN_WIDTH, SCREEN_HEIGHT))\n clock = pygame.time.Clock()\n Game.walls = [[], []]\n player = Player(0, SCREEN_WIDTH - 80, int(SCREEN_HEIGHT / 2), 2, 16, 1)\n player2 = Player(1, 80, int(SCREEN_HEIGHT / 2), 2, 16, 0)\n try:\n pygame.joystick.init()\n joysticks = [pygame.joystick.Joystick(x) for x in range(pygame.\n joystick.get_count())]\n joysticks[0].init()\n joysticks[1].init()\n player1_joystick = joysticks[0]\n player2_joystick = joysticks[1]\n except IndexError:\n player1_joystick = None\n player2_joystick = None\n end = pygame.image.load('number3.png')\n screen.fill((0, 0, 0))\n screen.blit(end, (0.5 * SCREEN_WIDTH - 0.5 * 500, 0.5 *\n SCREEN_HEIGHT - 0.5 * 500))\n pygame.display.flip()\n pygame.time.wait(1000)\n end = pygame.image.load('number2.png')\n screen.fill((0, 0, 0))\n screen.blit(end, (0.5 * SCREEN_WIDTH - 0.5 * 500, 0.5 *\n SCREEN_HEIGHT - 0.5 * 500))\n pygame.display.flip()\n pygame.time.wait(1000)\n end = pygame.image.load('number1.png')\n screen.fill((0, 0, 0))\n screen.blit(end, (0.5 * SCREEN_WIDTH - 0.5 * 500, 0.5 *\n SCREEN_HEIGHT - 0.5 * 500))\n pygame.display.flip()\n pygame.time.wait(1000)\n pygame.display.flip()\n running = True\n while running:\n clock.tick(60)\n for e in pygame.event.get():\n if e.type == pygame.QUIT:\n running = False\n if e.type == pygame.KEYDOWN and e.key == pygame.K_ESCAPE:\n running = False\n try:\n if e.type == pygame.locals.JOYAXISMOTION:\n player1jx, player1jy = player1_joystick.get_axis(0\n ), player1_joystick.get_axis(1)\n if player1jx < 0:\n player2.moveLeft()\n if player1jx > 0:\n player2.moveRight()\n if player1jy < 0:\n player2.moveUp()\n if player1jy > 0:\n player2.moveDown()\n player2jx, player2jy = player2_joystick.get_axis(0\n ), player2_joystick.get_axis(1)\n if player2jx < 0:\n player.moveLeft()\n if player2jx > 0:\n player.moveRight()\n if player2jy < 0:\n player.moveUp()\n if player2jy > 0:\n player.moveDown()\n except:\n pass\n key = pygame.key.get_pressed()\n if key[pygame.K_LEFT]:\n player.moveLeft()\n if key[pygame.K_RIGHT]:\n player.moveRight()\n if key[pygame.K_UP]:\n player.moveUp()\n if key[pygame.K_DOWN]:\n player.moveDown()\n player.moveOn()\n key = pygame.key.get_pressed()\n if key[pygame.K_a]:\n player2.moveLeft()\n if key[pygame.K_d]:\n player2.moveRight()\n if key[pygame.K_w]:\n player2.moveUp()\n if key[pygame.K_s]:\n player2.moveDown()\n player2.moveOn()\n if player.moto.rect.x < 0 or player.moto.rect.x > SCREEN_WIDTH:\n winner = 2\n running = False\n if player2.moto.rect.x < 0 or player2.moto.rect.x > SCREEN_WIDTH:\n winner = 1\n running = False\n if player.moto.rect.y < 0 or player.moto.rect.y > SCREEN_HEIGHT:\n winner = 2\n running = False\n if player2.moto.rect.y < 0 or player2.moto.rect.y > SCREEN_HEIGHT:\n winner = 1\n running = False\n screen.fill((0, 0, 0))\n counter1 = 0\n counter2 = 0\n coll_range = len(Game.walls[0]) - (player.moto.rect.width / 2 + 10)\n coll_range_2 = len(Game.walls[1]) - (player2.moto.rect.width / \n 2 + 10)\n for wall in Game.walls[0]:\n if player2.moto.rect.colliderect(wall.rect):\n winner = 1\n running = False\n if counter1 < coll_range and player.moto.rect.colliderect(wall\n .rect):\n winner = 2\n running = False\n counter1 += 1\n pygame.draw.rect(screen, (255, 0, 0), wall.rect)\n for wall in Game.walls[1]:\n if player.moto.rect.colliderect(wall.rect):\n winner = 2\n running = False\n if counter2 < coll_range_2 and player2.moto.rect.colliderect(\n wall.rect):\n winner = 1\n running = False\n counter2 += 1\n pygame.draw.rect(screen, (0, 0, 255), wall.rect)\n pygame.draw.rect(screen, (255, 200, 0), player.rect)\n screen.blit(player.moto.image, (player.moto.rect.x, player.moto\n .rect.y))\n pygame.draw.rect(screen, (255, 200, 0), player2.rect)\n screen.blit(player2.moto.image, (player2.moto.rect.x, player2.\n moto.rect.y))\n pygame.display.flip()\n print('Winner: ', winner)\n running = True\n clock = pygame.time.Clock()\n sound = pygame.mixer.Sound('blast.wav')\n sound.play(loops=0, maxtime=0, fade_ms=0)\n while running:\n clock.tick(60)\n for e in pygame.event.get():\n if e.type == pygame.JOYBUTTONDOWN:\n player1Button = player1_joystick.get_button(0)\n if player1Button > 0:\n running = False\n print('BACK TO MENU')\n return True\n player2Button = player2_joystick.get_button(0)\n if player2Button > 0:\n running = False\n print('BACK TO MENU')\n return True\n if e.type == pygame.KEYDOWN and (e.key == pygame.K_KP_ENTER or\n e.key == pygame.K_RETURN):\n running = False\n print('BACK TO MENU')\n return True\n end = pygame.image.load('gameover.png')\n screen.blit(end, (0.5 * SCREEN_WIDTH - 0.5 * 1024, 0.5 *\n SCREEN_HEIGHT - 0.5 * 768))\n screen.fill((0, 0, 0))\n screen.blit(end, (10, 10))\n if winner == 2:\n myfont = pygame.font.SysFont('monospace', 72)\n label = myfont.render('Blue won!', 1, (0, 0, 225))\n screen.blit(label, (0.5 * SCREEN_WIDTH - 0.5 * 500, 0.5 *\n SCREEN_HEIGHT - 0.5 * 750))\n else:\n myfont = pygame.font.SysFont('monospace', 72)\n label = myfont.render('Red won!', 1, (255, 0, 0))\n screen.blit(label, (0.5 * SCREEN_WIDTH - 0.5 * 500, 0.5 *\n SCREEN_HEIGHT - 0.5 * 750))\n pygame.display.flip()\n",
"step-4": "<mask token>\nSCREEN_WIDTH = 1280\nSCREEN_HEIGHT = 1024\n\n\nclass Moto(pygame.sprite.Sprite):\n\n def __init__(self, player_num, start_direction):\n pygame.sprite.Sprite.__init__(self)\n self.image = pygame.image.load('motor' + str(player_num) + '.png'\n ).convert()\n self.orig_image = self.image\n self.rect = self.image.get_rect()\n self.direction = start_direction\n\n def move_single_axis(self, dx, dy):\n self.rect.x += dx\n self.rect.y += dy\n\n def moveRight(self):\n self.direction = 0\n self.image = pygame.transform.rotate(self.orig_image, 0)\n\n def moveLeft(self):\n self.direction = 1\n self.image = pygame.transform.rotate(self.orig_image, 0)\n\n def moveUp(self):\n self.direction = 2\n self.image = pygame.transform.rotate(self.orig_image, 90)\n\n def moveDown(self):\n self.direction = 3\n self.image = pygame.transform.rotate(self.orig_image, 90)\n\n\nclass Player(object):\n\n def __init__(self, player_num, px, py, sx, sy, start_direction):\n self.player_num = player_num\n self.rect = pygame.Rect(px, py, sx, sy)\n self.direction = start_direction\n self.moto = Moto(player_num, start_direction)\n self.moto.rect.x = px\n self.moto.rect.y = py\n\n def moveRight(self):\n if self.direction != 1:\n self.direction = 0\n self.moto.moveRight()\n\n def moveLeft(self):\n if self.direction != 0:\n self.direction = 1\n self.moto.moveLeft()\n\n def moveUp(self):\n if self.direction != 3:\n self.direction = 2\n self.moto.moveUp()\n\n def moveDown(self):\n if self.direction != 2:\n self.direction = 3\n self.moto.moveDown()\n\n def moveOn(self):\n if self.direction == 0:\n self.move(2, 0)\n if self.direction == 1:\n self.move(-2, 0)\n if self.direction == 2:\n self.move(0, -2)\n if self.direction == 3:\n self.move(0, 2)\n\n def move(self, dx, dy):\n if dx != 0:\n self.move_single_axis(dx, 0)\n self.moto.move_single_axis(dx, 0)\n if dy != 0:\n self.move_single_axis(0, dy)\n self.moto.move_single_axis(0, dy)\n\n def move_single_axis(self, dx, dy):\n self.rect.x += dx\n self.rect.y += dy\n Wall(self.player_num, (self.rect.centerx, self.rect.centery))\n\n\nclass Wall(object):\n\n def __init__(self, player_num, pos):\n Game.walls[player_num].append(self)\n self.rect = pygame.Rect(pos[0], pos[1], 3, 3)\n\n\nclass Game:\n walls = [[], []]\n\n def main(self):\n winner = 0\n screen = pygame.display.set_mode((SCREEN_WIDTH, SCREEN_HEIGHT))\n clock = pygame.time.Clock()\n Game.walls = [[], []]\n player = Player(0, SCREEN_WIDTH - 80, int(SCREEN_HEIGHT / 2), 2, 16, 1)\n player2 = Player(1, 80, int(SCREEN_HEIGHT / 2), 2, 16, 0)\n try:\n pygame.joystick.init()\n joysticks = [pygame.joystick.Joystick(x) for x in range(pygame.\n joystick.get_count())]\n joysticks[0].init()\n joysticks[1].init()\n player1_joystick = joysticks[0]\n player2_joystick = joysticks[1]\n except IndexError:\n player1_joystick = None\n player2_joystick = None\n end = pygame.image.load('number3.png')\n screen.fill((0, 0, 0))\n screen.blit(end, (0.5 * SCREEN_WIDTH - 0.5 * 500, 0.5 *\n SCREEN_HEIGHT - 0.5 * 500))\n pygame.display.flip()\n pygame.time.wait(1000)\n end = pygame.image.load('number2.png')\n screen.fill((0, 0, 0))\n screen.blit(end, (0.5 * SCREEN_WIDTH - 0.5 * 500, 0.5 *\n SCREEN_HEIGHT - 0.5 * 500))\n pygame.display.flip()\n pygame.time.wait(1000)\n end = pygame.image.load('number1.png')\n screen.fill((0, 0, 0))\n screen.blit(end, (0.5 * SCREEN_WIDTH - 0.5 * 500, 0.5 *\n SCREEN_HEIGHT - 0.5 * 500))\n pygame.display.flip()\n pygame.time.wait(1000)\n pygame.display.flip()\n running = True\n while running:\n clock.tick(60)\n for e in pygame.event.get():\n if e.type == pygame.QUIT:\n running = False\n if e.type == pygame.KEYDOWN and e.key == pygame.K_ESCAPE:\n running = False\n try:\n if e.type == pygame.locals.JOYAXISMOTION:\n player1jx, player1jy = player1_joystick.get_axis(0\n ), player1_joystick.get_axis(1)\n if player1jx < 0:\n player2.moveLeft()\n if player1jx > 0:\n player2.moveRight()\n if player1jy < 0:\n player2.moveUp()\n if player1jy > 0:\n player2.moveDown()\n player2jx, player2jy = player2_joystick.get_axis(0\n ), player2_joystick.get_axis(1)\n if player2jx < 0:\n player.moveLeft()\n if player2jx > 0:\n player.moveRight()\n if player2jy < 0:\n player.moveUp()\n if player2jy > 0:\n player.moveDown()\n except:\n pass\n key = pygame.key.get_pressed()\n if key[pygame.K_LEFT]:\n player.moveLeft()\n if key[pygame.K_RIGHT]:\n player.moveRight()\n if key[pygame.K_UP]:\n player.moveUp()\n if key[pygame.K_DOWN]:\n player.moveDown()\n player.moveOn()\n key = pygame.key.get_pressed()\n if key[pygame.K_a]:\n player2.moveLeft()\n if key[pygame.K_d]:\n player2.moveRight()\n if key[pygame.K_w]:\n player2.moveUp()\n if key[pygame.K_s]:\n player2.moveDown()\n player2.moveOn()\n if player.moto.rect.x < 0 or player.moto.rect.x > SCREEN_WIDTH:\n winner = 2\n running = False\n if player2.moto.rect.x < 0 or player2.moto.rect.x > SCREEN_WIDTH:\n winner = 1\n running = False\n if player.moto.rect.y < 0 or player.moto.rect.y > SCREEN_HEIGHT:\n winner = 2\n running = False\n if player2.moto.rect.y < 0 or player2.moto.rect.y > SCREEN_HEIGHT:\n winner = 1\n running = False\n screen.fill((0, 0, 0))\n counter1 = 0\n counter2 = 0\n coll_range = len(Game.walls[0]) - (player.moto.rect.width / 2 + 10)\n coll_range_2 = len(Game.walls[1]) - (player2.moto.rect.width / \n 2 + 10)\n for wall in Game.walls[0]:\n if player2.moto.rect.colliderect(wall.rect):\n winner = 1\n running = False\n if counter1 < coll_range and player.moto.rect.colliderect(wall\n .rect):\n winner = 2\n running = False\n counter1 += 1\n pygame.draw.rect(screen, (255, 0, 0), wall.rect)\n for wall in Game.walls[1]:\n if player.moto.rect.colliderect(wall.rect):\n winner = 2\n running = False\n if counter2 < coll_range_2 and player2.moto.rect.colliderect(\n wall.rect):\n winner = 1\n running = False\n counter2 += 1\n pygame.draw.rect(screen, (0, 0, 255), wall.rect)\n pygame.draw.rect(screen, (255, 200, 0), player.rect)\n screen.blit(player.moto.image, (player.moto.rect.x, player.moto\n .rect.y))\n pygame.draw.rect(screen, (255, 200, 0), player2.rect)\n screen.blit(player2.moto.image, (player2.moto.rect.x, player2.\n moto.rect.y))\n pygame.display.flip()\n print('Winner: ', winner)\n running = True\n clock = pygame.time.Clock()\n sound = pygame.mixer.Sound('blast.wav')\n sound.play(loops=0, maxtime=0, fade_ms=0)\n while running:\n clock.tick(60)\n for e in pygame.event.get():\n if e.type == pygame.JOYBUTTONDOWN:\n player1Button = player1_joystick.get_button(0)\n if player1Button > 0:\n running = False\n print('BACK TO MENU')\n return True\n player2Button = player2_joystick.get_button(0)\n if player2Button > 0:\n running = False\n print('BACK TO MENU')\n return True\n if e.type == pygame.KEYDOWN and (e.key == pygame.K_KP_ENTER or\n e.key == pygame.K_RETURN):\n running = False\n print('BACK TO MENU')\n return True\n end = pygame.image.load('gameover.png')\n screen.blit(end, (0.5 * SCREEN_WIDTH - 0.5 * 1024, 0.5 *\n SCREEN_HEIGHT - 0.5 * 768))\n screen.fill((0, 0, 0))\n screen.blit(end, (10, 10))\n if winner == 2:\n myfont = pygame.font.SysFont('monospace', 72)\n label = myfont.render('Blue won!', 1, (0, 0, 225))\n screen.blit(label, (0.5 * SCREEN_WIDTH - 0.5 * 500, 0.5 *\n SCREEN_HEIGHT - 0.5 * 750))\n else:\n myfont = pygame.font.SysFont('monospace', 72)\n label = myfont.render('Red won!', 1, (255, 0, 0))\n screen.blit(label, (0.5 * SCREEN_WIDTH - 0.5 * 500, 0.5 *\n SCREEN_HEIGHT - 0.5 * 750))\n pygame.display.flip()\n",
"step-5": "import os\nimport sys\nimport random\nimport pygame\nimport time\nfrom pygame import locals\n\nSCREEN_WIDTH = 1280\nSCREEN_HEIGHT = 1024\n\n\nclass Moto(pygame.sprite.Sprite):\n\n def __init__(self, player_num, start_direction):\n pygame.sprite.Sprite.__init__(self)\n\n self.image = pygame.image.load(\"motor\" + str(player_num) + \".png\").convert()\n self.orig_image = self.image\n\n # Fetch the rectangle object that has the dimensions of the image\n # Update the position of this object by setting the values of rect.x and rect.y\n self.rect = self.image.get_rect()\n\n self.direction = start_direction\n\n def move_single_axis(self, dx, dy):\n # Move the rect\n self.rect.x += dx\n self.rect.y += dy\n\n def moveRight(self):\n self.direction = 0\n self.image = pygame.transform.rotate(self.orig_image, 0)\n\n def moveLeft(self):\n self.direction = 1\n self.image = pygame.transform.rotate(self.orig_image, 0)\n\n def moveUp(self):\n self.direction = 2\n self.image = pygame.transform.rotate(self.orig_image, 90)\n\n def moveDown(self):\n self.direction = 3\n self.image = pygame.transform.rotate(self.orig_image, 90)\n\n\n# Class for the orange dude\nclass Player(object):\n\n def __init__(self, player_num, px, py, sx, sy, start_direction):\n self.player_num = player_num\n self.rect = pygame.Rect(px, py, sx, sy)\n self.direction = start_direction\n self.moto = Moto(player_num, start_direction)\n self.moto.rect.x = px\n self.moto.rect.y = py\n\n def moveRight(self):\n if self.direction != 1:\n self.direction = 0\n self.moto.moveRight()\n\n def moveLeft(self):\n if self.direction != 0:\n self.direction = 1\n self.moto.moveLeft()\n\n def moveUp(self):\n if self.direction != 3:\n self.direction = 2\n self.moto.moveUp()\n\n def moveDown(self):\n if self.direction != 2:\n self.direction = 3\n self.moto.moveDown()\n\n def moveOn(self):\n if self.direction == 0:\n self.move(2, 0)\n if self.direction == 1:\n self.move(-2, 0)\n if self.direction == 2:\n self.move(0, -2)\n if self.direction == 3:\n self.move(0, 2)\n\n def move(self, dx, dy):\n\n # Move each axis separately. Note that this checks for collisions both times.\n if dx != 0:\n self.move_single_axis(dx, 0)\n self.moto.move_single_axis(dx, 0)\n if dy != 0:\n self.move_single_axis(0, dy)\n self.moto.move_single_axis(0, dy)\n\n def move_single_axis(self, dx, dy):\n\n # Move the rect\n self.rect.x += dx\n self.rect.y += dy\n\n # Draw a wall (after the movement)\n Wall(self.player_num, (self.rect.centerx, self.rect.centery))\n\n\n# Nice class to hold a wall rect\nclass Wall(object):\n\n def __init__(self, player_num, pos):\n Game.walls[player_num].append(self)\n self.rect = pygame.Rect(pos[0], pos[1], 3, 3)\n\n\n# MAIN\nclass Game:\n\n walls = [[], []]\n\n def main(self):\n\n winner = 0\n\n screen = pygame.display.set_mode((SCREEN_WIDTH, SCREEN_HEIGHT))\n\n clock = pygame.time.Clock()\n # walls for 2 players: lists in list\n Game.walls = [[], []]\n # starting positions\n player = Player(0, SCREEN_WIDTH - 80, int(SCREEN_HEIGHT / 2), 2, 16, 1)\n player2 = Player(1, 80, int(SCREEN_HEIGHT / 2), 2, 16, 0)\n\n # JOYSTICK\n try:\n pygame.joystick.init()\n joysticks = [pygame.joystick.Joystick(x) for x in range(pygame.joystick.get_count())]\n joysticks[0].init()\n joysticks[1].init()\n player1_joystick = joysticks[0]\n player2_joystick = joysticks[1]\n except IndexError:\n player1_joystick = None\n player2_joystick = None\n\n end = pygame.image.load('number3.png')\n screen.fill((0, 0, 0))\n screen.blit(end, ((0.5 * SCREEN_WIDTH) - (0.5 * 500), (0.5 * SCREEN_HEIGHT) - (0.5 * 500)))\n pygame.display.flip()\n pygame.time.wait(1000)\n\n end = pygame.image.load('number2.png')\n screen.fill((0, 0, 0))\n screen.blit(end, ((0.5 * SCREEN_WIDTH) - (0.5 * 500), (0.5 * SCREEN_HEIGHT) - (0.5 * 500)))\n pygame.display.flip()\n pygame.time.wait(1000)\n\n end = pygame.image.load('number1.png')\n screen.fill((0, 0, 0))\n screen.blit(end, ((0.5 * SCREEN_WIDTH) - (0.5 * 500), (0.5 * SCREEN_HEIGHT) - (0.5 * 500)))\n pygame.display.flip()\n pygame.time.wait(1000)\n\n # end = pygame.image.load('arcade.jpg').convert()\n # screen.blit(end, ((0.5 * SCREEN_WIDTH) - (0.5 * 500), (0.5 * SCREEN_HEIGHT) - (0.5 * 500)))\n pygame.display.flip()\n\n # background_image = pygame.transform.scale(pygame.image.load('arcade.jpg').convert(), (1280, 1024))\n # screen.blit(background_image, [0, 0])\n\n running = True\n while running:\n clock.tick(60)\n\n for e in pygame.event.get():\n if e.type == pygame.QUIT:\n running = False\n if e.type == pygame.KEYDOWN and e.key == pygame.K_ESCAPE:\n running = False\n\n # JOYSTICK\n try:\n if e.type == pygame.locals.JOYAXISMOTION:\n player1jx, player1jy = player1_joystick.get_axis(0), player1_joystick.get_axis(1)\n if player1jx < 0:\n player2.moveLeft()\n if player1jx > 0:\n player2.moveRight()\n if player1jy < 0:\n player2.moveUp()\n if player1jy > 0:\n player2.moveDown()\n player2jx, player2jy = player2_joystick.get_axis(0), player2_joystick.get_axis(1)\n if player2jx < 0:\n player.moveLeft()\n if player2jx > 0:\n player.moveRight()\n if player2jy < 0:\n player.moveUp()\n if player2jy > 0:\n player.moveDown()\n except:\n pass\n\n # PLAYER 1\n # Move the player if an arrow key is pressed\n key = pygame.key.get_pressed()\n if key[pygame.K_LEFT]:\n player.moveLeft()\n if key[pygame.K_RIGHT]:\n player.moveRight()\n if key[pygame.K_UP]:\n player.moveUp()\n if key[pygame.K_DOWN]:\n player.moveDown()\n\n player.moveOn()\n\n # PLAYER 2\n key = pygame.key.get_pressed()\n if key[pygame.K_a]:\n player2.moveLeft()\n if key[pygame.K_d]:\n player2.moveRight()\n if key[pygame.K_w]:\n player2.moveUp()\n if key[pygame.K_s]:\n player2.moveDown()\n\n player2.moveOn()\n\n # check borders\n if player.moto.rect.x < 0 or player.moto.rect.x > SCREEN_WIDTH:\n winner = 2\n running = False\n if player2.moto.rect.x < 0 or player2.moto.rect.x > SCREEN_WIDTH:\n winner = 1\n running = False\n if player.moto.rect.y < 0 or player.moto.rect.y > SCREEN_HEIGHT:\n winner = 2\n running = False\n if player2.moto.rect.y < 0 or player2.moto.rect.y > SCREEN_HEIGHT:\n winner = 1\n running = False\n # Draw the scene\n # screen.blit(background_image, [0, 0])\n # pygame.display.flip()\n screen.fill((0, 0, 0))\n\n # Player 1 walls\n counter1 = 0\n counter2 = 0\n coll_range = len(Game.walls[0]) - (player.moto.rect.width / 2 + 10)\n coll_range_2 = len(Game.walls[1]) - (player2.moto.rect.width / 2 + 10)\n for wall in Game.walls[0]:\n if player2.moto.rect.colliderect(wall.rect):\n winner = 1\n running = False\n if (counter1 < coll_range) and player.moto.rect.colliderect(wall.rect):\n winner = 2\n running = False\n counter1 += 1\n pygame.draw.rect(screen, (255, 0, 0), wall.rect)\n # Player 2 walls\n for wall in Game.walls[1]:\n if player.moto.rect.colliderect(wall.rect):\n winner = 2\n running = False\n if (counter2 < coll_range_2) and player2.moto.rect.colliderect(wall.rect):\n winner = 1\n running = False\n counter2 += 1\n pygame.draw.rect(screen, (0, 0, 255), wall.rect)\n\n # Player 1\n pygame.draw.rect(screen, (255, 200, 0), player.rect)\n screen.blit(player.moto.image, (player.moto.rect.x, player.moto.rect.y))\n\n # Player 2\n pygame.draw.rect(screen, (255, 200, 0), player2.rect)\n screen.blit(player2.moto.image, (player2.moto.rect.x, player2.moto.rect.y))\n\n pygame.display.flip()\n\n # GAME OVER\n print(\"Winner: \", winner)\n running = True\n clock = pygame.time.Clock()\n sound = pygame.mixer.Sound('blast.wav')\n sound.play(loops=0, maxtime=0, fade_ms=0)\n\n while running:\n\n clock.tick(60)\n\n for e in pygame.event.get():\n if e.type == pygame.JOYBUTTONDOWN:\n player1Button = player1_joystick.get_button(0)\n if (player1Button > 0):\n running = False\n print(\"BACK TO MENU\")\n return True\n player2Button = player2_joystick.get_button(0)\n if (player2Button > 0):\n running = False\n print(\"BACK TO MENU\")\n return True\n\n if e.type == pygame.KEYDOWN and (e.key == pygame.K_KP_ENTER or e.key == pygame.K_RETURN):\n running = False\n print(\"BACK TO MENU\")\n return True\n\n end = pygame.image.load('gameover.png')\n screen.blit(end, ((0.5 * SCREEN_WIDTH) - (0.5 * 1024), (0.5 * SCREEN_HEIGHT) - (0.5 * 768)))\n screen.fill((0, 0, 0))\n screen.blit(end, (10, 10))\n if winner == 2:\n myfont = pygame.font.SysFont(\"monospace\", 72)\n label = myfont.render('Blue won!', 1, (0, 0, 225))\n screen.blit(label, ((0.5 * SCREEN_WIDTH) - (0.5 * 500), (0.5 * SCREEN_HEIGHT) - (0.5 * 750)))\n else:\n myfont = pygame.font.SysFont(\"monospace\", 72)\n label = myfont.render('Red won!', 1, (255, 0, 0))\n screen.blit(label, ((0.5 * SCREEN_WIDTH) - (0.5 * 500), (0.5 * SCREEN_HEIGHT) - (0.5 * 750)))\n\n pygame.display.flip()\n",
"step-ids": [
13,
15,
19,
22,
24
]
}
|
[
13,
15,
19,
22,
24
] |
<|reserved_special_token_0|>
def remove_zero_bars(dgm):
"""
remove zero bars from diagram
"""
inds = dgm[:, 0] != dgm[:, 1]
return dgm[inds, :]
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def remove_filler(dgm, val=np.inf):
"""
remove filler rows from diagram
"""
inds = dgm[:, 0] != val
return dgm[inds, :]
def remove_zero_bars(dgm):
"""
remove zero bars from diagram
"""
inds = dgm[:, 0] != dgm[:, 1]
return dgm[inds, :]
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def remove_filler(dgm, val=np.inf):
"""
remove filler rows from diagram
"""
inds = dgm[:, 0] != val
return dgm[inds, :]
def remove_zero_bars(dgm):
"""
remove zero bars from diagram
"""
inds = dgm[:, 0] != dgm[:, 1]
return dgm[inds, :]
def remove_infinite_bars(dgm, issub):
"""
remove infinite bars from diagram
"""
if issub:
inds = dgm[:, 1] != np.inf
return dgm[inds, :]
else:
inds = dgm[:, 1] != -np.inf
return dgm[inds, :]
<|reserved_special_token_1|>
import torch
import numpy as np
def remove_filler(dgm, val=np.inf):
"""
remove filler rows from diagram
"""
inds = dgm[:, 0] != val
return dgm[inds, :]
def remove_zero_bars(dgm):
"""
remove zero bars from diagram
"""
inds = dgm[:, 0] != dgm[:, 1]
return dgm[inds, :]
def remove_infinite_bars(dgm, issub):
"""
remove infinite bars from diagram
"""
if issub:
inds = dgm[:, 1] != np.inf
return dgm[inds, :]
else:
inds = dgm[:, 1] != -np.inf
return dgm[inds, :]
<|reserved_special_token_1|>
# processing functions for diagrams
import torch
import numpy as np
def remove_filler(dgm, val=np.inf):
"""
remove filler rows from diagram
"""
inds = (dgm[:,0] != val)
return dgm[inds,:]
def remove_zero_bars(dgm):
"""
remove zero bars from diagram
"""
inds = dgm[:,0] != dgm[:,1]
return dgm[inds,:]
def remove_infinite_bars(dgm, issub):
"""
remove infinite bars from diagram
"""
if issub:
inds = dgm[:, 1] != np.inf
return dgm[inds,:]
else:
inds = dgm[:, 1] != -np.inf
return dgm[inds,:]
|
flexible
|
{
"blob_id": "ac459bff6d4281ce07b70dbccde3243412ddb414",
"index": 3155,
"step-1": "<mask token>\n\n\ndef remove_zero_bars(dgm):\n \"\"\"\n remove zero bars from diagram\n \"\"\"\n inds = dgm[:, 0] != dgm[:, 1]\n return dgm[inds, :]\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef remove_filler(dgm, val=np.inf):\n \"\"\"\n remove filler rows from diagram\n \"\"\"\n inds = dgm[:, 0] != val\n return dgm[inds, :]\n\n\ndef remove_zero_bars(dgm):\n \"\"\"\n remove zero bars from diagram\n \"\"\"\n inds = dgm[:, 0] != dgm[:, 1]\n return dgm[inds, :]\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef remove_filler(dgm, val=np.inf):\n \"\"\"\n remove filler rows from diagram\n \"\"\"\n inds = dgm[:, 0] != val\n return dgm[inds, :]\n\n\ndef remove_zero_bars(dgm):\n \"\"\"\n remove zero bars from diagram\n \"\"\"\n inds = dgm[:, 0] != dgm[:, 1]\n return dgm[inds, :]\n\n\ndef remove_infinite_bars(dgm, issub):\n \"\"\"\n remove infinite bars from diagram\n \"\"\"\n if issub:\n inds = dgm[:, 1] != np.inf\n return dgm[inds, :]\n else:\n inds = dgm[:, 1] != -np.inf\n return dgm[inds, :]\n",
"step-4": "import torch\nimport numpy as np\n\n\ndef remove_filler(dgm, val=np.inf):\n \"\"\"\n remove filler rows from diagram\n \"\"\"\n inds = dgm[:, 0] != val\n return dgm[inds, :]\n\n\ndef remove_zero_bars(dgm):\n \"\"\"\n remove zero bars from diagram\n \"\"\"\n inds = dgm[:, 0] != dgm[:, 1]\n return dgm[inds, :]\n\n\ndef remove_infinite_bars(dgm, issub):\n \"\"\"\n remove infinite bars from diagram\n \"\"\"\n if issub:\n inds = dgm[:, 1] != np.inf\n return dgm[inds, :]\n else:\n inds = dgm[:, 1] != -np.inf\n return dgm[inds, :]\n",
"step-5": "# processing functions for diagrams\r\n\r\nimport torch\r\nimport numpy as np\r\n\r\ndef remove_filler(dgm, val=np.inf):\r\n \"\"\"\r\n remove filler rows from diagram\r\n \"\"\"\r\n inds = (dgm[:,0] != val)\r\n return dgm[inds,:]\r\n\r\n\r\ndef remove_zero_bars(dgm):\r\n \"\"\"\r\n remove zero bars from diagram\r\n \"\"\"\r\n inds = dgm[:,0] != dgm[:,1]\r\n return dgm[inds,:]\r\n\r\n\r\ndef remove_infinite_bars(dgm, issub):\r\n \"\"\"\r\n remove infinite bars from diagram\r\n \"\"\"\r\n if issub:\r\n inds = dgm[:, 1] != np.inf\r\n return dgm[inds,:]\r\n else:\r\n inds = dgm[:, 1] != -np.inf\r\n return dgm[inds,:]\r\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
# __author__ = 'Vasudev Gupta'
import tf_lightning as tl
import tensorflow as tf
class TestModel(tl.LightningModule):
# just a random model with random dataset
def __init__(self):
# simple test model
super().__init__()
self.model = tf.keras.Sequential([
tf.keras.layers.Dense(5),
tf.keras.layers.Dense(2)
])
def call(self, dataset):
return self.model(dataset)
def configure_optimizers(self):
return tf.keras.optimizers.Adam(0.1),
def training_step(self, batch, batch_idx, optimizer_idx):
pred = self(batch)
loss = tf.reduce_mean(pred)
log = {'batch_idx': batch_idx, 'tr_loss': loss}
result = tl.TrainResult(
loss, self.model.trainable_variables, log=log)
return result
def validation_step(self, batch, batch_idx, optimizer_idx):
pred = self(batch)
loss = tf.reduce_mean(pred)
log = {'batch_idx': batch_idx, 'val_loss': loss}
result = tl.EvalResult(loss, log=log)
return result
def checkpointer(self):
return tf.train.Checkpoint(m=self.model,
opt0=self.optimizer_0)
class TestDataLoader(tl.LightningDataModule):
# using random dataset
def __init__(self):
self.batch_size = 32
def setup(self):
self.tr_dataset = tf.random.normal((256, 7))
self.val_dataset = tf.random.normal((64, 7))
def train_dataloader(self):
dataset = tf.data.Dataset.from_tensor_slices(
self.tr_dataset).batch(self.batch_size)
return dataset
def val_dataloader(self):
dataset = tf.data.Dataset.from_tensor_slices(
self.val_dataset).batch(self.batch_size)
return dataset
if __name__ == '__main__':
model = TestModel()
dataloader = TestDataLoader()
trainer = tl.Trainer()
trainer.fit(model, dataloader)
|
normal
|
{
"blob_id": "f2397ba3fe1452238f251111f35b06b4a93e0359",
"index": 2441,
"step-1": "<mask token>\n\n\nclass TestModel(tl.LightningModule):\n <mask token>\n <mask token>\n <mask token>\n\n def training_step(self, batch, batch_idx, optimizer_idx):\n pred = self(batch)\n loss = tf.reduce_mean(pred)\n log = {'batch_idx': batch_idx, 'tr_loss': loss}\n result = tl.TrainResult(loss, self.model.trainable_variables, log=log)\n return result\n <mask token>\n <mask token>\n\n\nclass TestDataLoader(tl.LightningDataModule):\n\n def __init__(self):\n self.batch_size = 32\n\n def setup(self):\n self.tr_dataset = tf.random.normal((256, 7))\n self.val_dataset = tf.random.normal((64, 7))\n\n def train_dataloader(self):\n dataset = tf.data.Dataset.from_tensor_slices(self.tr_dataset).batch(\n self.batch_size)\n return dataset\n\n def val_dataloader(self):\n dataset = tf.data.Dataset.from_tensor_slices(self.val_dataset).batch(\n self.batch_size)\n return dataset\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass TestModel(tl.LightningModule):\n\n def __init__(self):\n super().__init__()\n self.model = tf.keras.Sequential([tf.keras.layers.Dense(5), tf.\n keras.layers.Dense(2)])\n\n def call(self, dataset):\n return self.model(dataset)\n\n def configure_optimizers(self):\n return tf.keras.optimizers.Adam(0.1),\n\n def training_step(self, batch, batch_idx, optimizer_idx):\n pred = self(batch)\n loss = tf.reduce_mean(pred)\n log = {'batch_idx': batch_idx, 'tr_loss': loss}\n result = tl.TrainResult(loss, self.model.trainable_variables, log=log)\n return result\n\n def validation_step(self, batch, batch_idx, optimizer_idx):\n pred = self(batch)\n loss = tf.reduce_mean(pred)\n log = {'batch_idx': batch_idx, 'val_loss': loss}\n result = tl.EvalResult(loss, log=log)\n return result\n <mask token>\n\n\nclass TestDataLoader(tl.LightningDataModule):\n\n def __init__(self):\n self.batch_size = 32\n\n def setup(self):\n self.tr_dataset = tf.random.normal((256, 7))\n self.val_dataset = tf.random.normal((64, 7))\n\n def train_dataloader(self):\n dataset = tf.data.Dataset.from_tensor_slices(self.tr_dataset).batch(\n self.batch_size)\n return dataset\n\n def val_dataloader(self):\n dataset = tf.data.Dataset.from_tensor_slices(self.val_dataset).batch(\n self.batch_size)\n return dataset\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass TestModel(tl.LightningModule):\n\n def __init__(self):\n super().__init__()\n self.model = tf.keras.Sequential([tf.keras.layers.Dense(5), tf.\n keras.layers.Dense(2)])\n\n def call(self, dataset):\n return self.model(dataset)\n\n def configure_optimizers(self):\n return tf.keras.optimizers.Adam(0.1),\n\n def training_step(self, batch, batch_idx, optimizer_idx):\n pred = self(batch)\n loss = tf.reduce_mean(pred)\n log = {'batch_idx': batch_idx, 'tr_loss': loss}\n result = tl.TrainResult(loss, self.model.trainable_variables, log=log)\n return result\n\n def validation_step(self, batch, batch_idx, optimizer_idx):\n pred = self(batch)\n loss = tf.reduce_mean(pred)\n log = {'batch_idx': batch_idx, 'val_loss': loss}\n result = tl.EvalResult(loss, log=log)\n return result\n\n def checkpointer(self):\n return tf.train.Checkpoint(m=self.model, opt0=self.optimizer_0)\n\n\nclass TestDataLoader(tl.LightningDataModule):\n\n def __init__(self):\n self.batch_size = 32\n\n def setup(self):\n self.tr_dataset = tf.random.normal((256, 7))\n self.val_dataset = tf.random.normal((64, 7))\n\n def train_dataloader(self):\n dataset = tf.data.Dataset.from_tensor_slices(self.tr_dataset).batch(\n self.batch_size)\n return dataset\n\n def val_dataloader(self):\n dataset = tf.data.Dataset.from_tensor_slices(self.val_dataset).batch(\n self.batch_size)\n return dataset\n\n\n<mask token>\n",
"step-4": "import tf_lightning as tl\nimport tensorflow as tf\n\n\nclass TestModel(tl.LightningModule):\n\n def __init__(self):\n super().__init__()\n self.model = tf.keras.Sequential([tf.keras.layers.Dense(5), tf.\n keras.layers.Dense(2)])\n\n def call(self, dataset):\n return self.model(dataset)\n\n def configure_optimizers(self):\n return tf.keras.optimizers.Adam(0.1),\n\n def training_step(self, batch, batch_idx, optimizer_idx):\n pred = self(batch)\n loss = tf.reduce_mean(pred)\n log = {'batch_idx': batch_idx, 'tr_loss': loss}\n result = tl.TrainResult(loss, self.model.trainable_variables, log=log)\n return result\n\n def validation_step(self, batch, batch_idx, optimizer_idx):\n pred = self(batch)\n loss = tf.reduce_mean(pred)\n log = {'batch_idx': batch_idx, 'val_loss': loss}\n result = tl.EvalResult(loss, log=log)\n return result\n\n def checkpointer(self):\n return tf.train.Checkpoint(m=self.model, opt0=self.optimizer_0)\n\n\nclass TestDataLoader(tl.LightningDataModule):\n\n def __init__(self):\n self.batch_size = 32\n\n def setup(self):\n self.tr_dataset = tf.random.normal((256, 7))\n self.val_dataset = tf.random.normal((64, 7))\n\n def train_dataloader(self):\n dataset = tf.data.Dataset.from_tensor_slices(self.tr_dataset).batch(\n self.batch_size)\n return dataset\n\n def val_dataloader(self):\n dataset = tf.data.Dataset.from_tensor_slices(self.val_dataset).batch(\n self.batch_size)\n return dataset\n\n\nif __name__ == '__main__':\n model = TestModel()\n dataloader = TestDataLoader()\n trainer = tl.Trainer()\n trainer.fit(model, dataloader)\n",
"step-5": "# __author__ = 'Vasudev Gupta'\n\nimport tf_lightning as tl\nimport tensorflow as tf\n\n\nclass TestModel(tl.LightningModule):\n # just a random model with random dataset\n\n def __init__(self):\n # simple test model\n super().__init__()\n\n self.model = tf.keras.Sequential([\n tf.keras.layers.Dense(5),\n tf.keras.layers.Dense(2)\n ])\n\n def call(self, dataset):\n return self.model(dataset)\n\n def configure_optimizers(self):\n return tf.keras.optimizers.Adam(0.1),\n\n def training_step(self, batch, batch_idx, optimizer_idx):\n\n pred = self(batch)\n loss = tf.reduce_mean(pred)\n\n log = {'batch_idx': batch_idx, 'tr_loss': loss}\n result = tl.TrainResult(\n loss, self.model.trainable_variables, log=log)\n\n return result\n\n def validation_step(self, batch, batch_idx, optimizer_idx):\n\n pred = self(batch)\n loss = tf.reduce_mean(pred)\n\n log = {'batch_idx': batch_idx, 'val_loss': loss}\n result = tl.EvalResult(loss, log=log)\n\n return result\n\n def checkpointer(self):\n return tf.train.Checkpoint(m=self.model,\n opt0=self.optimizer_0)\n\n\nclass TestDataLoader(tl.LightningDataModule):\n # using random dataset\n\n def __init__(self):\n self.batch_size = 32\n\n def setup(self):\n self.tr_dataset = tf.random.normal((256, 7))\n self.val_dataset = tf.random.normal((64, 7))\n\n def train_dataloader(self):\n dataset = tf.data.Dataset.from_tensor_slices(\n self.tr_dataset).batch(self.batch_size)\n return dataset\n\n def val_dataloader(self):\n dataset = tf.data.Dataset.from_tensor_slices(\n self.val_dataset).batch(self.batch_size)\n return dataset\n\n\nif __name__ == '__main__':\n\n model = TestModel()\n\n dataloader = TestDataLoader()\n\n trainer = tl.Trainer()\n\n trainer.fit(model, dataloader)\n",
"step-ids": [
7,
11,
12,
14,
15
]
}
|
[
7,
11,
12,
14,
15
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def merge(L1, L2):
if L1 == []:
return L2
if L2 == []:
return L1
x1, R1 = L1[0], L1[1:]
x2, R2 = L2[0], L2[1:]
if x1 <= x2:
return [x1] + merge(R1, L2)
else:
return [x2] + merge(L1, R2)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def sort(L):
n = len(L)
if n < 2:
return L
L1, L2 = L[:n // 2], L[n // 2:]
return merge(sort(L1), sort(L2))
def merge(L1, L2):
if L1 == []:
return L2
if L2 == []:
return L1
x1, R1 = L1[0], L1[1:]
x2, R2 = L2[0], L2[1:]
if x1 <= x2:
return [x1] + merge(R1, L2)
else:
return [x2] + merge(L1, R2)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def sort(L):
n = len(L)
if n < 2:
return L
L1, L2 = L[:n // 2], L[n // 2:]
return merge(sort(L1), sort(L2))
def merge(L1, L2):
if L1 == []:
return L2
if L2 == []:
return L1
x1, R1 = L1[0], L1[1:]
x2, R2 = L2[0], L2[1:]
if x1 <= x2:
return [x1] + merge(R1, L2)
else:
return [x2] + merge(L1, R2)
print(sort([9, 7, 8, 0, 5, 6, 4, 1, 2, 3]))
|
flexible
|
{
"blob_id": "056636e2220e529d3f66872a4a48c0984cda1ce4",
"index": 6617,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef merge(L1, L2):\n if L1 == []:\n return L2\n if L2 == []:\n return L1\n x1, R1 = L1[0], L1[1:]\n x2, R2 = L2[0], L2[1:]\n if x1 <= x2:\n return [x1] + merge(R1, L2)\n else:\n return [x2] + merge(L1, R2)\n\n\n<mask token>\n",
"step-3": "def sort(L):\n n = len(L)\n if n < 2:\n return L\n L1, L2 = L[:n // 2], L[n // 2:]\n return merge(sort(L1), sort(L2))\n\n\ndef merge(L1, L2):\n if L1 == []:\n return L2\n if L2 == []:\n return L1\n x1, R1 = L1[0], L1[1:]\n x2, R2 = L2[0], L2[1:]\n if x1 <= x2:\n return [x1] + merge(R1, L2)\n else:\n return [x2] + merge(L1, R2)\n\n\n<mask token>\n",
"step-4": "def sort(L):\n n = len(L)\n if n < 2:\n return L\n L1, L2 = L[:n // 2], L[n // 2:]\n return merge(sort(L1), sort(L2))\n\n\ndef merge(L1, L2):\n if L1 == []:\n return L2\n if L2 == []:\n return L1\n x1, R1 = L1[0], L1[1:]\n x2, R2 = L2[0], L2[1:]\n if x1 <= x2:\n return [x1] + merge(R1, L2)\n else:\n return [x2] + merge(L1, R2)\n\n\nprint(sort([9, 7, 8, 0, 5, 6, 4, 1, 2, 3]))\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
class Player(pygame.sprite.Sprite):
def __init__(self, group):
super().__init__(group)
self.weapon = Weapon(self, 'Green laser gun')
self.image = load_image('player.jpg', -1)
self.rect = self.image.get_rect()
self.coords = self.rect.x, self.rect.y = 75, 635
self.mask = pygame.mask.from_surface(self.image)
<|reserved_special_token_0|>
def move(self, side):
x = self.rect.x
if x < 630 and side == 'right':
x += 70
if x > 35 and side == 'left':
x -= 70
self.rect.x = x
class Enemy(pygame.sprite.Sprite):
global enemies_count, MiniG_rate, EnemyG_rate, MetalM_rate
def __init__(self, group):
super().__init__(group)
if enemies_count >= 30 and enemies_count % MetalM_rate == 0:
self.type = 'MM'
self.hp = 24
self.image = pygame.transform.scale(load_image('Metal_Man.png',
-1), (120, 140))
self.rect = self.image.get_rect()
self.coords = self.rect.x, self.rect.y = random.randrange(10,
560, 70), 140
self.mask = pygame.mask.from_surface(self.image)
elif enemies_count >= 15 and enemies_count % EnemyG_rate == 0:
self.type = 'EG'
self.hp = 6
self.image = pygame.transform.scale(load_image(
'Enemy_glider.png', -1), (70, 70))
self.rect = self.image.get_rect()
self.coords = self.rect.x, self.rect.y = random.randrange(0,
700, 70), 140
self.mask = pygame.mask.from_surface(self.image)
else:
self.type = 'MG'
self.hp = 4
self.image = pygame.transform.scale(load_image(
'Mini_glider.png', -1), (70, 70))
self.rect = self.image.get_rect()
self.coords = self.rect.x, self.rect.y = random.randrange(0,
700, 70), 140
self.mask = pygame.mask.from_surface(self.image)
def death_check(self):
global killed, score, coins, FPS
if self.hp <= 0:
killed += 1
if self.type == 'MM':
score += 30
coins += 15
FPS += 10
elif self.type == 'EG':
score += 15
coins += 5
elif self.type == 'MG':
score += 10
coins += 2
self.kill()
def move(self):
self.rect.y += 1
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Bullet(pygame.sprite.Sprite):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def hit(self, enemy):
enemy.hp -= self.damage
self.kill()
class Weapon:
def __init__(self, player, kind):
self.kind = kind
self.ability = None
self.player = player
if self.kind == 'Green laser gun':
self.damage = 2
self.price = 0
elif self.kind == 'Purple laser gun':
self.damage = 4
self.price = 50
elif self.kind == 'Plasma gun':
self.damage = 8
self.price = 150
self.ability = 'Rage'
def shoot(self, enemy_sprites):
bullet = Bullet(enemy_sprites, self.player.rect.x, self.damage,
self.kind)
class Player(pygame.sprite.Sprite):
def __init__(self, group):
super().__init__(group)
self.weapon = Weapon(self, 'Green laser gun')
self.image = load_image('player.jpg', -1)
self.rect = self.image.get_rect()
self.coords = self.rect.x, self.rect.y = 75, 635
self.mask = pygame.mask.from_surface(self.image)
def shoot(self, enemy_sprites):
self.weapon.shoot(enemy_sprites)
def move(self, side):
x = self.rect.x
if x < 630 and side == 'right':
x += 70
if x > 35 and side == 'left':
x -= 70
self.rect.x = x
class Enemy(pygame.sprite.Sprite):
global enemies_count, MiniG_rate, EnemyG_rate, MetalM_rate
def __init__(self, group):
super().__init__(group)
if enemies_count >= 30 and enemies_count % MetalM_rate == 0:
self.type = 'MM'
self.hp = 24
self.image = pygame.transform.scale(load_image('Metal_Man.png',
-1), (120, 140))
self.rect = self.image.get_rect()
self.coords = self.rect.x, self.rect.y = random.randrange(10,
560, 70), 140
self.mask = pygame.mask.from_surface(self.image)
elif enemies_count >= 15 and enemies_count % EnemyG_rate == 0:
self.type = 'EG'
self.hp = 6
self.image = pygame.transform.scale(load_image(
'Enemy_glider.png', -1), (70, 70))
self.rect = self.image.get_rect()
self.coords = self.rect.x, self.rect.y = random.randrange(0,
700, 70), 140
self.mask = pygame.mask.from_surface(self.image)
else:
self.type = 'MG'
self.hp = 4
self.image = pygame.transform.scale(load_image(
'Mini_glider.png', -1), (70, 70))
self.rect = self.image.get_rect()
self.coords = self.rect.x, self.rect.y = random.randrange(0,
700, 70), 140
self.mask = pygame.mask.from_surface(self.image)
def death_check(self):
global killed, score, coins, FPS
if self.hp <= 0:
killed += 1
if self.type == 'MM':
score += 30
coins += 15
FPS += 10
elif self.type == 'EG':
score += 15
coins += 5
elif self.type == 'MG':
score += 10
coins += 2
self.kill()
def move(self):
self.rect.y += 1
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Board:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class Bullet(pygame.sprite.Sprite):
def __init__(self, enemy_sprites, x, damage, kind):
super().__init__(bullet_sprites)
self.damage = damage
if kind == 'Green laser gun':
self.image = load_image('green.png', -1)
elif kind == 'Purple laser gun':
self.image = load_image('purple.png', -1)
elif kind == 'Plasma gun':
self.image = pygame.transform.scale(load_image('plasma.png', -1
), (25, 25))
self.rect = self.image.get_rect()
self.coords = self.rect.x, self.rect.y = x + 30, 665
self.mask = pygame.mask.from_surface(self.image)
self.fly(enemy_sprites)
def fly(self, enemy_sprites):
if self.rect.y >= 140:
self.rect.y -= 1
for enemy in enemy_sprites:
if pygame.sprite.collide_mask(enemy, self):
self.hit(enemy)
else:
self.kill()
def hit(self, enemy):
enemy.hp -= self.damage
self.kill()
class Weapon:
def __init__(self, player, kind):
self.kind = kind
self.ability = None
self.player = player
if self.kind == 'Green laser gun':
self.damage = 2
self.price = 0
elif self.kind == 'Purple laser gun':
self.damage = 4
self.price = 50
elif self.kind == 'Plasma gun':
self.damage = 8
self.price = 150
self.ability = 'Rage'
def shoot(self, enemy_sprites):
bullet = Bullet(enemy_sprites, self.player.rect.x, self.damage,
self.kind)
class Player(pygame.sprite.Sprite):
def __init__(self, group):
super().__init__(group)
self.weapon = Weapon(self, 'Green laser gun')
self.image = load_image('player.jpg', -1)
self.rect = self.image.get_rect()
self.coords = self.rect.x, self.rect.y = 75, 635
self.mask = pygame.mask.from_surface(self.image)
def shoot(self, enemy_sprites):
self.weapon.shoot(enemy_sprites)
def move(self, side):
x = self.rect.x
if x < 630 and side == 'right':
x += 70
if x > 35 and side == 'left':
x -= 70
self.rect.x = x
class Enemy(pygame.sprite.Sprite):
global enemies_count, MiniG_rate, EnemyG_rate, MetalM_rate
def __init__(self, group):
super().__init__(group)
if enemies_count >= 30 and enemies_count % MetalM_rate == 0:
self.type = 'MM'
self.hp = 24
self.image = pygame.transform.scale(load_image('Metal_Man.png',
-1), (120, 140))
self.rect = self.image.get_rect()
self.coords = self.rect.x, self.rect.y = random.randrange(10,
560, 70), 140
self.mask = pygame.mask.from_surface(self.image)
elif enemies_count >= 15 and enemies_count % EnemyG_rate == 0:
self.type = 'EG'
self.hp = 6
self.image = pygame.transform.scale(load_image(
'Enemy_glider.png', -1), (70, 70))
self.rect = self.image.get_rect()
self.coords = self.rect.x, self.rect.y = random.randrange(0,
700, 70), 140
self.mask = pygame.mask.from_surface(self.image)
else:
self.type = 'MG'
self.hp = 4
self.image = pygame.transform.scale(load_image(
'Mini_glider.png', -1), (70, 70))
self.rect = self.image.get_rect()
self.coords = self.rect.x, self.rect.y = random.randrange(0,
700, 70), 140
self.mask = pygame.mask.from_surface(self.image)
def death_check(self):
global killed, score, coins, FPS
if self.hp <= 0:
killed += 1
if self.type == 'MM':
score += 30
coins += 15
FPS += 10
elif self.type == 'EG':
score += 15
coins += 5
elif self.type == 'MG':
score += 10
coins += 2
self.kill()
def move(self):
self.rect.y += 1
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def info_print():
global score, killed, coins
font = pygame.font.Font(None, 30)
text_coord = 2
pygame.draw.rect(screen, (100, 100, 100), (0, 0, 200, 100), 3)
pygame.draw.rect(screen, (150, 150, 150), (3, 3, 194, 94), 3)
pygame.draw.rect(screen, (250, 250, 250), (5, 5, 190, 90))
text = [f'Счёт: {score}', f'Убито: {killed}', f'Монеты: {coins}']
for line in text:
string_rendered = font.render(line, 1, (50, 50, 50))
intro_rect = string_rendered.get_rect()
text_coord += 10
intro_rect.top = text_coord
intro_rect.x = 10
text_coord += intro_rect.height
screen.blit(string_rendered, intro_rect)
class Board:
def __init__(self, screen, width, height):
self.width = width
self.height = height
self.board = [([0] * width) for _ in range(height)]
self.left = 0
self.top = 0
self.cell_size = 70
self.screen = screen
def set_view(self, left, top, cell_size):
self.left = left
self.top = top
self.cell_size = cell_size
def render(self):
tp, pp = [[0, 140], [17, 105], [35, 140]], [[17, 105], [35, 140], [
52, 105]]
for y in range(self.height):
for x in range(self.width):
if y >= 2:
pygame.draw.rect(self.screen, (100, 100, 100), (x *
self.cell_size, y * self.cell_size, self.cell_size,
self.cell_size), 1)
pygame.draw.rect(self.screen, (150, 150, 150), (x *
self.cell_size + 1, y * self.cell_size + 1, self.
cell_size - 2, self.cell_size - 2), 2)
pygame.draw.rect(self.screen, (250, 250, 250), (x *
self.cell_size + 3, y * self.cell_size + 3, self.
cell_size - 4, self.cell_size - 4))
for i in range(self.width * 2 - 1):
pygame.draw.polygon(screen, (0, 230, 200), pp)
pp[0][1] += 2
pp[0][0] += 4
pp[1][1] -= 3
pp[2][1] += 2
pp[2][0] -= 4
pygame.draw.polygon(screen, (0, 125, 200), pp)
pp[0][1] += 4
pp[0][0] += 6
pp[1][1] -= 7
pp[2][1] += 4
pp[2][0] -= 6
pygame.draw.polygon(screen, (0, 230, 200), pp)
pp[0][1] -= 6
pp[0][0] -= 10
pp[1][1] += 10
pp[2][1] -= 6
pp[2][0] += 10
for point in pp:
point[0] += 35
for i in range(self.width * 2):
pygame.draw.polygon(screen, (100, 100, 100), tp)
tp[0][1] -= 2
tp[0][0] += 4
tp[1][1] += 4
tp[2][1] -= 2
tp[2][0] -= 4
pygame.draw.polygon(screen, (150, 150, 150), tp)
tp[0][1] -= 2
tp[0][0] += 4
tp[1][1] += 4
tp[2][1] -= 2
tp[2][0] -= 4
pygame.draw.polygon(screen, (250, 250, 250), tp)
tp[0][1] += 4
tp[0][0] -= 8
tp[1][1] -= 8
tp[2][1] += 4
tp[2][0] += 8
for point in tp:
point[0] += 35
class Bullet(pygame.sprite.Sprite):
def __init__(self, enemy_sprites, x, damage, kind):
super().__init__(bullet_sprites)
self.damage = damage
if kind == 'Green laser gun':
self.image = load_image('green.png', -1)
elif kind == 'Purple laser gun':
self.image = load_image('purple.png', -1)
elif kind == 'Plasma gun':
self.image = pygame.transform.scale(load_image('plasma.png', -1
), (25, 25))
self.rect = self.image.get_rect()
self.coords = self.rect.x, self.rect.y = x + 30, 665
self.mask = pygame.mask.from_surface(self.image)
self.fly(enemy_sprites)
def fly(self, enemy_sprites):
if self.rect.y >= 140:
self.rect.y -= 1
for enemy in enemy_sprites:
if pygame.sprite.collide_mask(enemy, self):
self.hit(enemy)
else:
self.kill()
def hit(self, enemy):
enemy.hp -= self.damage
self.kill()
class Weapon:
def __init__(self, player, kind):
self.kind = kind
self.ability = None
self.player = player
if self.kind == 'Green laser gun':
self.damage = 2
self.price = 0
elif self.kind == 'Purple laser gun':
self.damage = 4
self.price = 50
elif self.kind == 'Plasma gun':
self.damage = 8
self.price = 150
self.ability = 'Rage'
def shoot(self, enemy_sprites):
bullet = Bullet(enemy_sprites, self.player.rect.x, self.damage,
self.kind)
class Player(pygame.sprite.Sprite):
def __init__(self, group):
super().__init__(group)
self.weapon = Weapon(self, 'Green laser gun')
self.image = load_image('player.jpg', -1)
self.rect = self.image.get_rect()
self.coords = self.rect.x, self.rect.y = 75, 635
self.mask = pygame.mask.from_surface(self.image)
def shoot(self, enemy_sprites):
self.weapon.shoot(enemy_sprites)
def move(self, side):
x = self.rect.x
if x < 630 and side == 'right':
x += 70
if x > 35 and side == 'left':
x -= 70
self.rect.x = x
class Enemy(pygame.sprite.Sprite):
global enemies_count, MiniG_rate, EnemyG_rate, MetalM_rate
def __init__(self, group):
super().__init__(group)
if enemies_count >= 30 and enemies_count % MetalM_rate == 0:
self.type = 'MM'
self.hp = 24
self.image = pygame.transform.scale(load_image('Metal_Man.png',
-1), (120, 140))
self.rect = self.image.get_rect()
self.coords = self.rect.x, self.rect.y = random.randrange(10,
560, 70), 140
self.mask = pygame.mask.from_surface(self.image)
elif enemies_count >= 15 and enemies_count % EnemyG_rate == 0:
self.type = 'EG'
self.hp = 6
self.image = pygame.transform.scale(load_image(
'Enemy_glider.png', -1), (70, 70))
self.rect = self.image.get_rect()
self.coords = self.rect.x, self.rect.y = random.randrange(0,
700, 70), 140
self.mask = pygame.mask.from_surface(self.image)
else:
self.type = 'MG'
self.hp = 4
self.image = pygame.transform.scale(load_image(
'Mini_glider.png', -1), (70, 70))
self.rect = self.image.get_rect()
self.coords = self.rect.x, self.rect.y = random.randrange(0,
700, 70), 140
self.mask = pygame.mask.from_surface(self.image)
def death_check(self):
global killed, score, coins, FPS
if self.hp <= 0:
killed += 1
if self.type == 'MM':
score += 30
coins += 15
FPS += 10
elif self.type == 'EG':
score += 15
coins += 5
elif self.type == 'MG':
score += 10
coins += 2
self.kill()
def move(self):
self.rect.y += 1
def game_over():
global FPS, not_paused, score, killed, coins
def text_print():
game_over = ' GAME OVER'
intro_text = ['', 'Нажми клавишу A', 'чтобы сыграть еще раз', '',
'Нажми на кнопку "Esc", ', 'чтобы выйти из игры',
f'Счёт: {score}', f'Убито: {killed}', f'Монеты: {coins}']
fon = pygame.transform.scale(load_image('fon.jpg'), (width, height))
screen.blit(fon, (0, 0))
font = pygame.font.Font(None, 50)
text_coord = 40
string_rendered = font.render(game_over, 1, pygame.Color('white'))
intro_rect = string_rendered.get_rect()
text_coord += 10
intro_rect.top = text_coord
intro_rect.x = 10
text_coord += intro_rect.height
screen.blit(string_rendered, intro_rect)
font = pygame.font.Font(None, 30)
for line in intro_text:
string_rendered = font.render(line, 1, pygame.Color('white'))
intro_rect = string_rendered.get_rect()
text_coord += 10
intro_rect.top = text_coord
intro_rect.x = 10
text_coord += intro_rect.height
intro_rect.x += 10
screen.blit(string_rendered, intro_rect)
FPS = 30
pygame.mouse.set_visible(True)
text_print()
while True:
for event in pygame.event.get():
if event.type == pygame.QUIT:
terminate()
elif event.type == pygame.KEYDOWN or event.type == pygame.MOUSEBUTTONDOWN:
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_ESCAPE:
terminate()
if event.key == 97:
pygame.quit()
subprocess.call('python' + ' проект.py', shell=True)
if not_paused:
pygame.display.flip()
clock.tick(FPS)
terminate()
def terminate():
pygame.quit()
sys.exit()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
import sys
import pygame
import os
import random
import subprocess
FPS, NEWENEMYSPAWN, fst_spawn, not_paused, coins, enemies_count, killed, score = 50, 30, 2000, True, 0, 0, 0, 0
MiniG_rate, EnemyG_rate, MetalM_rate = 1, 5, 15
WEAPONS_LIST = ['Green laser gun', 'Purple laser gun', 'Plasma gun']
def load_image(name, colorkey=None):
fullname = os.path.join('data', name)
image = pygame.image.load(fullname).convert()
if colorkey is not None:
if colorkey == -1:
colorkey = image.get_at((0, 0))
image.set_colorkey(colorkey)
else:
image = image.convert_alpha()
return image
def info_print():
global score, killed, coins
font = pygame.font.Font(None, 30)
text_coord = 2
pygame.draw.rect(screen, (100, 100, 100), (0, 0, 200, 100), 3)
pygame.draw.rect(screen, (150, 150, 150), (3, 3, 194, 94), 3)
pygame.draw.rect(screen, (250, 250, 250), (5, 5, 190, 90))
text = [f'Счёт: {score}',
f'Убито: {killed}',
f'Монеты: {coins}']
for line in text:
string_rendered = font.render(line, 1, (50, 50, 50))
intro_rect = string_rendered.get_rect()
text_coord += 10
intro_rect.top = text_coord
intro_rect.x = 10
text_coord += intro_rect.height
screen.blit(string_rendered, intro_rect)
class Board:
def __init__(self, screen, width, height):
self.width = width
self.height = height
self.board = [[0] * width for _ in range(height)]
self.left = 0
self.top = 0
self.cell_size = 70
self.screen = screen
def set_view(self, left, top, cell_size):
self.left = left
self.top = top
self.cell_size = cell_size
def render(self):
tp, pp = [[0, 140], [17, 105], [35, 140]], [[17, 105], [35, 140], [52, 105]]
for y in range(self.height):
for x in range(self.width):
if y >= 2:
pygame.draw.rect(self.screen, (100, 100, 100), (
x * self.cell_size, y * self.cell_size, self.cell_size, self.cell_size),
1)
pygame.draw.rect(self.screen, (150, 150, 150), (
x * self.cell_size + 1, y * self.cell_size + 1, self.cell_size - 2,
self.cell_size - 2), 2)
pygame.draw.rect(self.screen, (250, 250, 250), (
x * self.cell_size + 3, y * self.cell_size + 3, self.cell_size - 4,
self.cell_size - 4))
for i in range(self.width * 2 - 1):
pygame.draw.polygon(screen, (0, 230, 200), pp)
pp[0][1] += 2
pp[0][0] += 4
pp[1][1] -= 3
pp[2][1] += 2
pp[2][0] -= 4
pygame.draw.polygon(screen, (0, 125, 200), pp)
pp[0][1] += 4
pp[0][0] += 6
pp[1][1] -= 7
pp[2][1] += 4
pp[2][0] -= 6
pygame.draw.polygon(screen, (0, 230, 200), pp)
pp[0][1] -= 6
pp[0][0] -= 10
pp[1][1] += 10
pp[2][1] -= 6
pp[2][0] += 10
for point in pp:
point[0] += 35
for i in range(self.width * 2):
pygame.draw.polygon(screen, (100, 100, 100), tp)
tp[0][1] -= 2
tp[0][0] += 4
tp[1][1] += 4
tp[2][1] -= 2
tp[2][0] -= 4
pygame.draw.polygon(screen, (150, 150, 150), tp)
tp[0][1] -= 2
tp[0][0] += 4
tp[1][1] += 4
tp[2][1] -= 2
tp[2][0] -= 4
pygame.draw.polygon(screen, (250, 250, 250), tp)
tp[0][1] += 4
tp[0][0] -= 8
tp[1][1] -= 8
tp[2][1] += 4
tp[2][0] += 8
for point in tp:
point[0] += 35
class Bullet(pygame.sprite.Sprite):
def __init__(self, enemy_sprites, x, damage, kind):
super().__init__(bullet_sprites)
self.damage = damage
if kind == 'Green laser gun':
self.image = load_image("green.png", -1)
elif kind == 'Purple laser gun':
self.image = load_image("purple.png", -1)
elif kind == 'Plasma gun':
self.image = pygame.transform.scale(load_image("plasma.png", -1), (25, 25))
self.rect = self.image.get_rect()
self.coords = self.rect.x, self.rect.y = x + 30, 665
self.mask = pygame.mask.from_surface(self.image)
self.fly(enemy_sprites)
def fly(self, enemy_sprites):
if self.rect.y >= 140:
self.rect.y -= 1
for enemy in enemy_sprites:
if pygame.sprite.collide_mask(enemy, self):
self.hit(enemy)
else:
self.kill()
def hit(self, enemy):
enemy.hp -= self.damage
self.kill()
class Weapon:
def __init__(self, player, kind):
self.kind = kind
self.ability = None
self.player = player
if self.kind == 'Green laser gun':
self.damage = 2
self.price = 0
elif self.kind == 'Purple laser gun':
self.damage = 4
self.price = 50
elif self.kind == 'Plasma gun':
self.damage = 8
self.price = 150
self.ability = 'Rage'
def shoot(self, enemy_sprites):
bullet = Bullet(enemy_sprites, self.player.rect.x, self.damage, self.kind)
class Player(pygame.sprite.Sprite):
def __init__(self, group):
super().__init__(group)
self.weapon = Weapon(self, 'Green laser gun')
self.image = load_image("player.jpg", -1)
self.rect = self.image.get_rect()
self.coords = self.rect.x, self.rect.y = 75, 635
self.mask = pygame.mask.from_surface(self.image)
def shoot(self, enemy_sprites):
self.weapon.shoot(enemy_sprites)
def move(self, side):
x = self.rect.x
if x < 630 and side == 'right':
x += 70
if x > 35 and side == 'left':
x -= 70
self.rect.x = x
class Enemy(pygame.sprite.Sprite):
global enemies_count, MiniG_rate, EnemyG_rate, MetalM_rate
def __init__(self, group):
super().__init__(group)
if enemies_count >= 30 and enemies_count % MetalM_rate == 0:
self.type = 'MM'
self.hp = 24
self.image = pygame.transform.scale(load_image("Metal_Man.png", -1), (120, 140))
self.rect = self.image.get_rect()
self.coords = self.rect.x, self.rect.y = random.randrange(10, 560, 70), 140
self.mask = pygame.mask.from_surface(self.image)
elif enemies_count >= 15 and enemies_count % EnemyG_rate == 0:
self.type = 'EG'
self.hp = 6
self.image = pygame.transform.scale(load_image('Enemy_glider.png', -1), (70, 70))
self.rect = self.image.get_rect()
self.coords = self.rect.x, self.rect.y = random.randrange(0, 700, 70), 140
self.mask = pygame.mask.from_surface(self.image)
else:
self.type = 'MG'
self.hp = 4
self.image = pygame.transform.scale(load_image('Mini_glider.png', -1), (70, 70))
self.rect = self.image.get_rect()
self.coords = self.rect.x, self.rect.y = random.randrange(0, 700, 70), 140
self.mask = pygame.mask.from_surface(self.image)
def death_check(self):
global killed, score, coins, FPS
if self.hp <= 0:
killed += 1
if self.type == 'MM':
score += 30
coins += 15
FPS += 10
elif self.type == 'EG':
score += 15
coins += 5
elif self.type == 'MG':
score += 10
coins += 2
self.kill()
def move(self):
self.rect.y += 1
def game_over():
global FPS, not_paused, score, killed, coins
def text_print():
game_over = ' GAME OVER'
intro_text = ["",
"Нажми клавишу A",
"чтобы сыграть еще раз",
'',
'Нажми на кнопку "Esc", ',
'чтобы выйти из игры',
f'Счёт: {score}',
f'Убито: {killed}',
f'Монеты: {coins}']
fon = pygame.transform.scale(load_image('fon.jpg'), (width, height))
screen.blit(fon, (0, 0))
font = pygame.font.Font(None, 50)
text_coord = 40
string_rendered = font.render(game_over, 1, pygame.Color('white'))
intro_rect = string_rendered.get_rect()
text_coord += 10
intro_rect.top = text_coord
intro_rect.x = 10
text_coord += intro_rect.height
screen.blit(string_rendered, intro_rect)
font = pygame.font.Font(None, 30)
for line in intro_text:
string_rendered = font.render(line, 1, pygame.Color('white'))
intro_rect = string_rendered.get_rect()
text_coord += 10
intro_rect.top = text_coord
intro_rect.x = 10
text_coord += intro_rect.height
intro_rect.x += 10
screen.blit(string_rendered, intro_rect)
FPS = 30
pygame.mouse.set_visible(True)
text_print()
while True:
for event in pygame.event.get():
if event.type == pygame.QUIT:
terminate()
elif event.type == pygame.KEYDOWN or event.type == pygame.MOUSEBUTTONDOWN:
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_ESCAPE:
terminate()
if event.key == 97:
pygame.quit()
subprocess.call("python" + " проект.py", shell=True)
if not_paused:
pygame.display.flip()
clock.tick(FPS)
terminate()
def terminate():
pygame.quit()
sys.exit()
def start_screen(screen, width, height):
global FPS, not_paused
def text_print():
intro_text = [" SPACE SOLDIER", "",
" Нажми любую клавишу,",
" чтобы начать игру",
' Нажимай на кнопки стрелок, чтобы перемещать персонажа',
' Не дай врагу пролететь мимо тебя!',
' Нажми на кнопку "Esc", ',
' чтобы открыть меню паузы',
' или попасть в магазин']
fon = pygame.transform.scale(load_image('fon.jpg'), (width, height))
font = pygame.font.Font(None, 30)
text_coord = 50
screen.blit(fon, (0, 0))
for line in intro_text:
string_rendered = font.render(line, 1, pygame.Color('black'))
intro_rect = string_rendered.get_rect()
text_coord += 10
intro_rect.top = text_coord
intro_rect.x = 10
text_coord += intro_rect.height
screen.blit(string_rendered, intro_rect)
pygame.mouse.set_visible(True)
text_print()
while True:
for event in pygame.event.get():
if event.type == pygame.QUIT:
terminate()
elif event.type == pygame.KEYDOWN or event.type == pygame.MOUSEBUTTONDOWN:
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_ESCAPE:
pause_menu(screen, width, height)
text_print()
else:
pygame.mouse.set_visible(False)
return
if not_paused:
pygame.display.flip()
clock.tick(FPS)
terminate()
def pause_menu(screen, width, height):
global FPS, not_paused
def text_print():
intro_text = ["Нажми на кнопку 'S',",
"чтобы открыть магазин",
'',
"Нажми на кнопку 'C',",
"чтобы продолжжить игру",
'',
"УПРАВЛЕНИЕ",
'',
'Нажимай на кнопки стрелок, чтобы перемещать персонажа',
'',
'Не дай врагу пролететь мимо тебя!',
'',
'Нажми на кнопку "Esc", ',
'чтобы закрыть меню паузы']
fon = pygame.transform.scale(load_image('fon.jpg'), (width, height))
font = pygame.font.Font(None, 30)
text_coord = 50
screen.blit(fon, (0, 0))
for line in intro_text:
string_rendered = font.render(line, 1, pygame.Color('black'))
intro_rect = string_rendered.get_rect()
text_coord += 10
intro_rect.top = text_coord
intro_rect.x = 10
text_coord += intro_rect.height
screen.blit(string_rendered, intro_rect)
pygame.mouse.set_visible(True)
fon = pygame.transform.scale(load_image('fon.jpg'), (width, height))
screen.blit(fon, (0, 0))
text_print()
while True:
for event in pygame.event.get():
if event.type == pygame.QUIT:
terminate()
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_ESCAPE:
not_paused = True
pygame.mouse.set_visible(False)
return
if event.key == 115:
shop(screen, width, height)
if event.key == 99:
return
pygame.display.flip()
clock.tick(FPS)
terminate()
def shop(screen, width, height):
global FPS, not_paused, WEAPONS_LIST, coins
def text_print():
intro_text = [" Нажми на кнопку 'U',",
"чтобы улучшить свое оружие",
'Нажми на кнопку "Esc", ',
'чтобы выйти из магазина', '',
'Текущее оружие:',
f'{player.weapon.kind}',
'Наносимый урон:',
f'{player.weapon.damage}',
'Следующее улучшение:',
f'{next_weapon}',
'Урон:',
f'{next_damage}',
'Стоимость:',
f'{next_price}',
'Ваши монеты:',
f'{coins}']
fon = pygame.transform.scale(load_image('fon.jpg'), (width, height))
font = pygame.font.Font(None, 30)
text_coord = 50
screen.blit(fon, (0, 0))
for line in intro_text:
string_rendered = font.render(line, 1, pygame.Color('black'))
intro_rect = string_rendered.get_rect()
text_coord += 10
intro_rect.top = text_coord
intro_rect.x = 10
text_coord += intro_rect.height
screen.blit(string_rendered, intro_rect)
if player.weapon.kind != 'Plasma gun':
next_weapon = WEAPONS_LIST[WEAPONS_LIST.index(player.weapon.kind) + 1]
if next_weapon == 'Purple laser gun':
next_damage = 4
next_price = 50
else:
next_damage = 6
next_price = 150
else:
next_weapon = 'Вы имеете лучшее оружие'
next_damage = 'Наносимый урон максимальный'
next_price = 'Покупать больше нечего'
pygame.mouse.set_visible(True)
fon = pygame.transform.scale(load_image('fon.jpg'), (width, height))
screen.blit(fon, (0, 0))
text_print()
while True:
for event in pygame.event.get():
if event.type == pygame.QUIT:
terminate()
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_ESCAPE:
pygame.mouse.set_visible(False)
screen.blit(fon, (0, 0))
return
if event.key == 117 and player.weapon.kind != 'Plasma gun' and coins >= next_price:
coins -= next_price
player.weapon = Weapon(player, WEAPONS_LIST[WEAPONS_LIST.index(player.weapon.kind) + 1])
pygame.display.flip()
clock.tick(FPS)
terminate()
pygame.init()
size = width, height = 700, 700
screen = pygame.display.set_mode(size)
pygame.display.set_caption('SPACE SOLDIER')
pygame.display.set_icon(load_image("icon.png", -1))
fon1 = pygame.transform.scale(load_image('fon1.png'), (700, 400))
board = Board(screen, 10, 10)
pygame.mouse.set_visible(True)
enemy_sprites = pygame.sprite.Group()
player_sprites = pygame.sprite.Group()
bullet_sprites = pygame.sprite.Group()
player = Player(player_sprites)
enemy_li = [Enemy(enemy_sprites)]
clock = pygame.time.Clock()
start_screen(screen, width, height)
pygame.time.set_timer(NEWENEMYSPAWN, fst_spawn)
while True:
for event in pygame.event.get():
if event.type == pygame.QUIT:
terminate()
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_ESCAPE:
not_paused = False
pause_menu(screen, width, height)
if not_paused:
if event.key == 275:
player.move('right')
elif event.key == 276:
player.move('left')
if event.key == 115:
player.shoot(enemy_sprites)
if not_paused and event.type == NEWENEMYSPAWN:
enemy_li.append(Enemy(enemy_sprites))
enemies_count += 1
if not_paused:
screen.blit(fon1, (0, 0))
board.render()
player_sprites.draw(screen)
enemy_sprites.draw(screen)
bullet_sprites.draw(screen)
for enemy in enemy_sprites:
if enemy.type != 'MM':
lim = 630
else:
lim = 560
if enemy.rect.y <= lim:
enemy.move()
else:
game_over()
for bullet in bullet_sprites:
bullet.fly(enemy_sprites)
enemy.death_check()
info_print()
pygame.display.flip()
clock.tick(FPS)
terminate()
|
flexible
|
{
"blob_id": "244191087fcab2a6f03bf024708484b9838731ed",
"index": 9301,
"step-1": "<mask token>\n\n\nclass Player(pygame.sprite.Sprite):\n\n def __init__(self, group):\n super().__init__(group)\n self.weapon = Weapon(self, 'Green laser gun')\n self.image = load_image('player.jpg', -1)\n self.rect = self.image.get_rect()\n self.coords = self.rect.x, self.rect.y = 75, 635\n self.mask = pygame.mask.from_surface(self.image)\n <mask token>\n\n def move(self, side):\n x = self.rect.x\n if x < 630 and side == 'right':\n x += 70\n if x > 35 and side == 'left':\n x -= 70\n self.rect.x = x\n\n\nclass Enemy(pygame.sprite.Sprite):\n global enemies_count, MiniG_rate, EnemyG_rate, MetalM_rate\n\n def __init__(self, group):\n super().__init__(group)\n if enemies_count >= 30 and enemies_count % MetalM_rate == 0:\n self.type = 'MM'\n self.hp = 24\n self.image = pygame.transform.scale(load_image('Metal_Man.png',\n -1), (120, 140))\n self.rect = self.image.get_rect()\n self.coords = self.rect.x, self.rect.y = random.randrange(10, \n 560, 70), 140\n self.mask = pygame.mask.from_surface(self.image)\n elif enemies_count >= 15 and enemies_count % EnemyG_rate == 0:\n self.type = 'EG'\n self.hp = 6\n self.image = pygame.transform.scale(load_image(\n 'Enemy_glider.png', -1), (70, 70))\n self.rect = self.image.get_rect()\n self.coords = self.rect.x, self.rect.y = random.randrange(0, \n 700, 70), 140\n self.mask = pygame.mask.from_surface(self.image)\n else:\n self.type = 'MG'\n self.hp = 4\n self.image = pygame.transform.scale(load_image(\n 'Mini_glider.png', -1), (70, 70))\n self.rect = self.image.get_rect()\n self.coords = self.rect.x, self.rect.y = random.randrange(0, \n 700, 70), 140\n self.mask = pygame.mask.from_surface(self.image)\n\n def death_check(self):\n global killed, score, coins, FPS\n if self.hp <= 0:\n killed += 1\n if self.type == 'MM':\n score += 30\n coins += 15\n FPS += 10\n elif self.type == 'EG':\n score += 15\n coins += 5\n elif self.type == 'MG':\n score += 10\n coins += 2\n self.kill()\n\n def move(self):\n self.rect.y += 1\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Bullet(pygame.sprite.Sprite):\n <mask token>\n <mask token>\n\n def hit(self, enemy):\n enemy.hp -= self.damage\n self.kill()\n\n\nclass Weapon:\n\n def __init__(self, player, kind):\n self.kind = kind\n self.ability = None\n self.player = player\n if self.kind == 'Green laser gun':\n self.damage = 2\n self.price = 0\n elif self.kind == 'Purple laser gun':\n self.damage = 4\n self.price = 50\n elif self.kind == 'Plasma gun':\n self.damage = 8\n self.price = 150\n self.ability = 'Rage'\n\n def shoot(self, enemy_sprites):\n bullet = Bullet(enemy_sprites, self.player.rect.x, self.damage,\n self.kind)\n\n\nclass Player(pygame.sprite.Sprite):\n\n def __init__(self, group):\n super().__init__(group)\n self.weapon = Weapon(self, 'Green laser gun')\n self.image = load_image('player.jpg', -1)\n self.rect = self.image.get_rect()\n self.coords = self.rect.x, self.rect.y = 75, 635\n self.mask = pygame.mask.from_surface(self.image)\n\n def shoot(self, enemy_sprites):\n self.weapon.shoot(enemy_sprites)\n\n def move(self, side):\n x = self.rect.x\n if x < 630 and side == 'right':\n x += 70\n if x > 35 and side == 'left':\n x -= 70\n self.rect.x = x\n\n\nclass Enemy(pygame.sprite.Sprite):\n global enemies_count, MiniG_rate, EnemyG_rate, MetalM_rate\n\n def __init__(self, group):\n super().__init__(group)\n if enemies_count >= 30 and enemies_count % MetalM_rate == 0:\n self.type = 'MM'\n self.hp = 24\n self.image = pygame.transform.scale(load_image('Metal_Man.png',\n -1), (120, 140))\n self.rect = self.image.get_rect()\n self.coords = self.rect.x, self.rect.y = random.randrange(10, \n 560, 70), 140\n self.mask = pygame.mask.from_surface(self.image)\n elif enemies_count >= 15 and enemies_count % EnemyG_rate == 0:\n self.type = 'EG'\n self.hp = 6\n self.image = pygame.transform.scale(load_image(\n 'Enemy_glider.png', -1), (70, 70))\n self.rect = self.image.get_rect()\n self.coords = self.rect.x, self.rect.y = random.randrange(0, \n 700, 70), 140\n self.mask = pygame.mask.from_surface(self.image)\n else:\n self.type = 'MG'\n self.hp = 4\n self.image = pygame.transform.scale(load_image(\n 'Mini_glider.png', -1), (70, 70))\n self.rect = self.image.get_rect()\n self.coords = self.rect.x, self.rect.y = random.randrange(0, \n 700, 70), 140\n self.mask = pygame.mask.from_surface(self.image)\n\n def death_check(self):\n global killed, score, coins, FPS\n if self.hp <= 0:\n killed += 1\n if self.type == 'MM':\n score += 30\n coins += 15\n FPS += 10\n elif self.type == 'EG':\n score += 15\n coins += 5\n elif self.type == 'MG':\n score += 10\n coins += 2\n self.kill()\n\n def move(self):\n self.rect.y += 1\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Board:\n <mask token>\n <mask token>\n <mask token>\n\n\nclass Bullet(pygame.sprite.Sprite):\n\n def __init__(self, enemy_sprites, x, damage, kind):\n super().__init__(bullet_sprites)\n self.damage = damage\n if kind == 'Green laser gun':\n self.image = load_image('green.png', -1)\n elif kind == 'Purple laser gun':\n self.image = load_image('purple.png', -1)\n elif kind == 'Plasma gun':\n self.image = pygame.transform.scale(load_image('plasma.png', -1\n ), (25, 25))\n self.rect = self.image.get_rect()\n self.coords = self.rect.x, self.rect.y = x + 30, 665\n self.mask = pygame.mask.from_surface(self.image)\n self.fly(enemy_sprites)\n\n def fly(self, enemy_sprites):\n if self.rect.y >= 140:\n self.rect.y -= 1\n for enemy in enemy_sprites:\n if pygame.sprite.collide_mask(enemy, self):\n self.hit(enemy)\n else:\n self.kill()\n\n def hit(self, enemy):\n enemy.hp -= self.damage\n self.kill()\n\n\nclass Weapon:\n\n def __init__(self, player, kind):\n self.kind = kind\n self.ability = None\n self.player = player\n if self.kind == 'Green laser gun':\n self.damage = 2\n self.price = 0\n elif self.kind == 'Purple laser gun':\n self.damage = 4\n self.price = 50\n elif self.kind == 'Plasma gun':\n self.damage = 8\n self.price = 150\n self.ability = 'Rage'\n\n def shoot(self, enemy_sprites):\n bullet = Bullet(enemy_sprites, self.player.rect.x, self.damage,\n self.kind)\n\n\nclass Player(pygame.sprite.Sprite):\n\n def __init__(self, group):\n super().__init__(group)\n self.weapon = Weapon(self, 'Green laser gun')\n self.image = load_image('player.jpg', -1)\n self.rect = self.image.get_rect()\n self.coords = self.rect.x, self.rect.y = 75, 635\n self.mask = pygame.mask.from_surface(self.image)\n\n def shoot(self, enemy_sprites):\n self.weapon.shoot(enemy_sprites)\n\n def move(self, side):\n x = self.rect.x\n if x < 630 and side == 'right':\n x += 70\n if x > 35 and side == 'left':\n x -= 70\n self.rect.x = x\n\n\nclass Enemy(pygame.sprite.Sprite):\n global enemies_count, MiniG_rate, EnemyG_rate, MetalM_rate\n\n def __init__(self, group):\n super().__init__(group)\n if enemies_count >= 30 and enemies_count % MetalM_rate == 0:\n self.type = 'MM'\n self.hp = 24\n self.image = pygame.transform.scale(load_image('Metal_Man.png',\n -1), (120, 140))\n self.rect = self.image.get_rect()\n self.coords = self.rect.x, self.rect.y = random.randrange(10, \n 560, 70), 140\n self.mask = pygame.mask.from_surface(self.image)\n elif enemies_count >= 15 and enemies_count % EnemyG_rate == 0:\n self.type = 'EG'\n self.hp = 6\n self.image = pygame.transform.scale(load_image(\n 'Enemy_glider.png', -1), (70, 70))\n self.rect = self.image.get_rect()\n self.coords = self.rect.x, self.rect.y = random.randrange(0, \n 700, 70), 140\n self.mask = pygame.mask.from_surface(self.image)\n else:\n self.type = 'MG'\n self.hp = 4\n self.image = pygame.transform.scale(load_image(\n 'Mini_glider.png', -1), (70, 70))\n self.rect = self.image.get_rect()\n self.coords = self.rect.x, self.rect.y = random.randrange(0, \n 700, 70), 140\n self.mask = pygame.mask.from_surface(self.image)\n\n def death_check(self):\n global killed, score, coins, FPS\n if self.hp <= 0:\n killed += 1\n if self.type == 'MM':\n score += 30\n coins += 15\n FPS += 10\n elif self.type == 'EG':\n score += 15\n coins += 5\n elif self.type == 'MG':\n score += 10\n coins += 2\n self.kill()\n\n def move(self):\n self.rect.y += 1\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\ndef info_print():\n global score, killed, coins\n font = pygame.font.Font(None, 30)\n text_coord = 2\n pygame.draw.rect(screen, (100, 100, 100), (0, 0, 200, 100), 3)\n pygame.draw.rect(screen, (150, 150, 150), (3, 3, 194, 94), 3)\n pygame.draw.rect(screen, (250, 250, 250), (5, 5, 190, 90))\n text = [f'Счёт: {score}', f'Убито: {killed}', f'Монеты: {coins}']\n for line in text:\n string_rendered = font.render(line, 1, (50, 50, 50))\n intro_rect = string_rendered.get_rect()\n text_coord += 10\n intro_rect.top = text_coord\n intro_rect.x = 10\n text_coord += intro_rect.height\n screen.blit(string_rendered, intro_rect)\n\n\nclass Board:\n\n def __init__(self, screen, width, height):\n self.width = width\n self.height = height\n self.board = [([0] * width) for _ in range(height)]\n self.left = 0\n self.top = 0\n self.cell_size = 70\n self.screen = screen\n\n def set_view(self, left, top, cell_size):\n self.left = left\n self.top = top\n self.cell_size = cell_size\n\n def render(self):\n tp, pp = [[0, 140], [17, 105], [35, 140]], [[17, 105], [35, 140], [\n 52, 105]]\n for y in range(self.height):\n for x in range(self.width):\n if y >= 2:\n pygame.draw.rect(self.screen, (100, 100, 100), (x *\n self.cell_size, y * self.cell_size, self.cell_size,\n self.cell_size), 1)\n pygame.draw.rect(self.screen, (150, 150, 150), (x *\n self.cell_size + 1, y * self.cell_size + 1, self.\n cell_size - 2, self.cell_size - 2), 2)\n pygame.draw.rect(self.screen, (250, 250, 250), (x *\n self.cell_size + 3, y * self.cell_size + 3, self.\n cell_size - 4, self.cell_size - 4))\n for i in range(self.width * 2 - 1):\n pygame.draw.polygon(screen, (0, 230, 200), pp)\n pp[0][1] += 2\n pp[0][0] += 4\n pp[1][1] -= 3\n pp[2][1] += 2\n pp[2][0] -= 4\n pygame.draw.polygon(screen, (0, 125, 200), pp)\n pp[0][1] += 4\n pp[0][0] += 6\n pp[1][1] -= 7\n pp[2][1] += 4\n pp[2][0] -= 6\n pygame.draw.polygon(screen, (0, 230, 200), pp)\n pp[0][1] -= 6\n pp[0][0] -= 10\n pp[1][1] += 10\n pp[2][1] -= 6\n pp[2][0] += 10\n for point in pp:\n point[0] += 35\n for i in range(self.width * 2):\n pygame.draw.polygon(screen, (100, 100, 100), tp)\n tp[0][1] -= 2\n tp[0][0] += 4\n tp[1][1] += 4\n tp[2][1] -= 2\n tp[2][0] -= 4\n pygame.draw.polygon(screen, (150, 150, 150), tp)\n tp[0][1] -= 2\n tp[0][0] += 4\n tp[1][1] += 4\n tp[2][1] -= 2\n tp[2][0] -= 4\n pygame.draw.polygon(screen, (250, 250, 250), tp)\n tp[0][1] += 4\n tp[0][0] -= 8\n tp[1][1] -= 8\n tp[2][1] += 4\n tp[2][0] += 8\n for point in tp:\n point[0] += 35\n\n\nclass Bullet(pygame.sprite.Sprite):\n\n def __init__(self, enemy_sprites, x, damage, kind):\n super().__init__(bullet_sprites)\n self.damage = damage\n if kind == 'Green laser gun':\n self.image = load_image('green.png', -1)\n elif kind == 'Purple laser gun':\n self.image = load_image('purple.png', -1)\n elif kind == 'Plasma gun':\n self.image = pygame.transform.scale(load_image('plasma.png', -1\n ), (25, 25))\n self.rect = self.image.get_rect()\n self.coords = self.rect.x, self.rect.y = x + 30, 665\n self.mask = pygame.mask.from_surface(self.image)\n self.fly(enemy_sprites)\n\n def fly(self, enemy_sprites):\n if self.rect.y >= 140:\n self.rect.y -= 1\n for enemy in enemy_sprites:\n if pygame.sprite.collide_mask(enemy, self):\n self.hit(enemy)\n else:\n self.kill()\n\n def hit(self, enemy):\n enemy.hp -= self.damage\n self.kill()\n\n\nclass Weapon:\n\n def __init__(self, player, kind):\n self.kind = kind\n self.ability = None\n self.player = player\n if self.kind == 'Green laser gun':\n self.damage = 2\n self.price = 0\n elif self.kind == 'Purple laser gun':\n self.damage = 4\n self.price = 50\n elif self.kind == 'Plasma gun':\n self.damage = 8\n self.price = 150\n self.ability = 'Rage'\n\n def shoot(self, enemy_sprites):\n bullet = Bullet(enemy_sprites, self.player.rect.x, self.damage,\n self.kind)\n\n\nclass Player(pygame.sprite.Sprite):\n\n def __init__(self, group):\n super().__init__(group)\n self.weapon = Weapon(self, 'Green laser gun')\n self.image = load_image('player.jpg', -1)\n self.rect = self.image.get_rect()\n self.coords = self.rect.x, self.rect.y = 75, 635\n self.mask = pygame.mask.from_surface(self.image)\n\n def shoot(self, enemy_sprites):\n self.weapon.shoot(enemy_sprites)\n\n def move(self, side):\n x = self.rect.x\n if x < 630 and side == 'right':\n x += 70\n if x > 35 and side == 'left':\n x -= 70\n self.rect.x = x\n\n\nclass Enemy(pygame.sprite.Sprite):\n global enemies_count, MiniG_rate, EnemyG_rate, MetalM_rate\n\n def __init__(self, group):\n super().__init__(group)\n if enemies_count >= 30 and enemies_count % MetalM_rate == 0:\n self.type = 'MM'\n self.hp = 24\n self.image = pygame.transform.scale(load_image('Metal_Man.png',\n -1), (120, 140))\n self.rect = self.image.get_rect()\n self.coords = self.rect.x, self.rect.y = random.randrange(10, \n 560, 70), 140\n self.mask = pygame.mask.from_surface(self.image)\n elif enemies_count >= 15 and enemies_count % EnemyG_rate == 0:\n self.type = 'EG'\n self.hp = 6\n self.image = pygame.transform.scale(load_image(\n 'Enemy_glider.png', -1), (70, 70))\n self.rect = self.image.get_rect()\n self.coords = self.rect.x, self.rect.y = random.randrange(0, \n 700, 70), 140\n self.mask = pygame.mask.from_surface(self.image)\n else:\n self.type = 'MG'\n self.hp = 4\n self.image = pygame.transform.scale(load_image(\n 'Mini_glider.png', -1), (70, 70))\n self.rect = self.image.get_rect()\n self.coords = self.rect.x, self.rect.y = random.randrange(0, \n 700, 70), 140\n self.mask = pygame.mask.from_surface(self.image)\n\n def death_check(self):\n global killed, score, coins, FPS\n if self.hp <= 0:\n killed += 1\n if self.type == 'MM':\n score += 30\n coins += 15\n FPS += 10\n elif self.type == 'EG':\n score += 15\n coins += 5\n elif self.type == 'MG':\n score += 10\n coins += 2\n self.kill()\n\n def move(self):\n self.rect.y += 1\n\n\ndef game_over():\n global FPS, not_paused, score, killed, coins\n\n def text_print():\n game_over = ' GAME OVER'\n intro_text = ['', 'Нажми клавишу A', 'чтобы сыграть еще раз', '',\n 'Нажми на кнопку \"Esc\", ', 'чтобы выйти из игры',\n f'Счёт: {score}', f'Убито: {killed}', f'Монеты: {coins}']\n fon = pygame.transform.scale(load_image('fon.jpg'), (width, height))\n screen.blit(fon, (0, 0))\n font = pygame.font.Font(None, 50)\n text_coord = 40\n string_rendered = font.render(game_over, 1, pygame.Color('white'))\n intro_rect = string_rendered.get_rect()\n text_coord += 10\n intro_rect.top = text_coord\n intro_rect.x = 10\n text_coord += intro_rect.height\n screen.blit(string_rendered, intro_rect)\n font = pygame.font.Font(None, 30)\n for line in intro_text:\n string_rendered = font.render(line, 1, pygame.Color('white'))\n intro_rect = string_rendered.get_rect()\n text_coord += 10\n intro_rect.top = text_coord\n intro_rect.x = 10\n text_coord += intro_rect.height\n intro_rect.x += 10\n screen.blit(string_rendered, intro_rect)\n FPS = 30\n pygame.mouse.set_visible(True)\n text_print()\n while True:\n for event in pygame.event.get():\n if event.type == pygame.QUIT:\n terminate()\n elif event.type == pygame.KEYDOWN or event.type == pygame.MOUSEBUTTONDOWN:\n if event.type == pygame.KEYDOWN:\n if event.key == pygame.K_ESCAPE:\n terminate()\n if event.key == 97:\n pygame.quit()\n subprocess.call('python' + ' проект.py', shell=True)\n if not_paused:\n pygame.display.flip()\n clock.tick(FPS)\n terminate()\n\n\ndef terminate():\n pygame.quit()\n sys.exit()\n\n\n<mask token>\n",
"step-5": "import sys\r\nimport pygame\r\nimport os\r\nimport random\r\nimport subprocess\r\n\r\nFPS, NEWENEMYSPAWN, fst_spawn, not_paused, coins, enemies_count, killed, score = 50, 30, 2000, True, 0, 0, 0, 0\r\nMiniG_rate, EnemyG_rate, MetalM_rate = 1, 5, 15\r\nWEAPONS_LIST = ['Green laser gun', 'Purple laser gun', 'Plasma gun']\r\n\r\n\r\ndef load_image(name, colorkey=None):\r\n fullname = os.path.join('data', name)\r\n image = pygame.image.load(fullname).convert()\r\n if colorkey is not None:\r\n if colorkey == -1:\r\n colorkey = image.get_at((0, 0))\r\n image.set_colorkey(colorkey)\r\n else:\r\n image = image.convert_alpha()\r\n return image\r\n\r\n\r\ndef info_print():\r\n global score, killed, coins\r\n\r\n font = pygame.font.Font(None, 30)\r\n text_coord = 2\r\n pygame.draw.rect(screen, (100, 100, 100), (0, 0, 200, 100), 3)\r\n pygame.draw.rect(screen, (150, 150, 150), (3, 3, 194, 94), 3)\r\n pygame.draw.rect(screen, (250, 250, 250), (5, 5, 190, 90))\r\n text = [f'Счёт: {score}',\r\n f'Убито: {killed}',\r\n f'Монеты: {coins}']\r\n for line in text:\r\n string_rendered = font.render(line, 1, (50, 50, 50))\r\n intro_rect = string_rendered.get_rect()\r\n text_coord += 10\r\n intro_rect.top = text_coord\r\n intro_rect.x = 10\r\n text_coord += intro_rect.height\r\n screen.blit(string_rendered, intro_rect)\r\n\r\n\r\nclass Board:\r\n\r\n def __init__(self, screen, width, height):\r\n self.width = width\r\n self.height = height\r\n self.board = [[0] * width for _ in range(height)]\r\n self.left = 0\r\n self.top = 0\r\n self.cell_size = 70\r\n self.screen = screen\r\n\r\n def set_view(self, left, top, cell_size):\r\n self.left = left\r\n self.top = top\r\n self.cell_size = cell_size\r\n\r\n def render(self):\r\n tp, pp = [[0, 140], [17, 105], [35, 140]], [[17, 105], [35, 140], [52, 105]]\r\n for y in range(self.height):\r\n for x in range(self.width):\r\n if y >= 2:\r\n pygame.draw.rect(self.screen, (100, 100, 100), (\r\n x * self.cell_size, y * self.cell_size, self.cell_size, self.cell_size),\r\n 1)\r\n pygame.draw.rect(self.screen, (150, 150, 150), (\r\n x * self.cell_size + 1, y * self.cell_size + 1, self.cell_size - 2,\r\n self.cell_size - 2), 2)\r\n pygame.draw.rect(self.screen, (250, 250, 250), (\r\n x * self.cell_size + 3, y * self.cell_size + 3, self.cell_size - 4,\r\n self.cell_size - 4))\r\n for i in range(self.width * 2 - 1):\r\n pygame.draw.polygon(screen, (0, 230, 200), pp)\r\n pp[0][1] += 2\r\n pp[0][0] += 4\r\n pp[1][1] -= 3\r\n pp[2][1] += 2\r\n pp[2][0] -= 4\r\n pygame.draw.polygon(screen, (0, 125, 200), pp)\r\n pp[0][1] += 4\r\n pp[0][0] += 6\r\n pp[1][1] -= 7\r\n pp[2][1] += 4\r\n pp[2][0] -= 6\r\n pygame.draw.polygon(screen, (0, 230, 200), pp)\r\n pp[0][1] -= 6\r\n pp[0][0] -= 10\r\n pp[1][1] += 10\r\n pp[2][1] -= 6\r\n pp[2][0] += 10\r\n for point in pp:\r\n point[0] += 35\r\n for i in range(self.width * 2):\r\n pygame.draw.polygon(screen, (100, 100, 100), tp)\r\n tp[0][1] -= 2\r\n tp[0][0] += 4\r\n tp[1][1] += 4\r\n tp[2][1] -= 2\r\n tp[2][0] -= 4\r\n pygame.draw.polygon(screen, (150, 150, 150), tp)\r\n tp[0][1] -= 2\r\n tp[0][0] += 4\r\n tp[1][1] += 4\r\n tp[2][1] -= 2\r\n tp[2][0] -= 4\r\n pygame.draw.polygon(screen, (250, 250, 250), tp)\r\n tp[0][1] += 4\r\n tp[0][0] -= 8\r\n tp[1][1] -= 8\r\n tp[2][1] += 4\r\n tp[2][0] += 8\r\n for point in tp:\r\n point[0] += 35\r\n\r\n\r\nclass Bullet(pygame.sprite.Sprite):\r\n\r\n def __init__(self, enemy_sprites, x, damage, kind):\r\n super().__init__(bullet_sprites)\r\n self.damage = damage\r\n if kind == 'Green laser gun':\r\n self.image = load_image(\"green.png\", -1)\r\n elif kind == 'Purple laser gun':\r\n self.image = load_image(\"purple.png\", -1)\r\n elif kind == 'Plasma gun':\r\n self.image = pygame.transform.scale(load_image(\"plasma.png\", -1), (25, 25))\r\n self.rect = self.image.get_rect()\r\n self.coords = self.rect.x, self.rect.y = x + 30, 665\r\n self.mask = pygame.mask.from_surface(self.image)\r\n self.fly(enemy_sprites)\r\n\r\n def fly(self, enemy_sprites):\r\n if self.rect.y >= 140:\r\n self.rect.y -= 1\r\n for enemy in enemy_sprites:\r\n if pygame.sprite.collide_mask(enemy, self):\r\n self.hit(enemy)\r\n else:\r\n self.kill()\r\n\r\n def hit(self, enemy):\r\n enemy.hp -= self.damage\r\n self.kill()\r\n\r\n\r\nclass Weapon:\r\n\r\n def __init__(self, player, kind):\r\n self.kind = kind\r\n self.ability = None\r\n self.player = player\r\n if self.kind == 'Green laser gun':\r\n self.damage = 2\r\n self.price = 0\r\n elif self.kind == 'Purple laser gun':\r\n self.damage = 4\r\n self.price = 50\r\n elif self.kind == 'Plasma gun':\r\n self.damage = 8\r\n self.price = 150\r\n self.ability = 'Rage'\r\n\r\n def shoot(self, enemy_sprites):\r\n bullet = Bullet(enemy_sprites, self.player.rect.x, self.damage, self.kind)\r\n\r\n\r\nclass Player(pygame.sprite.Sprite):\r\n\r\n def __init__(self, group):\r\n super().__init__(group)\r\n self.weapon = Weapon(self, 'Green laser gun')\r\n self.image = load_image(\"player.jpg\", -1)\r\n self.rect = self.image.get_rect()\r\n self.coords = self.rect.x, self.rect.y = 75, 635\r\n self.mask = pygame.mask.from_surface(self.image)\r\n\r\n def shoot(self, enemy_sprites):\r\n self.weapon.shoot(enemy_sprites)\r\n\r\n def move(self, side):\r\n x = self.rect.x\r\n if x < 630 and side == 'right':\r\n x += 70\r\n if x > 35 and side == 'left':\r\n x -= 70\r\n self.rect.x = x\r\n\r\n\r\nclass Enemy(pygame.sprite.Sprite):\r\n global enemies_count, MiniG_rate, EnemyG_rate, MetalM_rate\r\n\r\n def __init__(self, group):\r\n super().__init__(group)\r\n if enemies_count >= 30 and enemies_count % MetalM_rate == 0:\r\n self.type = 'MM'\r\n self.hp = 24\r\n self.image = pygame.transform.scale(load_image(\"Metal_Man.png\", -1), (120, 140))\r\n self.rect = self.image.get_rect()\r\n self.coords = self.rect.x, self.rect.y = random.randrange(10, 560, 70), 140\r\n self.mask = pygame.mask.from_surface(self.image)\r\n elif enemies_count >= 15 and enemies_count % EnemyG_rate == 0:\r\n self.type = 'EG'\r\n self.hp = 6\r\n self.image = pygame.transform.scale(load_image('Enemy_glider.png', -1), (70, 70))\r\n self.rect = self.image.get_rect()\r\n self.coords = self.rect.x, self.rect.y = random.randrange(0, 700, 70), 140\r\n self.mask = pygame.mask.from_surface(self.image)\r\n else:\r\n self.type = 'MG'\r\n self.hp = 4\r\n self.image = pygame.transform.scale(load_image('Mini_glider.png', -1), (70, 70))\r\n self.rect = self.image.get_rect()\r\n self.coords = self.rect.x, self.rect.y = random.randrange(0, 700, 70), 140\r\n self.mask = pygame.mask.from_surface(self.image)\r\n\r\n def death_check(self):\r\n global killed, score, coins, FPS\r\n\r\n if self.hp <= 0:\r\n killed += 1\r\n if self.type == 'MM':\r\n score += 30\r\n coins += 15\r\n FPS += 10\r\n elif self.type == 'EG':\r\n score += 15\r\n coins += 5\r\n elif self.type == 'MG':\r\n score += 10\r\n coins += 2\r\n self.kill()\r\n\r\n def move(self):\r\n self.rect.y += 1\r\n\r\n\r\ndef game_over():\r\n global FPS, not_paused, score, killed, coins\r\n\r\n def text_print():\r\n game_over = ' GAME OVER'\r\n intro_text = [\"\",\r\n \"Нажми клавишу A\",\r\n \"чтобы сыграть еще раз\",\r\n '',\r\n 'Нажми на кнопку \"Esc\", ',\r\n 'чтобы выйти из игры',\r\n f'Счёт: {score}',\r\n f'Убито: {killed}',\r\n f'Монеты: {coins}']\r\n\r\n fon = pygame.transform.scale(load_image('fon.jpg'), (width, height))\r\n screen.blit(fon, (0, 0))\r\n font = pygame.font.Font(None, 50)\r\n text_coord = 40\r\n string_rendered = font.render(game_over, 1, pygame.Color('white'))\r\n intro_rect = string_rendered.get_rect()\r\n text_coord += 10\r\n intro_rect.top = text_coord\r\n intro_rect.x = 10\r\n text_coord += intro_rect.height\r\n screen.blit(string_rendered, intro_rect)\r\n font = pygame.font.Font(None, 30)\r\n for line in intro_text:\r\n string_rendered = font.render(line, 1, pygame.Color('white'))\r\n intro_rect = string_rendered.get_rect()\r\n text_coord += 10\r\n intro_rect.top = text_coord\r\n intro_rect.x = 10\r\n text_coord += intro_rect.height\r\n intro_rect.x += 10\r\n screen.blit(string_rendered, intro_rect)\r\n\r\n FPS = 30\r\n pygame.mouse.set_visible(True)\r\n text_print()\r\n while True:\r\n for event in pygame.event.get():\r\n if event.type == pygame.QUIT:\r\n terminate()\r\n elif event.type == pygame.KEYDOWN or event.type == pygame.MOUSEBUTTONDOWN:\r\n if event.type == pygame.KEYDOWN:\r\n if event.key == pygame.K_ESCAPE:\r\n terminate()\r\n if event.key == 97:\r\n pygame.quit()\r\n subprocess.call(\"python\" + \" проект.py\", shell=True)\r\n if not_paused:\r\n pygame.display.flip()\r\n clock.tick(FPS)\r\n terminate()\r\n\r\n\r\ndef terminate():\r\n pygame.quit()\r\n sys.exit()\r\n\r\n\r\ndef start_screen(screen, width, height):\r\n global FPS, not_paused\r\n\r\n def text_print():\r\n intro_text = [\" SPACE SOLDIER\", \"\",\r\n \" Нажми любую клавишу,\",\r\n \" чтобы начать игру\",\r\n ' Нажимай на кнопки стрелок, чтобы перемещать персонажа',\r\n ' Не дай врагу пролететь мимо тебя!',\r\n ' Нажми на кнопку \"Esc\", ',\r\n ' чтобы открыть меню паузы',\r\n ' или попасть в магазин']\r\n\r\n fon = pygame.transform.scale(load_image('fon.jpg'), (width, height))\r\n font = pygame.font.Font(None, 30)\r\n text_coord = 50\r\n screen.blit(fon, (0, 0))\r\n for line in intro_text:\r\n string_rendered = font.render(line, 1, pygame.Color('black'))\r\n intro_rect = string_rendered.get_rect()\r\n text_coord += 10\r\n intro_rect.top = text_coord\r\n intro_rect.x = 10\r\n text_coord += intro_rect.height\r\n screen.blit(string_rendered, intro_rect)\r\n\r\n pygame.mouse.set_visible(True)\r\n text_print()\r\n while True:\r\n for event in pygame.event.get():\r\n if event.type == pygame.QUIT:\r\n terminate()\r\n elif event.type == pygame.KEYDOWN or event.type == pygame.MOUSEBUTTONDOWN:\r\n if event.type == pygame.KEYDOWN:\r\n if event.key == pygame.K_ESCAPE:\r\n pause_menu(screen, width, height)\r\n text_print()\r\n else:\r\n pygame.mouse.set_visible(False)\r\n return\r\n if not_paused:\r\n pygame.display.flip()\r\n clock.tick(FPS)\r\n terminate()\r\n\r\n\r\ndef pause_menu(screen, width, height):\r\n global FPS, not_paused\r\n\r\n def text_print():\r\n intro_text = [\"Нажми на кнопку 'S',\",\r\n \"чтобы открыть магазин\",\r\n '',\r\n \"Нажми на кнопку 'C',\",\r\n \"чтобы продолжжить игру\",\r\n '',\r\n \"УПРАВЛЕНИЕ\",\r\n '',\r\n 'Нажимай на кнопки стрелок, чтобы перемещать персонажа',\r\n '',\r\n 'Не дай врагу пролететь мимо тебя!',\r\n '',\r\n 'Нажми на кнопку \"Esc\", ',\r\n 'чтобы закрыть меню паузы']\r\n\r\n fon = pygame.transform.scale(load_image('fon.jpg'), (width, height))\r\n font = pygame.font.Font(None, 30)\r\n text_coord = 50\r\n screen.blit(fon, (0, 0))\r\n for line in intro_text:\r\n string_rendered = font.render(line, 1, pygame.Color('black'))\r\n intro_rect = string_rendered.get_rect()\r\n text_coord += 10\r\n intro_rect.top = text_coord\r\n intro_rect.x = 10\r\n text_coord += intro_rect.height\r\n screen.blit(string_rendered, intro_rect)\r\n\r\n pygame.mouse.set_visible(True)\r\n fon = pygame.transform.scale(load_image('fon.jpg'), (width, height))\r\n screen.blit(fon, (0, 0))\r\n text_print()\r\n while True:\r\n for event in pygame.event.get():\r\n if event.type == pygame.QUIT:\r\n terminate()\r\n if event.type == pygame.KEYDOWN:\r\n if event.key == pygame.K_ESCAPE:\r\n not_paused = True\r\n pygame.mouse.set_visible(False)\r\n return\r\n if event.key == 115:\r\n shop(screen, width, height)\r\n if event.key == 99:\r\n return\r\n pygame.display.flip()\r\n clock.tick(FPS)\r\n terminate()\r\n\r\n\r\ndef shop(screen, width, height):\r\n global FPS, not_paused, WEAPONS_LIST, coins\r\n\r\n def text_print():\r\n intro_text = [\" Нажми на кнопку 'U',\",\r\n \"чтобы улучшить свое оружие\",\r\n 'Нажми на кнопку \"Esc\", ',\r\n 'чтобы выйти из магазина', '',\r\n 'Текущее оружие:',\r\n f'{player.weapon.kind}',\r\n 'Наносимый урон:',\r\n f'{player.weapon.damage}',\r\n 'Следующее улучшение:',\r\n f'{next_weapon}',\r\n 'Урон:',\r\n f'{next_damage}',\r\n 'Стоимость:',\r\n f'{next_price}',\r\n 'Ваши монеты:',\r\n f'{coins}']\r\n\r\n fon = pygame.transform.scale(load_image('fon.jpg'), (width, height))\r\n font = pygame.font.Font(None, 30)\r\n text_coord = 50\r\n screen.blit(fon, (0, 0))\r\n for line in intro_text:\r\n string_rendered = font.render(line, 1, pygame.Color('black'))\r\n intro_rect = string_rendered.get_rect()\r\n text_coord += 10\r\n intro_rect.top = text_coord\r\n intro_rect.x = 10\r\n text_coord += intro_rect.height\r\n screen.blit(string_rendered, intro_rect)\r\n\r\n if player.weapon.kind != 'Plasma gun':\r\n next_weapon = WEAPONS_LIST[WEAPONS_LIST.index(player.weapon.kind) + 1]\r\n if next_weapon == 'Purple laser gun':\r\n next_damage = 4\r\n next_price = 50\r\n else:\r\n next_damage = 6\r\n next_price = 150\r\n else:\r\n next_weapon = 'Вы имеете лучшее оружие'\r\n next_damage = 'Наносимый урон максимальный'\r\n next_price = 'Покупать больше нечего'\r\n\r\n pygame.mouse.set_visible(True)\r\n fon = pygame.transform.scale(load_image('fon.jpg'), (width, height))\r\n screen.blit(fon, (0, 0))\r\n text_print()\r\n while True:\r\n for event in pygame.event.get():\r\n if event.type == pygame.QUIT:\r\n terminate()\r\n if event.type == pygame.KEYDOWN:\r\n if event.key == pygame.K_ESCAPE:\r\n pygame.mouse.set_visible(False)\r\n screen.blit(fon, (0, 0))\r\n return\r\n if event.key == 117 and player.weapon.kind != 'Plasma gun' and coins >= next_price:\r\n coins -= next_price\r\n player.weapon = Weapon(player, WEAPONS_LIST[WEAPONS_LIST.index(player.weapon.kind) + 1])\r\n pygame.display.flip()\r\n clock.tick(FPS)\r\n terminate()\r\n\r\n\r\npygame.init()\r\nsize = width, height = 700, 700\r\nscreen = pygame.display.set_mode(size)\r\npygame.display.set_caption('SPACE SOLDIER')\r\npygame.display.set_icon(load_image(\"icon.png\", -1))\r\nfon1 = pygame.transform.scale(load_image('fon1.png'), (700, 400))\r\nboard = Board(screen, 10, 10)\r\npygame.mouse.set_visible(True)\r\nenemy_sprites = pygame.sprite.Group()\r\nplayer_sprites = pygame.sprite.Group()\r\nbullet_sprites = pygame.sprite.Group()\r\nplayer = Player(player_sprites)\r\nenemy_li = [Enemy(enemy_sprites)]\r\nclock = pygame.time.Clock()\r\nstart_screen(screen, width, height)\r\npygame.time.set_timer(NEWENEMYSPAWN, fst_spawn)\r\nwhile True:\r\n for event in pygame.event.get():\r\n if event.type == pygame.QUIT:\r\n terminate()\r\n if event.type == pygame.KEYDOWN:\r\n if event.key == pygame.K_ESCAPE:\r\n not_paused = False\r\n pause_menu(screen, width, height)\r\n if not_paused:\r\n if event.key == 275:\r\n player.move('right')\r\n elif event.key == 276:\r\n player.move('left')\r\n if event.key == 115:\r\n player.shoot(enemy_sprites)\r\n if not_paused and event.type == NEWENEMYSPAWN:\r\n enemy_li.append(Enemy(enemy_sprites))\r\n enemies_count += 1\r\n\r\n if not_paused:\r\n screen.blit(fon1, (0, 0))\r\n board.render()\r\n player_sprites.draw(screen)\r\n enemy_sprites.draw(screen)\r\n bullet_sprites.draw(screen)\r\n for enemy in enemy_sprites:\r\n if enemy.type != 'MM':\r\n lim = 630\r\n else:\r\n lim = 560\r\n if enemy.rect.y <= lim:\r\n enemy.move()\r\n else:\r\n game_over()\r\n for bullet in bullet_sprites:\r\n bullet.fly(enemy_sprites)\r\n enemy.death_check()\r\n info_print()\r\n pygame.display.flip()\r\n clock.tick(FPS)\r\nterminate()\r\n",
"step-ids": [
7,
13,
16,
22,
30
]
}
|
[
7,
13,
16,
22,
30
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
print(' whats your name boi ?')
<|reserved_special_token_0|>
if name == 'arrya':
print('u are a boi')
elif name == 'jon':
print('basterd')
elif name == 'ned':
print('you are dead man')
elif name == 'rob':
print('the king in the north')
else:
print('carry on')
<|reserved_special_token_1|>
print(' whats your name boi ?')
name = input()
if name == 'arrya':
print('u are a boi')
elif name == 'jon':
print('basterd')
elif name == 'ned':
print('you are dead man')
elif name == 'rob':
print('the king in the north')
else:
print('carry on')
<|reserved_special_token_1|>
print(" whats your name boi ?")
name = input();
if name == "arrya":
print("u are a boi");
elif name == "jon":
print("basterd")
elif name == "ned":
print("you are dead man")
elif name == "rob":
print("the king in the north")
else:
print("carry on")
|
flexible
|
{
"blob_id": "483a5e95a7bfca2cc6b1e7e81740620468fb5623",
"index": 9646,
"step-1": "<mask token>\n",
"step-2": "print(' whats your name boi ?')\n<mask token>\nif name == 'arrya':\n print('u are a boi')\nelif name == 'jon':\n print('basterd')\nelif name == 'ned':\n print('you are dead man')\nelif name == 'rob':\n print('the king in the north')\nelse:\n print('carry on')\n",
"step-3": "print(' whats your name boi ?')\nname = input()\nif name == 'arrya':\n print('u are a boi')\nelif name == 'jon':\n print('basterd')\nelif name == 'ned':\n print('you are dead man')\nelif name == 'rob':\n print('the king in the north')\nelse:\n print('carry on')\n",
"step-4": "print(\" whats your name boi ?\")\r\nname = input();\r\nif name == \"arrya\":\r\n print(\"u are a boi\");\r\nelif name == \"jon\":\r\n print(\"basterd\")\r\nelif name == \"ned\":\r\n print(\"you are dead man\")\r\nelif name == \"rob\":\r\n print(\"the king in the north\")\r\nelse:\r\n print(\"carry on\")\r\n\r\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
#!/usr/bin/python
# -*- coding: UTF-8 -*-
from connect import Connect
class Resource:
def __init__(self, row: tuple):
self.video_path = row[0]
self.pic_path = row[1]
|
normal
|
{
"blob_id": "65aa27addaec6014fe5fd66df2c0d3632231a314",
"index": 3124,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Resource:\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Resource:\n\n def __init__(self, row: tuple):\n self.video_path = row[0]\n self.pic_path = row[1]\n",
"step-4": "from connect import Connect\n\n\nclass Resource:\n\n def __init__(self, row: tuple):\n self.video_path = row[0]\n self.pic_path = row[1]\n",
"step-5": "#!/usr/bin/python\n# -*- coding: UTF-8 -*-\nfrom connect import Connect\n\n\nclass Resource:\n def __init__(self, row: tuple):\n self.video_path = row[0]\n self.pic_path = row[1]\n\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import cv2
import numpy as np
from matplotlib import pyplot as plt
#cargar la imagen a analizar
imagen= cv2.imread("tomate22.jpg")
#cv2.imshow("Original", imagen)
#cv2.waitKey(0)
# Convertimos en escala de grise
gris = cv2.cvtColor(imagen, cv2.COLOR_BGR2GRAY)
#cv2.imshow("En gris", gris)
#cv2.waitKey(0)
# Aplicar suavizado Gaussiano
gaussiana = cv2.GaussianBlur(gris, (3,3), 0)
#cv2.imshow("Gaussiano", gaussiana)
#cv2.waitKey(0)
#detectamos los bordes con canny
sigma=0.9
v=np.median(gaussiana)
lower=int(max(0,(1.0-sigma)*v))
upper=int(min(255,(1.0+sigma)*v))
canny = cv2.Canny(gaussiana, lower, upper)
plt.subplot(121),plt.imshow(canny,cmap = 'gray')
plt.title('Canny'), plt.xticks([]), plt.yticks([])
#cv2.imshow("Canny", canny)
#cv2.waitKey(0)
#dilatacion
#kernel = np.ones((5,5),np.uint8)
kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (5,5))
dilation = cv2.dilate(canny,kernel,iterations = 1)
#cv2.imshow("Dilatado", dilation)
#cv2.waitKey(0)
#buscamos los contornos
(_,contornos,_) = cv2.findContours(dilation.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
cv2.drawContours(imagen,contornos,-1,(255,0,0), 2)
cv2.imshow("contornos", imagen)
cv2.waitKey(0)
for x in range (len(contornos)):
#mascara
mask=np.zeros_like(imagen)
out=np.zeros_like(imagen)
cv2.drawContours(mask, [contornos[x]], 0, (255,0,0), -1) #con ese -1 al final hace que pinte todo de azul lo que esta dentro del borde
for i in range (imagen.shape[0]): #para recorrer todas las columnas .shape[0]
for j in range (imagen.shape[1]): #para recorrer todas las filas .shape[1]
if mask[i,j,0]==255:
out[i,j]=imagen[i,j]
cv2.imshow("contorno", out)
#histograma
color = ('b','g','r')
for i,col in enumerate(color):
histr = cv2.calcHist([out],[i],None,[256],[1,256])
plt.plot(histr,color = col)
plt.xlim([0,256])
plt.show()
print("Es contorno de tomate?")
c=cv2.waitKey(0) & 0xFF
if (c==ord("t") ):
print("Histograma guardado como valido")
if (c==ord("n") ):
print("Histograma guardado como no valido")
cv2.destroyAllWindows()
|
normal
|
{
"blob_id": "9f42a9d0ca622d6c4e2cf20bc2e494262c16055b",
"index": 7744,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nplt.subplot(121), plt.imshow(canny, cmap='gray')\nplt.title('Canny'), plt.xticks([]), plt.yticks([])\n<mask token>\ncv2.drawContours(imagen, contornos, -1, (255, 0, 0), 2)\ncv2.imshow('contornos', imagen)\ncv2.waitKey(0)\nfor x in range(len(contornos)):\n mask = np.zeros_like(imagen)\n out = np.zeros_like(imagen)\n cv2.drawContours(mask, [contornos[x]], 0, (255, 0, 0), -1)\n for i in range(imagen.shape[0]):\n for j in range(imagen.shape[1]):\n if mask[i, j, 0] == 255:\n out[i, j] = imagen[i, j]\n cv2.imshow('contorno', out)\n color = 'b', 'g', 'r'\n for i, col in enumerate(color):\n histr = cv2.calcHist([out], [i], None, [256], [1, 256])\n plt.plot(histr, color=col)\n plt.xlim([0, 256])\n plt.show()\n print('Es contorno de tomate?')\n c = cv2.waitKey(0) & 255\n if c == ord('t'):\n print('Histograma guardado como valido')\n if c == ord('n'):\n print('Histograma guardado como no valido')\ncv2.destroyAllWindows()\n",
"step-3": "<mask token>\nimagen = cv2.imread('tomate22.jpg')\ngris = cv2.cvtColor(imagen, cv2.COLOR_BGR2GRAY)\ngaussiana = cv2.GaussianBlur(gris, (3, 3), 0)\nsigma = 0.9\nv = np.median(gaussiana)\nlower = int(max(0, (1.0 - sigma) * v))\nupper = int(min(255, (1.0 + sigma) * v))\ncanny = cv2.Canny(gaussiana, lower, upper)\nplt.subplot(121), plt.imshow(canny, cmap='gray')\nplt.title('Canny'), plt.xticks([]), plt.yticks([])\nkernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (5, 5))\ndilation = cv2.dilate(canny, kernel, iterations=1)\n_, contornos, _ = cv2.findContours(dilation.copy(), cv2.RETR_EXTERNAL, cv2.\n CHAIN_APPROX_SIMPLE)\ncv2.drawContours(imagen, contornos, -1, (255, 0, 0), 2)\ncv2.imshow('contornos', imagen)\ncv2.waitKey(0)\nfor x in range(len(contornos)):\n mask = np.zeros_like(imagen)\n out = np.zeros_like(imagen)\n cv2.drawContours(mask, [contornos[x]], 0, (255, 0, 0), -1)\n for i in range(imagen.shape[0]):\n for j in range(imagen.shape[1]):\n if mask[i, j, 0] == 255:\n out[i, j] = imagen[i, j]\n cv2.imshow('contorno', out)\n color = 'b', 'g', 'r'\n for i, col in enumerate(color):\n histr = cv2.calcHist([out], [i], None, [256], [1, 256])\n plt.plot(histr, color=col)\n plt.xlim([0, 256])\n plt.show()\n print('Es contorno de tomate?')\n c = cv2.waitKey(0) & 255\n if c == ord('t'):\n print('Histograma guardado como valido')\n if c == ord('n'):\n print('Histograma guardado como no valido')\ncv2.destroyAllWindows()\n",
"step-4": "import cv2\nimport numpy as np\nfrom matplotlib import pyplot as plt\nimagen = cv2.imread('tomate22.jpg')\ngris = cv2.cvtColor(imagen, cv2.COLOR_BGR2GRAY)\ngaussiana = cv2.GaussianBlur(gris, (3, 3), 0)\nsigma = 0.9\nv = np.median(gaussiana)\nlower = int(max(0, (1.0 - sigma) * v))\nupper = int(min(255, (1.0 + sigma) * v))\ncanny = cv2.Canny(gaussiana, lower, upper)\nplt.subplot(121), plt.imshow(canny, cmap='gray')\nplt.title('Canny'), plt.xticks([]), plt.yticks([])\nkernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (5, 5))\ndilation = cv2.dilate(canny, kernel, iterations=1)\n_, contornos, _ = cv2.findContours(dilation.copy(), cv2.RETR_EXTERNAL, cv2.\n CHAIN_APPROX_SIMPLE)\ncv2.drawContours(imagen, contornos, -1, (255, 0, 0), 2)\ncv2.imshow('contornos', imagen)\ncv2.waitKey(0)\nfor x in range(len(contornos)):\n mask = np.zeros_like(imagen)\n out = np.zeros_like(imagen)\n cv2.drawContours(mask, [contornos[x]], 0, (255, 0, 0), -1)\n for i in range(imagen.shape[0]):\n for j in range(imagen.shape[1]):\n if mask[i, j, 0] == 255:\n out[i, j] = imagen[i, j]\n cv2.imshow('contorno', out)\n color = 'b', 'g', 'r'\n for i, col in enumerate(color):\n histr = cv2.calcHist([out], [i], None, [256], [1, 256])\n plt.plot(histr, color=col)\n plt.xlim([0, 256])\n plt.show()\n print('Es contorno de tomate?')\n c = cv2.waitKey(0) & 255\n if c == ord('t'):\n print('Histograma guardado como valido')\n if c == ord('n'):\n print('Histograma guardado como no valido')\ncv2.destroyAllWindows()\n",
"step-5": "import cv2\nimport numpy as np\nfrom matplotlib import pyplot as plt\n\n\n#cargar la imagen a analizar\nimagen= cv2.imread(\"tomate22.jpg\")\n#cv2.imshow(\"Original\", imagen)\n#cv2.waitKey(0)\n\n# Convertimos en escala de grise\ngris = cv2.cvtColor(imagen, cv2.COLOR_BGR2GRAY)\n#cv2.imshow(\"En gris\", gris)\n#cv2.waitKey(0)\n\n# Aplicar suavizado Gaussiano\ngaussiana = cv2.GaussianBlur(gris, (3,3), 0)\n#cv2.imshow(\"Gaussiano\", gaussiana)\n#cv2.waitKey(0)\n\n#detectamos los bordes con canny\nsigma=0.9\nv=np.median(gaussiana)\nlower=int(max(0,(1.0-sigma)*v))\nupper=int(min(255,(1.0+sigma)*v))\ncanny = cv2.Canny(gaussiana, lower, upper)\nplt.subplot(121),plt.imshow(canny,cmap = 'gray')\nplt.title('Canny'), plt.xticks([]), plt.yticks([])\n#cv2.imshow(\"Canny\", canny)\n#cv2.waitKey(0)\n\n#dilatacion\n#kernel = np.ones((5,5),np.uint8)\nkernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (5,5))\ndilation = cv2.dilate(canny,kernel,iterations = 1)\n#cv2.imshow(\"Dilatado\", dilation)\n#cv2.waitKey(0)\n\n#buscamos los contornos\n(_,contornos,_) = cv2.findContours(dilation.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)\n\ncv2.drawContours(imagen,contornos,-1,(255,0,0), 2)\ncv2.imshow(\"contornos\", imagen)\ncv2.waitKey(0)\n\nfor x in range (len(contornos)):\n \n #mascara\n mask=np.zeros_like(imagen)\n out=np.zeros_like(imagen)\n cv2.drawContours(mask, [contornos[x]], 0, (255,0,0), -1) #con ese -1 al final hace que pinte todo de azul lo que esta dentro del borde\n for i in range (imagen.shape[0]): #para recorrer todas las columnas .shape[0]\n for j in range (imagen.shape[1]): #para recorrer todas las filas .shape[1]\n if mask[i,j,0]==255: \n out[i,j]=imagen[i,j] \n \n cv2.imshow(\"contorno\", out)\n \n #histograma \n color = ('b','g','r')\n for i,col in enumerate(color):\n histr = cv2.calcHist([out],[i],None,[256],[1,256])\n plt.plot(histr,color = col)\n plt.xlim([0,256])\n plt.show()\n\n print(\"Es contorno de tomate?\")\n c=cv2.waitKey(0) & 0xFF\n if (c==ord(\"t\") ):\n \n print(\"Histograma guardado como valido\")\n \n if (c==ord(\"n\") ):\n\n print(\"Histograma guardado como no valido\")\n \ncv2.destroyAllWindows()\n\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# list audio files
import glob
def listFiles(path):
return glob.glob(path + '*.wav')
import random
def getNextFile(files):
return random.choice(files)
import pyaudio
import wave
CHUNK = 1024
def getRandomFile(folder = 'test/'):
files = listFiles(folder)
filename = getNextFile(files)
return filename
def play(filename):
# opem file
f = wave.open(filename, 'rb')
p = pyaudio.PyAudio()
#open stream
stream = p.open(format = p.get_format_from_width(f.getsampwidth()),
channels = f.getnchannels(),
rate = f.getframerate(),
output = True)
# read data
data = f.readframes(CHUNK)
#play stream
while data:
stream.write(data)
data = f.readframes(CHUNK)
#stop stream
stream.stop_stream()
stream.close()
#close PyAudio
p.terminate()
|
normal
|
{
"blob_id": "a3bcd383656284a2236e79b5d5d7acdfe433a13b",
"index": 8409,
"step-1": "<mask token>\n\n\ndef getNextFile(files):\n return random.choice(files)\n\n\n<mask token>\n\n\ndef getRandomFile(folder='test/'):\n files = listFiles(folder)\n filename = getNextFile(files)\n return filename\n\n\ndef play(filename):\n f = wave.open(filename, 'rb')\n p = pyaudio.PyAudio()\n stream = p.open(format=p.get_format_from_width(f.getsampwidth()),\n channels=f.getnchannels(), rate=f.getframerate(), output=True)\n data = f.readframes(CHUNK)\n while data:\n stream.write(data)\n data = f.readframes(CHUNK)\n stream.stop_stream()\n stream.close()\n p.terminate()\n",
"step-2": "<mask token>\n\n\ndef listFiles(path):\n return glob.glob(path + '*.wav')\n\n\n<mask token>\n\n\ndef getNextFile(files):\n return random.choice(files)\n\n\n<mask token>\n\n\ndef getRandomFile(folder='test/'):\n files = listFiles(folder)\n filename = getNextFile(files)\n return filename\n\n\ndef play(filename):\n f = wave.open(filename, 'rb')\n p = pyaudio.PyAudio()\n stream = p.open(format=p.get_format_from_width(f.getsampwidth()),\n channels=f.getnchannels(), rate=f.getframerate(), output=True)\n data = f.readframes(CHUNK)\n while data:\n stream.write(data)\n data = f.readframes(CHUNK)\n stream.stop_stream()\n stream.close()\n p.terminate()\n",
"step-3": "<mask token>\n\n\ndef listFiles(path):\n return glob.glob(path + '*.wav')\n\n\n<mask token>\n\n\ndef getNextFile(files):\n return random.choice(files)\n\n\n<mask token>\nCHUNK = 1024\n\n\ndef getRandomFile(folder='test/'):\n files = listFiles(folder)\n filename = getNextFile(files)\n return filename\n\n\ndef play(filename):\n f = wave.open(filename, 'rb')\n p = pyaudio.PyAudio()\n stream = p.open(format=p.get_format_from_width(f.getsampwidth()),\n channels=f.getnchannels(), rate=f.getframerate(), output=True)\n data = f.readframes(CHUNK)\n while data:\n stream.write(data)\n data = f.readframes(CHUNK)\n stream.stop_stream()\n stream.close()\n p.terminate()\n",
"step-4": "import glob\n\n\ndef listFiles(path):\n return glob.glob(path + '*.wav')\n\n\nimport random\n\n\ndef getNextFile(files):\n return random.choice(files)\n\n\nimport pyaudio\nimport wave\nCHUNK = 1024\n\n\ndef getRandomFile(folder='test/'):\n files = listFiles(folder)\n filename = getNextFile(files)\n return filename\n\n\ndef play(filename):\n f = wave.open(filename, 'rb')\n p = pyaudio.PyAudio()\n stream = p.open(format=p.get_format_from_width(f.getsampwidth()),\n channels=f.getnchannels(), rate=f.getframerate(), output=True)\n data = f.readframes(CHUNK)\n while data:\n stream.write(data)\n data = f.readframes(CHUNK)\n stream.stop_stream()\n stream.close()\n p.terminate()\n",
"step-5": "# list audio files\nimport glob\ndef listFiles(path):\n return glob.glob(path + '*.wav')\n\nimport random\ndef getNextFile(files):\n return random.choice(files)\n\nimport pyaudio\nimport wave\nCHUNK = 1024\n\ndef getRandomFile(folder = 'test/'):\n files = listFiles(folder)\n filename = getNextFile(files)\n return filename\n\ndef play(filename):\n # opem file\n f = wave.open(filename, 'rb')\n p = pyaudio.PyAudio()\n #open stream \n stream = p.open(format = p.get_format_from_width(f.getsampwidth()), \n channels = f.getnchannels(), \n rate = f.getframerate(), \n output = True)\n # read data\n data = f.readframes(CHUNK)\n #play stream \n while data:\n stream.write(data) \n data = f.readframes(CHUNK)\n #stop stream \n stream.stop_stream() \n stream.close() \n #close PyAudio \n p.terminate()\n ",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
<|reserved_special_token_0|>
class Timer(object):
<|reserved_special_token_0|>
def reset(self):
self.time_ = 0.0
self.start_ = 0.0
def start(self):
self.start_ = time.clock()
def end(self):
self.time_ += time.clock() - self.start_
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Timer(object):
def __init__(self):
self.time_ = 0.0
self.start_ = 0.0
def reset(self):
self.time_ = 0.0
self.start_ = 0.0
def start(self):
self.start_ = time.clock()
def end(self):
self.time_ += time.clock() - self.start_
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Timer(object):
def __init__(self):
self.time_ = 0.0
self.start_ = 0.0
def reset(self):
self.time_ = 0.0
self.start_ = 0.0
def start(self):
self.start_ = time.clock()
def end(self):
self.time_ += time.clock() - self.start_
def timing(timer):
"""Decorator for timing.
Example:
timer = Timer()
@timing(timer)
def foo():
pass
:param timer:
"""
def real_timing(function):
def advice(*args, **kwargs):
timer.start()
result = function(*args, **kwargs)
timer.end()
return result
return advice
return real_timing
<|reserved_special_token_1|>
import time
class Timer(object):
def __init__(self):
self.time_ = 0.0
self.start_ = 0.0
def reset(self):
self.time_ = 0.0
self.start_ = 0.0
def start(self):
self.start_ = time.clock()
def end(self):
self.time_ += time.clock() - self.start_
def timing(timer):
"""Decorator for timing.
Example:
timer = Timer()
@timing(timer)
def foo():
pass
:param timer:
"""
def real_timing(function):
def advice(*args, **kwargs):
timer.start()
result = function(*args, **kwargs)
timer.end()
return result
return advice
return real_timing
<|reserved_special_token_1|>
#!/usr/bin/env python
# coding: utf-8
import time
class Timer(object):
def __init__(self):
self.time_ = 0.
self.start_ = 0.
def reset(self):
self.time_ = 0.
self.start_ = 0.
def start(self):
self.start_ = time.clock()
def end(self):
self.time_ += time.clock() - self.start_
def timing(timer):
"""Decorator for timing.
Example:
timer = Timer()
@timing(timer)
def foo():
pass
:param timer:
"""
def real_timing(function):
def advice(*args, **kwargs):
timer.start()
result = function(*args, **kwargs)
timer.end()
return result
return advice
return real_timing
|
flexible
|
{
"blob_id": "0cf5b009f384d2ca7162b5a88699afb3702ae1f6",
"index": 1147,
"step-1": "<mask token>\n\n\nclass Timer(object):\n <mask token>\n\n def reset(self):\n self.time_ = 0.0\n self.start_ = 0.0\n\n def start(self):\n self.start_ = time.clock()\n\n def end(self):\n self.time_ += time.clock() - self.start_\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Timer(object):\n\n def __init__(self):\n self.time_ = 0.0\n self.start_ = 0.0\n\n def reset(self):\n self.time_ = 0.0\n self.start_ = 0.0\n\n def start(self):\n self.start_ = time.clock()\n\n def end(self):\n self.time_ += time.clock() - self.start_\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Timer(object):\n\n def __init__(self):\n self.time_ = 0.0\n self.start_ = 0.0\n\n def reset(self):\n self.time_ = 0.0\n self.start_ = 0.0\n\n def start(self):\n self.start_ = time.clock()\n\n def end(self):\n self.time_ += time.clock() - self.start_\n\n\ndef timing(timer):\n \"\"\"Decorator for timing.\n\n Example:\n timer = Timer()\n @timing(timer)\n def foo():\n pass\n\n :param timer:\n \"\"\"\n\n def real_timing(function):\n\n def advice(*args, **kwargs):\n timer.start()\n result = function(*args, **kwargs)\n timer.end()\n return result\n return advice\n return real_timing\n",
"step-4": "import time\n\n\nclass Timer(object):\n\n def __init__(self):\n self.time_ = 0.0\n self.start_ = 0.0\n\n def reset(self):\n self.time_ = 0.0\n self.start_ = 0.0\n\n def start(self):\n self.start_ = time.clock()\n\n def end(self):\n self.time_ += time.clock() - self.start_\n\n\ndef timing(timer):\n \"\"\"Decorator for timing.\n\n Example:\n timer = Timer()\n @timing(timer)\n def foo():\n pass\n\n :param timer:\n \"\"\"\n\n def real_timing(function):\n\n def advice(*args, **kwargs):\n timer.start()\n result = function(*args, **kwargs)\n timer.end()\n return result\n return advice\n return real_timing\n",
"step-5": "#!/usr/bin/env python\n# coding: utf-8\nimport time\n\n\nclass Timer(object):\n def __init__(self):\n self.time_ = 0.\n self.start_ = 0.\n\n def reset(self):\n self.time_ = 0.\n self.start_ = 0.\n\n def start(self):\n self.start_ = time.clock()\n\n def end(self):\n self.time_ += time.clock() - self.start_\n\n\ndef timing(timer):\n \"\"\"Decorator for timing.\n\n Example:\n timer = Timer()\n @timing(timer)\n def foo():\n pass\n\n :param timer:\n \"\"\"\n\n def real_timing(function):\n def advice(*args, **kwargs):\n timer.start()\n result = function(*args, **kwargs)\n timer.end()\n return result\n return advice\n return real_timing\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
from elements import Node, Bar, Material, Group, Load
from pprint import pprint
# query
# next((e for e in result['coordinates']['nodes'] if e.n == int(el[0])), None)
class Reader():
def read(self, filePath):
"""
Reads text file with nodes and returns the result dict with all objects
and their nested properties
"""
result = {
'coordinates': {
'count': 0,
'nodes': []
},
'element_groups': {
'number_of_elements': 0,
'count': 0,
'groups': []
},
'bars': [],
'materials': {
'count': 0,
'materials': []
},
'geometric_properties': {
'count': 0
},
'bcnodes': {
'count': 0
},
'loads': {
'count': 0
}
}
# print(result['coordinates']['nodes'])
with open(filePath,'r') as f:
lines = f.readlines()
elementCounter = 0
groupCounter = 0
geometricCounter = 0
for line in lines:
line = line.strip()
el = line.split(' ')
if len(line) == 0:
continue
if len(line) != 0 and line[0] == "*":
section = line[1:].lower()
continue
if section == 'coordinates':
if len(el) == 1 :
result[section]['count'] = el[0]
else:
result[section]['nodes'].append(Node(int(el[0]), float(el[1]), float(el[2])))
elif section == 'element_groups':
if len(line) == 1:
result[section]['count'] = int(el[0])
else:
result[section]['groups'].append(Group(el[0], el[1], el[2]))
result[section]['number_of_elements'] += int(el[1])
elif section == 'incidences':
groups = result['element_groups']['groups']
nodes = result['coordinates']['nodes']
print(el)
currentGroup = groups[groupCounter]
if (currentGroup.amount == 0):
groupCounter += 1
currentGroup = groups[groupCounter]
print("Group n: {} count: {}".format(currentGroup.n, currentGroup.amount))
bar = Bar(el[0], nodes[int(el[1])-1], nodes[int(el[2])-1], groups[groupCounter])
print(
"""
Bar {} created
Start node: {} End Node: {} Group: {}
""".format(bar.id, bar.startNode.n, bar.endNode.n, bar.group))
result['bars'].append(bar)
currentGroup.amount -= 1
elif section == 'materials':
if len(el) == 1:
result[section]['count'] = el[0]
groupCounter = 0
else:
material = Material(el[0], el[1], el[2])
result[section]['materials'].append(material)
result['element_groups']['groups'][groupCounter].setMaterial(material)
groupCounter += 1
elif section == 'geometric_properties':
if geometricCounter == 0:
result[section]['count'] = el[0]
else:
result['element_groups']['groups'][geometricCounter - 1].setSectionArea(
el[0]
)
geometricCounter += 1
elif section == 'bcnodes':
if len(el) == 1:
result[section]['count'] = el[0]
else:
nodeIndex = next((e for e, item in enumerate(
result['coordinates']['nodes']) if item.n == int(el[0])), None
)
result['coordinates']['nodes'][nodeIndex].setRestriction(int(el[1]))
elif section == 'loads':
if len(el) == 1:
result[section]['count'] = el[0]
else:
load = Load(el[1], el[2])
nodeIndex = next((e for e, item in enumerate(
result['coordinates']['nodes']) if item.n == int(el[0])), None
)
result['coordinates']['nodes'][nodeIndex].addLoad(load)
for bar in result['bars']:
bar.createLocalArray()
print('---------- Parsing complete! ----------')
pprint(result)
print('---------------------------------------')
return result
# reader = Reader()
# reader.read("./arquivoentrada.fem")
|
normal
|
{
"blob_id": "c796123fbbf3adcde59779a104dcafb30a673a79",
"index": 6422,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Reader:\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Reader:\n\n def read(self, filePath):\n \"\"\"\n Reads text file with nodes and returns the result dict with all objects\n and their nested properties\n \"\"\"\n result = {'coordinates': {'count': 0, 'nodes': []},\n 'element_groups': {'number_of_elements': 0, 'count': 0,\n 'groups': []}, 'bars': [], 'materials': {'count': 0,\n 'materials': []}, 'geometric_properties': {'count': 0},\n 'bcnodes': {'count': 0}, 'loads': {'count': 0}}\n with open(filePath, 'r') as f:\n lines = f.readlines()\n elementCounter = 0\n groupCounter = 0\n geometricCounter = 0\n for line in lines:\n line = line.strip()\n el = line.split(' ')\n if len(line) == 0:\n continue\n if len(line) != 0 and line[0] == '*':\n section = line[1:].lower()\n continue\n if section == 'coordinates':\n if len(el) == 1:\n result[section]['count'] = el[0]\n else:\n result[section]['nodes'].append(Node(int(el[0]),\n float(el[1]), float(el[2])))\n elif section == 'element_groups':\n if len(line) == 1:\n result[section]['count'] = int(el[0])\n else:\n result[section]['groups'].append(Group(el[0], el[1],\n el[2]))\n result[section]['number_of_elements'] += int(el[1])\n elif section == 'incidences':\n groups = result['element_groups']['groups']\n nodes = result['coordinates']['nodes']\n print(el)\n currentGroup = groups[groupCounter]\n if currentGroup.amount == 0:\n groupCounter += 1\n currentGroup = groups[groupCounter]\n print('Group n: {} count: {}'.format(currentGroup.n,\n currentGroup.amount))\n bar = Bar(el[0], nodes[int(el[1]) - 1], nodes[int(el[2]\n ) - 1], groups[groupCounter])\n print(\n \"\"\"\n Bar {} created \n Start node: {} End Node: {} Group: {}\n \"\"\"\n .format(bar.id, bar.startNode.n, bar.endNode.n, bar\n .group))\n result['bars'].append(bar)\n currentGroup.amount -= 1\n elif section == 'materials':\n if len(el) == 1:\n result[section]['count'] = el[0]\n groupCounter = 0\n else:\n material = Material(el[0], el[1], el[2])\n result[section]['materials'].append(material)\n result['element_groups']['groups'][groupCounter\n ].setMaterial(material)\n groupCounter += 1\n elif section == 'geometric_properties':\n if geometricCounter == 0:\n result[section]['count'] = el[0]\n else:\n result['element_groups']['groups'][geometricCounter - 1\n ].setSectionArea(el[0])\n geometricCounter += 1\n elif section == 'bcnodes':\n if len(el) == 1:\n result[section]['count'] = el[0]\n else:\n nodeIndex = next((e for e, item in enumerate(result\n ['coordinates']['nodes']) if item.n == int(el[0\n ])), None)\n result['coordinates']['nodes'][nodeIndex\n ].setRestriction(int(el[1]))\n elif section == 'loads':\n if len(el) == 1:\n result[section]['count'] = el[0]\n else:\n load = Load(el[1], el[2])\n nodeIndex = next((e for e, item in enumerate(result\n ['coordinates']['nodes']) if item.n == int(el[0\n ])), None)\n result['coordinates']['nodes'][nodeIndex].addLoad(load)\n for bar in result['bars']:\n bar.createLocalArray()\n print('---------- Parsing complete! ----------')\n pprint(result)\n print('---------------------------------------')\n return result\n",
"step-4": "from elements import Node, Bar, Material, Group, Load\nfrom pprint import pprint\n\n\nclass Reader:\n\n def read(self, filePath):\n \"\"\"\n Reads text file with nodes and returns the result dict with all objects\n and their nested properties\n \"\"\"\n result = {'coordinates': {'count': 0, 'nodes': []},\n 'element_groups': {'number_of_elements': 0, 'count': 0,\n 'groups': []}, 'bars': [], 'materials': {'count': 0,\n 'materials': []}, 'geometric_properties': {'count': 0},\n 'bcnodes': {'count': 0}, 'loads': {'count': 0}}\n with open(filePath, 'r') as f:\n lines = f.readlines()\n elementCounter = 0\n groupCounter = 0\n geometricCounter = 0\n for line in lines:\n line = line.strip()\n el = line.split(' ')\n if len(line) == 0:\n continue\n if len(line) != 0 and line[0] == '*':\n section = line[1:].lower()\n continue\n if section == 'coordinates':\n if len(el) == 1:\n result[section]['count'] = el[0]\n else:\n result[section]['nodes'].append(Node(int(el[0]),\n float(el[1]), float(el[2])))\n elif section == 'element_groups':\n if len(line) == 1:\n result[section]['count'] = int(el[0])\n else:\n result[section]['groups'].append(Group(el[0], el[1],\n el[2]))\n result[section]['number_of_elements'] += int(el[1])\n elif section == 'incidences':\n groups = result['element_groups']['groups']\n nodes = result['coordinates']['nodes']\n print(el)\n currentGroup = groups[groupCounter]\n if currentGroup.amount == 0:\n groupCounter += 1\n currentGroup = groups[groupCounter]\n print('Group n: {} count: {}'.format(currentGroup.n,\n currentGroup.amount))\n bar = Bar(el[0], nodes[int(el[1]) - 1], nodes[int(el[2]\n ) - 1], groups[groupCounter])\n print(\n \"\"\"\n Bar {} created \n Start node: {} End Node: {} Group: {}\n \"\"\"\n .format(bar.id, bar.startNode.n, bar.endNode.n, bar\n .group))\n result['bars'].append(bar)\n currentGroup.amount -= 1\n elif section == 'materials':\n if len(el) == 1:\n result[section]['count'] = el[0]\n groupCounter = 0\n else:\n material = Material(el[0], el[1], el[2])\n result[section]['materials'].append(material)\n result['element_groups']['groups'][groupCounter\n ].setMaterial(material)\n groupCounter += 1\n elif section == 'geometric_properties':\n if geometricCounter == 0:\n result[section]['count'] = el[0]\n else:\n result['element_groups']['groups'][geometricCounter - 1\n ].setSectionArea(el[0])\n geometricCounter += 1\n elif section == 'bcnodes':\n if len(el) == 1:\n result[section]['count'] = el[0]\n else:\n nodeIndex = next((e for e, item in enumerate(result\n ['coordinates']['nodes']) if item.n == int(el[0\n ])), None)\n result['coordinates']['nodes'][nodeIndex\n ].setRestriction(int(el[1]))\n elif section == 'loads':\n if len(el) == 1:\n result[section]['count'] = el[0]\n else:\n load = Load(el[1], el[2])\n nodeIndex = next((e for e, item in enumerate(result\n ['coordinates']['nodes']) if item.n == int(el[0\n ])), None)\n result['coordinates']['nodes'][nodeIndex].addLoad(load)\n for bar in result['bars']:\n bar.createLocalArray()\n print('---------- Parsing complete! ----------')\n pprint(result)\n print('---------------------------------------')\n return result\n",
"step-5": "from elements import Node, Bar, Material, Group, Load\nfrom pprint import pprint\n\n# query\n# next((e for e in result['coordinates']['nodes'] if e.n == int(el[0])), None)\n\nclass Reader():\n def read(self, filePath):\n \"\"\"\n Reads text file with nodes and returns the result dict with all objects\n and their nested properties\n \"\"\"\n \n result = {\n 'coordinates': {\n 'count': 0,\n 'nodes': []\n },\n 'element_groups': { \n 'number_of_elements': 0,\n 'count': 0,\n 'groups': []\n },\n 'bars': [],\n 'materials': {\n 'count': 0,\n 'materials': []\n },\n 'geometric_properties': {\n 'count': 0\n },\n 'bcnodes': {\n 'count': 0\n },\n 'loads': {\n 'count': 0\n }\n }\n # print(result['coordinates']['nodes'])\n \n with open(filePath,'r') as f:\n lines = f.readlines()\n elementCounter = 0\n groupCounter = 0\n geometricCounter = 0\n\n for line in lines:\n line = line.strip()\n el = line.split(' ')\n \n if len(line) == 0:\n continue\n\n if len(line) != 0 and line[0] == \"*\":\n section = line[1:].lower()\n continue\n \n if section == 'coordinates':\n if len(el) == 1 :\n result[section]['count'] = el[0]\n else:\n result[section]['nodes'].append(Node(int(el[0]), float(el[1]), float(el[2])))\n \n elif section == 'element_groups':\n if len(line) == 1:\n result[section]['count'] = int(el[0])\n else: \n result[section]['groups'].append(Group(el[0], el[1], el[2]))\n result[section]['number_of_elements'] += int(el[1])\n\n elif section == 'incidences':\n groups = result['element_groups']['groups']\n nodes = result['coordinates']['nodes']\n print(el)\n\n currentGroup = groups[groupCounter]\n if (currentGroup.amount == 0):\n groupCounter += 1\n currentGroup = groups[groupCounter]\n \n print(\"Group n: {} count: {}\".format(currentGroup.n, currentGroup.amount))\n \n bar = Bar(el[0], nodes[int(el[1])-1], nodes[int(el[2])-1], groups[groupCounter])\n print(\n \"\"\"\n Bar {} created \n Start node: {} End Node: {} Group: {}\n \"\"\".format(bar.id, bar.startNode.n, bar.endNode.n, bar.group))\n result['bars'].append(bar)\n currentGroup.amount -= 1\n \n elif section == 'materials':\n if len(el) == 1:\n result[section]['count'] = el[0]\n groupCounter = 0\n else:\n material = Material(el[0], el[1], el[2])\n result[section]['materials'].append(material)\n result['element_groups']['groups'][groupCounter].setMaterial(material)\n groupCounter += 1\n\n elif section == 'geometric_properties':\n if geometricCounter == 0:\n result[section]['count'] = el[0]\n else:\n result['element_groups']['groups'][geometricCounter - 1].setSectionArea(\n el[0]\n )\n geometricCounter += 1\n\n elif section == 'bcnodes':\n if len(el) == 1:\n result[section]['count'] = el[0]\n else:\n nodeIndex = next((e for e, item in enumerate(\n result['coordinates']['nodes']) if item.n == int(el[0])), None\n )\n result['coordinates']['nodes'][nodeIndex].setRestriction(int(el[1]))\n\n elif section == 'loads':\n if len(el) == 1:\n result[section]['count'] = el[0]\n else:\n load = Load(el[1], el[2])\n nodeIndex = next((e for e, item in enumerate(\n result['coordinates']['nodes']) if item.n == int(el[0])), None\n )\n result['coordinates']['nodes'][nodeIndex].addLoad(load)\n\n for bar in result['bars']:\n bar.createLocalArray()\n\n print('---------- Parsing complete! ----------')\n pprint(result)\n print('---------------------------------------')\n\n return result\n \n\n# reader = Reader()\n# reader.read(\"./arquivoentrada.fem\")\n\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
print(auto.head())
sns.pairplot(auto, kind='reg', hue='origin')
plt.show()
<|reserved_special_token_1|>
# EXERCISE:
# Plotting distributions pairwise (2)
# In this exercise, you will generate pairwise joint distributions again. This time, you will make two particular
# additions:
# - You will display regressions as well as scatter plots in the off-diagonal subplots. You will do this with the
# argument kind='reg' (where 'reg' means 'regression'). Another option for kind is 'scatter' (the default) that
# plots scatter plots in the off-diagonal subplots.
# - You will also visualize the joint distributions separated by continent of origin. You will do this with the
# keyword argument hue specifying the 'origin'.
# INSTRUCTIONS:
# - Plot the pairwise joint distributions separated by continent of origin and display the regressions.
# CODE:
# Print the first 5 rows of the DataFrame
print(auto.head())
# Plot the pairwise joint distributions grouped by 'origin' along with regression lines
sns.pairplot(auto, kind='reg', hue='origin')
# Display the plot
plt.show()
|
flexible
|
{
"blob_id": "0eaaa81d3c8bc61368701e1916b42ede88b90d04",
"index": 412,
"step-1": "<mask token>\n",
"step-2": "print(auto.head())\nsns.pairplot(auto, kind='reg', hue='origin')\nplt.show()\n",
"step-3": "# EXERCISE:\n\n# Plotting distributions pairwise (2)\n\n# In this exercise, you will generate pairwise joint distributions again. This time, you will make two particular\n# additions:\n\n# - You will display regressions as well as scatter plots in the off-diagonal subplots. You will do this with the\n# argument kind='reg' (where 'reg' means 'regression'). Another option for kind is 'scatter' (the default) that\n# plots scatter plots in the off-diagonal subplots.\n# - You will also visualize the joint distributions separated by continent of origin. You will do this with the\n# keyword argument hue specifying the 'origin'.\n\n\n# INSTRUCTIONS:\n\n# - Plot the pairwise joint distributions separated by continent of origin and display the regressions.\n\n\n# CODE:\n\n# Print the first 5 rows of the DataFrame\nprint(auto.head())\n\n# Plot the pairwise joint distributions grouped by 'origin' along with regression lines\nsns.pairplot(auto, kind='reg', hue='origin')\n\n# Display the plot\nplt.show()\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import django.core.validators
class Migration(migrations.Migration):
dependencies = [
('Registration', '0015_auto_20150525_1815'),
]
operations = [
migrations.AlterField(
model_name='user',
name='created_date',
field=models.DateField(auto_now_add=True),
),
migrations.AlterField(
model_name='user',
name='last_login',
field=models.DateTimeField(null=True, verbose_name='last login', blank=True),
),
migrations.AlterField(
model_name='user',
name='modified_date',
field=models.DateField(auto_now=True),
),
migrations.AlterField(
model_name='user_skills',
name='percentage',
field=models.PositiveSmallIntegerField(default=0, validators=[django.core.validators.MinValueValidator(0), django.core.validators.MaxValueValidator(100)]),
),
]
|
normal
|
{
"blob_id": "7a1be5c9c48413ba1969631e99ecb45cf15ef613",
"index": 559,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('Registration', '0015_auto_20150525_1815')]\n operations = [migrations.AlterField(model_name='user', name=\n 'created_date', field=models.DateField(auto_now_add=True)),\n migrations.AlterField(model_name='user', name='last_login', field=\n models.DateTimeField(null=True, verbose_name='last login', blank=\n True)), migrations.AlterField(model_name='user', name=\n 'modified_date', field=models.DateField(auto_now=True)), migrations\n .AlterField(model_name='user_skills', name='percentage', field=\n models.PositiveSmallIntegerField(default=0, validators=[django.core\n .validators.MinValueValidator(0), django.core.validators.\n MaxValueValidator(100)]))]\n",
"step-4": "from __future__ import unicode_literals\nfrom django.db import migrations, models\nimport django.core.validators\n\n\nclass Migration(migrations.Migration):\n dependencies = [('Registration', '0015_auto_20150525_1815')]\n operations = [migrations.AlterField(model_name='user', name=\n 'created_date', field=models.DateField(auto_now_add=True)),\n migrations.AlterField(model_name='user', name='last_login', field=\n models.DateTimeField(null=True, verbose_name='last login', blank=\n True)), migrations.AlterField(model_name='user', name=\n 'modified_date', field=models.DateField(auto_now=True)), migrations\n .AlterField(model_name='user_skills', name='percentage', field=\n models.PositiveSmallIntegerField(default=0, validators=[django.core\n .validators.MinValueValidator(0), django.core.validators.\n MaxValueValidator(100)]))]\n",
"step-5": "# -*- coding: utf-8 -*-\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\nimport django.core.validators\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('Registration', '0015_auto_20150525_1815'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='user',\n name='created_date',\n field=models.DateField(auto_now_add=True),\n ),\n migrations.AlterField(\n model_name='user',\n name='last_login',\n field=models.DateTimeField(null=True, verbose_name='last login', blank=True),\n ),\n migrations.AlterField(\n model_name='user',\n name='modified_date',\n field=models.DateField(auto_now=True),\n ),\n migrations.AlterField(\n model_name='user_skills',\n name='percentage',\n field=models.PositiveSmallIntegerField(default=0, validators=[django.core.validators.MinValueValidator(0), django.core.validators.MaxValueValidator(100)]),\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# coding: utf8
from __future__ import unicode_literals
from nltk.tag import stanford
from .SequenceTagger import SequenceTagger
class POSTagger(SequenceTagger):
"""
>>> tagger = POSTagger(model='resources/postagger.model')
>>> tagger.tag(['من', 'به', 'مدرسه', 'رفته_بودم', '.'])
[('من', 'PRO'), ('به', 'P'), ('مدرسه', 'N'), ('رفته_بودم', 'V'), ('.', 'PUNC')]
"""
class StanfordPOSTagger(stanford.StanfordPOSTagger):
"""
>>> tagger = StanfordPOSTagger(model_filename='resources/persian.tagger', path_to_jar='resources/stanford-postagger.jar')
>>> tagger.tag(['من', 'به', 'مدرسه', 'رفته_بودم', '.'])
[('من', 'PRO'), ('به', 'P'), ('مدرسه', 'N'), ('رفته_بودم', 'V'), ('.', 'PUNC')]
"""
def __init__(self, model_filename, path_to_jar, *args, **kwargs):
self._SEPARATOR = '/'
super(stanford.StanfordPOSTagger, self).__init__(model_filename=model_filename, path_to_jar=path_to_jar, *args, **kwargs)
def tag(self, tokens):
return self.tag_sents([tokens])[0]
def tag_sents(self, sentences):
refined = map(lambda s: [w.replace(' ', '_') for w in s], sentences)
return super(stanford.StanfordPOSTagger, self).tag_sents(refined)
|
normal
|
{
"blob_id": "1ac3630e6433a2d11c716b558640cab7c559f6ba",
"index": 4483,
"step-1": "<mask token>\n\n\nclass StanfordPOSTagger(stanford.StanfordPOSTagger):\n <mask token>\n\n def __init__(self, model_filename, path_to_jar, *args, **kwargs):\n self._SEPARATOR = '/'\n super(stanford.StanfordPOSTagger, self).__init__(*args,\n model_filename=model_filename, path_to_jar=path_to_jar, **kwargs)\n <mask token>\n\n def tag_sents(self, sentences):\n refined = map(lambda s: [w.replace(' ', '_') for w in s], sentences)\n return super(stanford.StanfordPOSTagger, self).tag_sents(refined)\n",
"step-2": "<mask token>\n\n\nclass StanfordPOSTagger(stanford.StanfordPOSTagger):\n \"\"\"\n\t>>> tagger = StanfordPOSTagger(model_filename='resources/persian.tagger', path_to_jar='resources/stanford-postagger.jar')\n\t>>> tagger.tag(['من', 'به', 'مدرسه', 'رفته_بودم', '.'])\n\t[('من', 'PRO'), ('به', 'P'), ('مدرسه', 'N'), ('رفته_بودم', 'V'), ('.', 'PUNC')]\n\t\"\"\"\n\n def __init__(self, model_filename, path_to_jar, *args, **kwargs):\n self._SEPARATOR = '/'\n super(stanford.StanfordPOSTagger, self).__init__(*args,\n model_filename=model_filename, path_to_jar=path_to_jar, **kwargs)\n\n def tag(self, tokens):\n return self.tag_sents([tokens])[0]\n\n def tag_sents(self, sentences):\n refined = map(lambda s: [w.replace(' ', '_') for w in s], sentences)\n return super(stanford.StanfordPOSTagger, self).tag_sents(refined)\n",
"step-3": "<mask token>\n\n\nclass POSTagger(SequenceTagger):\n \"\"\"\n\t>>> tagger = POSTagger(model='resources/postagger.model')\n\t>>> tagger.tag(['من', 'به', 'مدرسه', 'رفته_بودم', '.'])\n\t[('من', 'PRO'), ('به', 'P'), ('مدرسه', 'N'), ('رفته_بودم', 'V'), ('.', 'PUNC')]\n\t\"\"\"\n\n\nclass StanfordPOSTagger(stanford.StanfordPOSTagger):\n \"\"\"\n\t>>> tagger = StanfordPOSTagger(model_filename='resources/persian.tagger', path_to_jar='resources/stanford-postagger.jar')\n\t>>> tagger.tag(['من', 'به', 'مدرسه', 'رفته_بودم', '.'])\n\t[('من', 'PRO'), ('به', 'P'), ('مدرسه', 'N'), ('رفته_بودم', 'V'), ('.', 'PUNC')]\n\t\"\"\"\n\n def __init__(self, model_filename, path_to_jar, *args, **kwargs):\n self._SEPARATOR = '/'\n super(stanford.StanfordPOSTagger, self).__init__(*args,\n model_filename=model_filename, path_to_jar=path_to_jar, **kwargs)\n\n def tag(self, tokens):\n return self.tag_sents([tokens])[0]\n\n def tag_sents(self, sentences):\n refined = map(lambda s: [w.replace(' ', '_') for w in s], sentences)\n return super(stanford.StanfordPOSTagger, self).tag_sents(refined)\n",
"step-4": "from __future__ import unicode_literals\nfrom nltk.tag import stanford\nfrom .SequenceTagger import SequenceTagger\n\n\nclass POSTagger(SequenceTagger):\n \"\"\"\n\t>>> tagger = POSTagger(model='resources/postagger.model')\n\t>>> tagger.tag(['من', 'به', 'مدرسه', 'رفته_بودم', '.'])\n\t[('من', 'PRO'), ('به', 'P'), ('مدرسه', 'N'), ('رفته_بودم', 'V'), ('.', 'PUNC')]\n\t\"\"\"\n\n\nclass StanfordPOSTagger(stanford.StanfordPOSTagger):\n \"\"\"\n\t>>> tagger = StanfordPOSTagger(model_filename='resources/persian.tagger', path_to_jar='resources/stanford-postagger.jar')\n\t>>> tagger.tag(['من', 'به', 'مدرسه', 'رفته_بودم', '.'])\n\t[('من', 'PRO'), ('به', 'P'), ('مدرسه', 'N'), ('رفته_بودم', 'V'), ('.', 'PUNC')]\n\t\"\"\"\n\n def __init__(self, model_filename, path_to_jar, *args, **kwargs):\n self._SEPARATOR = '/'\n super(stanford.StanfordPOSTagger, self).__init__(*args,\n model_filename=model_filename, path_to_jar=path_to_jar, **kwargs)\n\n def tag(self, tokens):\n return self.tag_sents([tokens])[0]\n\n def tag_sents(self, sentences):\n refined = map(lambda s: [w.replace(' ', '_') for w in s], sentences)\n return super(stanford.StanfordPOSTagger, self).tag_sents(refined)\n",
"step-5": "# coding: utf8\n\nfrom __future__ import unicode_literals\nfrom nltk.tag import stanford\nfrom .SequenceTagger import SequenceTagger\n\n\nclass POSTagger(SequenceTagger):\n\t\"\"\"\n\t>>> tagger = POSTagger(model='resources/postagger.model')\n\t>>> tagger.tag(['من', 'به', 'مدرسه', 'رفته_بودم', '.'])\n\t[('من', 'PRO'), ('به', 'P'), ('مدرسه', 'N'), ('رفته_بودم', 'V'), ('.', 'PUNC')]\n\t\"\"\"\n\n\nclass StanfordPOSTagger(stanford.StanfordPOSTagger):\n\t\"\"\"\n\t>>> tagger = StanfordPOSTagger(model_filename='resources/persian.tagger', path_to_jar='resources/stanford-postagger.jar')\n\t>>> tagger.tag(['من', 'به', 'مدرسه', 'رفته_بودم', '.'])\n\t[('من', 'PRO'), ('به', 'P'), ('مدرسه', 'N'), ('رفته_بودم', 'V'), ('.', 'PUNC')]\n\t\"\"\"\n\n\tdef __init__(self, model_filename, path_to_jar, *args, **kwargs):\n\t\tself._SEPARATOR = '/'\n\t\tsuper(stanford.StanfordPOSTagger, self).__init__(model_filename=model_filename, path_to_jar=path_to_jar, *args, **kwargs)\n\n\tdef tag(self, tokens):\n\t\treturn self.tag_sents([tokens])[0]\n\n\tdef tag_sents(self, sentences):\n\t\trefined = map(lambda s: [w.replace(' ', '_') for w in s], sentences)\n\t\treturn super(stanford.StanfordPOSTagger, self).tag_sents(refined)\n",
"step-ids": [
3,
5,
7,
8,
9
]
}
|
[
3,
5,
7,
8,
9
] |
/Users/apple/anaconda/lib/python3.5/operator.py
|
normal
|
{
"blob_id": "b4a267873c5823ecfa62a5e90b67c37f9cca3cd2",
"index": 8181,
"step-1": "/Users/apple/anaconda/lib/python3.5/operator.py",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
while n > 0:
r = n % 10
sum = sum + r
n = n // 10
print('The total sum of digits is:', sum)
<|reserved_special_token_1|>
n = int(input('Enter a number:\n'))
sum = 0
while n > 0:
r = n % 10
sum = sum + r
n = n // 10
print('The total sum of digits is:', sum)
<|reserved_special_token_1|>
#Q7. Write a program to calculate the sum of digits of a given number.
n=int(input("Enter a number:\n"))
sum=0
while(n>0):
r=n%10
sum=sum+r
n=n//10
print("The total sum of digits is:",sum)
|
flexible
|
{
"blob_id": "78e3750a1bbe9f2f6680937729c1a810bd29fd4d",
"index": 4232,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwhile n > 0:\n r = n % 10\n sum = sum + r\n n = n // 10\nprint('The total sum of digits is:', sum)\n",
"step-3": "n = int(input('Enter a number:\\n'))\nsum = 0\nwhile n > 0:\n r = n % 10\n sum = sum + r\n n = n // 10\nprint('The total sum of digits is:', sum)\n",
"step-4": "#Q7. Write a program to calculate the sum of digits of a given number.\r\n\r\nn=int(input(\"Enter a number:\\n\"))\r\nsum=0\r\nwhile(n>0):\r\n r=n%10\r\n sum=sum+r\r\n n=n//10\r\nprint(\"The total sum of digits is:\",sum)\r\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from django.urls import path
from . import views
urlpatterns = [
path('', views.home, name ='park-home'),
path('login/', views.login, name ='park-login'),
]
|
normal
|
{
"blob_id": "2fd490ca54f5d038997cec59a3e07c3f2c2d2538",
"index": 6757,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nurlpatterns = [path('', views.home, name='park-home'), path('login/', views\n .login, name='park-login')]\n",
"step-3": "from django.urls import path\nfrom . import views\nurlpatterns = [path('', views.home, name='park-home'), path('login/', views\n .login, name='park-login')]\n",
"step-4": "from django.urls import path\nfrom . import views\n\n\nurlpatterns = [\n path('', views.home, name ='park-home'), \n path('login/', views.login, name ='park-login'), \n]",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
def swap(a,b):
print(a,b)
a=input("enter a value 1 : ")
b=input("enter b value 2 : ")
a,b=b,a
print("the vaalues after swaping the variables are below:")
print("the value of a is : ",a)
print("the value of b is : ",b)
|
normal
|
{
"blob_id": "4fbe4d474e10e08eafee3bcc6173f8cd6b797dde",
"index": 3203,
"step-1": "<mask token>\n",
"step-2": "def swap(a, b):\n print(a, b)\n\n\n<mask token>\n",
"step-3": "def swap(a, b):\n print(a, b)\n\n\n<mask token>\nprint('the vaalues after swaping the variables are below:')\nprint('the value of a is : ', a)\nprint('the value of b is : ', b)\n",
"step-4": "def swap(a, b):\n print(a, b)\n\n\na = input('enter a value 1 : ')\nb = input('enter b value 2 : ')\na, b = b, a\nprint('the vaalues after swaping the variables are below:')\nprint('the value of a is : ', a)\nprint('the value of b is : ', b)\n",
"step-5": "def swap(a,b):\n print(a,b)\na=input(\"enter a value 1 : \")\nb=input(\"enter b value 2 : \")\na,b=b,a\nprint(\"the vaalues after swaping the variables are below:\")\nprint(\"the value of a is : \",a)\nprint(\"the value of b is : \",b)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class AmplitudeLogger:
<|reserved_special_token_0|>
async def log_event(self, event):
event = {'api_key': self.api_key, 'events': [event]}
try:
validate(instance=event, schema=self.api_schema)
except ValidationError:
log.error('Invalid payload', exc_info=True)
return None
async with aiohttp_client.post(API_URL, data=json.dumps(event)
) as resp:
if resp.status != 200:
log.warn('Failed to log event', exc_info=True)
return resp
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class AmplitudeLogger:
def __init__(self, api_key: str):
self.api_key = api_key
self.api_schema = json.loads(pkg_resources.read_text(resources,
'schema.json'))
async def log_event(self, event):
event = {'api_key': self.api_key, 'events': [event]}
try:
validate(instance=event, schema=self.api_schema)
except ValidationError:
log.error('Invalid payload', exc_info=True)
return None
async with aiohttp_client.post(API_URL, data=json.dumps(event)
) as resp:
if resp.status != 200:
log.warn('Failed to log event', exc_info=True)
return resp
<|reserved_special_token_1|>
<|reserved_special_token_0|>
log = logging.getLogger('amplitude-client')
API_URL = 'https://api.amplitude.com/2/httpapi'
class AmplitudeLogger:
def __init__(self, api_key: str):
self.api_key = api_key
self.api_schema = json.loads(pkg_resources.read_text(resources,
'schema.json'))
async def log_event(self, event):
event = {'api_key': self.api_key, 'events': [event]}
try:
validate(instance=event, schema=self.api_schema)
except ValidationError:
log.error('Invalid payload', exc_info=True)
return None
async with aiohttp_client.post(API_URL, data=json.dumps(event)
) as resp:
if resp.status != 200:
log.warn('Failed to log event', exc_info=True)
return resp
<|reserved_special_token_1|>
from . import resources
from jsonschema import validate
from jsonschema.exceptions import ValidationError
import aiohttp_client
import importlib.resources as pkg_resources
import json
import logging
log = logging.getLogger('amplitude-client')
API_URL = 'https://api.amplitude.com/2/httpapi'
class AmplitudeLogger:
def __init__(self, api_key: str):
self.api_key = api_key
self.api_schema = json.loads(pkg_resources.read_text(resources,
'schema.json'))
async def log_event(self, event):
event = {'api_key': self.api_key, 'events': [event]}
try:
validate(instance=event, schema=self.api_schema)
except ValidationError:
log.error('Invalid payload', exc_info=True)
return None
async with aiohttp_client.post(API_URL, data=json.dumps(event)
) as resp:
if resp.status != 200:
log.warn('Failed to log event', exc_info=True)
return resp
<|reserved_special_token_1|>
from . import resources
from jsonschema import validate
from jsonschema.exceptions import ValidationError
import aiohttp_client
import importlib.resources as pkg_resources
import json
import logging
log = logging.getLogger("amplitude-client")
API_URL = "https://api.amplitude.com/2/httpapi"
class AmplitudeLogger:
def __init__(self, api_key: str):
self.api_key = api_key
self.api_schema = json.loads(pkg_resources.read_text(resources, "schema.json"))
async def log_event(self, event):
# Amplitude API requires (user_id OR device_id) AND event_type
event = {"api_key": self.api_key, "events": [event]}
try:
validate(instance=event, schema=self.api_schema)
except ValidationError:
log.error("Invalid payload", exc_info=True)
return None
async with aiohttp_client.post(API_URL, data=json.dumps(event)) as resp:
if resp.status != 200:
log.warn("Failed to log event", exc_info=True)
return resp
|
flexible
|
{
"blob_id": "d32f009f373249b7b602ac36f29982273a2ed192",
"index": 2289,
"step-1": "<mask token>\n\n\nclass AmplitudeLogger:\n <mask token>\n\n async def log_event(self, event):\n event = {'api_key': self.api_key, 'events': [event]}\n try:\n validate(instance=event, schema=self.api_schema)\n except ValidationError:\n log.error('Invalid payload', exc_info=True)\n return None\n async with aiohttp_client.post(API_URL, data=json.dumps(event)\n ) as resp:\n if resp.status != 200:\n log.warn('Failed to log event', exc_info=True)\n return resp\n",
"step-2": "<mask token>\n\n\nclass AmplitudeLogger:\n\n def __init__(self, api_key: str):\n self.api_key = api_key\n self.api_schema = json.loads(pkg_resources.read_text(resources,\n 'schema.json'))\n\n async def log_event(self, event):\n event = {'api_key': self.api_key, 'events': [event]}\n try:\n validate(instance=event, schema=self.api_schema)\n except ValidationError:\n log.error('Invalid payload', exc_info=True)\n return None\n async with aiohttp_client.post(API_URL, data=json.dumps(event)\n ) as resp:\n if resp.status != 200:\n log.warn('Failed to log event', exc_info=True)\n return resp\n",
"step-3": "<mask token>\nlog = logging.getLogger('amplitude-client')\nAPI_URL = 'https://api.amplitude.com/2/httpapi'\n\n\nclass AmplitudeLogger:\n\n def __init__(self, api_key: str):\n self.api_key = api_key\n self.api_schema = json.loads(pkg_resources.read_text(resources,\n 'schema.json'))\n\n async def log_event(self, event):\n event = {'api_key': self.api_key, 'events': [event]}\n try:\n validate(instance=event, schema=self.api_schema)\n except ValidationError:\n log.error('Invalid payload', exc_info=True)\n return None\n async with aiohttp_client.post(API_URL, data=json.dumps(event)\n ) as resp:\n if resp.status != 200:\n log.warn('Failed to log event', exc_info=True)\n return resp\n",
"step-4": "from . import resources\nfrom jsonschema import validate\nfrom jsonschema.exceptions import ValidationError\nimport aiohttp_client\nimport importlib.resources as pkg_resources\nimport json\nimport logging\nlog = logging.getLogger('amplitude-client')\nAPI_URL = 'https://api.amplitude.com/2/httpapi'\n\n\nclass AmplitudeLogger:\n\n def __init__(self, api_key: str):\n self.api_key = api_key\n self.api_schema = json.loads(pkg_resources.read_text(resources,\n 'schema.json'))\n\n async def log_event(self, event):\n event = {'api_key': self.api_key, 'events': [event]}\n try:\n validate(instance=event, schema=self.api_schema)\n except ValidationError:\n log.error('Invalid payload', exc_info=True)\n return None\n async with aiohttp_client.post(API_URL, data=json.dumps(event)\n ) as resp:\n if resp.status != 200:\n log.warn('Failed to log event', exc_info=True)\n return resp\n",
"step-5": "from . import resources\nfrom jsonschema import validate\nfrom jsonschema.exceptions import ValidationError\n\nimport aiohttp_client\nimport importlib.resources as pkg_resources\nimport json\nimport logging\n\nlog = logging.getLogger(\"amplitude-client\")\n\nAPI_URL = \"https://api.amplitude.com/2/httpapi\"\n\n\nclass AmplitudeLogger:\n def __init__(self, api_key: str):\n self.api_key = api_key\n\n self.api_schema = json.loads(pkg_resources.read_text(resources, \"schema.json\"))\n\n async def log_event(self, event):\n # Amplitude API requires (user_id OR device_id) AND event_type\n\n event = {\"api_key\": self.api_key, \"events\": [event]}\n\n try:\n validate(instance=event, schema=self.api_schema)\n except ValidationError:\n log.error(\"Invalid payload\", exc_info=True)\n return None\n\n async with aiohttp_client.post(API_URL, data=json.dumps(event)) as resp:\n if resp.status != 200:\n log.warn(\"Failed to log event\", exc_info=True)\n\n return resp\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
def zero_pad(values, max_m):
m = len(values)
values += [0] * (max_m - m)
def solve_with_solver(values_copy, n):
return xpress_solver(values_copy, n)
def solve_with_net(values_copy, n):
start = time.time()
sum_vals = sum(values_copy)
new_values = [(val / sum_vals) for val in values_copy]
pred = net(torch.FloatTensor([float(n)] + new_values))
pred_num = float(pred.data[0])
final_result = pred_num * sum_vals
end = time.time()
return final_result, end - start
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def split_to_train_validation(path_to_data):
dataset = CustomDataset(path_to_data)
print(len(dataset))
batch_size = 300
validation_split = 0.2
shuffle_dataset = True
random_seed = 56
dataset_size = len(dataset)
indices = list(range(dataset_size))
split = int(np.floor(validation_split * dataset_size))
if shuffle_dataset:
np.random.seed(random_seed)
np.random.shuffle(indices)
train_indices, val_indices = indices[split:], indices[:split]
print(len(train_indices), len(val_indices))
train_sampler = SubsetRandomSampler(train_indices)
valid_sampler = SubsetRandomSampler(val_indices)
train_loader = DataLoader(dataset, batch_size=batch_size, sampler=
train_sampler)
validation_loader = DataLoader(dataset, batch_size=batch_size, sampler=
valid_sampler)
print(len(train_loader), len(validation_loader))
return train_loader, validation_loader
<|reserved_special_token_0|>
def compute_loss(dataloader, net):
loss = 0
if torch.cuda.is_available():
net.cuda()
net.eval()
n_batches = 0
with torch.no_grad():
for x, y in dataloader:
n_batches += 1
if torch.cuda.is_available():
x = x.cuda()
y = y.cuda()
pred = net(x)
loss += loss_func(pred, y).item()
loss = loss / n_batches
return loss
<|reserved_special_token_0|>
def zero_pad(values, max_m):
m = len(values)
values += [0] * (max_m - m)
def solve_with_solver(values_copy, n):
return xpress_solver(values_copy, n)
def solve_with_net(values_copy, n):
start = time.time()
sum_vals = sum(values_copy)
new_values = [(val / sum_vals) for val in values_copy]
pred = net(torch.FloatTensor([float(n)] + new_values))
pred_num = float(pred.data[0])
final_result = pred_num * sum_vals
end = time.time()
return final_result, end - start
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def split_to_train_validation(path_to_data):
dataset = CustomDataset(path_to_data)
print(len(dataset))
batch_size = 300
validation_split = 0.2
shuffle_dataset = True
random_seed = 56
dataset_size = len(dataset)
indices = list(range(dataset_size))
split = int(np.floor(validation_split * dataset_size))
if shuffle_dataset:
np.random.seed(random_seed)
np.random.shuffle(indices)
train_indices, val_indices = indices[split:], indices[:split]
print(len(train_indices), len(val_indices))
train_sampler = SubsetRandomSampler(train_indices)
valid_sampler = SubsetRandomSampler(val_indices)
train_loader = DataLoader(dataset, batch_size=batch_size, sampler=
train_sampler)
validation_loader = DataLoader(dataset, batch_size=batch_size, sampler=
valid_sampler)
print(len(train_loader), len(validation_loader))
return train_loader, validation_loader
<|reserved_special_token_0|>
def compute_loss(dataloader, net):
loss = 0
if torch.cuda.is_available():
net.cuda()
net.eval()
n_batches = 0
with torch.no_grad():
for x, y in dataloader:
n_batches += 1
if torch.cuda.is_available():
x = x.cuda()
y = y.cuda()
pred = net(x)
loss += loss_func(pred, y).item()
loss = loss / n_batches
return loss
<|reserved_special_token_0|>
if torch.cuda.is_available():
net.cuda()
for epoch in pbar:
if len(validation_loss_vs_epoch) > 1:
print('epoch', epoch, ' val loss:' + '{0:.5f}'.format(
validation_loss_vs_epoch[-1]))
net.train()
for x, y in train_loader:
y = y.to(torch.float32)
if torch.cuda.is_available():
x = x.cuda()
y = y.cuda()
optimizer.zero_grad()
pred = net(x)
loss = loss_func(pred, y)
loss.backward()
optimizer.step()
net.eval()
valid_loss = compute_loss(validation_loader, net)
validation_loss_vs_epoch.append(valid_loss)
def zero_pad(values, max_m):
m = len(values)
values += [0] * (max_m - m)
def solve_with_solver(values_copy, n):
return xpress_solver(values_copy, n)
def solve_with_net(values_copy, n):
start = time.time()
sum_vals = sum(values_copy)
new_values = [(val / sum_vals) for val in values_copy]
pred = net(torch.FloatTensor([float(n)] + new_values))
pred_num = float(pred.data[0])
final_result = pred_num * sum_vals
end = time.time()
return final_result, end - start
def test_net(path):
max_m = 100
filelist = glob.glob(path + '/*.json')
print(len(filelist))
test_result = dict()
filelist_len = len(filelist)
for count, filename in enumerate(filelist):
n, m, max_val = get_params_from_filename(filename)
data_list_in_file = []
with open(filename) as jsonFile:
data_list_in_file = json.load(jsonFile)
idx = random.randint(0, len(data_list_in_file) - 1)
example = data_list_in_file[idx]
values = example[0]['values']
values_copy = copy.deepcopy(values)
values_copy.sort(reverse=True)
solver_result, solver_time = solve_with_solver(values_copy, n)
zero_pad(values_copy, max_m)
net_result, net_time = solve_with_net(values_copy, n)
test_result[str((n, m, max_val))] = {'values_idx': idx,
'solver_result': solver_result, 'solver_time': solver_time,
'net_result': net_result, 'net_time': net_time}
if count % 20 == 0:
print(count, 'out of', filelist_len)
test_result_path = './TestResults/test_results.json'
with open(test_result_path, 'w+') as json_file:
json.dump(test_result, json_file, indent=4)
test_net(path_to_data)
<|reserved_special_token_1|>
import random
import glob
import json
import time
from torch.utils.data import Dataset, DataLoader, SubsetRandomSampler
from SimpleDataLoader import CustomDataset, get_params_from_filename
import numpy as np
from DNN_model import Net
import torch.optim as optim
import torch.nn as nn
import torch
from tqdm import tqdm
from MMS_compute import xpress_solver
import copy
path_to_data = 'Dataset'
def split_to_train_validation(path_to_data):
dataset = CustomDataset(path_to_data)
print(len(dataset))
batch_size = 300
validation_split = 0.2
shuffle_dataset = True
random_seed = 56
dataset_size = len(dataset)
indices = list(range(dataset_size))
split = int(np.floor(validation_split * dataset_size))
if shuffle_dataset:
np.random.seed(random_seed)
np.random.shuffle(indices)
train_indices, val_indices = indices[split:], indices[:split]
print(len(train_indices), len(val_indices))
train_sampler = SubsetRandomSampler(train_indices)
valid_sampler = SubsetRandomSampler(val_indices)
train_loader = DataLoader(dataset, batch_size=batch_size, sampler=
train_sampler)
validation_loader = DataLoader(dataset, batch_size=batch_size, sampler=
valid_sampler)
print(len(train_loader), len(validation_loader))
return train_loader, validation_loader
train_loader, validation_loader = split_to_train_validation(path_to_data)
net = Net()
loss_func = nn.MSELoss()
optimizer = optim.Adam(net.parameters(), lr=0.0001)
def compute_loss(dataloader, net):
loss = 0
if torch.cuda.is_available():
net.cuda()
net.eval()
n_batches = 0
with torch.no_grad():
for x, y in dataloader:
n_batches += 1
if torch.cuda.is_available():
x = x.cuda()
y = y.cuda()
pred = net(x)
loss += loss_func(pred, y).item()
loss = loss / n_batches
return loss
n_epochs = 50
pbar = tqdm(range(n_epochs))
validation_loss_vs_epoch = []
if torch.cuda.is_available():
net.cuda()
for epoch in pbar:
if len(validation_loss_vs_epoch) > 1:
print('epoch', epoch, ' val loss:' + '{0:.5f}'.format(
validation_loss_vs_epoch[-1]))
net.train()
for x, y in train_loader:
y = y.to(torch.float32)
if torch.cuda.is_available():
x = x.cuda()
y = y.cuda()
optimizer.zero_grad()
pred = net(x)
loss = loss_func(pred, y)
loss.backward()
optimizer.step()
net.eval()
valid_loss = compute_loss(validation_loader, net)
validation_loss_vs_epoch.append(valid_loss)
def zero_pad(values, max_m):
m = len(values)
values += [0] * (max_m - m)
def solve_with_solver(values_copy, n):
return xpress_solver(values_copy, n)
def solve_with_net(values_copy, n):
start = time.time()
sum_vals = sum(values_copy)
new_values = [(val / sum_vals) for val in values_copy]
pred = net(torch.FloatTensor([float(n)] + new_values))
pred_num = float(pred.data[0])
final_result = pred_num * sum_vals
end = time.time()
return final_result, end - start
def test_net(path):
max_m = 100
filelist = glob.glob(path + '/*.json')
print(len(filelist))
test_result = dict()
filelist_len = len(filelist)
for count, filename in enumerate(filelist):
n, m, max_val = get_params_from_filename(filename)
data_list_in_file = []
with open(filename) as jsonFile:
data_list_in_file = json.load(jsonFile)
idx = random.randint(0, len(data_list_in_file) - 1)
example = data_list_in_file[idx]
values = example[0]['values']
values_copy = copy.deepcopy(values)
values_copy.sort(reverse=True)
solver_result, solver_time = solve_with_solver(values_copy, n)
zero_pad(values_copy, max_m)
net_result, net_time = solve_with_net(values_copy, n)
test_result[str((n, m, max_val))] = {'values_idx': idx,
'solver_result': solver_result, 'solver_time': solver_time,
'net_result': net_result, 'net_time': net_time}
if count % 20 == 0:
print(count, 'out of', filelist_len)
test_result_path = './TestResults/test_results.json'
with open(test_result_path, 'w+') as json_file:
json.dump(test_result, json_file, indent=4)
test_net(path_to_data)
<|reserved_special_token_1|>
import random
import glob
import json
import time
from torch.utils.data import Dataset, DataLoader, SubsetRandomSampler
from SimpleDataLoader import CustomDataset, get_params_from_filename
import numpy as np
from DNN_model import Net
import torch.optim as optim
import torch.nn as nn
import torch
from tqdm import tqdm
from MMS_compute import xpress_solver
import copy
path_to_data = 'Dataset'
def split_to_train_validation(path_to_data):
dataset = CustomDataset(path_to_data)
print(len(dataset))
batch_size = 300
validation_split = 0.2
shuffle_dataset = True
random_seed= 56
dataset_size = len(dataset)
indices = list(range(dataset_size))
split = int(np.floor(validation_split * dataset_size))
if shuffle_dataset :
np.random.seed(random_seed)
np.random.shuffle(indices)
train_indices, val_indices = indices[split:], indices[:split]
print(len(train_indices), len(val_indices))
# Creating PT data samplers and loaders:
train_sampler = SubsetRandomSampler(train_indices)
valid_sampler = SubsetRandomSampler(val_indices)
train_loader = DataLoader(dataset, batch_size=batch_size,
sampler=train_sampler)
validation_loader = DataLoader(dataset, batch_size=batch_size,
sampler=valid_sampler)
print(len(train_loader), len(validation_loader))
return train_loader, validation_loader
train_loader, validation_loader = split_to_train_validation(path_to_data)
net = Net()
loss_func = nn.MSELoss()
# loss_func = nn.L1Loss()
optimizer = optim.Adam(net.parameters(), lr=1e-4)
def compute_loss(dataloader, net):
loss = 0
if torch.cuda.is_available():
net.cuda()
net.eval()
n_batches = 0
with torch.no_grad():
for x, y in dataloader:
n_batches += 1
if torch.cuda.is_available():
x = x.cuda()
y = y.cuda()
pred = net(x)
loss += loss_func(pred, y).item()
loss = loss / n_batches
return loss
n_epochs = 50
pbar = tqdm(range(n_epochs))
validation_loss_vs_epoch = []
if torch.cuda.is_available():
net.cuda()
for epoch in pbar:
if len(validation_loss_vs_epoch) > 1:
print('epoch', epoch, ' val loss:' + '{0:.5f}'.format(validation_loss_vs_epoch[-1]))
net.train() # put the net into "training mode"
for x, y in train_loader:
y = y.to(torch.float32)
if torch.cuda.is_available():
x = x.cuda()
y = y.cuda()
optimizer.zero_grad()
pred = net(x)
loss = loss_func(pred, y)
loss.backward()
optimizer.step()
net.eval() # put the net into evaluation mode
valid_loss = compute_loss(validation_loader, net)
validation_loss_vs_epoch.append(valid_loss)
# n = 5
# m = 50
# max_val = 100
# values = [random.randrange(0, max_val + 1) for _ in range(m)]
# values.sort(reverse=True)
# values += [0]*50
# mms = xpress_solver(values,n)[0]
# sum_vals = sum(values)
# new_values = [val/sum_vals for val in values]
# pred = net(torch.FloatTensor([float(n)]+new_values))
# pred_num = float(pred.data[0])
# print(pred, mms, pred*sum_vals)
# print(pred_num*sum_vals)
def zero_pad(values, max_m):
m = len(values)
values += [0] * (max_m - m)
def solve_with_solver(values_copy, n):
return xpress_solver(values_copy, n)
def solve_with_net(values_copy, n):
start = time.time()
sum_vals = sum(values_copy)
new_values = [val / sum_vals for val in values_copy]
pred = net(torch.FloatTensor([float(n)] + new_values))
pred_num = float(pred.data[0])
final_result = pred_num*sum_vals
end = time.time()
return final_result, end-start
def test_net(path):
max_m = 100
filelist = glob.glob(path + '/*.json')
print(len(filelist))
test_result = dict()
filelist_len = len(filelist)
for count, filename in enumerate(filelist):
n, m, max_val = get_params_from_filename(filename)
data_list_in_file = []
with open(filename) as jsonFile:
data_list_in_file = json.load(jsonFile)
idx = random.randint(0, len(data_list_in_file)-1)
example=data_list_in_file[idx]
values = example[0]["values"]
values_copy = copy.deepcopy(values)
values_copy.sort(reverse=True)
solver_result, solver_time = solve_with_solver(values_copy, n)
zero_pad(values_copy, max_m)
net_result, net_time = solve_with_net(values_copy, n)
test_result[str((n, m, max_val))] = {
'values_idx': idx,
'solver_result': solver_result,
'solver_time':solver_time,
'net_result':net_result,
'net_time':net_time
}
if count % 20 == 0:
print(count, 'out of', filelist_len)
test_result_path = './TestResults/test_results.json'
with open(test_result_path, 'w+') as json_file:
json.dump(test_result, json_file, indent=4)
test_net(path_to_data)
|
flexible
|
{
"blob_id": "1f63f9234596787e4859b740d3a7fbfaacc9c0c8",
"index": 9930,
"step-1": "<mask token>\n\n\ndef zero_pad(values, max_m):\n m = len(values)\n values += [0] * (max_m - m)\n\n\ndef solve_with_solver(values_copy, n):\n return xpress_solver(values_copy, n)\n\n\ndef solve_with_net(values_copy, n):\n start = time.time()\n sum_vals = sum(values_copy)\n new_values = [(val / sum_vals) for val in values_copy]\n pred = net(torch.FloatTensor([float(n)] + new_values))\n pred_num = float(pred.data[0])\n final_result = pred_num * sum_vals\n end = time.time()\n return final_result, end - start\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef split_to_train_validation(path_to_data):\n dataset = CustomDataset(path_to_data)\n print(len(dataset))\n batch_size = 300\n validation_split = 0.2\n shuffle_dataset = True\n random_seed = 56\n dataset_size = len(dataset)\n indices = list(range(dataset_size))\n split = int(np.floor(validation_split * dataset_size))\n if shuffle_dataset:\n np.random.seed(random_seed)\n np.random.shuffle(indices)\n train_indices, val_indices = indices[split:], indices[:split]\n print(len(train_indices), len(val_indices))\n train_sampler = SubsetRandomSampler(train_indices)\n valid_sampler = SubsetRandomSampler(val_indices)\n train_loader = DataLoader(dataset, batch_size=batch_size, sampler=\n train_sampler)\n validation_loader = DataLoader(dataset, batch_size=batch_size, sampler=\n valid_sampler)\n print(len(train_loader), len(validation_loader))\n return train_loader, validation_loader\n\n\n<mask token>\n\n\ndef compute_loss(dataloader, net):\n loss = 0\n if torch.cuda.is_available():\n net.cuda()\n net.eval()\n n_batches = 0\n with torch.no_grad():\n for x, y in dataloader:\n n_batches += 1\n if torch.cuda.is_available():\n x = x.cuda()\n y = y.cuda()\n pred = net(x)\n loss += loss_func(pred, y).item()\n loss = loss / n_batches\n return loss\n\n\n<mask token>\n\n\ndef zero_pad(values, max_m):\n m = len(values)\n values += [0] * (max_m - m)\n\n\ndef solve_with_solver(values_copy, n):\n return xpress_solver(values_copy, n)\n\n\ndef solve_with_net(values_copy, n):\n start = time.time()\n sum_vals = sum(values_copy)\n new_values = [(val / sum_vals) for val in values_copy]\n pred = net(torch.FloatTensor([float(n)] + new_values))\n pred_num = float(pred.data[0])\n final_result = pred_num * sum_vals\n end = time.time()\n return final_result, end - start\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef split_to_train_validation(path_to_data):\n dataset = CustomDataset(path_to_data)\n print(len(dataset))\n batch_size = 300\n validation_split = 0.2\n shuffle_dataset = True\n random_seed = 56\n dataset_size = len(dataset)\n indices = list(range(dataset_size))\n split = int(np.floor(validation_split * dataset_size))\n if shuffle_dataset:\n np.random.seed(random_seed)\n np.random.shuffle(indices)\n train_indices, val_indices = indices[split:], indices[:split]\n print(len(train_indices), len(val_indices))\n train_sampler = SubsetRandomSampler(train_indices)\n valid_sampler = SubsetRandomSampler(val_indices)\n train_loader = DataLoader(dataset, batch_size=batch_size, sampler=\n train_sampler)\n validation_loader = DataLoader(dataset, batch_size=batch_size, sampler=\n valid_sampler)\n print(len(train_loader), len(validation_loader))\n return train_loader, validation_loader\n\n\n<mask token>\n\n\ndef compute_loss(dataloader, net):\n loss = 0\n if torch.cuda.is_available():\n net.cuda()\n net.eval()\n n_batches = 0\n with torch.no_grad():\n for x, y in dataloader:\n n_batches += 1\n if torch.cuda.is_available():\n x = x.cuda()\n y = y.cuda()\n pred = net(x)\n loss += loss_func(pred, y).item()\n loss = loss / n_batches\n return loss\n\n\n<mask token>\nif torch.cuda.is_available():\n net.cuda()\nfor epoch in pbar:\n if len(validation_loss_vs_epoch) > 1:\n print('epoch', epoch, ' val loss:' + '{0:.5f}'.format(\n validation_loss_vs_epoch[-1]))\n net.train()\n for x, y in train_loader:\n y = y.to(torch.float32)\n if torch.cuda.is_available():\n x = x.cuda()\n y = y.cuda()\n optimizer.zero_grad()\n pred = net(x)\n loss = loss_func(pred, y)\n loss.backward()\n optimizer.step()\n net.eval()\n valid_loss = compute_loss(validation_loader, net)\n validation_loss_vs_epoch.append(valid_loss)\n\n\ndef zero_pad(values, max_m):\n m = len(values)\n values += [0] * (max_m - m)\n\n\ndef solve_with_solver(values_copy, n):\n return xpress_solver(values_copy, n)\n\n\ndef solve_with_net(values_copy, n):\n start = time.time()\n sum_vals = sum(values_copy)\n new_values = [(val / sum_vals) for val in values_copy]\n pred = net(torch.FloatTensor([float(n)] + new_values))\n pred_num = float(pred.data[0])\n final_result = pred_num * sum_vals\n end = time.time()\n return final_result, end - start\n\n\ndef test_net(path):\n max_m = 100\n filelist = glob.glob(path + '/*.json')\n print(len(filelist))\n test_result = dict()\n filelist_len = len(filelist)\n for count, filename in enumerate(filelist):\n n, m, max_val = get_params_from_filename(filename)\n data_list_in_file = []\n with open(filename) as jsonFile:\n data_list_in_file = json.load(jsonFile)\n idx = random.randint(0, len(data_list_in_file) - 1)\n example = data_list_in_file[idx]\n values = example[0]['values']\n values_copy = copy.deepcopy(values)\n values_copy.sort(reverse=True)\n solver_result, solver_time = solve_with_solver(values_copy, n)\n zero_pad(values_copy, max_m)\n net_result, net_time = solve_with_net(values_copy, n)\n test_result[str((n, m, max_val))] = {'values_idx': idx,\n 'solver_result': solver_result, 'solver_time': solver_time,\n 'net_result': net_result, 'net_time': net_time}\n if count % 20 == 0:\n print(count, 'out of', filelist_len)\n test_result_path = './TestResults/test_results.json'\n with open(test_result_path, 'w+') as json_file:\n json.dump(test_result, json_file, indent=4)\n\n\ntest_net(path_to_data)\n",
"step-4": "import random\nimport glob\nimport json\nimport time\nfrom torch.utils.data import Dataset, DataLoader, SubsetRandomSampler\nfrom SimpleDataLoader import CustomDataset, get_params_from_filename\nimport numpy as np\nfrom DNN_model import Net\nimport torch.optim as optim\nimport torch.nn as nn\nimport torch\nfrom tqdm import tqdm\nfrom MMS_compute import xpress_solver\nimport copy\npath_to_data = 'Dataset'\n\n\ndef split_to_train_validation(path_to_data):\n dataset = CustomDataset(path_to_data)\n print(len(dataset))\n batch_size = 300\n validation_split = 0.2\n shuffle_dataset = True\n random_seed = 56\n dataset_size = len(dataset)\n indices = list(range(dataset_size))\n split = int(np.floor(validation_split * dataset_size))\n if shuffle_dataset:\n np.random.seed(random_seed)\n np.random.shuffle(indices)\n train_indices, val_indices = indices[split:], indices[:split]\n print(len(train_indices), len(val_indices))\n train_sampler = SubsetRandomSampler(train_indices)\n valid_sampler = SubsetRandomSampler(val_indices)\n train_loader = DataLoader(dataset, batch_size=batch_size, sampler=\n train_sampler)\n validation_loader = DataLoader(dataset, batch_size=batch_size, sampler=\n valid_sampler)\n print(len(train_loader), len(validation_loader))\n return train_loader, validation_loader\n\n\ntrain_loader, validation_loader = split_to_train_validation(path_to_data)\nnet = Net()\nloss_func = nn.MSELoss()\noptimizer = optim.Adam(net.parameters(), lr=0.0001)\n\n\ndef compute_loss(dataloader, net):\n loss = 0\n if torch.cuda.is_available():\n net.cuda()\n net.eval()\n n_batches = 0\n with torch.no_grad():\n for x, y in dataloader:\n n_batches += 1\n if torch.cuda.is_available():\n x = x.cuda()\n y = y.cuda()\n pred = net(x)\n loss += loss_func(pred, y).item()\n loss = loss / n_batches\n return loss\n\n\nn_epochs = 50\npbar = tqdm(range(n_epochs))\nvalidation_loss_vs_epoch = []\nif torch.cuda.is_available():\n net.cuda()\nfor epoch in pbar:\n if len(validation_loss_vs_epoch) > 1:\n print('epoch', epoch, ' val loss:' + '{0:.5f}'.format(\n validation_loss_vs_epoch[-1]))\n net.train()\n for x, y in train_loader:\n y = y.to(torch.float32)\n if torch.cuda.is_available():\n x = x.cuda()\n y = y.cuda()\n optimizer.zero_grad()\n pred = net(x)\n loss = loss_func(pred, y)\n loss.backward()\n optimizer.step()\n net.eval()\n valid_loss = compute_loss(validation_loader, net)\n validation_loss_vs_epoch.append(valid_loss)\n\n\ndef zero_pad(values, max_m):\n m = len(values)\n values += [0] * (max_m - m)\n\n\ndef solve_with_solver(values_copy, n):\n return xpress_solver(values_copy, n)\n\n\ndef solve_with_net(values_copy, n):\n start = time.time()\n sum_vals = sum(values_copy)\n new_values = [(val / sum_vals) for val in values_copy]\n pred = net(torch.FloatTensor([float(n)] + new_values))\n pred_num = float(pred.data[0])\n final_result = pred_num * sum_vals\n end = time.time()\n return final_result, end - start\n\n\ndef test_net(path):\n max_m = 100\n filelist = glob.glob(path + '/*.json')\n print(len(filelist))\n test_result = dict()\n filelist_len = len(filelist)\n for count, filename in enumerate(filelist):\n n, m, max_val = get_params_from_filename(filename)\n data_list_in_file = []\n with open(filename) as jsonFile:\n data_list_in_file = json.load(jsonFile)\n idx = random.randint(0, len(data_list_in_file) - 1)\n example = data_list_in_file[idx]\n values = example[0]['values']\n values_copy = copy.deepcopy(values)\n values_copy.sort(reverse=True)\n solver_result, solver_time = solve_with_solver(values_copy, n)\n zero_pad(values_copy, max_m)\n net_result, net_time = solve_with_net(values_copy, n)\n test_result[str((n, m, max_val))] = {'values_idx': idx,\n 'solver_result': solver_result, 'solver_time': solver_time,\n 'net_result': net_result, 'net_time': net_time}\n if count % 20 == 0:\n print(count, 'out of', filelist_len)\n test_result_path = './TestResults/test_results.json'\n with open(test_result_path, 'w+') as json_file:\n json.dump(test_result, json_file, indent=4)\n\n\ntest_net(path_to_data)\n",
"step-5": "import random\nimport glob\nimport json\nimport time\n\nfrom torch.utils.data import Dataset, DataLoader, SubsetRandomSampler\nfrom SimpleDataLoader import CustomDataset, get_params_from_filename\nimport numpy as np\nfrom DNN_model import Net\nimport torch.optim as optim\nimport torch.nn as nn\nimport torch\nfrom tqdm import tqdm\nfrom MMS_compute import xpress_solver\nimport copy\n\n\npath_to_data = 'Dataset'\n\ndef split_to_train_validation(path_to_data):\n\n dataset = CustomDataset(path_to_data)\n print(len(dataset))\n\n batch_size = 300\n validation_split = 0.2\n shuffle_dataset = True\n random_seed= 56\n dataset_size = len(dataset)\n indices = list(range(dataset_size))\n split = int(np.floor(validation_split * dataset_size))\n if shuffle_dataset :\n np.random.seed(random_seed)\n np.random.shuffle(indices)\n train_indices, val_indices = indices[split:], indices[:split]\n print(len(train_indices), len(val_indices))\n\n # Creating PT data samplers and loaders:\n train_sampler = SubsetRandomSampler(train_indices)\n valid_sampler = SubsetRandomSampler(val_indices)\n\n train_loader = DataLoader(dataset, batch_size=batch_size,\n sampler=train_sampler)\n validation_loader = DataLoader(dataset, batch_size=batch_size,\n sampler=valid_sampler)\n\n print(len(train_loader), len(validation_loader))\n return train_loader, validation_loader\n\n\ntrain_loader, validation_loader = split_to_train_validation(path_to_data)\n\nnet = Net()\n\n\n\n\n\nloss_func = nn.MSELoss()\n# loss_func = nn.L1Loss()\noptimizer = optim.Adam(net.parameters(), lr=1e-4)\n\n\ndef compute_loss(dataloader, net):\n loss = 0\n\n if torch.cuda.is_available():\n net.cuda()\n net.eval()\n\n n_batches = 0\n with torch.no_grad():\n for x, y in dataloader:\n n_batches += 1\n\n if torch.cuda.is_available():\n x = x.cuda()\n y = y.cuda()\n pred = net(x)\n\n loss += loss_func(pred, y).item()\n\n loss = loss / n_batches\n return loss\n\n\n\n\nn_epochs = 50\n\npbar = tqdm(range(n_epochs))\nvalidation_loss_vs_epoch = []\n\nif torch.cuda.is_available():\n net.cuda()\n\nfor epoch in pbar:\n\n if len(validation_loss_vs_epoch) > 1:\n print('epoch', epoch, ' val loss:' + '{0:.5f}'.format(validation_loss_vs_epoch[-1]))\n\n net.train() # put the net into \"training mode\"\n for x, y in train_loader:\n y = y.to(torch.float32)\n\n if torch.cuda.is_available():\n x = x.cuda()\n y = y.cuda()\n\n optimizer.zero_grad()\n pred = net(x)\n loss = loss_func(pred, y)\n loss.backward()\n optimizer.step()\n\n net.eval() # put the net into evaluation mode\n\n valid_loss = compute_loss(validation_loader, net)\n\n validation_loss_vs_epoch.append(valid_loss)\n\n# n = 5\n# m = 50\n# max_val = 100\n# values = [random.randrange(0, max_val + 1) for _ in range(m)]\n# values.sort(reverse=True)\n# values += [0]*50\n# mms = xpress_solver(values,n)[0]\n# sum_vals = sum(values)\n# new_values = [val/sum_vals for val in values]\n# pred = net(torch.FloatTensor([float(n)]+new_values))\n# pred_num = float(pred.data[0])\n# print(pred, mms, pred*sum_vals)\n# print(pred_num*sum_vals)\n\n\ndef zero_pad(values, max_m):\n m = len(values)\n values += [0] * (max_m - m)\n\n\ndef solve_with_solver(values_copy, n):\n return xpress_solver(values_copy, n)\n\n\n\ndef solve_with_net(values_copy, n):\n start = time.time()\n sum_vals = sum(values_copy)\n new_values = [val / sum_vals for val in values_copy]\n pred = net(torch.FloatTensor([float(n)] + new_values))\n pred_num = float(pred.data[0])\n final_result = pred_num*sum_vals\n end = time.time()\n return final_result, end-start\n\ndef test_net(path):\n max_m = 100\n filelist = glob.glob(path + '/*.json')\n print(len(filelist))\n\n test_result = dict()\n filelist_len = len(filelist)\n for count, filename in enumerate(filelist):\n n, m, max_val = get_params_from_filename(filename)\n data_list_in_file = []\n with open(filename) as jsonFile:\n data_list_in_file = json.load(jsonFile)\n idx = random.randint(0, len(data_list_in_file)-1)\n example=data_list_in_file[idx]\n values = example[0][\"values\"]\n values_copy = copy.deepcopy(values)\n values_copy.sort(reverse=True)\n solver_result, solver_time = solve_with_solver(values_copy, n)\n\n zero_pad(values_copy, max_m)\n net_result, net_time = solve_with_net(values_copy, n)\n test_result[str((n, m, max_val))] = {\n 'values_idx': idx,\n 'solver_result': solver_result,\n 'solver_time':solver_time,\n 'net_result':net_result,\n 'net_time':net_time\n }\n if count % 20 == 0:\n print(count, 'out of', filelist_len)\n test_result_path = './TestResults/test_results.json'\n with open(test_result_path, 'w+') as json_file:\n json.dump(test_result, json_file, indent=4)\n\ntest_net(path_to_data)",
"step-ids": [
3,
5,
7,
9,
10
]
}
|
[
3,
5,
7,
9,
10
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
with open(tree_csv, 'rU') as csvinput:
with open('../harbordvillage/outfile.csv', 'w+') as csvoutput:
writer = csv.writer(csvoutput, quoting=csv.QUOTE_NONNUMERIC)
reader = csv.reader(csvinput)
all = []
row = next(reader)
row.append('Address')
all.append(row)
for row in reader:
add = '%s %s %s %s' % (row[1], row[0], 'Toronto', 'Canada')
results = Geocoder.geocode(add)
row[0] = results.route
ind = results[0].coordinates
lat = ind[0]
lon = ind[1]
ind = str(lat) + ' ' + str(lon)
print(ind)
mypoint = fromstr('POINT(' + ind + ')')
try:
row.append(mypoint)
except:
pass
all.append(row)
print(row)
writer.writerows(all)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
tree_csv = os.path.abspath('../harbordvillage/Inventory2009_test.csv')
with open(tree_csv, 'rU') as csvinput:
with open('../harbordvillage/outfile.csv', 'w+') as csvoutput:
writer = csv.writer(csvoutput, quoting=csv.QUOTE_NONNUMERIC)
reader = csv.reader(csvinput)
all = []
row = next(reader)
row.append('Address')
all.append(row)
for row in reader:
add = '%s %s %s %s' % (row[1], row[0], 'Toronto', 'Canada')
results = Geocoder.geocode(add)
row[0] = results.route
ind = results[0].coordinates
lat = ind[0]
lon = ind[1]
ind = str(lat) + ' ' + str(lon)
print(ind)
mypoint = fromstr('POINT(' + ind + ')')
try:
row.append(mypoint)
except:
pass
all.append(row)
print(row)
writer.writerows(all)
<|reserved_special_token_1|>
import os
import time
from django.contrib.gis.geos import fromstr
import csv
from pygeocoder import Geocoder
tree_csv = os.path.abspath('../harbordvillage/Inventory2009_test.csv')
with open(tree_csv, 'rU') as csvinput:
with open('../harbordvillage/outfile.csv', 'w+') as csvoutput:
writer = csv.writer(csvoutput, quoting=csv.QUOTE_NONNUMERIC)
reader = csv.reader(csvinput)
all = []
row = next(reader)
row.append('Address')
all.append(row)
for row in reader:
add = '%s %s %s %s' % (row[1], row[0], 'Toronto', 'Canada')
results = Geocoder.geocode(add)
row[0] = results.route
ind = results[0].coordinates
lat = ind[0]
lon = ind[1]
ind = str(lat) + ' ' + str(lon)
print(ind)
mypoint = fromstr('POINT(' + ind + ')')
try:
row.append(mypoint)
except:
pass
all.append(row)
print(row)
writer.writerows(all)
<|reserved_special_token_1|>
import os
# didnt endup using this
import time
# from django.contrib.gis.utils import LayerMapping
from django.contrib.gis.geos import fromstr
# from models import Harbord
import csv
from pygeocoder import Geocoder
# from django.contrib.gis.geos import (Point, fromstr, fromfile,
# GEOSGeometry, MultiPoint, MultiPolygon, Polygon)
tree_csv = os.path.abspath('../harbordvillage/Inventory2009_test.csv')
#Setup
with open(tree_csv, "rU") as csvinput:
with open("../harbordvillage/outfile.csv","w+") as csvoutput:
writer = csv.writer(csvoutput,quoting=csv.QUOTE_NONNUMERIC)
reader = csv.reader(csvinput)
all = []
row = next(reader)
row.append('Address')
all.append(row)
for row in reader:
add=("%s %s %s %s" % (row[1], row[0], 'Toronto', 'Canada'))
# pygeocode stuff
# time.sleep(1)
results = Geocoder.geocode(add)
row[0] = results.route
# print(isinstance(results, basestring))
ind = results[0].coordinates
lat=ind[0]
lon=ind[1]
ind= str(lat) + ' ' + str(lon)
print(ind)
mypoint = fromstr('POINT('+ ind + ')')
# print(type(mypoint))
try:
row.append(mypoint)
except:
pass
all.append(row)
print(row)
# row.append(results.cooridnates)
# print(row)
writer.writerows(all)
|
flexible
|
{
"blob_id": "40b9114e4348bab5d76d68a937b3abe95a90c230",
"index": 4130,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwith open(tree_csv, 'rU') as csvinput:\n with open('../harbordvillage/outfile.csv', 'w+') as csvoutput:\n writer = csv.writer(csvoutput, quoting=csv.QUOTE_NONNUMERIC)\n reader = csv.reader(csvinput)\n all = []\n row = next(reader)\n row.append('Address')\n all.append(row)\n for row in reader:\n add = '%s %s %s %s' % (row[1], row[0], 'Toronto', 'Canada')\n results = Geocoder.geocode(add)\n row[0] = results.route\n ind = results[0].coordinates\n lat = ind[0]\n lon = ind[1]\n ind = str(lat) + ' ' + str(lon)\n print(ind)\n mypoint = fromstr('POINT(' + ind + ')')\n try:\n row.append(mypoint)\n except:\n pass\n all.append(row)\n print(row)\n writer.writerows(all)\n",
"step-3": "<mask token>\ntree_csv = os.path.abspath('../harbordvillage/Inventory2009_test.csv')\nwith open(tree_csv, 'rU') as csvinput:\n with open('../harbordvillage/outfile.csv', 'w+') as csvoutput:\n writer = csv.writer(csvoutput, quoting=csv.QUOTE_NONNUMERIC)\n reader = csv.reader(csvinput)\n all = []\n row = next(reader)\n row.append('Address')\n all.append(row)\n for row in reader:\n add = '%s %s %s %s' % (row[1], row[0], 'Toronto', 'Canada')\n results = Geocoder.geocode(add)\n row[0] = results.route\n ind = results[0].coordinates\n lat = ind[0]\n lon = ind[1]\n ind = str(lat) + ' ' + str(lon)\n print(ind)\n mypoint = fromstr('POINT(' + ind + ')')\n try:\n row.append(mypoint)\n except:\n pass\n all.append(row)\n print(row)\n writer.writerows(all)\n",
"step-4": "import os\nimport time\nfrom django.contrib.gis.geos import fromstr\nimport csv\nfrom pygeocoder import Geocoder\ntree_csv = os.path.abspath('../harbordvillage/Inventory2009_test.csv')\nwith open(tree_csv, 'rU') as csvinput:\n with open('../harbordvillage/outfile.csv', 'w+') as csvoutput:\n writer = csv.writer(csvoutput, quoting=csv.QUOTE_NONNUMERIC)\n reader = csv.reader(csvinput)\n all = []\n row = next(reader)\n row.append('Address')\n all.append(row)\n for row in reader:\n add = '%s %s %s %s' % (row[1], row[0], 'Toronto', 'Canada')\n results = Geocoder.geocode(add)\n row[0] = results.route\n ind = results[0].coordinates\n lat = ind[0]\n lon = ind[1]\n ind = str(lat) + ' ' + str(lon)\n print(ind)\n mypoint = fromstr('POINT(' + ind + ')')\n try:\n row.append(mypoint)\n except:\n pass\n all.append(row)\n print(row)\n writer.writerows(all)\n",
"step-5": "import os\n# didnt endup using this\n\nimport time\n# from django.contrib.gis.utils import LayerMapping\nfrom django.contrib.gis.geos import fromstr\n# from models import Harbord\n\nimport csv\nfrom pygeocoder import Geocoder\n# from django.contrib.gis.geos import (Point, fromstr, fromfile, \n# GEOSGeometry, MultiPoint, MultiPolygon, Polygon)\n\n\ntree_csv = os.path.abspath('../harbordvillage/Inventory2009_test.csv')\n\n #Setup\nwith open(tree_csv, \"rU\") as csvinput:\n with open(\"../harbordvillage/outfile.csv\",\"w+\") as csvoutput:\n writer = csv.writer(csvoutput,quoting=csv.QUOTE_NONNUMERIC)\n reader = csv.reader(csvinput)\n\n all = []\n row = next(reader)\n row.append('Address')\n all.append(row)\n\n for row in reader:\n add=(\"%s %s %s %s\" % (row[1], row[0], 'Toronto', 'Canada'))\n\n # pygeocode stuff\n # time.sleep(1)\n results = Geocoder.geocode(add)\n row[0] = results.route\n # print(isinstance(results, basestring))\n ind = results[0].coordinates\n lat=ind[0]\n lon=ind[1]\n ind= str(lat) + ' ' + str(lon)\n print(ind)\n mypoint = fromstr('POINT('+ ind + ')')\n # print(type(mypoint))\n try:\n row.append(mypoint)\n except:\n pass\n\n\n all.append(row)\n print(row)\n # row.append(results.cooridnates)\n # print(row)\n\n writer.writerows(all)\n\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# Copyright The Cloud Custodian Authors.
# SPDX-License-Identifier: Apache-2.0
from c7n_azure.provider import resources
from c7n_azure.resources.arm import ArmResourceManager
from c7n.utils import type_schema
from c7n.filters.core import ValueFilter
@resources.register('mysql-flexibleserver')
class MySQLFlexibleServer(ArmResourceManager):
class resource_type(ArmResourceManager.resource_type):
doc_groups = ['Databases']
service = 'azure.mgmt.rdbms.mysql_flexibleservers'
client = 'MySQLManagementClient'
enum_spec = ('servers', 'list', None)
default_report_fields = (
'name',
'location',
'resourceGroup'
)
resource_type = 'Microsoft.DBForMySQL/flexibleservers/configurations'
@MySQLFlexibleServer.filter_registry.register('server-parameter')
class ServerParametersFilter(ValueFilter):
"""Filter by configuration parameter for mysql flexible server
:example:
Example JSON document showing the data format provided to the filter
.. code-block:: json
{
"value": "TLSv1.2"
"description": "Which protocols the server permits for encrypted
connections. By default, TLS 1.2 is enforced",
"defaultValue": "TLSv1.2",
"dataType": "Set",
"allowedValues": "TLSv1,TLSv1.1,TLSv1.2",
"source": "system-default",
"isReadOnly": "False",
"isConfigPendingRestart": "False",
"isDynamicConfig": "False",
}
:example:
Find Mysql Flexible servers with tls_version not set to TLSV1.2
.. code-block:: yaml
policies:
- name: mysql-flexible-server-tls-version
resource: azure.mysql-flexibleserver
filters:
- type: server-parameter
name: tls_version
key: value
op: eq
value: 'TLSv1.2'
"""
schema = type_schema(
'server-parameter',
required=['type', 'name'],
rinherit=ValueFilter.schema,
name={
'type': 'string',
'allowed_value': ['TLSv1.2']
},
)
def __call__(self, resource):
key = f'c7n:config-params:{self.data["name"]}'
if key not in resource['properties']:
client = self.manager.get_client()
query = client.configurations.get(
resource['resourceGroup'],
resource['name'],
self.data["name"]
)
resource['properties'][key] = query.serialize(True).get('properties')
return super().__call__(resource['properties'].get(key))
|
normal
|
{
"blob_id": "b9bc6a9dbb3dbe51fbae45078bd499fb97fa003f",
"index": 3950,
"step-1": "<mask token>\n\n\n@MySQLFlexibleServer.filter_registry.register('server-parameter')\nclass ServerParametersFilter(ValueFilter):\n <mask token>\n schema = type_schema('server-parameter', required=['type', 'name'],\n rinherit=ValueFilter.schema, name={'type': 'string',\n 'allowed_value': ['TLSv1.2']})\n\n def __call__(self, resource):\n key = f\"c7n:config-params:{self.data['name']}\"\n if key not in resource['properties']:\n client = self.manager.get_client()\n query = client.configurations.get(resource['resourceGroup'],\n resource['name'], self.data['name'])\n resource['properties'][key] = query.serialize(True).get(\n 'properties')\n return super().__call__(resource['properties'].get(key))\n",
"step-2": "<mask token>\n\n\n@MySQLFlexibleServer.filter_registry.register('server-parameter')\nclass ServerParametersFilter(ValueFilter):\n \"\"\"Filter by configuration parameter for mysql flexible server\n\n :example:\n\n Example JSON document showing the data format provided to the filter\n\n .. code-block:: json\n\n {\n \"value\": \"TLSv1.2\"\n \"description\": \"Which protocols the server permits for encrypted\n connections. By default, TLS 1.2 is enforced\",\n \"defaultValue\": \"TLSv1.2\",\n \"dataType\": \"Set\",\n \"allowedValues\": \"TLSv1,TLSv1.1,TLSv1.2\",\n \"source\": \"system-default\",\n \"isReadOnly\": \"False\",\n \"isConfigPendingRestart\": \"False\",\n \"isDynamicConfig\": \"False\",\n }\n\n :example:\n\n Find Mysql Flexible servers with tls_version not set to TLSV1.2\n\n .. code-block:: yaml\n\n policies:\n - name: mysql-flexible-server-tls-version\n resource: azure.mysql-flexibleserver\n filters:\n - type: server-parameter\n name: tls_version\n key: value\n op: eq\n value: 'TLSv1.2'\n\n \"\"\"\n schema = type_schema('server-parameter', required=['type', 'name'],\n rinherit=ValueFilter.schema, name={'type': 'string',\n 'allowed_value': ['TLSv1.2']})\n\n def __call__(self, resource):\n key = f\"c7n:config-params:{self.data['name']}\"\n if key not in resource['properties']:\n client = self.manager.get_client()\n query = client.configurations.get(resource['resourceGroup'],\n resource['name'], self.data['name'])\n resource['properties'][key] = query.serialize(True).get(\n 'properties')\n return super().__call__(resource['properties'].get(key))\n",
"step-3": "<mask token>\n\n\n@resources.register('mysql-flexibleserver')\nclass MySQLFlexibleServer(ArmResourceManager):\n\n\n class resource_type(ArmResourceManager.resource_type):\n doc_groups = ['Databases']\n service = 'azure.mgmt.rdbms.mysql_flexibleservers'\n client = 'MySQLManagementClient'\n enum_spec = 'servers', 'list', None\n default_report_fields = 'name', 'location', 'resourceGroup'\n resource_type = 'Microsoft.DBForMySQL/flexibleservers/configurations'\n\n\n@MySQLFlexibleServer.filter_registry.register('server-parameter')\nclass ServerParametersFilter(ValueFilter):\n \"\"\"Filter by configuration parameter for mysql flexible server\n\n :example:\n\n Example JSON document showing the data format provided to the filter\n\n .. code-block:: json\n\n {\n \"value\": \"TLSv1.2\"\n \"description\": \"Which protocols the server permits for encrypted\n connections. By default, TLS 1.2 is enforced\",\n \"defaultValue\": \"TLSv1.2\",\n \"dataType\": \"Set\",\n \"allowedValues\": \"TLSv1,TLSv1.1,TLSv1.2\",\n \"source\": \"system-default\",\n \"isReadOnly\": \"False\",\n \"isConfigPendingRestart\": \"False\",\n \"isDynamicConfig\": \"False\",\n }\n\n :example:\n\n Find Mysql Flexible servers with tls_version not set to TLSV1.2\n\n .. code-block:: yaml\n\n policies:\n - name: mysql-flexible-server-tls-version\n resource: azure.mysql-flexibleserver\n filters:\n - type: server-parameter\n name: tls_version\n key: value\n op: eq\n value: 'TLSv1.2'\n\n \"\"\"\n schema = type_schema('server-parameter', required=['type', 'name'],\n rinherit=ValueFilter.schema, name={'type': 'string',\n 'allowed_value': ['TLSv1.2']})\n\n def __call__(self, resource):\n key = f\"c7n:config-params:{self.data['name']}\"\n if key not in resource['properties']:\n client = self.manager.get_client()\n query = client.configurations.get(resource['resourceGroup'],\n resource['name'], self.data['name'])\n resource['properties'][key] = query.serialize(True).get(\n 'properties')\n return super().__call__(resource['properties'].get(key))\n",
"step-4": "from c7n_azure.provider import resources\nfrom c7n_azure.resources.arm import ArmResourceManager\nfrom c7n.utils import type_schema\nfrom c7n.filters.core import ValueFilter\n\n\n@resources.register('mysql-flexibleserver')\nclass MySQLFlexibleServer(ArmResourceManager):\n\n\n class resource_type(ArmResourceManager.resource_type):\n doc_groups = ['Databases']\n service = 'azure.mgmt.rdbms.mysql_flexibleservers'\n client = 'MySQLManagementClient'\n enum_spec = 'servers', 'list', None\n default_report_fields = 'name', 'location', 'resourceGroup'\n resource_type = 'Microsoft.DBForMySQL/flexibleservers/configurations'\n\n\n@MySQLFlexibleServer.filter_registry.register('server-parameter')\nclass ServerParametersFilter(ValueFilter):\n \"\"\"Filter by configuration parameter for mysql flexible server\n\n :example:\n\n Example JSON document showing the data format provided to the filter\n\n .. code-block:: json\n\n {\n \"value\": \"TLSv1.2\"\n \"description\": \"Which protocols the server permits for encrypted\n connections. By default, TLS 1.2 is enforced\",\n \"defaultValue\": \"TLSv1.2\",\n \"dataType\": \"Set\",\n \"allowedValues\": \"TLSv1,TLSv1.1,TLSv1.2\",\n \"source\": \"system-default\",\n \"isReadOnly\": \"False\",\n \"isConfigPendingRestart\": \"False\",\n \"isDynamicConfig\": \"False\",\n }\n\n :example:\n\n Find Mysql Flexible servers with tls_version not set to TLSV1.2\n\n .. code-block:: yaml\n\n policies:\n - name: mysql-flexible-server-tls-version\n resource: azure.mysql-flexibleserver\n filters:\n - type: server-parameter\n name: tls_version\n key: value\n op: eq\n value: 'TLSv1.2'\n\n \"\"\"\n schema = type_schema('server-parameter', required=['type', 'name'],\n rinherit=ValueFilter.schema, name={'type': 'string',\n 'allowed_value': ['TLSv1.2']})\n\n def __call__(self, resource):\n key = f\"c7n:config-params:{self.data['name']}\"\n if key not in resource['properties']:\n client = self.manager.get_client()\n query = client.configurations.get(resource['resourceGroup'],\n resource['name'], self.data['name'])\n resource['properties'][key] = query.serialize(True).get(\n 'properties')\n return super().__call__(resource['properties'].get(key))\n",
"step-5": "# Copyright The Cloud Custodian Authors.\n# SPDX-License-Identifier: Apache-2.0\n\nfrom c7n_azure.provider import resources\nfrom c7n_azure.resources.arm import ArmResourceManager\nfrom c7n.utils import type_schema\nfrom c7n.filters.core import ValueFilter\n\n\n@resources.register('mysql-flexibleserver')\nclass MySQLFlexibleServer(ArmResourceManager):\n\n class resource_type(ArmResourceManager.resource_type):\n doc_groups = ['Databases']\n\n service = 'azure.mgmt.rdbms.mysql_flexibleservers'\n client = 'MySQLManagementClient'\n enum_spec = ('servers', 'list', None)\n default_report_fields = (\n 'name',\n 'location',\n 'resourceGroup'\n )\n resource_type = 'Microsoft.DBForMySQL/flexibleservers/configurations'\n\n\n@MySQLFlexibleServer.filter_registry.register('server-parameter')\nclass ServerParametersFilter(ValueFilter):\n \"\"\"Filter by configuration parameter for mysql flexible server\n\n :example:\n\n Example JSON document showing the data format provided to the filter\n\n .. code-block:: json\n\n {\n \"value\": \"TLSv1.2\"\n \"description\": \"Which protocols the server permits for encrypted\n connections. By default, TLS 1.2 is enforced\",\n \"defaultValue\": \"TLSv1.2\",\n \"dataType\": \"Set\",\n \"allowedValues\": \"TLSv1,TLSv1.1,TLSv1.2\",\n \"source\": \"system-default\",\n \"isReadOnly\": \"False\",\n \"isConfigPendingRestart\": \"False\",\n \"isDynamicConfig\": \"False\",\n }\n\n :example:\n\n Find Mysql Flexible servers with tls_version not set to TLSV1.2\n\n .. code-block:: yaml\n\n policies:\n - name: mysql-flexible-server-tls-version\n resource: azure.mysql-flexibleserver\n filters:\n - type: server-parameter\n name: tls_version\n key: value\n op: eq\n value: 'TLSv1.2'\n\n \"\"\"\n\n schema = type_schema(\n 'server-parameter',\n required=['type', 'name'],\n rinherit=ValueFilter.schema,\n name={\n 'type': 'string',\n 'allowed_value': ['TLSv1.2']\n },\n )\n\n def __call__(self, resource):\n key = f'c7n:config-params:{self.data[\"name\"]}'\n if key not in resource['properties']:\n client = self.manager.get_client()\n query = client.configurations.get(\n resource['resourceGroup'],\n resource['name'],\n self.data[\"name\"]\n )\n\n resource['properties'][key] = query.serialize(True).get('properties')\n\n return super().__call__(resource['properties'].get(key))\n\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
print(data)
<|reserved_special_token_0|>
print(mx, my)
<|reserved_special_token_0|>
for i in range(len(x)):
num += (x[i] - mx) * (y[i] - my)
den += (x[i] - mx) ** 2
<|reserved_special_token_0|>
print(beta1, beta0)
<|reserved_special_token_0|>
plt.scatter(x, y)
plt.plot([min(x), max(x)], [min(Y_predict), max(Y_predict)], color='red')
plt.show()
<|reserved_special_token_0|>
for i in range(len(x)):
xdata = beta1 * x[i] + beta0
ycap.append(xdata)
print(ycap)
<|reserved_special_token_0|>
for i in range(len(y)):
l = y[i] - ycap[i]
residue.append(l)
print(residue)
<|reserved_special_token_0|>
print(residualsum)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
data = pd.read_csv('regression.csv')
print(data)
x = data.iloc[:, 0]
y = data.iloc[:, 1]
mx = data['X1'].mean()
my = data['Y'].mean()
print(mx, my)
num, den = 0, 0
for i in range(len(x)):
num += (x[i] - mx) * (y[i] - my)
den += (x[i] - mx) ** 2
beta1 = num / den
beta0 = my - beta1 * mx
print(beta1, beta0)
Y_predict = beta1 * x + beta0
plt.scatter(x, y)
plt.plot([min(x), max(x)], [min(Y_predict), max(Y_predict)], color='red')
plt.show()
ycap = []
for i in range(len(x)):
xdata = beta1 * x[i] + beta0
ycap.append(xdata)
print(ycap)
residue = []
for i in range(len(y)):
l = y[i] - ycap[i]
residue.append(l)
print(residue)
residualsum = sum(residue)
print(residualsum)
<|reserved_special_token_1|>
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
data = pd.read_csv('regression.csv')
print(data)
x = data.iloc[:, 0]
y = data.iloc[:, 1]
mx = data['X1'].mean()
my = data['Y'].mean()
print(mx, my)
num, den = 0, 0
for i in range(len(x)):
num += (x[i] - mx) * (y[i] - my)
den += (x[i] - mx) ** 2
beta1 = num / den
beta0 = my - beta1 * mx
print(beta1, beta0)
Y_predict = beta1 * x + beta0
plt.scatter(x, y)
plt.plot([min(x), max(x)], [min(Y_predict), max(Y_predict)], color='red')
plt.show()
ycap = []
for i in range(len(x)):
xdata = beta1 * x[i] + beta0
ycap.append(xdata)
print(ycap)
residue = []
for i in range(len(y)):
l = y[i] - ycap[i]
residue.append(l)
print(residue)
residualsum = sum(residue)
print(residualsum)
<|reserved_special_token_1|>
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
data=pd.read_csv('regression.csv')
print(data)
x=data.iloc[:,0]
y=data.iloc[:,1]
mx=data['X1'].mean()
my=data['Y'].mean()
print(mx,my)
num, den = 0,0
for i in range(len(x)):
num += (x[i] - mx)*(y[i]-my)
den += (x[i]-mx)**2
beta1 = num/den
beta0 =my-(beta1*mx)
print(beta1,beta0)
Y_predict=beta1*x + beta0
plt.scatter(x,y)
plt.plot([min(x),max(x)],[min(Y_predict),max(Y_predict)], color='red')
plt.show()
ycap = []
for i in range(len(x)):
xdata =( beta1*x[i])+ beta0
ycap.append(xdata)
print(ycap)
residue=[]
for i in range(len(y)):
l = y[i] - ycap[i]
residue.append(l)
print(residue)
residualsum=sum(residue)
print(residualsum)
|
flexible
|
{
"blob_id": "ca6b064dbd8200c49665eaa944fdf1fc80c25726",
"index": 1047,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(data)\n<mask token>\nprint(mx, my)\n<mask token>\nfor i in range(len(x)):\n num += (x[i] - mx) * (y[i] - my)\n den += (x[i] - mx) ** 2\n<mask token>\nprint(beta1, beta0)\n<mask token>\nplt.scatter(x, y)\nplt.plot([min(x), max(x)], [min(Y_predict), max(Y_predict)], color='red')\nplt.show()\n<mask token>\nfor i in range(len(x)):\n xdata = beta1 * x[i] + beta0\n ycap.append(xdata)\nprint(ycap)\n<mask token>\nfor i in range(len(y)):\n l = y[i] - ycap[i]\n residue.append(l)\nprint(residue)\n<mask token>\nprint(residualsum)\n",
"step-3": "<mask token>\ndata = pd.read_csv('regression.csv')\nprint(data)\nx = data.iloc[:, 0]\ny = data.iloc[:, 1]\nmx = data['X1'].mean()\nmy = data['Y'].mean()\nprint(mx, my)\nnum, den = 0, 0\nfor i in range(len(x)):\n num += (x[i] - mx) * (y[i] - my)\n den += (x[i] - mx) ** 2\nbeta1 = num / den\nbeta0 = my - beta1 * mx\nprint(beta1, beta0)\nY_predict = beta1 * x + beta0\nplt.scatter(x, y)\nplt.plot([min(x), max(x)], [min(Y_predict), max(Y_predict)], color='red')\nplt.show()\nycap = []\nfor i in range(len(x)):\n xdata = beta1 * x[i] + beta0\n ycap.append(xdata)\nprint(ycap)\nresidue = []\nfor i in range(len(y)):\n l = y[i] - ycap[i]\n residue.append(l)\nprint(residue)\nresidualsum = sum(residue)\nprint(residualsum)\n",
"step-4": "import pandas as pd\nimport numpy as np\nimport matplotlib.pyplot as plt\ndata = pd.read_csv('regression.csv')\nprint(data)\nx = data.iloc[:, 0]\ny = data.iloc[:, 1]\nmx = data['X1'].mean()\nmy = data['Y'].mean()\nprint(mx, my)\nnum, den = 0, 0\nfor i in range(len(x)):\n num += (x[i] - mx) * (y[i] - my)\n den += (x[i] - mx) ** 2\nbeta1 = num / den\nbeta0 = my - beta1 * mx\nprint(beta1, beta0)\nY_predict = beta1 * x + beta0\nplt.scatter(x, y)\nplt.plot([min(x), max(x)], [min(Y_predict), max(Y_predict)], color='red')\nplt.show()\nycap = []\nfor i in range(len(x)):\n xdata = beta1 * x[i] + beta0\n ycap.append(xdata)\nprint(ycap)\nresidue = []\nfor i in range(len(y)):\n l = y[i] - ycap[i]\n residue.append(l)\nprint(residue)\nresidualsum = sum(residue)\nprint(residualsum)\n",
"step-5": "import pandas as pd\r\nimport numpy as np\r\nimport matplotlib.pyplot as plt\r\ndata=pd.read_csv('regression.csv')\r\nprint(data)\r\nx=data.iloc[:,0]\r\ny=data.iloc[:,1]\r\nmx=data['X1'].mean()\r\nmy=data['Y'].mean()\r\nprint(mx,my)\r\n\r\nnum, den = 0,0\r\nfor i in range(len(x)):\r\n num += (x[i] - mx)*(y[i]-my)\r\n den += (x[i]-mx)**2\r\nbeta1 = num/den\r\nbeta0 =my-(beta1*mx)\r\nprint(beta1,beta0)\r\nY_predict=beta1*x + beta0\r\nplt.scatter(x,y)\r\n\r\nplt.plot([min(x),max(x)],[min(Y_predict),max(Y_predict)], color='red')\r\nplt.show()\r\n\r\nycap = []\r\nfor i in range(len(x)):\r\n xdata =( beta1*x[i])+ beta0\r\n ycap.append(xdata)\r\nprint(ycap)\r\nresidue=[]\r\nfor i in range(len(y)):\r\n l = y[i] - ycap[i]\r\n residue.append(l)\r\nprint(residue)\r\nresidualsum=sum(residue)\r\nprint(residualsum)\r\n\r\n\r\n\r\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
#!/usr/bin/env python
from setuptools import setup
import NagAconda
setup(name=NagAconda.__name__,
version=NagAconda.__version__,
description="NagAconda is a Python Nagios wrapper.",
long_description=open('README').read(),
author='Steven Schlegel',
author_email='steven@schlegel.tech',
license='New BSD License',
url='https://github.com/SchlegelS0208/NagAconda',
packages=['NagAconda'],
tests_require=['nose>=0.11',],
install_requires=['Sphinx'],
test_suite = 'nose.collector',
platforms = 'any',
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Documentation',
'Topic :: Software Development :: Libraries',
'Topic :: System :: Monitoring',
'Topic :: System :: Systems Administration',
'Topic :: Utilities',
],
)
|
normal
|
{
"blob_id": "c3719f30bcf13061134b34b0925dfa2af4535f14",
"index": 7854,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nsetup(name=NagAconda.__name__, version=NagAconda.__version__, description=\n 'NagAconda is a Python Nagios wrapper.', long_description=open('README'\n ).read(), author='Steven Schlegel', author_email='steven@schlegel.tech',\n license='New BSD License', url=\n 'https://github.com/SchlegelS0208/NagAconda', packages=['NagAconda'],\n tests_require=['nose>=0.11'], install_requires=['Sphinx'], test_suite=\n 'nose.collector', platforms='any', classifiers=[\n 'Development Status :: 4 - Beta', 'Environment :: Console',\n 'Intended Audience :: Developers',\n 'Intended Audience :: System Administrators',\n 'License :: OSI Approved :: BSD License',\n 'Operating System :: OS Independent', 'Programming Language :: Python',\n 'Topic :: Documentation', 'Topic :: Software Development :: Libraries',\n 'Topic :: System :: Monitoring',\n 'Topic :: System :: Systems Administration', 'Topic :: Utilities'])\n",
"step-3": "from setuptools import setup\nimport NagAconda\nsetup(name=NagAconda.__name__, version=NagAconda.__version__, description=\n 'NagAconda is a Python Nagios wrapper.', long_description=open('README'\n ).read(), author='Steven Schlegel', author_email='steven@schlegel.tech',\n license='New BSD License', url=\n 'https://github.com/SchlegelS0208/NagAconda', packages=['NagAconda'],\n tests_require=['nose>=0.11'], install_requires=['Sphinx'], test_suite=\n 'nose.collector', platforms='any', classifiers=[\n 'Development Status :: 4 - Beta', 'Environment :: Console',\n 'Intended Audience :: Developers',\n 'Intended Audience :: System Administrators',\n 'License :: OSI Approved :: BSD License',\n 'Operating System :: OS Independent', 'Programming Language :: Python',\n 'Topic :: Documentation', 'Topic :: Software Development :: Libraries',\n 'Topic :: System :: Monitoring',\n 'Topic :: System :: Systems Administration', 'Topic :: Utilities'])\n",
"step-4": "#!/usr/bin/env python\n\nfrom setuptools import setup\nimport NagAconda\n\nsetup(name=NagAconda.__name__,\n version=NagAconda.__version__,\n description=\"NagAconda is a Python Nagios wrapper.\",\n long_description=open('README').read(),\n author='Steven Schlegel',\n author_email='steven@schlegel.tech',\n license='New BSD License',\n url='https://github.com/SchlegelS0208/NagAconda',\n packages=['NagAconda'],\n tests_require=['nose>=0.11',],\n install_requires=['Sphinx'],\n test_suite = 'nose.collector',\n platforms = 'any',\n classifiers = [\n 'Development Status :: 4 - Beta',\n 'Environment :: Console',\n 'Intended Audience :: Developers',\n 'Intended Audience :: System Administrators',\n 'License :: OSI Approved :: BSD License',\n 'Operating System :: OS Independent',\n 'Programming Language :: Python',\n 'Topic :: Documentation',\n 'Topic :: Software Development :: Libraries',\n 'Topic :: System :: Monitoring',\n 'Topic :: System :: Systems Administration',\n 'Topic :: Utilities',\n ],\n )\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
# 0=RED, 1=GREEN, 2=BLUE, 3=ALPHA
#import tkinter as tk
#import tkinter.ttk as ttk
#from tkcolorpicker import askcolor
import time
c1 = [0,0,0,0] #this color
c2 = [0,0,0] #over this color
c3 = [0,0,0] #result
cont='y'
#--------------------------------
while cont=='y':
print('--enter underlay color in r,g,b--')
c2[0]=int(input('red: '))
c2[1]=int(input('green: '))
c2[2]=int(input('blue: '))
print('')
print('--enter desired color in r,g,b--')
c3[0]=int(input('red: '))
c3[1]=int(input('green: '))
c3[2]=int(input('blue: '))
print('')
#--------------------------------
alpha = 0
r = -1
g = -1
b = -1
while alpha < 1 and r < 0 or g < 0 or b < 0 or r > 255 or g > 255 or b > 255:
alpha+= 1/256
inv = 1 / alpha
r = c3[0] * inv + c2[0] * (1 - inv)
g = c3[1] * inv + c2[1] * (1 - inv)
b = c3[2] * inv + c2[2] * (1 - inv)
print('---result---')
print('red:', round(r))
print('green:', round(g))
print('blue:', round(b))
print('alpha:', round(alpha*256))
print('------------')
print('')
cont=input('again? y/n')
print('')
|
normal
|
{
"blob_id": "5fa8ae36c4b4a5bffa64f4c65b74b74b29ba246f",
"index": 4578,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwhile cont == 'y':\n print('--enter underlay color in r,g,b--')\n c2[0] = int(input('red: '))\n c2[1] = int(input('green: '))\n c2[2] = int(input('blue: '))\n print('')\n print('--enter desired color in r,g,b--')\n c3[0] = int(input('red: '))\n c3[1] = int(input('green: '))\n c3[2] = int(input('blue: '))\n print('')\n alpha = 0\n r = -1\n g = -1\n b = -1\n while (alpha < 1 and r < 0 or g < 0 or b < 0 or r > 255 or g > 255 or b >\n 255):\n alpha += 1 / 256\n inv = 1 / alpha\n r = c3[0] * inv + c2[0] * (1 - inv)\n g = c3[1] * inv + c2[1] * (1 - inv)\n b = c3[2] * inv + c2[2] * (1 - inv)\n print('---result---')\n print('red:', round(r))\n print('green:', round(g))\n print('blue:', round(b))\n print('alpha:', round(alpha * 256))\n print('------------')\n print('')\n cont = input('again? y/n')\n print('')\n",
"step-3": "<mask token>\nc1 = [0, 0, 0, 0]\nc2 = [0, 0, 0]\nc3 = [0, 0, 0]\ncont = 'y'\nwhile cont == 'y':\n print('--enter underlay color in r,g,b--')\n c2[0] = int(input('red: '))\n c2[1] = int(input('green: '))\n c2[2] = int(input('blue: '))\n print('')\n print('--enter desired color in r,g,b--')\n c3[0] = int(input('red: '))\n c3[1] = int(input('green: '))\n c3[2] = int(input('blue: '))\n print('')\n alpha = 0\n r = -1\n g = -1\n b = -1\n while (alpha < 1 and r < 0 or g < 0 or b < 0 or r > 255 or g > 255 or b >\n 255):\n alpha += 1 / 256\n inv = 1 / alpha\n r = c3[0] * inv + c2[0] * (1 - inv)\n g = c3[1] * inv + c2[1] * (1 - inv)\n b = c3[2] * inv + c2[2] * (1 - inv)\n print('---result---')\n print('red:', round(r))\n print('green:', round(g))\n print('blue:', round(b))\n print('alpha:', round(alpha * 256))\n print('------------')\n print('')\n cont = input('again? y/n')\n print('')\n",
"step-4": "import time\nc1 = [0, 0, 0, 0]\nc2 = [0, 0, 0]\nc3 = [0, 0, 0]\ncont = 'y'\nwhile cont == 'y':\n print('--enter underlay color in r,g,b--')\n c2[0] = int(input('red: '))\n c2[1] = int(input('green: '))\n c2[2] = int(input('blue: '))\n print('')\n print('--enter desired color in r,g,b--')\n c3[0] = int(input('red: '))\n c3[1] = int(input('green: '))\n c3[2] = int(input('blue: '))\n print('')\n alpha = 0\n r = -1\n g = -1\n b = -1\n while (alpha < 1 and r < 0 or g < 0 or b < 0 or r > 255 or g > 255 or b >\n 255):\n alpha += 1 / 256\n inv = 1 / alpha\n r = c3[0] * inv + c2[0] * (1 - inv)\n g = c3[1] * inv + c2[1] * (1 - inv)\n b = c3[2] * inv + c2[2] * (1 - inv)\n print('---result---')\n print('red:', round(r))\n print('green:', round(g))\n print('blue:', round(b))\n print('alpha:', round(alpha * 256))\n print('------------')\n print('')\n cont = input('again? y/n')\n print('')\n",
"step-5": "# 0=RED, 1=GREEN, 2=BLUE, 3=ALPHA\r\n\r\n#import tkinter as tk\r\n#import tkinter.ttk as ttk\r\n#from tkcolorpicker import askcolor\r\nimport time\r\n\r\nc1 = [0,0,0,0] #this color\r\nc2 = [0,0,0] #over this color\r\nc3 = [0,0,0] #result\r\n\r\ncont='y'\r\n\r\n#--------------------------------\r\n\r\nwhile cont=='y':\r\n print('--enter underlay color in r,g,b--')\r\n c2[0]=int(input('red: '))\r\n c2[1]=int(input('green: '))\r\n c2[2]=int(input('blue: '))\r\n print('')\r\n \r\n print('--enter desired color in r,g,b--')\r\n c3[0]=int(input('red: '))\r\n c3[1]=int(input('green: '))\r\n c3[2]=int(input('blue: '))\r\n print('')\r\n \r\n #--------------------------------\r\n\r\n alpha = 0\r\n r = -1\r\n g = -1\r\n b = -1\r\n\r\n while alpha < 1 and r < 0 or g < 0 or b < 0 or r > 255 or g > 255 or b > 255:\r\n alpha+= 1/256\r\n inv = 1 / alpha\r\n r = c3[0] * inv + c2[0] * (1 - inv)\r\n g = c3[1] * inv + c2[1] * (1 - inv)\r\n b = c3[2] * inv + c2[2] * (1 - inv)\r\n\r\n print('---result---')\r\n print('red:', round(r))\r\n print('green:', round(g))\r\n print('blue:', round(b))\r\n print('alpha:', round(alpha*256))\r\n print('------------')\r\n print('')\r\n\r\n cont=input('again? y/n')\r\n print('')\r\n\r\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# coding: utf-8
from pyquery import PyQuery as pq
html = '''
<div id="container">
<ul class="list">
<li class="item-0">first item</li>
<li class="item-1"><a href="link2.html">second item</a></li>
<li class="item-0 active"><a href="link3.html">third item</a></li>
<li class="item-1 active"><a href="link4.html">fourth item</a></li>
<li class="item-0"><a href="link5.html">fifth item</a></li>
</ul
</div>
'''
# 获取属性
# 第一种方法
doc = pq(html)
a = doc('.item-0.active a')
print(a, type(a))
print(a.attr('href'))
# 第二种方法
print(a.attr.href)
|
normal
|
{
"blob_id": "02ab822dacb26d623a474fa45ebb034f9c1291b8",
"index": 1604,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(a, type(a))\nprint(a.attr('href'))\nprint(a.attr.href)\n",
"step-3": "<mask token>\nhtml = \"\"\"\n <div id=\"container\">\n <ul class=\"list\">\n <li class=\"item-0\">first item</li>\n <li class=\"item-1\"><a href=\"link2.html\">second item</a></li>\n <li class=\"item-0 active\"><a href=\"link3.html\">third item</a></li>\n <li class=\"item-1 active\"><a href=\"link4.html\">fourth item</a></li>\n <li class=\"item-0\"><a href=\"link5.html\">fifth item</a></li>\n </ul\n </div>\n\"\"\"\ndoc = pq(html)\na = doc('.item-0.active a')\nprint(a, type(a))\nprint(a.attr('href'))\nprint(a.attr.href)\n",
"step-4": "from pyquery import PyQuery as pq\nhtml = \"\"\"\n <div id=\"container\">\n <ul class=\"list\">\n <li class=\"item-0\">first item</li>\n <li class=\"item-1\"><a href=\"link2.html\">second item</a></li>\n <li class=\"item-0 active\"><a href=\"link3.html\">third item</a></li>\n <li class=\"item-1 active\"><a href=\"link4.html\">fourth item</a></li>\n <li class=\"item-0\"><a href=\"link5.html\">fifth item</a></li>\n </ul\n </div>\n\"\"\"\ndoc = pq(html)\na = doc('.item-0.active a')\nprint(a, type(a))\nprint(a.attr('href'))\nprint(a.attr.href)\n",
"step-5": "# coding: utf-8\n\nfrom pyquery import PyQuery as pq\n\n\nhtml = '''\n <div id=\"container\">\n <ul class=\"list\">\n <li class=\"item-0\">first item</li>\n <li class=\"item-1\"><a href=\"link2.html\">second item</a></li>\n <li class=\"item-0 active\"><a href=\"link3.html\">third item</a></li>\n <li class=\"item-1 active\"><a href=\"link4.html\">fourth item</a></li>\n <li class=\"item-0\"><a href=\"link5.html\">fifth item</a></li>\n </ul\n </div>\n'''\n# 获取属性\n# 第一种方法\ndoc = pq(html)\na = doc('.item-0.active a')\nprint(a, type(a))\nprint(a.attr('href'))\n\n# 第二种方法\nprint(a.attr.href)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
@app.route('/')
def index():
result_plot = compute_model_output()
return render_template('index.html', graphJSON=result_plot)
def compute_model_output():
num_steps = 500
init_inf = 5
t_inc = 5
t_inf = 9
r_t = 2.5
rho = 1.0
kappa_0 = 0.0
kappa = 0.0
n_pop = 2000
seir = model.SEIRModel(num_steps, n_pop, init_inf, t_inc, t_inf, r_t,
rho, kappa_0, kappa)
s, e, i, r = seir.run()
days = np.linspace(0, num_steps, num_steps)
trace_0 = pgo.Scatter(x=days, y=s, mode='lines', name='s', line=dict(
color='rgba(128, 223, 255, 1)'))
trace_1 = pgo.Scatter(x=days, y=e, mode='lines', name='e', line=dict(
color='rgba(200, 100, 0, 1)'))
trace_2 = pgo.Scatter(x=days, y=i, mode='lines', name='i', line=dict(
color='rgba(180, 0, 0, 1)'))
trace_3 = pgo.Scatter(x=days, y=r, mode='lines', name='r', line=dict(
color='rgba(0, 100, 50, 1)'))
data = [trace_0, trace_1, trace_2, trace_3]
graphJSON = json.dumps(data, cls=plotly.utils.PlotlyJSONEncoder)
return graphJSON
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
sys.path.insert(0, parentdir)
sys.path.append(os.path.join(parentdir, 'utils'))
<|reserved_special_token_0|>
@app.route('/')
def index():
result_plot = compute_model_output()
return render_template('index.html', graphJSON=result_plot)
def compute_model_output():
num_steps = 500
init_inf = 5
t_inc = 5
t_inf = 9
r_t = 2.5
rho = 1.0
kappa_0 = 0.0
kappa = 0.0
n_pop = 2000
seir = model.SEIRModel(num_steps, n_pop, init_inf, t_inc, t_inf, r_t,
rho, kappa_0, kappa)
s, e, i, r = seir.run()
days = np.linspace(0, num_steps, num_steps)
trace_0 = pgo.Scatter(x=days, y=s, mode='lines', name='s', line=dict(
color='rgba(128, 223, 255, 1)'))
trace_1 = pgo.Scatter(x=days, y=e, mode='lines', name='e', line=dict(
color='rgba(200, 100, 0, 1)'))
trace_2 = pgo.Scatter(x=days, y=i, mode='lines', name='i', line=dict(
color='rgba(180, 0, 0, 1)'))
trace_3 = pgo.Scatter(x=days, y=r, mode='lines', name='r', line=dict(
color='rgba(0, 100, 50, 1)'))
data = [trace_0, trace_1, trace_2, trace_3]
graphJSON = json.dumps(data, cls=plotly.utils.PlotlyJSONEncoder)
return graphJSON
<|reserved_special_token_0|>
if __name__ == '__main__':
app.run(debug=True)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.
currentframe())))
parentdir = os.path.dirname(currentdir)
sys.path.insert(0, parentdir)
sys.path.append(os.path.join(parentdir, 'utils'))
<|reserved_special_token_0|>
app = Flask(__name__)
@app.route('/')
def index():
result_plot = compute_model_output()
return render_template('index.html', graphJSON=result_plot)
def compute_model_output():
num_steps = 500
init_inf = 5
t_inc = 5
t_inf = 9
r_t = 2.5
rho = 1.0
kappa_0 = 0.0
kappa = 0.0
n_pop = 2000
seir = model.SEIRModel(num_steps, n_pop, init_inf, t_inc, t_inf, r_t,
rho, kappa_0, kappa)
s, e, i, r = seir.run()
days = np.linspace(0, num_steps, num_steps)
trace_0 = pgo.Scatter(x=days, y=s, mode='lines', name='s', line=dict(
color='rgba(128, 223, 255, 1)'))
trace_1 = pgo.Scatter(x=days, y=e, mode='lines', name='e', line=dict(
color='rgba(200, 100, 0, 1)'))
trace_2 = pgo.Scatter(x=days, y=i, mode='lines', name='i', line=dict(
color='rgba(180, 0, 0, 1)'))
trace_3 = pgo.Scatter(x=days, y=r, mode='lines', name='r', line=dict(
color='rgba(0, 100, 50, 1)'))
data = [trace_0, trace_1, trace_2, trace_3]
graphJSON = json.dumps(data, cls=plotly.utils.PlotlyJSONEncoder)
return graphJSON
<|reserved_special_token_0|>
if __name__ == '__main__':
app.run(debug=True)
<|reserved_special_token_1|>
import os, sys, inspect
currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.
currentframe())))
parentdir = os.path.dirname(currentdir)
sys.path.insert(0, parentdir)
sys.path.append(os.path.join(parentdir, 'utils'))
from flask import Flask, render_template
import numpy as np
import plotly
import plotly.graph_objs as pgo
import json
from utils import model
app = Flask(__name__)
@app.route('/')
def index():
result_plot = compute_model_output()
return render_template('index.html', graphJSON=result_plot)
def compute_model_output():
num_steps = 500
init_inf = 5
t_inc = 5
t_inf = 9
r_t = 2.5
rho = 1.0
kappa_0 = 0.0
kappa = 0.0
n_pop = 2000
seir = model.SEIRModel(num_steps, n_pop, init_inf, t_inc, t_inf, r_t,
rho, kappa_0, kappa)
s, e, i, r = seir.run()
days = np.linspace(0, num_steps, num_steps)
trace_0 = pgo.Scatter(x=days, y=s, mode='lines', name='s', line=dict(
color='rgba(128, 223, 255, 1)'))
trace_1 = pgo.Scatter(x=days, y=e, mode='lines', name='e', line=dict(
color='rgba(200, 100, 0, 1)'))
trace_2 = pgo.Scatter(x=days, y=i, mode='lines', name='i', line=dict(
color='rgba(180, 0, 0, 1)'))
trace_3 = pgo.Scatter(x=days, y=r, mode='lines', name='r', line=dict(
color='rgba(0, 100, 50, 1)'))
data = [trace_0, trace_1, trace_2, trace_3]
graphJSON = json.dumps(data, cls=plotly.utils.PlotlyJSONEncoder)
return graphJSON
<|reserved_special_token_0|>
if __name__ == '__main__':
app.run(debug=True)
<|reserved_special_token_1|>
# Set up path references and dependencies.
import os, sys, inspect
currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
parentdir = os.path.dirname(currentdir)
sys.path.insert(0, parentdir)
sys.path.append(os.path.join(parentdir, "utils"))
# Import important helper libraries.
from flask import Flask, render_template
import numpy as np
import plotly
import plotly.graph_objs as pgo
import json
# Import modules created to serve the project.
#from utils import DB_interface as DBI
#from utils import path_config as pc
from utils import model
app = Flask(__name__)
# Global variable
#DAYS = 500
@app.route('/')
def index():
result_plot = compute_model_output()
return render_template("index.html", graphJSON=result_plot)
def compute_model_output():
num_steps = 500
init_inf = 5
t_inc = 5
t_inf = 9
r_t = 2.5 #np.random.normal(2.5, 1.0)
rho = 1.0
kappa_0 = 0.0
kappa = 0.0
n_pop = 2000
seir = model.SEIRModel(num_steps,n_pop, init_inf, t_inc, t_inf, r_t, rho, kappa_0, kappa)
s, e, i, r = seir.run()
days = np.linspace(0, num_steps, num_steps)
trace_0 = pgo.Scatter(x=days, y=s, mode='lines', name='s', line=dict(color='rgba(128, 223, 255, 1)'))
trace_1 = pgo.Scatter(x=days, y=e, mode='lines', name='e', line=dict(color='rgba(200, 100, 0, 1)'))
trace_2 = pgo.Scatter(x=days, y=i, mode='lines', name='i', line=dict(color='rgba(180, 0, 0, 1)'))
trace_3 = pgo.Scatter(x=days, y=r, mode='lines', name='r', line=dict(color='rgba(0, 100, 50, 1)'))
data = [trace_0, trace_1, trace_2, trace_3]
graphJSON = json.dumps(data, cls=plotly.utils.PlotlyJSONEncoder)
return (graphJSON)
"""
@app.callback(
Output('test','children')
[Input('val_num_steps', 'num_steps')]
)
@app.route('/start_bckgrnd_update')
def start_bckgrnd_update():
p = Process(target=bckgrnd_update, name="background_update")
p.start()
#p.join()
now = datetime.now()
user = {'username': 'MSE!'}
posts = [
{
'author': {'username': 'Paul'},
'body': 'Henrik has the update just been started?'
},
{
'author': {'username': 'Henrik'},
'body': 'You bet your sweet ass it has!'
},
{
'author': {'username': 'Paul'},
'body': 'So what time was is when it started?'
},
{
'author': {'username': 'Henrik'},
'body': 'It was exactly %s !' % now
}
]
return render_template("start_bckgrnd_update.html", title="home", user = user, posts=posts)
def bckgrnd_update():
global updating
updating = True
while updating:
print(datetime.now())
print("updating RKI DBs now")
DB = DBI.DB_interface()
DB.update_RKI_csv()
DB.update_RKI_landkreise_csv()
day = 24 * 3600
time.sleep(day)
"""
if __name__ == "__main__":
app.run(debug=True)
|
flexible
|
{
"blob_id": "7d099012584b84e9767bf0ce9d9df1596ca3bbab",
"index": 542,
"step-1": "<mask token>\n\n\n@app.route('/')\ndef index():\n result_plot = compute_model_output()\n return render_template('index.html', graphJSON=result_plot)\n\n\ndef compute_model_output():\n num_steps = 500\n init_inf = 5\n t_inc = 5\n t_inf = 9\n r_t = 2.5\n rho = 1.0\n kappa_0 = 0.0\n kappa = 0.0\n n_pop = 2000\n seir = model.SEIRModel(num_steps, n_pop, init_inf, t_inc, t_inf, r_t,\n rho, kappa_0, kappa)\n s, e, i, r = seir.run()\n days = np.linspace(0, num_steps, num_steps)\n trace_0 = pgo.Scatter(x=days, y=s, mode='lines', name='s', line=dict(\n color='rgba(128, 223, 255, 1)'))\n trace_1 = pgo.Scatter(x=days, y=e, mode='lines', name='e', line=dict(\n color='rgba(200, 100, 0, 1)'))\n trace_2 = pgo.Scatter(x=days, y=i, mode='lines', name='i', line=dict(\n color='rgba(180, 0, 0, 1)'))\n trace_3 = pgo.Scatter(x=days, y=r, mode='lines', name='r', line=dict(\n color='rgba(0, 100, 50, 1)'))\n data = [trace_0, trace_1, trace_2, trace_3]\n graphJSON = json.dumps(data, cls=plotly.utils.PlotlyJSONEncoder)\n return graphJSON\n\n\n<mask token>\n",
"step-2": "<mask token>\nsys.path.insert(0, parentdir)\nsys.path.append(os.path.join(parentdir, 'utils'))\n<mask token>\n\n\n@app.route('/')\ndef index():\n result_plot = compute_model_output()\n return render_template('index.html', graphJSON=result_plot)\n\n\ndef compute_model_output():\n num_steps = 500\n init_inf = 5\n t_inc = 5\n t_inf = 9\n r_t = 2.5\n rho = 1.0\n kappa_0 = 0.0\n kappa = 0.0\n n_pop = 2000\n seir = model.SEIRModel(num_steps, n_pop, init_inf, t_inc, t_inf, r_t,\n rho, kappa_0, kappa)\n s, e, i, r = seir.run()\n days = np.linspace(0, num_steps, num_steps)\n trace_0 = pgo.Scatter(x=days, y=s, mode='lines', name='s', line=dict(\n color='rgba(128, 223, 255, 1)'))\n trace_1 = pgo.Scatter(x=days, y=e, mode='lines', name='e', line=dict(\n color='rgba(200, 100, 0, 1)'))\n trace_2 = pgo.Scatter(x=days, y=i, mode='lines', name='i', line=dict(\n color='rgba(180, 0, 0, 1)'))\n trace_3 = pgo.Scatter(x=days, y=r, mode='lines', name='r', line=dict(\n color='rgba(0, 100, 50, 1)'))\n data = [trace_0, trace_1, trace_2, trace_3]\n graphJSON = json.dumps(data, cls=plotly.utils.PlotlyJSONEncoder)\n return graphJSON\n\n\n<mask token>\nif __name__ == '__main__':\n app.run(debug=True)\n",
"step-3": "<mask token>\ncurrentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.\n currentframe())))\nparentdir = os.path.dirname(currentdir)\nsys.path.insert(0, parentdir)\nsys.path.append(os.path.join(parentdir, 'utils'))\n<mask token>\napp = Flask(__name__)\n\n\n@app.route('/')\ndef index():\n result_plot = compute_model_output()\n return render_template('index.html', graphJSON=result_plot)\n\n\ndef compute_model_output():\n num_steps = 500\n init_inf = 5\n t_inc = 5\n t_inf = 9\n r_t = 2.5\n rho = 1.0\n kappa_0 = 0.0\n kappa = 0.0\n n_pop = 2000\n seir = model.SEIRModel(num_steps, n_pop, init_inf, t_inc, t_inf, r_t,\n rho, kappa_0, kappa)\n s, e, i, r = seir.run()\n days = np.linspace(0, num_steps, num_steps)\n trace_0 = pgo.Scatter(x=days, y=s, mode='lines', name='s', line=dict(\n color='rgba(128, 223, 255, 1)'))\n trace_1 = pgo.Scatter(x=days, y=e, mode='lines', name='e', line=dict(\n color='rgba(200, 100, 0, 1)'))\n trace_2 = pgo.Scatter(x=days, y=i, mode='lines', name='i', line=dict(\n color='rgba(180, 0, 0, 1)'))\n trace_3 = pgo.Scatter(x=days, y=r, mode='lines', name='r', line=dict(\n color='rgba(0, 100, 50, 1)'))\n data = [trace_0, trace_1, trace_2, trace_3]\n graphJSON = json.dumps(data, cls=plotly.utils.PlotlyJSONEncoder)\n return graphJSON\n\n\n<mask token>\nif __name__ == '__main__':\n app.run(debug=True)\n",
"step-4": "import os, sys, inspect\ncurrentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.\n currentframe())))\nparentdir = os.path.dirname(currentdir)\nsys.path.insert(0, parentdir)\nsys.path.append(os.path.join(parentdir, 'utils'))\nfrom flask import Flask, render_template\nimport numpy as np\nimport plotly\nimport plotly.graph_objs as pgo\nimport json\nfrom utils import model\napp = Flask(__name__)\n\n\n@app.route('/')\ndef index():\n result_plot = compute_model_output()\n return render_template('index.html', graphJSON=result_plot)\n\n\ndef compute_model_output():\n num_steps = 500\n init_inf = 5\n t_inc = 5\n t_inf = 9\n r_t = 2.5\n rho = 1.0\n kappa_0 = 0.0\n kappa = 0.0\n n_pop = 2000\n seir = model.SEIRModel(num_steps, n_pop, init_inf, t_inc, t_inf, r_t,\n rho, kappa_0, kappa)\n s, e, i, r = seir.run()\n days = np.linspace(0, num_steps, num_steps)\n trace_0 = pgo.Scatter(x=days, y=s, mode='lines', name='s', line=dict(\n color='rgba(128, 223, 255, 1)'))\n trace_1 = pgo.Scatter(x=days, y=e, mode='lines', name='e', line=dict(\n color='rgba(200, 100, 0, 1)'))\n trace_2 = pgo.Scatter(x=days, y=i, mode='lines', name='i', line=dict(\n color='rgba(180, 0, 0, 1)'))\n trace_3 = pgo.Scatter(x=days, y=r, mode='lines', name='r', line=dict(\n color='rgba(0, 100, 50, 1)'))\n data = [trace_0, trace_1, trace_2, trace_3]\n graphJSON = json.dumps(data, cls=plotly.utils.PlotlyJSONEncoder)\n return graphJSON\n\n\n<mask token>\nif __name__ == '__main__':\n app.run(debug=True)\n",
"step-5": "# Set up path references and dependencies.\nimport os, sys, inspect\ncurrentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))\nparentdir = os.path.dirname(currentdir)\nsys.path.insert(0, parentdir)\nsys.path.append(os.path.join(parentdir, \"utils\"))\n\n# Import important helper libraries.\nfrom flask import Flask, render_template\nimport numpy as np\n\nimport plotly\nimport plotly.graph_objs as pgo\nimport json\n\n# Import modules created to serve the project.\n#from utils import DB_interface as DBI\n#from utils import path_config as pc\nfrom utils import model\n\napp = Flask(__name__)\n\n# Global variable\n#DAYS = 500\n\n@app.route('/')\ndef index():\n result_plot = compute_model_output()\n return render_template(\"index.html\", graphJSON=result_plot)\n\ndef compute_model_output():\n num_steps = 500\n init_inf = 5\n t_inc = 5\n t_inf = 9\n r_t = 2.5 #np.random.normal(2.5, 1.0)\n rho = 1.0\n kappa_0 = 0.0\n kappa = 0.0\n\n n_pop = 2000\n\n seir = model.SEIRModel(num_steps,n_pop, init_inf, t_inc, t_inf, r_t, rho, kappa_0, kappa)\n\n s, e, i, r = seir.run()\n\n days = np.linspace(0, num_steps, num_steps)\n\n trace_0 = pgo.Scatter(x=days, y=s, mode='lines', name='s', line=dict(color='rgba(128, 223, 255, 1)'))\n trace_1 = pgo.Scatter(x=days, y=e, mode='lines', name='e', line=dict(color='rgba(200, 100, 0, 1)'))\n trace_2 = pgo.Scatter(x=days, y=i, mode='lines', name='i', line=dict(color='rgba(180, 0, 0, 1)'))\n trace_3 = pgo.Scatter(x=days, y=r, mode='lines', name='r', line=dict(color='rgba(0, 100, 50, 1)'))\n\n data = [trace_0, trace_1, trace_2, trace_3]\n\n graphJSON = json.dumps(data, cls=plotly.utils.PlotlyJSONEncoder)\n\n return (graphJSON)\n\n\"\"\"\n@app.callback(\n Output('test','children')\n [Input('val_num_steps', 'num_steps')]\n)\n\n\n@app.route('/start_bckgrnd_update')\ndef start_bckgrnd_update():\n p = Process(target=bckgrnd_update, name=\"background_update\")\n p.start()\n #p.join()\n now = datetime.now()\n user = {'username': 'MSE!'}\n posts = [\n {\n 'author': {'username': 'Paul'},\n 'body': 'Henrik has the update just been started?'\n },\n {\n 'author': {'username': 'Henrik'},\n 'body': 'You bet your sweet ass it has!'\n },\n {\n 'author': {'username': 'Paul'},\n 'body': 'So what time was is when it started?'\n },\n {\n 'author': {'username': 'Henrik'},\n 'body': 'It was exactly %s !' % now\n }\n\n ]\n return render_template(\"start_bckgrnd_update.html\", title=\"home\", user = user, posts=posts)\n\ndef bckgrnd_update():\n global updating\n updating = True\n while updating:\n print(datetime.now())\n print(\"updating RKI DBs now\")\n DB = DBI.DB_interface()\n DB.update_RKI_csv()\n DB.update_RKI_landkreise_csv()\n day = 24 * 3600\n time.sleep(day)\n\"\"\"\n\nif __name__ == \"__main__\":\n app.run(debug=True)\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
import datetime
import operator
import geopy
from django.db import models
from django.db.models import Q
from django.db.models.query import QuerySet
from django.db.models import permalink
from django.contrib.auth.models import User
geocoder = geopy.geocoders.Google()
class City(models.Model):
name = models.CharField(max_length=30, unique=True)
class Meta:
ordering = ('name',)
verbose_name_plural = "cities"
def __unicode__(self):
return self.name
class Area(models.Model):
name = models.CharField(max_length=50)
city = models.ForeignKey(City)
class Meta:
unique_together = ('name', 'city')
ordering = ('name',)
def __unicode__(self):
return self.name
class ApartmentQuerySet(QuerySet):
def available(self):
return self.filter(
is_available=True,
tenant__is_active=True,
)
class ApartmentManager(models.Manager):
def get_query_set(self):
return ApartmentQuerySet(self.model).select_related(
'area', 'area__city')
def available(self):
return self.get_query_set().available()
def search(self, search):
if search:
search = search.strip()
if not search:
return self.none()
search_fields = (
'area__name__iexact',
'area__city__name__iexact',
'postcode__iexact',
'address__icontains',
)
criteria = [Q(**{field : search})
for field in search_fields]
return self.filter(reduce(operator.or_, criteria))
class Apartment(models.Model):
TYPE_APARTMENT = 1
TYPE_DETACHED = 2
TYPE_SEMIDETACHED = 3
TYPE_COTTAGE =4
TYPE_CHOICES = (
(TYPE_APARTMENT, "Apartment"),
(TYPE_DETACHED, "Detached house"),
(TYPE_SEMIDETACHED, "Semi-detached house"),
(TYPE_COTTAGE, "Cottage"),
)
SAUNA_NONE = 1
SAUNA_SHARED = 2
SAUNA_OWN = 3
SAUNA_CHOICES = (
(SAUNA_NONE, "No sauna"),
(SAUNA_SHARED, "Shared/communal sauna"),
(SAUNA_OWN, "Own sauna"),
)
ROOM_CHOICES = (
(1, "1 room"),
(2, "2 rooms"),
(3, "3 rooms"),
(4, "4 rooms"),
)
LANDLORD_TENANT = 1
LANDLORD_PRIVATE = 2
LANDLORD_AGENCY = 3
LANDLORD_CHOICES = (
(LANDLORD_TENANT, "Occupant"),
(LANDLORD_PRIVATE, "Private landlord"),
(LANDLORD_AGENCY, "Rental agency"),
)
area = models.ForeignKey(Area)
tenant = models.ForeignKey(User)
landlord = models.IntegerField(
choices=LANDLORD_CHOICES,
default=LANDLORD_PRIVATE,
)
agency = models.CharField(max_length=100, null=True, blank=True)
agency_website = models.URLField(null=True, blank=True)
address = models.CharField(max_length=100)
postcode = models.CharField(max_length=7)
latitude = models.FloatField(null=True, blank=True)
longitude = models.FloatField(null=True, blank=True)
added_on = models.DateTimeField(auto_now_add=True)
is_available = models.BooleanField(default=True)
available_from = models.DateField(null=True, blank=True)
available_to = models.DateField(null=True, blank=True)
is_shared = models.BooleanField('Shared accomodation', default=False)
type = models.IntegerField(
choices=TYPE_CHOICES,
default=TYPE_APARTMENT,
)
num_rooms = models.IntegerField('Rooms', choices=ROOM_CHOICES)
floor = models.IntegerField(null=True, blank=True)
lift = models.BooleanField(default=False)
num_floors = models.IntegerField(null=True, blank=True)
sauna = models.IntegerField(
choices=SAUNA_CHOICES,
default=SAUNA_NONE,
)
rent_pcm = models.DecimalField(
decimal_places=2,
max_digits=8,
)
deposit = models.DecimalField(
decimal_places=2,
max_digits=8,
null=True,
blank=True
)
smoking = models.BooleanField(default=False)
pets = models.BooleanField(default=False)
size = models.FloatField('Size (sqm)')
garden_size = models.FloatField(null=True, blank=True)
furnished = models.BooleanField(default=False)
cable = models.BooleanField(default=False)
broadband = models.BooleanField(default=False)
satellite = models.BooleanField(default=False)
balcony = models.BooleanField(default=False)
parking = models.BooleanField(default=False)
garage = models.BooleanField(default=False)
bike_storage = models.BooleanField(default=False)
extra_storage = models.BooleanField(default=False)
gym = models.BooleanField(default=False)
laundry = models.BooleanField(default=False)
description = models.TextField(null=True, blank=True)
kitchen_amenities = models.TextField(null=True, blank=True)
furniture = models.TextField(null=True, blank=True)
heating = models.TextField(null=True, blank=True)
other_amenities = models.TextField(null=True, blank=True)
objects = ApartmentManager()
def __unicode__(self):
return self.get_full_address()
@permalink
def get_absolute_url(self):
return ('apartments:detail', [str(self.id)])
def get_full_address(self):
return "{0}, {1} {2}".format(
self.address,
self.postcode,
self.area.city.name.upper()
)
def is_agency_landlord(self):
return self.landlord == self.LANDLORD_AGENCY
def get_location(self):
searchable = "{0}, {1} {2}, Finland".format(
self.address,
self.postcode,
self.area.city,
)
address, (lat, lng) = geocoder.geocode(
searchable,
exactly_one=True
)
return lat, lng
def save(self, *args, **kwargs):
self.latitude, self.longitude = self.get_location()
super(Apartment, self).save(*args, **kwargs)
|
normal
|
{
"blob_id": "89ba805e47a9727573e1e25371a70fb887ee170d",
"index": 9141,
"step-1": "<mask token>\n\n\nclass Area(models.Model):\n <mask token>\n <mask token>\n\n\n class Meta:\n unique_together = 'name', 'city'\n ordering = 'name',\n <mask token>\n\n\nclass ApartmentQuerySet(QuerySet):\n\n def available(self):\n return self.filter(is_available=True, tenant__is_active=True)\n\n\nclass ApartmentManager(models.Manager):\n\n def get_query_set(self):\n return ApartmentQuerySet(self.model).select_related('area',\n 'area__city')\n\n def available(self):\n return self.get_query_set().available()\n\n def search(self, search):\n if search:\n search = search.strip()\n if not search:\n return self.none()\n search_fields = ('area__name__iexact', 'area__city__name__iexact',\n 'postcode__iexact', 'address__icontains')\n criteria = [Q(**{field: search}) for field in search_fields]\n return self.filter(reduce(operator.or_, criteria))\n\n\nclass Apartment(models.Model):\n TYPE_APARTMENT = 1\n TYPE_DETACHED = 2\n TYPE_SEMIDETACHED = 3\n TYPE_COTTAGE = 4\n TYPE_CHOICES = (TYPE_APARTMENT, 'Apartment'), (TYPE_DETACHED,\n 'Detached house'), (TYPE_SEMIDETACHED, 'Semi-detached house'), (\n TYPE_COTTAGE, 'Cottage')\n SAUNA_NONE = 1\n SAUNA_SHARED = 2\n SAUNA_OWN = 3\n SAUNA_CHOICES = (SAUNA_NONE, 'No sauna'), (SAUNA_SHARED,\n 'Shared/communal sauna'), (SAUNA_OWN, 'Own sauna')\n ROOM_CHOICES = (1, '1 room'), (2, '2 rooms'), (3, '3 rooms'), (4, '4 rooms'\n )\n LANDLORD_TENANT = 1\n LANDLORD_PRIVATE = 2\n LANDLORD_AGENCY = 3\n LANDLORD_CHOICES = (LANDLORD_TENANT, 'Occupant'), (LANDLORD_PRIVATE,\n 'Private landlord'), (LANDLORD_AGENCY, 'Rental agency')\n area = models.ForeignKey(Area)\n tenant = models.ForeignKey(User)\n landlord = models.IntegerField(choices=LANDLORD_CHOICES, default=\n LANDLORD_PRIVATE)\n agency = models.CharField(max_length=100, null=True, blank=True)\n agency_website = models.URLField(null=True, blank=True)\n address = models.CharField(max_length=100)\n postcode = models.CharField(max_length=7)\n latitude = models.FloatField(null=True, blank=True)\n longitude = models.FloatField(null=True, blank=True)\n added_on = models.DateTimeField(auto_now_add=True)\n is_available = models.BooleanField(default=True)\n available_from = models.DateField(null=True, blank=True)\n available_to = models.DateField(null=True, blank=True)\n is_shared = models.BooleanField('Shared accomodation', default=False)\n type = models.IntegerField(choices=TYPE_CHOICES, default=TYPE_APARTMENT)\n num_rooms = models.IntegerField('Rooms', choices=ROOM_CHOICES)\n floor = models.IntegerField(null=True, blank=True)\n lift = models.BooleanField(default=False)\n num_floors = models.IntegerField(null=True, blank=True)\n sauna = models.IntegerField(choices=SAUNA_CHOICES, default=SAUNA_NONE)\n rent_pcm = models.DecimalField(decimal_places=2, max_digits=8)\n deposit = models.DecimalField(decimal_places=2, max_digits=8, null=True,\n blank=True)\n smoking = models.BooleanField(default=False)\n pets = models.BooleanField(default=False)\n size = models.FloatField('Size (sqm)')\n garden_size = models.FloatField(null=True, blank=True)\n furnished = models.BooleanField(default=False)\n cable = models.BooleanField(default=False)\n broadband = models.BooleanField(default=False)\n satellite = models.BooleanField(default=False)\n balcony = models.BooleanField(default=False)\n parking = models.BooleanField(default=False)\n garage = models.BooleanField(default=False)\n bike_storage = models.BooleanField(default=False)\n extra_storage = models.BooleanField(default=False)\n gym = models.BooleanField(default=False)\n laundry = models.BooleanField(default=False)\n description = models.TextField(null=True, blank=True)\n kitchen_amenities = models.TextField(null=True, blank=True)\n furniture = models.TextField(null=True, blank=True)\n heating = models.TextField(null=True, blank=True)\n other_amenities = models.TextField(null=True, blank=True)\n objects = ApartmentManager()\n\n def __unicode__(self):\n return self.get_full_address()\n\n @permalink\n def get_absolute_url(self):\n return 'apartments:detail', [str(self.id)]\n\n def get_full_address(self):\n return '{0}, {1} {2}'.format(self.address, self.postcode, self.area\n .city.name.upper())\n\n def is_agency_landlord(self):\n return self.landlord == self.LANDLORD_AGENCY\n\n def get_location(self):\n searchable = '{0}, {1} {2}, Finland'.format(self.address, self.\n postcode, self.area.city)\n address, (lat, lng) = geocoder.geocode(searchable, exactly_one=True)\n return lat, lng\n\n def save(self, *args, **kwargs):\n self.latitude, self.longitude = self.get_location()\n super(Apartment, self).save(*args, **kwargs)\n",
"step-2": "<mask token>\n\n\nclass City(models.Model):\n <mask token>\n\n\n class Meta:\n ordering = 'name',\n verbose_name_plural = 'cities'\n\n def __unicode__(self):\n return self.name\n\n\nclass Area(models.Model):\n name = models.CharField(max_length=50)\n city = models.ForeignKey(City)\n\n\n class Meta:\n unique_together = 'name', 'city'\n ordering = 'name',\n\n def __unicode__(self):\n return self.name\n\n\nclass ApartmentQuerySet(QuerySet):\n\n def available(self):\n return self.filter(is_available=True, tenant__is_active=True)\n\n\nclass ApartmentManager(models.Manager):\n\n def get_query_set(self):\n return ApartmentQuerySet(self.model).select_related('area',\n 'area__city')\n\n def available(self):\n return self.get_query_set().available()\n\n def search(self, search):\n if search:\n search = search.strip()\n if not search:\n return self.none()\n search_fields = ('area__name__iexact', 'area__city__name__iexact',\n 'postcode__iexact', 'address__icontains')\n criteria = [Q(**{field: search}) for field in search_fields]\n return self.filter(reduce(operator.or_, criteria))\n\n\nclass Apartment(models.Model):\n TYPE_APARTMENT = 1\n TYPE_DETACHED = 2\n TYPE_SEMIDETACHED = 3\n TYPE_COTTAGE = 4\n TYPE_CHOICES = (TYPE_APARTMENT, 'Apartment'), (TYPE_DETACHED,\n 'Detached house'), (TYPE_SEMIDETACHED, 'Semi-detached house'), (\n TYPE_COTTAGE, 'Cottage')\n SAUNA_NONE = 1\n SAUNA_SHARED = 2\n SAUNA_OWN = 3\n SAUNA_CHOICES = (SAUNA_NONE, 'No sauna'), (SAUNA_SHARED,\n 'Shared/communal sauna'), (SAUNA_OWN, 'Own sauna')\n ROOM_CHOICES = (1, '1 room'), (2, '2 rooms'), (3, '3 rooms'), (4, '4 rooms'\n )\n LANDLORD_TENANT = 1\n LANDLORD_PRIVATE = 2\n LANDLORD_AGENCY = 3\n LANDLORD_CHOICES = (LANDLORD_TENANT, 'Occupant'), (LANDLORD_PRIVATE,\n 'Private landlord'), (LANDLORD_AGENCY, 'Rental agency')\n area = models.ForeignKey(Area)\n tenant = models.ForeignKey(User)\n landlord = models.IntegerField(choices=LANDLORD_CHOICES, default=\n LANDLORD_PRIVATE)\n agency = models.CharField(max_length=100, null=True, blank=True)\n agency_website = models.URLField(null=True, blank=True)\n address = models.CharField(max_length=100)\n postcode = models.CharField(max_length=7)\n latitude = models.FloatField(null=True, blank=True)\n longitude = models.FloatField(null=True, blank=True)\n added_on = models.DateTimeField(auto_now_add=True)\n is_available = models.BooleanField(default=True)\n available_from = models.DateField(null=True, blank=True)\n available_to = models.DateField(null=True, blank=True)\n is_shared = models.BooleanField('Shared accomodation', default=False)\n type = models.IntegerField(choices=TYPE_CHOICES, default=TYPE_APARTMENT)\n num_rooms = models.IntegerField('Rooms', choices=ROOM_CHOICES)\n floor = models.IntegerField(null=True, blank=True)\n lift = models.BooleanField(default=False)\n num_floors = models.IntegerField(null=True, blank=True)\n sauna = models.IntegerField(choices=SAUNA_CHOICES, default=SAUNA_NONE)\n rent_pcm = models.DecimalField(decimal_places=2, max_digits=8)\n deposit = models.DecimalField(decimal_places=2, max_digits=8, null=True,\n blank=True)\n smoking = models.BooleanField(default=False)\n pets = models.BooleanField(default=False)\n size = models.FloatField('Size (sqm)')\n garden_size = models.FloatField(null=True, blank=True)\n furnished = models.BooleanField(default=False)\n cable = models.BooleanField(default=False)\n broadband = models.BooleanField(default=False)\n satellite = models.BooleanField(default=False)\n balcony = models.BooleanField(default=False)\n parking = models.BooleanField(default=False)\n garage = models.BooleanField(default=False)\n bike_storage = models.BooleanField(default=False)\n extra_storage = models.BooleanField(default=False)\n gym = models.BooleanField(default=False)\n laundry = models.BooleanField(default=False)\n description = models.TextField(null=True, blank=True)\n kitchen_amenities = models.TextField(null=True, blank=True)\n furniture = models.TextField(null=True, blank=True)\n heating = models.TextField(null=True, blank=True)\n other_amenities = models.TextField(null=True, blank=True)\n objects = ApartmentManager()\n\n def __unicode__(self):\n return self.get_full_address()\n\n @permalink\n def get_absolute_url(self):\n return 'apartments:detail', [str(self.id)]\n\n def get_full_address(self):\n return '{0}, {1} {2}'.format(self.address, self.postcode, self.area\n .city.name.upper())\n\n def is_agency_landlord(self):\n return self.landlord == self.LANDLORD_AGENCY\n\n def get_location(self):\n searchable = '{0}, {1} {2}, Finland'.format(self.address, self.\n postcode, self.area.city)\n address, (lat, lng) = geocoder.geocode(searchable, exactly_one=True)\n return lat, lng\n\n def save(self, *args, **kwargs):\n self.latitude, self.longitude = self.get_location()\n super(Apartment, self).save(*args, **kwargs)\n",
"step-3": "<mask token>\n\n\nclass City(models.Model):\n name = models.CharField(max_length=30, unique=True)\n\n\n class Meta:\n ordering = 'name',\n verbose_name_plural = 'cities'\n\n def __unicode__(self):\n return self.name\n\n\nclass Area(models.Model):\n name = models.CharField(max_length=50)\n city = models.ForeignKey(City)\n\n\n class Meta:\n unique_together = 'name', 'city'\n ordering = 'name',\n\n def __unicode__(self):\n return self.name\n\n\nclass ApartmentQuerySet(QuerySet):\n\n def available(self):\n return self.filter(is_available=True, tenant__is_active=True)\n\n\nclass ApartmentManager(models.Manager):\n\n def get_query_set(self):\n return ApartmentQuerySet(self.model).select_related('area',\n 'area__city')\n\n def available(self):\n return self.get_query_set().available()\n\n def search(self, search):\n if search:\n search = search.strip()\n if not search:\n return self.none()\n search_fields = ('area__name__iexact', 'area__city__name__iexact',\n 'postcode__iexact', 'address__icontains')\n criteria = [Q(**{field: search}) for field in search_fields]\n return self.filter(reduce(operator.or_, criteria))\n\n\nclass Apartment(models.Model):\n TYPE_APARTMENT = 1\n TYPE_DETACHED = 2\n TYPE_SEMIDETACHED = 3\n TYPE_COTTAGE = 4\n TYPE_CHOICES = (TYPE_APARTMENT, 'Apartment'), (TYPE_DETACHED,\n 'Detached house'), (TYPE_SEMIDETACHED, 'Semi-detached house'), (\n TYPE_COTTAGE, 'Cottage')\n SAUNA_NONE = 1\n SAUNA_SHARED = 2\n SAUNA_OWN = 3\n SAUNA_CHOICES = (SAUNA_NONE, 'No sauna'), (SAUNA_SHARED,\n 'Shared/communal sauna'), (SAUNA_OWN, 'Own sauna')\n ROOM_CHOICES = (1, '1 room'), (2, '2 rooms'), (3, '3 rooms'), (4, '4 rooms'\n )\n LANDLORD_TENANT = 1\n LANDLORD_PRIVATE = 2\n LANDLORD_AGENCY = 3\n LANDLORD_CHOICES = (LANDLORD_TENANT, 'Occupant'), (LANDLORD_PRIVATE,\n 'Private landlord'), (LANDLORD_AGENCY, 'Rental agency')\n area = models.ForeignKey(Area)\n tenant = models.ForeignKey(User)\n landlord = models.IntegerField(choices=LANDLORD_CHOICES, default=\n LANDLORD_PRIVATE)\n agency = models.CharField(max_length=100, null=True, blank=True)\n agency_website = models.URLField(null=True, blank=True)\n address = models.CharField(max_length=100)\n postcode = models.CharField(max_length=7)\n latitude = models.FloatField(null=True, blank=True)\n longitude = models.FloatField(null=True, blank=True)\n added_on = models.DateTimeField(auto_now_add=True)\n is_available = models.BooleanField(default=True)\n available_from = models.DateField(null=True, blank=True)\n available_to = models.DateField(null=True, blank=True)\n is_shared = models.BooleanField('Shared accomodation', default=False)\n type = models.IntegerField(choices=TYPE_CHOICES, default=TYPE_APARTMENT)\n num_rooms = models.IntegerField('Rooms', choices=ROOM_CHOICES)\n floor = models.IntegerField(null=True, blank=True)\n lift = models.BooleanField(default=False)\n num_floors = models.IntegerField(null=True, blank=True)\n sauna = models.IntegerField(choices=SAUNA_CHOICES, default=SAUNA_NONE)\n rent_pcm = models.DecimalField(decimal_places=2, max_digits=8)\n deposit = models.DecimalField(decimal_places=2, max_digits=8, null=True,\n blank=True)\n smoking = models.BooleanField(default=False)\n pets = models.BooleanField(default=False)\n size = models.FloatField('Size (sqm)')\n garden_size = models.FloatField(null=True, blank=True)\n furnished = models.BooleanField(default=False)\n cable = models.BooleanField(default=False)\n broadband = models.BooleanField(default=False)\n satellite = models.BooleanField(default=False)\n balcony = models.BooleanField(default=False)\n parking = models.BooleanField(default=False)\n garage = models.BooleanField(default=False)\n bike_storage = models.BooleanField(default=False)\n extra_storage = models.BooleanField(default=False)\n gym = models.BooleanField(default=False)\n laundry = models.BooleanField(default=False)\n description = models.TextField(null=True, blank=True)\n kitchen_amenities = models.TextField(null=True, blank=True)\n furniture = models.TextField(null=True, blank=True)\n heating = models.TextField(null=True, blank=True)\n other_amenities = models.TextField(null=True, blank=True)\n objects = ApartmentManager()\n\n def __unicode__(self):\n return self.get_full_address()\n\n @permalink\n def get_absolute_url(self):\n return 'apartments:detail', [str(self.id)]\n\n def get_full_address(self):\n return '{0}, {1} {2}'.format(self.address, self.postcode, self.area\n .city.name.upper())\n\n def is_agency_landlord(self):\n return self.landlord == self.LANDLORD_AGENCY\n\n def get_location(self):\n searchable = '{0}, {1} {2}, Finland'.format(self.address, self.\n postcode, self.area.city)\n address, (lat, lng) = geocoder.geocode(searchable, exactly_one=True)\n return lat, lng\n\n def save(self, *args, **kwargs):\n self.latitude, self.longitude = self.get_location()\n super(Apartment, self).save(*args, **kwargs)\n",
"step-4": "<mask token>\ngeocoder = geopy.geocoders.Google()\n\n\nclass City(models.Model):\n name = models.CharField(max_length=30, unique=True)\n\n\n class Meta:\n ordering = 'name',\n verbose_name_plural = 'cities'\n\n def __unicode__(self):\n return self.name\n\n\nclass Area(models.Model):\n name = models.CharField(max_length=50)\n city = models.ForeignKey(City)\n\n\n class Meta:\n unique_together = 'name', 'city'\n ordering = 'name',\n\n def __unicode__(self):\n return self.name\n\n\nclass ApartmentQuerySet(QuerySet):\n\n def available(self):\n return self.filter(is_available=True, tenant__is_active=True)\n\n\nclass ApartmentManager(models.Manager):\n\n def get_query_set(self):\n return ApartmentQuerySet(self.model).select_related('area',\n 'area__city')\n\n def available(self):\n return self.get_query_set().available()\n\n def search(self, search):\n if search:\n search = search.strip()\n if not search:\n return self.none()\n search_fields = ('area__name__iexact', 'area__city__name__iexact',\n 'postcode__iexact', 'address__icontains')\n criteria = [Q(**{field: search}) for field in search_fields]\n return self.filter(reduce(operator.or_, criteria))\n\n\nclass Apartment(models.Model):\n TYPE_APARTMENT = 1\n TYPE_DETACHED = 2\n TYPE_SEMIDETACHED = 3\n TYPE_COTTAGE = 4\n TYPE_CHOICES = (TYPE_APARTMENT, 'Apartment'), (TYPE_DETACHED,\n 'Detached house'), (TYPE_SEMIDETACHED, 'Semi-detached house'), (\n TYPE_COTTAGE, 'Cottage')\n SAUNA_NONE = 1\n SAUNA_SHARED = 2\n SAUNA_OWN = 3\n SAUNA_CHOICES = (SAUNA_NONE, 'No sauna'), (SAUNA_SHARED,\n 'Shared/communal sauna'), (SAUNA_OWN, 'Own sauna')\n ROOM_CHOICES = (1, '1 room'), (2, '2 rooms'), (3, '3 rooms'), (4, '4 rooms'\n )\n LANDLORD_TENANT = 1\n LANDLORD_PRIVATE = 2\n LANDLORD_AGENCY = 3\n LANDLORD_CHOICES = (LANDLORD_TENANT, 'Occupant'), (LANDLORD_PRIVATE,\n 'Private landlord'), (LANDLORD_AGENCY, 'Rental agency')\n area = models.ForeignKey(Area)\n tenant = models.ForeignKey(User)\n landlord = models.IntegerField(choices=LANDLORD_CHOICES, default=\n LANDLORD_PRIVATE)\n agency = models.CharField(max_length=100, null=True, blank=True)\n agency_website = models.URLField(null=True, blank=True)\n address = models.CharField(max_length=100)\n postcode = models.CharField(max_length=7)\n latitude = models.FloatField(null=True, blank=True)\n longitude = models.FloatField(null=True, blank=True)\n added_on = models.DateTimeField(auto_now_add=True)\n is_available = models.BooleanField(default=True)\n available_from = models.DateField(null=True, blank=True)\n available_to = models.DateField(null=True, blank=True)\n is_shared = models.BooleanField('Shared accomodation', default=False)\n type = models.IntegerField(choices=TYPE_CHOICES, default=TYPE_APARTMENT)\n num_rooms = models.IntegerField('Rooms', choices=ROOM_CHOICES)\n floor = models.IntegerField(null=True, blank=True)\n lift = models.BooleanField(default=False)\n num_floors = models.IntegerField(null=True, blank=True)\n sauna = models.IntegerField(choices=SAUNA_CHOICES, default=SAUNA_NONE)\n rent_pcm = models.DecimalField(decimal_places=2, max_digits=8)\n deposit = models.DecimalField(decimal_places=2, max_digits=8, null=True,\n blank=True)\n smoking = models.BooleanField(default=False)\n pets = models.BooleanField(default=False)\n size = models.FloatField('Size (sqm)')\n garden_size = models.FloatField(null=True, blank=True)\n furnished = models.BooleanField(default=False)\n cable = models.BooleanField(default=False)\n broadband = models.BooleanField(default=False)\n satellite = models.BooleanField(default=False)\n balcony = models.BooleanField(default=False)\n parking = models.BooleanField(default=False)\n garage = models.BooleanField(default=False)\n bike_storage = models.BooleanField(default=False)\n extra_storage = models.BooleanField(default=False)\n gym = models.BooleanField(default=False)\n laundry = models.BooleanField(default=False)\n description = models.TextField(null=True, blank=True)\n kitchen_amenities = models.TextField(null=True, blank=True)\n furniture = models.TextField(null=True, blank=True)\n heating = models.TextField(null=True, blank=True)\n other_amenities = models.TextField(null=True, blank=True)\n objects = ApartmentManager()\n\n def __unicode__(self):\n return self.get_full_address()\n\n @permalink\n def get_absolute_url(self):\n return 'apartments:detail', [str(self.id)]\n\n def get_full_address(self):\n return '{0}, {1} {2}'.format(self.address, self.postcode, self.area\n .city.name.upper())\n\n def is_agency_landlord(self):\n return self.landlord == self.LANDLORD_AGENCY\n\n def get_location(self):\n searchable = '{0}, {1} {2}, Finland'.format(self.address, self.\n postcode, self.area.city)\n address, (lat, lng) = geocoder.geocode(searchable, exactly_one=True)\n return lat, lng\n\n def save(self, *args, **kwargs):\n self.latitude, self.longitude = self.get_location()\n super(Apartment, self).save(*args, **kwargs)\n",
"step-5": "import datetime\nimport operator\n\nimport geopy\n\nfrom django.db import models\nfrom django.db.models import Q\nfrom django.db.models.query import QuerySet\nfrom django.db.models import permalink\nfrom django.contrib.auth.models import User\n\n\ngeocoder = geopy.geocoders.Google()\n\n\nclass City(models.Model):\n\n name = models.CharField(max_length=30, unique=True)\n\n class Meta:\n ordering = ('name',)\n verbose_name_plural = \"cities\"\n\n def __unicode__(self):\n return self.name\n\n\nclass Area(models.Model):\n\n name = models.CharField(max_length=50)\n city = models.ForeignKey(City)\n\n class Meta:\n unique_together = ('name', 'city')\n ordering = ('name',)\n\n def __unicode__(self):\n return self.name\n\n\nclass ApartmentQuerySet(QuerySet):\n\n def available(self):\n\n return self.filter(\n is_available=True,\n tenant__is_active=True,\n )\n\n\nclass ApartmentManager(models.Manager):\n\n def get_query_set(self):\n return ApartmentQuerySet(self.model).select_related(\n 'area', 'area__city')\n\n def available(self):\n return self.get_query_set().available()\n\n def search(self, search):\n\n if search:\n search = search.strip()\n\n if not search:\n return self.none()\n\n search_fields = (\n 'area__name__iexact',\n 'area__city__name__iexact',\n 'postcode__iexact',\n 'address__icontains',\n )\n\n criteria = [Q(**{field : search}) \n for field in search_fields]\n\n return self.filter(reduce(operator.or_, criteria))\n \n\nclass Apartment(models.Model):\n\n TYPE_APARTMENT = 1\n TYPE_DETACHED = 2\n TYPE_SEMIDETACHED = 3\n TYPE_COTTAGE =4 \n\n TYPE_CHOICES = (\n (TYPE_APARTMENT, \"Apartment\"),\n (TYPE_DETACHED, \"Detached house\"),\n (TYPE_SEMIDETACHED, \"Semi-detached house\"),\n (TYPE_COTTAGE, \"Cottage\"),\n )\n\n SAUNA_NONE = 1\n SAUNA_SHARED = 2\n SAUNA_OWN = 3\n\n SAUNA_CHOICES = (\n (SAUNA_NONE, \"No sauna\"),\n (SAUNA_SHARED, \"Shared/communal sauna\"),\n (SAUNA_OWN, \"Own sauna\"),\n )\n\n ROOM_CHOICES = (\n (1, \"1 room\"),\n (2, \"2 rooms\"),\n (3, \"3 rooms\"),\n (4, \"4 rooms\"),\n )\n\n LANDLORD_TENANT = 1\n LANDLORD_PRIVATE = 2\n LANDLORD_AGENCY = 3\n\n LANDLORD_CHOICES = (\n (LANDLORD_TENANT, \"Occupant\"),\n (LANDLORD_PRIVATE, \"Private landlord\"),\n (LANDLORD_AGENCY, \"Rental agency\"),\n )\n\n area = models.ForeignKey(Area)\n tenant = models.ForeignKey(User)\n \n landlord = models.IntegerField(\n choices=LANDLORD_CHOICES,\n default=LANDLORD_PRIVATE,\n )\n\n agency = models.CharField(max_length=100, null=True, blank=True)\n agency_website = models.URLField(null=True, blank=True)\n\n address = models.CharField(max_length=100)\n postcode = models.CharField(max_length=7)\n\n latitude = models.FloatField(null=True, blank=True)\n longitude = models.FloatField(null=True, blank=True)\n\n added_on = models.DateTimeField(auto_now_add=True)\n is_available = models.BooleanField(default=True)\n available_from = models.DateField(null=True, blank=True)\n available_to = models.DateField(null=True, blank=True)\n\n is_shared = models.BooleanField('Shared accomodation', default=False)\n\n type = models.IntegerField(\n choices=TYPE_CHOICES,\n default=TYPE_APARTMENT,\n )\n\n num_rooms = models.IntegerField('Rooms', choices=ROOM_CHOICES)\n floor = models.IntegerField(null=True, blank=True)\n lift = models.BooleanField(default=False)\n num_floors = models.IntegerField(null=True, blank=True)\n\n sauna = models.IntegerField(\n choices=SAUNA_CHOICES,\n default=SAUNA_NONE,\n )\n\n rent_pcm = models.DecimalField(\n decimal_places=2,\n max_digits=8,\n )\n\n deposit = models.DecimalField(\n decimal_places=2,\n max_digits=8,\n null=True,\n blank=True\n )\n\n smoking = models.BooleanField(default=False)\n pets = models.BooleanField(default=False)\n\n size = models.FloatField('Size (sqm)')\n garden_size = models.FloatField(null=True, blank=True)\n\n furnished = models.BooleanField(default=False)\n cable = models.BooleanField(default=False)\n broadband = models.BooleanField(default=False)\n satellite = models.BooleanField(default=False)\n balcony = models.BooleanField(default=False)\n\n parking = models.BooleanField(default=False)\n garage = models.BooleanField(default=False)\n bike_storage = models.BooleanField(default=False)\n extra_storage = models.BooleanField(default=False)\n gym = models.BooleanField(default=False)\n laundry = models.BooleanField(default=False)\n\n description = models.TextField(null=True, blank=True)\n kitchen_amenities = models.TextField(null=True, blank=True)\n furniture = models.TextField(null=True, blank=True)\n heating = models.TextField(null=True, blank=True)\n other_amenities = models.TextField(null=True, blank=True)\n\n\n objects = ApartmentManager()\n\n\n def __unicode__(self):\n return self.get_full_address()\n\n @permalink\n def get_absolute_url(self):\n return ('apartments:detail', [str(self.id)])\n\n def get_full_address(self):\n\n return \"{0}, {1} {2}\".format(\n self.address,\n self.postcode,\n self.area.city.name.upper()\n )\n\n \n def is_agency_landlord(self):\n return self.landlord == self.LANDLORD_AGENCY\n\n def get_location(self):\n\n searchable = \"{0}, {1} {2}, Finland\".format(\n self.address,\n self.postcode,\n self.area.city,\n )\n\n address, (lat, lng) = geocoder.geocode(\n searchable,\n exactly_one=True\n )\n\n return lat, lng\n\n def save(self, *args, **kwargs):\n self.latitude, self.longitude = self.get_location()\n super(Apartment, self).save(*args, **kwargs)\n\n \n",
"step-ids": [
15,
19,
20,
21,
23
]
}
|
[
15,
19,
20,
21,
23
] |
#!/usr/bin/python
# Copyright (c) 2020 Maryushi3
import emoji_data_python as edp
import sys
import pyautogui
from Xlib import display
from PyQt5.QtWidgets import QApplication, QGridLayout, QLabel, QLineEdit, QScrollArea, QSizePolicy, QStackedLayout, QVBoxLayout, QWidget
from PyQt5.QtCore import QEvent, QSettings, Qt, pyqtSignal
from PyQt5.QtGui import QFont
from PyQt5 import QtTest
# globals
emojiGridLayout = None
mainWindow = None
emojiGridColumnCount = 5
emojiGridRowCount = 4
emojiToShowCount = 0
fullRowsCount = 0
lastRowEmojiCount = 0
emojiFontSize = 20
selectedEmojiPosition = list((0,0))
willExitOnItsOwn = False
selectedEmojiChar=''
settingsFile = None
historyList = []
foundAnyEmoji = True
layoutStack = None
font = QFont()
font.setPointSize(emojiFontSize)
# quits without a lag
def quitNicely():
mainWindow.hide()
quit()
# gets mouse position from Xlib
def mousePosition():
pointerData = display.Display().screen().root.query_pointer()._data
return pointerData["root_x"], pointerData["root_y"]
# copies and pastes selected emoji
def execute_emoji(char):
add_char_to_history(char)
global willExitOnItsOwn
willExitOnItsOwn = True
mainWindow.hide()
QApplication.clipboard().setText(char)
pyautogui.hotkey("ctrl","v")
QtTest.QTest.qWait(250)
quit()
# fills grid with given char list and takes care of layout and counting
def fill_grid_with_char_list(charList):
# for wraparound
global emojiToShowCount
global fullRowsCount
global lastRowEmojiCount
emojiToShowCount = min(len(charList),(emojiGridColumnCount*emojiGridRowCount))
fullRowsCount = emojiToShowCount//emojiGridColumnCount
lastRowEmojiCount = emojiToShowCount%emojiGridColumnCount
global foundAnyEmoji
if emojiToShowCount>0:
foundAnyEmoji = True
layoutStack.setCurrentIndex(0)
else:
foundAnyEmoji = False
layoutStack.setCurrentIndex(1)
# clear grid
global emojiGridLayout
for i in reversed(range(emojiGridLayout.count())):
emojiGridLayout.itemAt(i).widget().setParent(None)
# fill with new chars
rowIdx = 0
colIdx = 0
for emoji in charList:
if rowIdx>emojiGridRowCount-1:
break;
label = QClickableLabel(emoji)
label.clicked.connect(execute_emoji)
label.setFont(font)
label.setAlignment(Qt.AlignCenter)
label.setMinimumHeight(49)
emojiGridLayout.addWidget(label,rowIdx,colIdx)
emojiGridLayout.setAlignment(label,Qt.AlignTop)
if colIdx < emojiGridColumnCount-1:
colIdx+=1
else:
colIdx=0
rowIdx+=1
emojiGridLayout.setContentsMargins(0,0,0,0)
emojiGridLayout.setHorizontalSpacing(0)
emojiGridLayout.setVerticalSpacing(0)
if emojiToShowCount>0:
highlight_emoji([0,0])
# searches for emoji, and passes them to fill_grid_with_char_list
def execute_search(text):
selectedEmoji = (0,0)
if not text or text.isspace():
fill_grid_with_history()
return
foundEmoji = edp.find_by_name(text)
charList = [emoji.char for emoji in foundEmoji]
fill_grid_with_char_list(charList)
# handles what to do after hovering over a given label
def emoji_hovered(hoveredLabel):
parentGrid = hoveredLabel.parentWidget().layout()
hoveredIndex = parentGrid.indexOf(hoveredLabel)
hoveredRow, hoveredColumn, _, _ = parentGrid.getItemPosition(hoveredIndex)
highlight_emoji([hoveredRow,hoveredColumn])
# selects, sets style and handles wraparound
def highlight_emoji(newPosition):
global selectedEmojiPosition
# grid is filled to a full rectangle (last row fills the window horizontally)
if lastRowEmojiCount==0:
if newPosition[0]<0:
newPosition[0]=fullRowsCount-1
elif newPosition[1]<0:
newPosition[1]=emojiGridColumnCount-1
elif newPosition[0]>fullRowsCount-1:
newPosition[0]=0
elif newPosition[1]>emojiGridColumnCount-1:
newPosition[1]=0
# last row is not full
else:
#horizontal wraparound through RIGHT edge for full rows
if (newPosition[0]<fullRowsCount) and (newPosition[1]>emojiGridColumnCount-1):
newPosition[1]=0
#horizontal wraparound through LEFT edge for full rows
elif (newPosition[0]<fullRowsCount) and (newPosition[1]<0):
newPosition[1]=emojiGridColumnCount-1
#horizontal wraparound through right edge for NON FULL rows
elif (newPosition[0]==fullRowsCount) and (newPosition[1]>lastRowEmojiCount-1) and ((selectedEmojiPosition[0]-newPosition[0])==0):
newPosition[1]=0
#horizontal wraparound through LEFT edge for NON FULL rows
elif (newPosition[0]>=fullRowsCount) and (newPosition[1]<0):
newPosition[1]=lastRowEmojiCount-1
#vertical wraparound through BOTTOM edge for full cols
elif (newPosition[0]>fullRowsCount) and (newPosition[1]<lastRowEmojiCount):
newPosition[0]=0
#vertical wraparound through TOP edge for full cols
elif (newPosition[0]<0) and (newPosition[1]<lastRowEmojiCount):
newPosition[0]=fullRowsCount
#vertical wraparound through BOTTOM edge for NON FULL cols
elif (newPosition[0]>fullRowsCount-1) and (newPosition[1]>lastRowEmojiCount-1):
newPosition[0]=0
#vertical wraparound through TOP edge for NON FULL cols
elif (newPosition[0]<0) and (newPosition[1]>lastRowEmojiCount-1):
newPosition[0]=fullRowsCount-1
oldPosition = selectedEmojiPosition
selectedEmojiPosition = newPosition
widgetToDeselect = emojiGridLayout.itemAtPosition(oldPosition[0],oldPosition[1])
if widgetToDeselect:
widgetToDeselect = widgetToDeselect.widget()
widgetToDeselect.setStyleSheet("")
global selectedEmojiChar
widgetToSelect = emojiGridLayout.itemAtPosition(selectedEmojiPosition[0],selectedEmojiPosition[1])
if widgetToSelect:
widgetToSelect = widgetToSelect.widget()
selectedEmojiChar = widgetToSelect.text()
widgetToSelect.setStyleSheet("QLabel{background-color: palette(highlight);}")
# handles direction where to move emoji selection
def move_selection(direction):
if direction=="right":
highlight_emoji([sum(x) for x in zip(selectedEmojiPosition, [0,1])])
elif direction=="left":
highlight_emoji([sum(x) for x in zip(selectedEmojiPosition, [0,-1])])
elif direction=="up":
highlight_emoji([sum(x) for x in zip(selectedEmojiPosition, [-1,0])])
elif direction=="down":
highlight_emoji([sum(x) for x in zip(selectedEmojiPosition, [1,0])])
# handles Esc
def on_key(key):
# test for a specific key
if key == Qt.Key_Escape:
quitNicely()
# adds given emoji to history and saves it to config file
def add_char_to_history(char):
global settingsFile
global historyList
if not historyList:
historyList = [char]
else:
if char in historyList:
historyList.remove(char)
tempList = [char]
tempList.extend(historyList)
historyList = tempList[:(emojiGridColumnCount*emojiGridRowCount)]
settingsFile.setValue('history/history',historyList)
# wrapper around filling the grid
def fill_grid_with_history():
fill_grid_with_char_list(historyList)
# main app window class with inits
class EmojiPickerWindow(QWidget):
def __init__(self):
super().__init__()
# focus handling
self.installEventFilter(self)
self.title = 'Emoji picker \(^o^)/'
self.width = 281
self.height = 251
# start with text box centered at mouse pointer position
self.left, self.top = mousePosition()
self.left -= self.width//2
self.top += (24-self.height)
self.initSettings()
self.initUI()
def initUI(self):
# topmost window layout
layout = QVBoxLayout()
global layoutStack
layoutStack = QStackedLayout()
layoutStackWidget = QWidget()
layoutStackWidget.setLayout(layoutStack)
# scroll area setup shenanigans
scrollArea = QScrollArea()
gridWidget = QWidget()
global emojiGridLayout
emojiGridLayout = QGridLayout(gridWidget)
emojiGridLayout.setAlignment(Qt.AlignTop | Qt.AlignLeft)
# stretch grid to widget
for col in range(emojiGridColumnCount):
emojiGridLayout.setColumnStretch(col,1)
for row in range(emojiGridRowCount):
emojiGridLayout.setRowStretch(row,1)
scrollArea.setWidget(gridWidget)
scrollArea.setWidgetResizable(True)
layoutStack.addWidget(scrollArea)
# info to show when no emoji has been found
noEmojiFoundLabel = QLabel("No emoji found 🙁")
noEmojiFoundLabel.setAlignment(Qt.AlignCenter | Qt.AlignHCenter | Qt.AlignVCenter)
layoutStack.addWidget(noEmojiFoundLabel)
layout.addWidget(layoutStackWidget)
# fill with a placeholder for now (smiling or smile)
# execute_search('smil')
fill_grid_with_history()
# bottom text entry
lineEdit = QLineEditWithArrows()
lineEdit.textChanged.connect(execute_search)
layout.addWidget(lineEdit)
# align it to the bottom, so that it won't stay centered vertically
layout.setAlignment(lineEdit, Qt.AlignBottom)
self.setLayout(layout)
self.setWindowTitle(self.title)
self.setGeometry(self.left, self.top, self.width, self.height)
self.setFixedSize(self.width, self.height)
self.setWindowFlags(Qt.FramelessWindowHint | Qt.WindowStaysOnTopHint)
# needed for filling the grid out from the outside
global mainWindow
mainWindow = self
# esc handling
self.keyPressed.connect(on_key)
self.show()
lineEdit.setFocus()
def initSettings(self):
global settingsFile
global historyList
settingsFile = QSettings("emoji-picker-qtpy", "history");
historyList = settingsFile.value('history/history')
# key handling
keyPressed = pyqtSignal(int)
def keyPressEvent(self, event):
super(EmojiPickerWindow, self).keyPressEvent(event)
self.keyPressed.emit(event.key())
# focus handling
global willExitOnItsOwn
def eventFilter(self, object, event):
if event.type()== QEvent.WindowDeactivate or event.type()== QEvent.FocusOut:
if (not willExitOnItsOwn):
quitNicely()
return False
# clickable label
class QClickableLabel(QLabel):
clicked=pyqtSignal(str)
def __init__(self, parent=None):
QLabel.__init__(self, parent)
def mousePressEvent(self, ev):
self.clicked.emit(self.text())
def enterEvent(self, ev):
emoji_hovered(self)
# keyboard handling override for QlineEdit
class QLineEditWithArrows(QLineEdit):
def keyPressEvent(self, ev):
global selectedEmojiChar
global foundAnyEmoji
if(ev.key() == Qt.Key_Right):
move_selection("right")
if(ev.key() == Qt.Key_Left):
move_selection("left")
if(ev.key() == Qt.Key_Up):
move_selection("up")
if(ev.key() == Qt.Key_Down):
move_selection("down")
if(ev.key() == Qt.Key_Return or ev.key() == Qt.Key_Enter):
if foundAnyEmoji:
execute_emoji(selectedEmojiChar)
else:
quitNicely()
if(ev.key() == Qt.Key_Tab):
pass
else:
QLineEdit.keyPressEvent(self,ev)
if __name__ == '__main__':
app = QApplication(sys.argv)
ex = EmojiPickerWindow()
sys.exit(app.exec_())
|
normal
|
{
"blob_id": "c860c1fa6e7610c60077f0eab1572895a23393fd",
"index": 3725,
"step-1": "<mask token>\n\n\ndef fill_grid_with_char_list(charList):\n global emojiToShowCount\n global fullRowsCount\n global lastRowEmojiCount\n emojiToShowCount = min(len(charList), emojiGridColumnCount *\n emojiGridRowCount)\n fullRowsCount = emojiToShowCount // emojiGridColumnCount\n lastRowEmojiCount = emojiToShowCount % emojiGridColumnCount\n global foundAnyEmoji\n if emojiToShowCount > 0:\n foundAnyEmoji = True\n layoutStack.setCurrentIndex(0)\n else:\n foundAnyEmoji = False\n layoutStack.setCurrentIndex(1)\n global emojiGridLayout\n for i in reversed(range(emojiGridLayout.count())):\n emojiGridLayout.itemAt(i).widget().setParent(None)\n rowIdx = 0\n colIdx = 0\n for emoji in charList:\n if rowIdx > emojiGridRowCount - 1:\n break\n label = QClickableLabel(emoji)\n label.clicked.connect(execute_emoji)\n label.setFont(font)\n label.setAlignment(Qt.AlignCenter)\n label.setMinimumHeight(49)\n emojiGridLayout.addWidget(label, rowIdx, colIdx)\n emojiGridLayout.setAlignment(label, Qt.AlignTop)\n if colIdx < emojiGridColumnCount - 1:\n colIdx += 1\n else:\n colIdx = 0\n rowIdx += 1\n emojiGridLayout.setContentsMargins(0, 0, 0, 0)\n emojiGridLayout.setHorizontalSpacing(0)\n emojiGridLayout.setVerticalSpacing(0)\n if emojiToShowCount > 0:\n highlight_emoji([0, 0])\n\n\n<mask token>\n\n\ndef emoji_hovered(hoveredLabel):\n parentGrid = hoveredLabel.parentWidget().layout()\n hoveredIndex = parentGrid.indexOf(hoveredLabel)\n hoveredRow, hoveredColumn, _, _ = parentGrid.getItemPosition(hoveredIndex)\n highlight_emoji([hoveredRow, hoveredColumn])\n\n\n<mask token>\n\n\ndef on_key(key):\n if key == Qt.Key_Escape:\n quitNicely()\n\n\n<mask token>\n\n\nclass EmojiPickerWindow(QWidget):\n\n def __init__(self):\n super().__init__()\n self.installEventFilter(self)\n self.title = 'Emoji picker \(^o^)/'\n self.width = 281\n self.height = 251\n self.left, self.top = mousePosition()\n self.left -= self.width // 2\n self.top += 24 - self.height\n self.initSettings()\n self.initUI()\n\n def initUI(self):\n layout = QVBoxLayout()\n global layoutStack\n layoutStack = QStackedLayout()\n layoutStackWidget = QWidget()\n layoutStackWidget.setLayout(layoutStack)\n scrollArea = QScrollArea()\n gridWidget = QWidget()\n global emojiGridLayout\n emojiGridLayout = QGridLayout(gridWidget)\n emojiGridLayout.setAlignment(Qt.AlignTop | Qt.AlignLeft)\n for col in range(emojiGridColumnCount):\n emojiGridLayout.setColumnStretch(col, 1)\n for row in range(emojiGridRowCount):\n emojiGridLayout.setRowStretch(row, 1)\n scrollArea.setWidget(gridWidget)\n scrollArea.setWidgetResizable(True)\n layoutStack.addWidget(scrollArea)\n noEmojiFoundLabel = QLabel('No emoji found 🙁')\n noEmojiFoundLabel.setAlignment(Qt.AlignCenter | Qt.AlignHCenter |\n Qt.AlignVCenter)\n layoutStack.addWidget(noEmojiFoundLabel)\n layout.addWidget(layoutStackWidget)\n fill_grid_with_history()\n lineEdit = QLineEditWithArrows()\n lineEdit.textChanged.connect(execute_search)\n layout.addWidget(lineEdit)\n layout.setAlignment(lineEdit, Qt.AlignBottom)\n self.setLayout(layout)\n self.setWindowTitle(self.title)\n self.setGeometry(self.left, self.top, self.width, self.height)\n self.setFixedSize(self.width, self.height)\n self.setWindowFlags(Qt.FramelessWindowHint | Qt.WindowStaysOnTopHint)\n global mainWindow\n mainWindow = self\n self.keyPressed.connect(on_key)\n self.show()\n lineEdit.setFocus()\n\n def initSettings(self):\n global settingsFile\n global historyList\n settingsFile = QSettings('emoji-picker-qtpy', 'history')\n historyList = settingsFile.value('history/history')\n keyPressed = pyqtSignal(int)\n\n def keyPressEvent(self, event):\n super(EmojiPickerWindow, self).keyPressEvent(event)\n self.keyPressed.emit(event.key())\n global willExitOnItsOwn\n\n def eventFilter(self, object, event):\n if event.type() == QEvent.WindowDeactivate or event.type(\n ) == QEvent.FocusOut:\n if not willExitOnItsOwn:\n quitNicely()\n return False\n\n\nclass QClickableLabel(QLabel):\n clicked = pyqtSignal(str)\n\n def __init__(self, parent=None):\n QLabel.__init__(self, parent)\n\n def mousePressEvent(self, ev):\n self.clicked.emit(self.text())\n\n def enterEvent(self, ev):\n emoji_hovered(self)\n\n\nclass QLineEditWithArrows(QLineEdit):\n\n def keyPressEvent(self, ev):\n global selectedEmojiChar\n global foundAnyEmoji\n if ev.key() == Qt.Key_Right:\n move_selection('right')\n if ev.key() == Qt.Key_Left:\n move_selection('left')\n if ev.key() == Qt.Key_Up:\n move_selection('up')\n if ev.key() == Qt.Key_Down:\n move_selection('down')\n if ev.key() == Qt.Key_Return or ev.key() == Qt.Key_Enter:\n if foundAnyEmoji:\n execute_emoji(selectedEmojiChar)\n else:\n quitNicely()\n if ev.key() == Qt.Key_Tab:\n pass\n else:\n QLineEdit.keyPressEvent(self, ev)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef quitNicely():\n mainWindow.hide()\n quit()\n\n\n<mask token>\n\n\ndef fill_grid_with_char_list(charList):\n global emojiToShowCount\n global fullRowsCount\n global lastRowEmojiCount\n emojiToShowCount = min(len(charList), emojiGridColumnCount *\n emojiGridRowCount)\n fullRowsCount = emojiToShowCount // emojiGridColumnCount\n lastRowEmojiCount = emojiToShowCount % emojiGridColumnCount\n global foundAnyEmoji\n if emojiToShowCount > 0:\n foundAnyEmoji = True\n layoutStack.setCurrentIndex(0)\n else:\n foundAnyEmoji = False\n layoutStack.setCurrentIndex(1)\n global emojiGridLayout\n for i in reversed(range(emojiGridLayout.count())):\n emojiGridLayout.itemAt(i).widget().setParent(None)\n rowIdx = 0\n colIdx = 0\n for emoji in charList:\n if rowIdx > emojiGridRowCount - 1:\n break\n label = QClickableLabel(emoji)\n label.clicked.connect(execute_emoji)\n label.setFont(font)\n label.setAlignment(Qt.AlignCenter)\n label.setMinimumHeight(49)\n emojiGridLayout.addWidget(label, rowIdx, colIdx)\n emojiGridLayout.setAlignment(label, Qt.AlignTop)\n if colIdx < emojiGridColumnCount - 1:\n colIdx += 1\n else:\n colIdx = 0\n rowIdx += 1\n emojiGridLayout.setContentsMargins(0, 0, 0, 0)\n emojiGridLayout.setHorizontalSpacing(0)\n emojiGridLayout.setVerticalSpacing(0)\n if emojiToShowCount > 0:\n highlight_emoji([0, 0])\n\n\ndef execute_search(text):\n selectedEmoji = 0, 0\n if not text or text.isspace():\n fill_grid_with_history()\n return\n foundEmoji = edp.find_by_name(text)\n charList = [emoji.char for emoji in foundEmoji]\n fill_grid_with_char_list(charList)\n\n\ndef emoji_hovered(hoveredLabel):\n parentGrid = hoveredLabel.parentWidget().layout()\n hoveredIndex = parentGrid.indexOf(hoveredLabel)\n hoveredRow, hoveredColumn, _, _ = parentGrid.getItemPosition(hoveredIndex)\n highlight_emoji([hoveredRow, hoveredColumn])\n\n\n<mask token>\n\n\ndef on_key(key):\n if key == Qt.Key_Escape:\n quitNicely()\n\n\n<mask token>\n\n\nclass EmojiPickerWindow(QWidget):\n\n def __init__(self):\n super().__init__()\n self.installEventFilter(self)\n self.title = 'Emoji picker \(^o^)/'\n self.width = 281\n self.height = 251\n self.left, self.top = mousePosition()\n self.left -= self.width // 2\n self.top += 24 - self.height\n self.initSettings()\n self.initUI()\n\n def initUI(self):\n layout = QVBoxLayout()\n global layoutStack\n layoutStack = QStackedLayout()\n layoutStackWidget = QWidget()\n layoutStackWidget.setLayout(layoutStack)\n scrollArea = QScrollArea()\n gridWidget = QWidget()\n global emojiGridLayout\n emojiGridLayout = QGridLayout(gridWidget)\n emojiGridLayout.setAlignment(Qt.AlignTop | Qt.AlignLeft)\n for col in range(emojiGridColumnCount):\n emojiGridLayout.setColumnStretch(col, 1)\n for row in range(emojiGridRowCount):\n emojiGridLayout.setRowStretch(row, 1)\n scrollArea.setWidget(gridWidget)\n scrollArea.setWidgetResizable(True)\n layoutStack.addWidget(scrollArea)\n noEmojiFoundLabel = QLabel('No emoji found 🙁')\n noEmojiFoundLabel.setAlignment(Qt.AlignCenter | Qt.AlignHCenter |\n Qt.AlignVCenter)\n layoutStack.addWidget(noEmojiFoundLabel)\n layout.addWidget(layoutStackWidget)\n fill_grid_with_history()\n lineEdit = QLineEditWithArrows()\n lineEdit.textChanged.connect(execute_search)\n layout.addWidget(lineEdit)\n layout.setAlignment(lineEdit, Qt.AlignBottom)\n self.setLayout(layout)\n self.setWindowTitle(self.title)\n self.setGeometry(self.left, self.top, self.width, self.height)\n self.setFixedSize(self.width, self.height)\n self.setWindowFlags(Qt.FramelessWindowHint | Qt.WindowStaysOnTopHint)\n global mainWindow\n mainWindow = self\n self.keyPressed.connect(on_key)\n self.show()\n lineEdit.setFocus()\n\n def initSettings(self):\n global settingsFile\n global historyList\n settingsFile = QSettings('emoji-picker-qtpy', 'history')\n historyList = settingsFile.value('history/history')\n keyPressed = pyqtSignal(int)\n\n def keyPressEvent(self, event):\n super(EmojiPickerWindow, self).keyPressEvent(event)\n self.keyPressed.emit(event.key())\n global willExitOnItsOwn\n\n def eventFilter(self, object, event):\n if event.type() == QEvent.WindowDeactivate or event.type(\n ) == QEvent.FocusOut:\n if not willExitOnItsOwn:\n quitNicely()\n return False\n\n\nclass QClickableLabel(QLabel):\n clicked = pyqtSignal(str)\n\n def __init__(self, parent=None):\n QLabel.__init__(self, parent)\n\n def mousePressEvent(self, ev):\n self.clicked.emit(self.text())\n\n def enterEvent(self, ev):\n emoji_hovered(self)\n\n\nclass QLineEditWithArrows(QLineEdit):\n\n def keyPressEvent(self, ev):\n global selectedEmojiChar\n global foundAnyEmoji\n if ev.key() == Qt.Key_Right:\n move_selection('right')\n if ev.key() == Qt.Key_Left:\n move_selection('left')\n if ev.key() == Qt.Key_Up:\n move_selection('up')\n if ev.key() == Qt.Key_Down:\n move_selection('down')\n if ev.key() == Qt.Key_Return or ev.key() == Qt.Key_Enter:\n if foundAnyEmoji:\n execute_emoji(selectedEmojiChar)\n else:\n quitNicely()\n if ev.key() == Qt.Key_Tab:\n pass\n else:\n QLineEdit.keyPressEvent(self, ev)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef quitNicely():\n mainWindow.hide()\n quit()\n\n\n<mask token>\n\n\ndef fill_grid_with_char_list(charList):\n global emojiToShowCount\n global fullRowsCount\n global lastRowEmojiCount\n emojiToShowCount = min(len(charList), emojiGridColumnCount *\n emojiGridRowCount)\n fullRowsCount = emojiToShowCount // emojiGridColumnCount\n lastRowEmojiCount = emojiToShowCount % emojiGridColumnCount\n global foundAnyEmoji\n if emojiToShowCount > 0:\n foundAnyEmoji = True\n layoutStack.setCurrentIndex(0)\n else:\n foundAnyEmoji = False\n layoutStack.setCurrentIndex(1)\n global emojiGridLayout\n for i in reversed(range(emojiGridLayout.count())):\n emojiGridLayout.itemAt(i).widget().setParent(None)\n rowIdx = 0\n colIdx = 0\n for emoji in charList:\n if rowIdx > emojiGridRowCount - 1:\n break\n label = QClickableLabel(emoji)\n label.clicked.connect(execute_emoji)\n label.setFont(font)\n label.setAlignment(Qt.AlignCenter)\n label.setMinimumHeight(49)\n emojiGridLayout.addWidget(label, rowIdx, colIdx)\n emojiGridLayout.setAlignment(label, Qt.AlignTop)\n if colIdx < emojiGridColumnCount - 1:\n colIdx += 1\n else:\n colIdx = 0\n rowIdx += 1\n emojiGridLayout.setContentsMargins(0, 0, 0, 0)\n emojiGridLayout.setHorizontalSpacing(0)\n emojiGridLayout.setVerticalSpacing(0)\n if emojiToShowCount > 0:\n highlight_emoji([0, 0])\n\n\ndef execute_search(text):\n selectedEmoji = 0, 0\n if not text or text.isspace():\n fill_grid_with_history()\n return\n foundEmoji = edp.find_by_name(text)\n charList = [emoji.char for emoji in foundEmoji]\n fill_grid_with_char_list(charList)\n\n\ndef emoji_hovered(hoveredLabel):\n parentGrid = hoveredLabel.parentWidget().layout()\n hoveredIndex = parentGrid.indexOf(hoveredLabel)\n hoveredRow, hoveredColumn, _, _ = parentGrid.getItemPosition(hoveredIndex)\n highlight_emoji([hoveredRow, hoveredColumn])\n\n\ndef highlight_emoji(newPosition):\n global selectedEmojiPosition\n if lastRowEmojiCount == 0:\n if newPosition[0] < 0:\n newPosition[0] = fullRowsCount - 1\n elif newPosition[1] < 0:\n newPosition[1] = emojiGridColumnCount - 1\n elif newPosition[0] > fullRowsCount - 1:\n newPosition[0] = 0\n elif newPosition[1] > emojiGridColumnCount - 1:\n newPosition[1] = 0\n elif newPosition[0] < fullRowsCount and newPosition[1\n ] > emojiGridColumnCount - 1:\n newPosition[1] = 0\n elif newPosition[0] < fullRowsCount and newPosition[1] < 0:\n newPosition[1] = emojiGridColumnCount - 1\n elif newPosition[0] == fullRowsCount and newPosition[1\n ] > lastRowEmojiCount - 1 and selectedEmojiPosition[0] - newPosition[0\n ] == 0:\n newPosition[1] = 0\n elif newPosition[0] >= fullRowsCount and newPosition[1] < 0:\n newPosition[1] = lastRowEmojiCount - 1\n elif newPosition[0] > fullRowsCount and newPosition[1] < lastRowEmojiCount:\n newPosition[0] = 0\n elif newPosition[0] < 0 and newPosition[1] < lastRowEmojiCount:\n newPosition[0] = fullRowsCount\n elif newPosition[0] > fullRowsCount - 1 and newPosition[1\n ] > lastRowEmojiCount - 1:\n newPosition[0] = 0\n elif newPosition[0] < 0 and newPosition[1] > lastRowEmojiCount - 1:\n newPosition[0] = fullRowsCount - 1\n oldPosition = selectedEmojiPosition\n selectedEmojiPosition = newPosition\n widgetToDeselect = emojiGridLayout.itemAtPosition(oldPosition[0],\n oldPosition[1])\n if widgetToDeselect:\n widgetToDeselect = widgetToDeselect.widget()\n widgetToDeselect.setStyleSheet('')\n global selectedEmojiChar\n widgetToSelect = emojiGridLayout.itemAtPosition(selectedEmojiPosition[0\n ], selectedEmojiPosition[1])\n if widgetToSelect:\n widgetToSelect = widgetToSelect.widget()\n selectedEmojiChar = widgetToSelect.text()\n widgetToSelect.setStyleSheet(\n 'QLabel{background-color: palette(highlight);}')\n\n\n<mask token>\n\n\ndef on_key(key):\n if key == Qt.Key_Escape:\n quitNicely()\n\n\n<mask token>\n\n\ndef fill_grid_with_history():\n fill_grid_with_char_list(historyList)\n\n\nclass EmojiPickerWindow(QWidget):\n\n def __init__(self):\n super().__init__()\n self.installEventFilter(self)\n self.title = 'Emoji picker \(^o^)/'\n self.width = 281\n self.height = 251\n self.left, self.top = mousePosition()\n self.left -= self.width // 2\n self.top += 24 - self.height\n self.initSettings()\n self.initUI()\n\n def initUI(self):\n layout = QVBoxLayout()\n global layoutStack\n layoutStack = QStackedLayout()\n layoutStackWidget = QWidget()\n layoutStackWidget.setLayout(layoutStack)\n scrollArea = QScrollArea()\n gridWidget = QWidget()\n global emojiGridLayout\n emojiGridLayout = QGridLayout(gridWidget)\n emojiGridLayout.setAlignment(Qt.AlignTop | Qt.AlignLeft)\n for col in range(emojiGridColumnCount):\n emojiGridLayout.setColumnStretch(col, 1)\n for row in range(emojiGridRowCount):\n emojiGridLayout.setRowStretch(row, 1)\n scrollArea.setWidget(gridWidget)\n scrollArea.setWidgetResizable(True)\n layoutStack.addWidget(scrollArea)\n noEmojiFoundLabel = QLabel('No emoji found 🙁')\n noEmojiFoundLabel.setAlignment(Qt.AlignCenter | Qt.AlignHCenter |\n Qt.AlignVCenter)\n layoutStack.addWidget(noEmojiFoundLabel)\n layout.addWidget(layoutStackWidget)\n fill_grid_with_history()\n lineEdit = QLineEditWithArrows()\n lineEdit.textChanged.connect(execute_search)\n layout.addWidget(lineEdit)\n layout.setAlignment(lineEdit, Qt.AlignBottom)\n self.setLayout(layout)\n self.setWindowTitle(self.title)\n self.setGeometry(self.left, self.top, self.width, self.height)\n self.setFixedSize(self.width, self.height)\n self.setWindowFlags(Qt.FramelessWindowHint | Qt.WindowStaysOnTopHint)\n global mainWindow\n mainWindow = self\n self.keyPressed.connect(on_key)\n self.show()\n lineEdit.setFocus()\n\n def initSettings(self):\n global settingsFile\n global historyList\n settingsFile = QSettings('emoji-picker-qtpy', 'history')\n historyList = settingsFile.value('history/history')\n keyPressed = pyqtSignal(int)\n\n def keyPressEvent(self, event):\n super(EmojiPickerWindow, self).keyPressEvent(event)\n self.keyPressed.emit(event.key())\n global willExitOnItsOwn\n\n def eventFilter(self, object, event):\n if event.type() == QEvent.WindowDeactivate or event.type(\n ) == QEvent.FocusOut:\n if not willExitOnItsOwn:\n quitNicely()\n return False\n\n\nclass QClickableLabel(QLabel):\n clicked = pyqtSignal(str)\n\n def __init__(self, parent=None):\n QLabel.__init__(self, parent)\n\n def mousePressEvent(self, ev):\n self.clicked.emit(self.text())\n\n def enterEvent(self, ev):\n emoji_hovered(self)\n\n\nclass QLineEditWithArrows(QLineEdit):\n\n def keyPressEvent(self, ev):\n global selectedEmojiChar\n global foundAnyEmoji\n if ev.key() == Qt.Key_Right:\n move_selection('right')\n if ev.key() == Qt.Key_Left:\n move_selection('left')\n if ev.key() == Qt.Key_Up:\n move_selection('up')\n if ev.key() == Qt.Key_Down:\n move_selection('down')\n if ev.key() == Qt.Key_Return or ev.key() == Qt.Key_Enter:\n if foundAnyEmoji:\n execute_emoji(selectedEmojiChar)\n else:\n quitNicely()\n if ev.key() == Qt.Key_Tab:\n pass\n else:\n QLineEdit.keyPressEvent(self, ev)\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\ndef quitNicely():\n mainWindow.hide()\n quit()\n\n\ndef mousePosition():\n pointerData = display.Display().screen().root.query_pointer()._data\n return pointerData['root_x'], pointerData['root_y']\n\n\ndef execute_emoji(char):\n add_char_to_history(char)\n global willExitOnItsOwn\n willExitOnItsOwn = True\n mainWindow.hide()\n QApplication.clipboard().setText(char)\n pyautogui.hotkey('ctrl', 'v')\n QtTest.QTest.qWait(250)\n quit()\n\n\ndef fill_grid_with_char_list(charList):\n global emojiToShowCount\n global fullRowsCount\n global lastRowEmojiCount\n emojiToShowCount = min(len(charList), emojiGridColumnCount *\n emojiGridRowCount)\n fullRowsCount = emojiToShowCount // emojiGridColumnCount\n lastRowEmojiCount = emojiToShowCount % emojiGridColumnCount\n global foundAnyEmoji\n if emojiToShowCount > 0:\n foundAnyEmoji = True\n layoutStack.setCurrentIndex(0)\n else:\n foundAnyEmoji = False\n layoutStack.setCurrentIndex(1)\n global emojiGridLayout\n for i in reversed(range(emojiGridLayout.count())):\n emojiGridLayout.itemAt(i).widget().setParent(None)\n rowIdx = 0\n colIdx = 0\n for emoji in charList:\n if rowIdx > emojiGridRowCount - 1:\n break\n label = QClickableLabel(emoji)\n label.clicked.connect(execute_emoji)\n label.setFont(font)\n label.setAlignment(Qt.AlignCenter)\n label.setMinimumHeight(49)\n emojiGridLayout.addWidget(label, rowIdx, colIdx)\n emojiGridLayout.setAlignment(label, Qt.AlignTop)\n if colIdx < emojiGridColumnCount - 1:\n colIdx += 1\n else:\n colIdx = 0\n rowIdx += 1\n emojiGridLayout.setContentsMargins(0, 0, 0, 0)\n emojiGridLayout.setHorizontalSpacing(0)\n emojiGridLayout.setVerticalSpacing(0)\n if emojiToShowCount > 0:\n highlight_emoji([0, 0])\n\n\ndef execute_search(text):\n selectedEmoji = 0, 0\n if not text or text.isspace():\n fill_grid_with_history()\n return\n foundEmoji = edp.find_by_name(text)\n charList = [emoji.char for emoji in foundEmoji]\n fill_grid_with_char_list(charList)\n\n\ndef emoji_hovered(hoveredLabel):\n parentGrid = hoveredLabel.parentWidget().layout()\n hoveredIndex = parentGrid.indexOf(hoveredLabel)\n hoveredRow, hoveredColumn, _, _ = parentGrid.getItemPosition(hoveredIndex)\n highlight_emoji([hoveredRow, hoveredColumn])\n\n\ndef highlight_emoji(newPosition):\n global selectedEmojiPosition\n if lastRowEmojiCount == 0:\n if newPosition[0] < 0:\n newPosition[0] = fullRowsCount - 1\n elif newPosition[1] < 0:\n newPosition[1] = emojiGridColumnCount - 1\n elif newPosition[0] > fullRowsCount - 1:\n newPosition[0] = 0\n elif newPosition[1] > emojiGridColumnCount - 1:\n newPosition[1] = 0\n elif newPosition[0] < fullRowsCount and newPosition[1\n ] > emojiGridColumnCount - 1:\n newPosition[1] = 0\n elif newPosition[0] < fullRowsCount and newPosition[1] < 0:\n newPosition[1] = emojiGridColumnCount - 1\n elif newPosition[0] == fullRowsCount and newPosition[1\n ] > lastRowEmojiCount - 1 and selectedEmojiPosition[0] - newPosition[0\n ] == 0:\n newPosition[1] = 0\n elif newPosition[0] >= fullRowsCount and newPosition[1] < 0:\n newPosition[1] = lastRowEmojiCount - 1\n elif newPosition[0] > fullRowsCount and newPosition[1] < lastRowEmojiCount:\n newPosition[0] = 0\n elif newPosition[0] < 0 and newPosition[1] < lastRowEmojiCount:\n newPosition[0] = fullRowsCount\n elif newPosition[0] > fullRowsCount - 1 and newPosition[1\n ] > lastRowEmojiCount - 1:\n newPosition[0] = 0\n elif newPosition[0] < 0 and newPosition[1] > lastRowEmojiCount - 1:\n newPosition[0] = fullRowsCount - 1\n oldPosition = selectedEmojiPosition\n selectedEmojiPosition = newPosition\n widgetToDeselect = emojiGridLayout.itemAtPosition(oldPosition[0],\n oldPosition[1])\n if widgetToDeselect:\n widgetToDeselect = widgetToDeselect.widget()\n widgetToDeselect.setStyleSheet('')\n global selectedEmojiChar\n widgetToSelect = emojiGridLayout.itemAtPosition(selectedEmojiPosition[0\n ], selectedEmojiPosition[1])\n if widgetToSelect:\n widgetToSelect = widgetToSelect.widget()\n selectedEmojiChar = widgetToSelect.text()\n widgetToSelect.setStyleSheet(\n 'QLabel{background-color: palette(highlight);}')\n\n\n<mask token>\n\n\ndef on_key(key):\n if key == Qt.Key_Escape:\n quitNicely()\n\n\ndef add_char_to_history(char):\n global settingsFile\n global historyList\n if not historyList:\n historyList = [char]\n else:\n if char in historyList:\n historyList.remove(char)\n tempList = [char]\n tempList.extend(historyList)\n historyList = tempList[:emojiGridColumnCount * emojiGridRowCount]\n settingsFile.setValue('history/history', historyList)\n\n\ndef fill_grid_with_history():\n fill_grid_with_char_list(historyList)\n\n\nclass EmojiPickerWindow(QWidget):\n\n def __init__(self):\n super().__init__()\n self.installEventFilter(self)\n self.title = 'Emoji picker \(^o^)/'\n self.width = 281\n self.height = 251\n self.left, self.top = mousePosition()\n self.left -= self.width // 2\n self.top += 24 - self.height\n self.initSettings()\n self.initUI()\n\n def initUI(self):\n layout = QVBoxLayout()\n global layoutStack\n layoutStack = QStackedLayout()\n layoutStackWidget = QWidget()\n layoutStackWidget.setLayout(layoutStack)\n scrollArea = QScrollArea()\n gridWidget = QWidget()\n global emojiGridLayout\n emojiGridLayout = QGridLayout(gridWidget)\n emojiGridLayout.setAlignment(Qt.AlignTop | Qt.AlignLeft)\n for col in range(emojiGridColumnCount):\n emojiGridLayout.setColumnStretch(col, 1)\n for row in range(emojiGridRowCount):\n emojiGridLayout.setRowStretch(row, 1)\n scrollArea.setWidget(gridWidget)\n scrollArea.setWidgetResizable(True)\n layoutStack.addWidget(scrollArea)\n noEmojiFoundLabel = QLabel('No emoji found 🙁')\n noEmojiFoundLabel.setAlignment(Qt.AlignCenter | Qt.AlignHCenter |\n Qt.AlignVCenter)\n layoutStack.addWidget(noEmojiFoundLabel)\n layout.addWidget(layoutStackWidget)\n fill_grid_with_history()\n lineEdit = QLineEditWithArrows()\n lineEdit.textChanged.connect(execute_search)\n layout.addWidget(lineEdit)\n layout.setAlignment(lineEdit, Qt.AlignBottom)\n self.setLayout(layout)\n self.setWindowTitle(self.title)\n self.setGeometry(self.left, self.top, self.width, self.height)\n self.setFixedSize(self.width, self.height)\n self.setWindowFlags(Qt.FramelessWindowHint | Qt.WindowStaysOnTopHint)\n global mainWindow\n mainWindow = self\n self.keyPressed.connect(on_key)\n self.show()\n lineEdit.setFocus()\n\n def initSettings(self):\n global settingsFile\n global historyList\n settingsFile = QSettings('emoji-picker-qtpy', 'history')\n historyList = settingsFile.value('history/history')\n keyPressed = pyqtSignal(int)\n\n def keyPressEvent(self, event):\n super(EmojiPickerWindow, self).keyPressEvent(event)\n self.keyPressed.emit(event.key())\n global willExitOnItsOwn\n\n def eventFilter(self, object, event):\n if event.type() == QEvent.WindowDeactivate or event.type(\n ) == QEvent.FocusOut:\n if not willExitOnItsOwn:\n quitNicely()\n return False\n\n\nclass QClickableLabel(QLabel):\n clicked = pyqtSignal(str)\n\n def __init__(self, parent=None):\n QLabel.__init__(self, parent)\n\n def mousePressEvent(self, ev):\n self.clicked.emit(self.text())\n\n def enterEvent(self, ev):\n emoji_hovered(self)\n\n\nclass QLineEditWithArrows(QLineEdit):\n\n def keyPressEvent(self, ev):\n global selectedEmojiChar\n global foundAnyEmoji\n if ev.key() == Qt.Key_Right:\n move_selection('right')\n if ev.key() == Qt.Key_Left:\n move_selection('left')\n if ev.key() == Qt.Key_Up:\n move_selection('up')\n if ev.key() == Qt.Key_Down:\n move_selection('down')\n if ev.key() == Qt.Key_Return or ev.key() == Qt.Key_Enter:\n if foundAnyEmoji:\n execute_emoji(selectedEmojiChar)\n else:\n quitNicely()\n if ev.key() == Qt.Key_Tab:\n pass\n else:\n QLineEdit.keyPressEvent(self, ev)\n\n\n<mask token>\n",
"step-5": "#!/usr/bin/python\n# Copyright (c) 2020 Maryushi3\n\nimport emoji_data_python as edp\nimport sys\nimport pyautogui\nfrom Xlib import display\nfrom PyQt5.QtWidgets import QApplication, QGridLayout, QLabel, QLineEdit, QScrollArea, QSizePolicy, QStackedLayout, QVBoxLayout, QWidget\nfrom PyQt5.QtCore import QEvent, QSettings, Qt, pyqtSignal\nfrom PyQt5.QtGui import QFont\nfrom PyQt5 import QtTest\n\n\n# globals\nemojiGridLayout = None\nmainWindow = None\nemojiGridColumnCount = 5\nemojiGridRowCount = 4\nemojiToShowCount = 0\nfullRowsCount = 0\nlastRowEmojiCount = 0\nemojiFontSize = 20\nselectedEmojiPosition = list((0,0))\nwillExitOnItsOwn = False\nselectedEmojiChar=''\nsettingsFile = None\nhistoryList = []\nfoundAnyEmoji = True\nlayoutStack = None\n\nfont = QFont()\nfont.setPointSize(emojiFontSize)\n\n# quits without a lag\ndef quitNicely():\n mainWindow.hide()\n quit()\n\n# gets mouse position from Xlib\ndef mousePosition():\n pointerData = display.Display().screen().root.query_pointer()._data\n return pointerData[\"root_x\"], pointerData[\"root_y\"]\n\n# copies and pastes selected emoji\ndef execute_emoji(char):\n add_char_to_history(char)\n global willExitOnItsOwn\n willExitOnItsOwn = True\n mainWindow.hide()\n QApplication.clipboard().setText(char)\n pyautogui.hotkey(\"ctrl\",\"v\")\n QtTest.QTest.qWait(250)\n quit()\n\n# fills grid with given char list and takes care of layout and counting\ndef fill_grid_with_char_list(charList):\n\n # for wraparound\n global emojiToShowCount\n global fullRowsCount\n global lastRowEmojiCount\n emojiToShowCount = min(len(charList),(emojiGridColumnCount*emojiGridRowCount))\n fullRowsCount = emojiToShowCount//emojiGridColumnCount\n lastRowEmojiCount = emojiToShowCount%emojiGridColumnCount\n \n global foundAnyEmoji\n if emojiToShowCount>0:\n foundAnyEmoji = True\n layoutStack.setCurrentIndex(0)\n else:\n foundAnyEmoji = False\n layoutStack.setCurrentIndex(1)\n\n\n # clear grid\n global emojiGridLayout\n for i in reversed(range(emojiGridLayout.count())): \n emojiGridLayout.itemAt(i).widget().setParent(None)\n\n # fill with new chars\n rowIdx = 0\n colIdx = 0\n for emoji in charList:\n if rowIdx>emojiGridRowCount-1:\n break;\n\n label = QClickableLabel(emoji)\n label.clicked.connect(execute_emoji)\n label.setFont(font)\n label.setAlignment(Qt.AlignCenter)\n label.setMinimumHeight(49)\n emojiGridLayout.addWidget(label,rowIdx,colIdx)\n emojiGridLayout.setAlignment(label,Qt.AlignTop)\n if colIdx < emojiGridColumnCount-1:\n colIdx+=1\n else:\n colIdx=0\n rowIdx+=1\n\n emojiGridLayout.setContentsMargins(0,0,0,0)\n emojiGridLayout.setHorizontalSpacing(0)\n emojiGridLayout.setVerticalSpacing(0)\n\n if emojiToShowCount>0:\n highlight_emoji([0,0])\n\n# searches for emoji, and passes them to fill_grid_with_char_list\ndef execute_search(text):\n selectedEmoji = (0,0)\n if not text or text.isspace():\n fill_grid_with_history()\n return\n\n foundEmoji = edp.find_by_name(text)\n charList = [emoji.char for emoji in foundEmoji]\n\n fill_grid_with_char_list(charList)\n\n# handles what to do after hovering over a given label\ndef emoji_hovered(hoveredLabel):\n parentGrid = hoveredLabel.parentWidget().layout()\n hoveredIndex = parentGrid.indexOf(hoveredLabel)\n hoveredRow, hoveredColumn, _, _ = parentGrid.getItemPosition(hoveredIndex)\n highlight_emoji([hoveredRow,hoveredColumn])\n\n# selects, sets style and handles wraparound\ndef highlight_emoji(newPosition):\n global selectedEmojiPosition\n\n # grid is filled to a full rectangle (last row fills the window horizontally)\n if lastRowEmojiCount==0:\n if newPosition[0]<0:\n newPosition[0]=fullRowsCount-1\n elif newPosition[1]<0:\n newPosition[1]=emojiGridColumnCount-1\n elif newPosition[0]>fullRowsCount-1:\n newPosition[0]=0\n elif newPosition[1]>emojiGridColumnCount-1:\n newPosition[1]=0\n # last row is not full\n else:\n #horizontal wraparound through RIGHT edge for full rows\n if (newPosition[0]<fullRowsCount) and (newPosition[1]>emojiGridColumnCount-1):\n newPosition[1]=0\n #horizontal wraparound through LEFT edge for full rows\n elif (newPosition[0]<fullRowsCount) and (newPosition[1]<0):\n newPosition[1]=emojiGridColumnCount-1\n #horizontal wraparound through right edge for NON FULL rows\n elif (newPosition[0]==fullRowsCount) and (newPosition[1]>lastRowEmojiCount-1) and ((selectedEmojiPosition[0]-newPosition[0])==0):\n newPosition[1]=0\n #horizontal wraparound through LEFT edge for NON FULL rows\n elif (newPosition[0]>=fullRowsCount) and (newPosition[1]<0):\n newPosition[1]=lastRowEmojiCount-1\n #vertical wraparound through BOTTOM edge for full cols\n elif (newPosition[0]>fullRowsCount) and (newPosition[1]<lastRowEmojiCount):\n newPosition[0]=0\n #vertical wraparound through TOP edge for full cols\n elif (newPosition[0]<0) and (newPosition[1]<lastRowEmojiCount):\n newPosition[0]=fullRowsCount\n #vertical wraparound through BOTTOM edge for NON FULL cols\n elif (newPosition[0]>fullRowsCount-1) and (newPosition[1]>lastRowEmojiCount-1):\n newPosition[0]=0\n #vertical wraparound through TOP edge for NON FULL cols\n elif (newPosition[0]<0) and (newPosition[1]>lastRowEmojiCount-1):\n newPosition[0]=fullRowsCount-1\n\n oldPosition = selectedEmojiPosition\n selectedEmojiPosition = newPosition\n \n widgetToDeselect = emojiGridLayout.itemAtPosition(oldPosition[0],oldPosition[1])\n if widgetToDeselect:\n widgetToDeselect = widgetToDeselect.widget()\n widgetToDeselect.setStyleSheet(\"\")\n\n global selectedEmojiChar\n widgetToSelect = emojiGridLayout.itemAtPosition(selectedEmojiPosition[0],selectedEmojiPosition[1])\n if widgetToSelect:\n widgetToSelect = widgetToSelect.widget()\n selectedEmojiChar = widgetToSelect.text()\n widgetToSelect.setStyleSheet(\"QLabel{background-color: palette(highlight);}\")\n \n# handles direction where to move emoji selection\ndef move_selection(direction):\n if direction==\"right\":\n highlight_emoji([sum(x) for x in zip(selectedEmojiPosition, [0,1])])\n elif direction==\"left\":\n highlight_emoji([sum(x) for x in zip(selectedEmojiPosition, [0,-1])])\n elif direction==\"up\":\n highlight_emoji([sum(x) for x in zip(selectedEmojiPosition, [-1,0])])\n elif direction==\"down\":\n highlight_emoji([sum(x) for x in zip(selectedEmojiPosition, [1,0])])\n\n# handles Esc\ndef on_key(key):\n # test for a specific key\n if key == Qt.Key_Escape:\n quitNicely()\n\n# adds given emoji to history and saves it to config file\ndef add_char_to_history(char):\n global settingsFile\n global historyList\n if not historyList:\n historyList = [char]\n else:\n if char in historyList:\n historyList.remove(char)\n \n tempList = [char]\n tempList.extend(historyList)\n historyList = tempList[:(emojiGridColumnCount*emojiGridRowCount)] \n \n settingsFile.setValue('history/history',historyList)\n\n# wrapper around filling the grid\ndef fill_grid_with_history():\n fill_grid_with_char_list(historyList)\n\n# main app window class with inits\nclass EmojiPickerWindow(QWidget):\n\n def __init__(self):\n super().__init__()\n\n # focus handling\n self.installEventFilter(self)\n\n self.title = 'Emoji picker \(^o^)/'\n self.width = 281\n self.height = 251\n\n # start with text box centered at mouse pointer position\n self.left, self.top = mousePosition() \n self.left -= self.width//2\n self.top += (24-self.height)\n\n self.initSettings()\n self.initUI()\n \n def initUI(self):\n # topmost window layout \n layout = QVBoxLayout() \n\n global layoutStack\n layoutStack = QStackedLayout()\n layoutStackWidget = QWidget()\n layoutStackWidget.setLayout(layoutStack)\n\n # scroll area setup shenanigans\n scrollArea = QScrollArea() \n gridWidget = QWidget()\n global emojiGridLayout\n emojiGridLayout = QGridLayout(gridWidget)\n emojiGridLayout.setAlignment(Qt.AlignTop | Qt.AlignLeft)\n\n\n\n # stretch grid to widget\n for col in range(emojiGridColumnCount):\n emojiGridLayout.setColumnStretch(col,1)\n for row in range(emojiGridRowCount):\n emojiGridLayout.setRowStretch(row,1)\n\n scrollArea.setWidget(gridWidget)\n scrollArea.setWidgetResizable(True)\n layoutStack.addWidget(scrollArea)\n\n # info to show when no emoji has been found\n noEmojiFoundLabel = QLabel(\"No emoji found 🙁\")\n noEmojiFoundLabel.setAlignment(Qt.AlignCenter | Qt.AlignHCenter | Qt.AlignVCenter)\n layoutStack.addWidget(noEmojiFoundLabel) \n layout.addWidget(layoutStackWidget)\n\n # fill with a placeholder for now (smiling or smile)\n # execute_search('smil')\n fill_grid_with_history()\n # bottom text entry\n lineEdit = QLineEditWithArrows() \n lineEdit.textChanged.connect(execute_search)\n layout.addWidget(lineEdit)\n\n # align it to the bottom, so that it won't stay centered vertically\n layout.setAlignment(lineEdit, Qt.AlignBottom)\n\n\n self.setLayout(layout)\n self.setWindowTitle(self.title)\n self.setGeometry(self.left, self.top, self.width, self.height)\n self.setFixedSize(self.width, self.height)\n self.setWindowFlags(Qt.FramelessWindowHint | Qt.WindowStaysOnTopHint)\n\n # needed for filling the grid out from the outside\n global mainWindow\n mainWindow = self\n\n # esc handling\n self.keyPressed.connect(on_key)\n\n self.show()\n lineEdit.setFocus()\n \n def initSettings(self):\n global settingsFile\n global historyList\n settingsFile = QSettings(\"emoji-picker-qtpy\", \"history\");\n historyList = settingsFile.value('history/history')\n\n # key handling\n keyPressed = pyqtSignal(int)\n def keyPressEvent(self, event):\n super(EmojiPickerWindow, self).keyPressEvent(event)\n self.keyPressed.emit(event.key())\n\n # focus handling\n global willExitOnItsOwn\n def eventFilter(self, object, event):\n if event.type()== QEvent.WindowDeactivate or event.type()== QEvent.FocusOut:\n if (not willExitOnItsOwn):\n quitNicely()\n return False\n\n# clickable label\nclass QClickableLabel(QLabel):\n clicked=pyqtSignal(str)\n def __init__(self, parent=None):\n QLabel.__init__(self, parent)\n\n def mousePressEvent(self, ev):\n self.clicked.emit(self.text()) \n\n def enterEvent(self, ev):\n emoji_hovered(self)\n\n# keyboard handling override for QlineEdit\nclass QLineEditWithArrows(QLineEdit):\n def keyPressEvent(self, ev):\n global selectedEmojiChar\n global foundAnyEmoji\n if(ev.key() == Qt.Key_Right):\n move_selection(\"right\")\n if(ev.key() == Qt.Key_Left):\n move_selection(\"left\")\n if(ev.key() == Qt.Key_Up):\n move_selection(\"up\")\n if(ev.key() == Qt.Key_Down):\n move_selection(\"down\")\n if(ev.key() == Qt.Key_Return or ev.key() == Qt.Key_Enter):\n if foundAnyEmoji:\n execute_emoji(selectedEmojiChar)\n else:\n quitNicely()\n if(ev.key() == Qt.Key_Tab):\n pass\n else:\n QLineEdit.keyPressEvent(self,ev)\n \nif __name__ == '__main__':\n app = QApplication(sys.argv)\n ex = EmojiPickerWindow()\n sys.exit(app.exec_())\n",
"step-ids": [
17,
19,
21,
24,
29
]
}
|
[
17,
19,
21,
24,
29
] |
# Create your models here.
from django.db import models
from django.utils import timezone
from django.db import models
# Create your models here.
#필드 개수가 다르다.
class Post(models.Model):
#이 Post의 저자이다라는 의미, CASCADE : 종속이라는 의미
author = models.ForeignKey('auth.User', on_delete=models.CASCADE)
title = models.CharField(max_length=200) #블로그 기사의 제목
text = models.TextField() # 글자수에 제한 없는 텍스트
#생성자를 만들때마다, 반드시 필수 파라미터가 존재해야한다.
created_date = models.DateTimeField(
default=timezone.now) # 날짜와 시간
#Null Field를 허용
published_date = models.DateTimeField(
blank=True, null=True) # 필드가 폼에서 빈 채로 저장되는 것을 허용, null은 DB 관점
def publish(self):
#published_data를 지금날짜로 바꾸고 save
self.published_date = timezone.now()
self.save()
def __str__(self):
return self.title
|
normal
|
{
"blob_id": "fe5398b03d2f0cfc7c972677faa0ea3ec701469e",
"index": 7858,
"step-1": "<mask token>\n\n\nclass Post(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def publish(self):\n self.published_date = timezone.now()\n self.save()\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Post(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def publish(self):\n self.published_date = timezone.now()\n self.save()\n\n def __str__(self):\n return self.title\n",
"step-3": "<mask token>\n\n\nclass Post(models.Model):\n author = models.ForeignKey('auth.User', on_delete=models.CASCADE)\n title = models.CharField(max_length=200)\n text = models.TextField()\n created_date = models.DateTimeField(default=timezone.now)\n published_date = models.DateTimeField(blank=True, null=True)\n\n def publish(self):\n self.published_date = timezone.now()\n self.save()\n\n def __str__(self):\n return self.title\n",
"step-4": "from django.db import models\nfrom django.utils import timezone\nfrom django.db import models\n\n\nclass Post(models.Model):\n author = models.ForeignKey('auth.User', on_delete=models.CASCADE)\n title = models.CharField(max_length=200)\n text = models.TextField()\n created_date = models.DateTimeField(default=timezone.now)\n published_date = models.DateTimeField(blank=True, null=True)\n\n def publish(self):\n self.published_date = timezone.now()\n self.save()\n\n def __str__(self):\n return self.title\n",
"step-5": "# Create your models here.\nfrom django.db import models\nfrom django.utils import timezone\nfrom django.db import models\n\n# Create your models here.\n#필드 개수가 다르다.\n\nclass Post(models.Model):\n #이 Post의 저자이다라는 의미, CASCADE : 종속이라는 의미\n author = models.ForeignKey('auth.User', on_delete=models.CASCADE)\n title = models.CharField(max_length=200) #블로그 기사의 제목\n text = models.TextField() # 글자수에 제한 없는 텍스트\n #생성자를 만들때마다, 반드시 필수 파라미터가 존재해야한다.\n created_date = models.DateTimeField(\n default=timezone.now) # 날짜와 시간\n #Null Field를 허용\n published_date = models.DateTimeField(\n blank=True, null=True) # 필드가 폼에서 빈 채로 저장되는 것을 허용, null은 DB 관점\n\n def publish(self):\n #published_data를 지금날짜로 바꾸고 save\n self.published_date = timezone.now()\n self.save()\n\n def __str__(self):\n return self.title",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def DFS(idx, cost, cur_loc):
global min_cost
if min_cost < cost:
return
if idx == N and arr[cur_loc][0]:
if min_cost > cost + arr[cur_loc][0]:
min_cost = cost + arr[cur_loc][0]
return
for i in range(1, N):
if way[i] or not arr[cur_loc][i]:
continue
way[i] = 1
DFS(idx + 1, cost + arr[cur_loc][i], i)
way[i] = 0
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def DFS(idx, cost, cur_loc):
global min_cost
if min_cost < cost:
return
if idx == N and arr[cur_loc][0]:
if min_cost > cost + arr[cur_loc][0]:
min_cost = cost + arr[cur_loc][0]
return
for i in range(1, N):
if way[i] or not arr[cur_loc][i]:
continue
way[i] = 1
DFS(idx + 1, cost + arr[cur_loc][i], i)
way[i] = 0
<|reserved_special_token_0|>
DFS(1, 0, 0)
print(min_cost)
<|reserved_special_token_1|>
def DFS(idx, cost, cur_loc):
global min_cost
if min_cost < cost:
return
if idx == N and arr[cur_loc][0]:
if min_cost > cost + arr[cur_loc][0]:
min_cost = cost + arr[cur_loc][0]
return
for i in range(1, N):
if way[i] or not arr[cur_loc][i]:
continue
way[i] = 1
DFS(idx + 1, cost + arr[cur_loc][i], i)
way[i] = 0
N = int(input())
arr = [list(map(int, input().split())) for _ in range(N)]
way = [0] * N
min_cost = 100 * N
DFS(1, 0, 0)
print(min_cost)
<|reserved_special_token_1|>
# 1-[2-3-4-5]-1
# 순열로 돌리고, 백트래킹으로 걷어내기
def DFS(idx, cost, cur_loc):
global min_cost
if min_cost < cost: return
if idx == N and arr[cur_loc][0]:
if min_cost > cost + arr[cur_loc][0]:
min_cost = cost + arr[cur_loc][0]
return
for i in range(1, N):
if way[i] or not arr[cur_loc][i] : continue
way[i] =1
DFS(idx+1, cost+arr[cur_loc][i], i)
way[i] = 0
N = int(input())
arr = [list(map(int, input().split())) for _ in range(N)]
way = [0] * N
min_cost = 100 * N
DFS(1, 0, 0)
print(min_cost)
|
flexible
|
{
"blob_id": "4ff7e83c6e85a041578a8b3471cbbb7e0c2543e6",
"index": 2663,
"step-1": "<mask token>\n",
"step-2": "def DFS(idx, cost, cur_loc):\n global min_cost\n if min_cost < cost:\n return\n if idx == N and arr[cur_loc][0]:\n if min_cost > cost + arr[cur_loc][0]:\n min_cost = cost + arr[cur_loc][0]\n return\n for i in range(1, N):\n if way[i] or not arr[cur_loc][i]:\n continue\n way[i] = 1\n DFS(idx + 1, cost + arr[cur_loc][i], i)\n way[i] = 0\n\n\n<mask token>\n",
"step-3": "def DFS(idx, cost, cur_loc):\n global min_cost\n if min_cost < cost:\n return\n if idx == N and arr[cur_loc][0]:\n if min_cost > cost + arr[cur_loc][0]:\n min_cost = cost + arr[cur_loc][0]\n return\n for i in range(1, N):\n if way[i] or not arr[cur_loc][i]:\n continue\n way[i] = 1\n DFS(idx + 1, cost + arr[cur_loc][i], i)\n way[i] = 0\n\n\n<mask token>\nDFS(1, 0, 0)\nprint(min_cost)\n",
"step-4": "def DFS(idx, cost, cur_loc):\n global min_cost\n if min_cost < cost:\n return\n if idx == N and arr[cur_loc][0]:\n if min_cost > cost + arr[cur_loc][0]:\n min_cost = cost + arr[cur_loc][0]\n return\n for i in range(1, N):\n if way[i] or not arr[cur_loc][i]:\n continue\n way[i] = 1\n DFS(idx + 1, cost + arr[cur_loc][i], i)\n way[i] = 0\n\n\nN = int(input())\narr = [list(map(int, input().split())) for _ in range(N)]\nway = [0] * N\nmin_cost = 100 * N\nDFS(1, 0, 0)\nprint(min_cost)\n",
"step-5": "# 1-[2-3-4-5]-1\n# 순열로 돌리고, 백트래킹으로 걷어내기\n\ndef DFS(idx, cost, cur_loc):\n global min_cost\n if min_cost < cost: return\n if idx == N and arr[cur_loc][0]:\n if min_cost > cost + arr[cur_loc][0]:\n min_cost = cost + arr[cur_loc][0]\n return\n for i in range(1, N):\n if way[i] or not arr[cur_loc][i] : continue\n way[i] =1\n DFS(idx+1, cost+arr[cur_loc][i], i)\n way[i] = 0\n\nN = int(input())\narr = [list(map(int, input().split())) for _ in range(N)]\nway = [0] * N\nmin_cost = 100 * N\nDFS(1, 0, 0)\nprint(min_cost)",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import os
import json
import librosa
# Constants
# Dataset used for training
DATASET_PATH = "dataset"
# Where the data is stored
JSON_PATH = "data.json"
# Number of samples considered to preprocess data
SAMPLES_TO_CONSIDER = 22050 # 1 sec worth of sound
# Main function to preprocess the data
def prepare_dataset(dataset_path, json_path, n_mfcc=13, hop_length=512, n_fft=2048):
# create data dictionary
data = {
"mappings": [], # keywords
"labels": [], # a value for each audio file in the dataset
"MFCCs": [], # MFCC for each audio file
"files": [] # filenames with path for each audio file
}
# loop through all the sub-dirs
# walk through a folder structure recursively top-down
for i, (dir_path, dir_names, filenames) in enumerate(os.walk(dataset_path)):
# we need to ensure that we are not at root level
if dir_path is not dataset_path:
# update mappings
category = dir_path.split("\\")[-1] # category name ex: dataset\\wahad -> [dataset, wahad]
data["mappings"].append(category)
print(f"Processing {category}")
# loop through filenames and extract MFCCs
for f in filenames:
# get file path
file_path = os.path.join(dir_path, f) # gives us the whole file path
# load audio file
signal, sr = librosa.load(file_path)
# ensure the audio file is at least 1 second
if len(signal) >= SAMPLES_TO_CONSIDER:
# enforce on 1 sec. long signal
signal = signal[:SAMPLES_TO_CONSIDER]
# extract the MFCCs
MFCCs = librosa.feature.mfcc(signal, n_mfcc=n_mfcc,
hop_length=hop_length, n_fft=n_fft)
# store data
data["labels"].append(i - 1)
data["MFCCs"].append(MFCCs.T.tolist())
data["files"].append(file_path)
print(f"{file_path}: {i - 1}")
# store in json file
with open(json_path, "w") as fp:
json.dump(data, fp, indent=4)
if __name__ == "__main__":
prepare_dataset(DATASET_PATH, JSON_PATH)
|
normal
|
{
"blob_id": "ba808d23f6a8226f40e1c214012a1535ee1e9e98",
"index": 2947,
"step-1": "<mask token>\n\n\ndef prepare_dataset(dataset_path, json_path, n_mfcc=13, hop_length=512,\n n_fft=2048):\n data = {'mappings': [], 'labels': [], 'MFCCs': [], 'files': []}\n for i, (dir_path, dir_names, filenames) in enumerate(os.walk(dataset_path)\n ):\n if dir_path is not dataset_path:\n category = dir_path.split('\\\\')[-1]\n data['mappings'].append(category)\n print(f'Processing {category}')\n for f in filenames:\n file_path = os.path.join(dir_path, f)\n signal, sr = librosa.load(file_path)\n if len(signal) >= SAMPLES_TO_CONSIDER:\n signal = signal[:SAMPLES_TO_CONSIDER]\n MFCCs = librosa.feature.mfcc(signal, n_mfcc=n_mfcc,\n hop_length=hop_length, n_fft=n_fft)\n data['labels'].append(i - 1)\n data['MFCCs'].append(MFCCs.T.tolist())\n data['files'].append(file_path)\n print(f'{file_path}: {i - 1}')\n with open(json_path, 'w') as fp:\n json.dump(data, fp, indent=4)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef prepare_dataset(dataset_path, json_path, n_mfcc=13, hop_length=512,\n n_fft=2048):\n data = {'mappings': [], 'labels': [], 'MFCCs': [], 'files': []}\n for i, (dir_path, dir_names, filenames) in enumerate(os.walk(dataset_path)\n ):\n if dir_path is not dataset_path:\n category = dir_path.split('\\\\')[-1]\n data['mappings'].append(category)\n print(f'Processing {category}')\n for f in filenames:\n file_path = os.path.join(dir_path, f)\n signal, sr = librosa.load(file_path)\n if len(signal) >= SAMPLES_TO_CONSIDER:\n signal = signal[:SAMPLES_TO_CONSIDER]\n MFCCs = librosa.feature.mfcc(signal, n_mfcc=n_mfcc,\n hop_length=hop_length, n_fft=n_fft)\n data['labels'].append(i - 1)\n data['MFCCs'].append(MFCCs.T.tolist())\n data['files'].append(file_path)\n print(f'{file_path}: {i - 1}')\n with open(json_path, 'w') as fp:\n json.dump(data, fp, indent=4)\n\n\nif __name__ == '__main__':\n prepare_dataset(DATASET_PATH, JSON_PATH)\n",
"step-3": "<mask token>\nDATASET_PATH = 'dataset'\nJSON_PATH = 'data.json'\nSAMPLES_TO_CONSIDER = 22050\n\n\ndef prepare_dataset(dataset_path, json_path, n_mfcc=13, hop_length=512,\n n_fft=2048):\n data = {'mappings': [], 'labels': [], 'MFCCs': [], 'files': []}\n for i, (dir_path, dir_names, filenames) in enumerate(os.walk(dataset_path)\n ):\n if dir_path is not dataset_path:\n category = dir_path.split('\\\\')[-1]\n data['mappings'].append(category)\n print(f'Processing {category}')\n for f in filenames:\n file_path = os.path.join(dir_path, f)\n signal, sr = librosa.load(file_path)\n if len(signal) >= SAMPLES_TO_CONSIDER:\n signal = signal[:SAMPLES_TO_CONSIDER]\n MFCCs = librosa.feature.mfcc(signal, n_mfcc=n_mfcc,\n hop_length=hop_length, n_fft=n_fft)\n data['labels'].append(i - 1)\n data['MFCCs'].append(MFCCs.T.tolist())\n data['files'].append(file_path)\n print(f'{file_path}: {i - 1}')\n with open(json_path, 'w') as fp:\n json.dump(data, fp, indent=4)\n\n\nif __name__ == '__main__':\n prepare_dataset(DATASET_PATH, JSON_PATH)\n",
"step-4": "import os\nimport json\nimport librosa\nDATASET_PATH = 'dataset'\nJSON_PATH = 'data.json'\nSAMPLES_TO_CONSIDER = 22050\n\n\ndef prepare_dataset(dataset_path, json_path, n_mfcc=13, hop_length=512,\n n_fft=2048):\n data = {'mappings': [], 'labels': [], 'MFCCs': [], 'files': []}\n for i, (dir_path, dir_names, filenames) in enumerate(os.walk(dataset_path)\n ):\n if dir_path is not dataset_path:\n category = dir_path.split('\\\\')[-1]\n data['mappings'].append(category)\n print(f'Processing {category}')\n for f in filenames:\n file_path = os.path.join(dir_path, f)\n signal, sr = librosa.load(file_path)\n if len(signal) >= SAMPLES_TO_CONSIDER:\n signal = signal[:SAMPLES_TO_CONSIDER]\n MFCCs = librosa.feature.mfcc(signal, n_mfcc=n_mfcc,\n hop_length=hop_length, n_fft=n_fft)\n data['labels'].append(i - 1)\n data['MFCCs'].append(MFCCs.T.tolist())\n data['files'].append(file_path)\n print(f'{file_path}: {i - 1}')\n with open(json_path, 'w') as fp:\n json.dump(data, fp, indent=4)\n\n\nif __name__ == '__main__':\n prepare_dataset(DATASET_PATH, JSON_PATH)\n",
"step-5": "import os\nimport json\nimport librosa\n\n# Constants\n# Dataset used for training\nDATASET_PATH = \"dataset\"\n# Where the data is stored\nJSON_PATH = \"data.json\"\n# Number of samples considered to preprocess data\nSAMPLES_TO_CONSIDER = 22050 # 1 sec worth of sound\n\n\n# Main function to preprocess the data\ndef prepare_dataset(dataset_path, json_path, n_mfcc=13, hop_length=512, n_fft=2048):\n # create data dictionary\n data = {\n \"mappings\": [], # keywords\n \"labels\": [], # a value for each audio file in the dataset\n \"MFCCs\": [], # MFCC for each audio file\n \"files\": [] # filenames with path for each audio file\n }\n # loop through all the sub-dirs\n # walk through a folder structure recursively top-down\n for i, (dir_path, dir_names, filenames) in enumerate(os.walk(dataset_path)):\n # we need to ensure that we are not at root level\n if dir_path is not dataset_path:\n # update mappings\n category = dir_path.split(\"\\\\\")[-1] # category name ex: dataset\\\\wahad -> [dataset, wahad]\n data[\"mappings\"].append(category)\n print(f\"Processing {category}\")\n\n # loop through filenames and extract MFCCs\n for f in filenames:\n # get file path\n file_path = os.path.join(dir_path, f) # gives us the whole file path\n\n # load audio file\n signal, sr = librosa.load(file_path)\n\n # ensure the audio file is at least 1 second\n if len(signal) >= SAMPLES_TO_CONSIDER:\n # enforce on 1 sec. long signal\n signal = signal[:SAMPLES_TO_CONSIDER]\n\n # extract the MFCCs\n MFCCs = librosa.feature.mfcc(signal, n_mfcc=n_mfcc,\n hop_length=hop_length, n_fft=n_fft)\n\n # store data\n data[\"labels\"].append(i - 1)\n data[\"MFCCs\"].append(MFCCs.T.tolist())\n data[\"files\"].append(file_path)\n print(f\"{file_path}: {i - 1}\")\n\n # store in json file\n with open(json_path, \"w\") as fp:\n json.dump(data, fp, indent=4)\n\n\nif __name__ == \"__main__\":\n prepare_dataset(DATASET_PATH, JSON_PATH)\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class _PULPIER:
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class _PULPIER:
def __init__(self):
self.name = 'PULPIER'
self.definitions = pulpy
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.basic = ['pulpy']
<|reserved_special_token_1|>
#calss header
class _PULPIER():
def __init__(self,):
self.name = "PULPIER"
self.definitions = pulpy
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.basic = ['pulpy']
|
flexible
|
{
"blob_id": "a1d1056f302cf7bc050537dd8cc53cdb2da7e989",
"index": 5507,
"step-1": "<mask token>\n",
"step-2": "class _PULPIER:\n <mask token>\n",
"step-3": "class _PULPIER:\n\n def __init__(self):\n self.name = 'PULPIER'\n self.definitions = pulpy\n self.parents = []\n self.childen = []\n self.properties = []\n self.jsondata = {}\n self.basic = ['pulpy']\n",
"step-4": "\n\n#calss header\nclass _PULPIER():\n\tdef __init__(self,): \n\t\tself.name = \"PULPIER\"\n\t\tself.definitions = pulpy\n\n\t\tself.parents = []\n\t\tself.childen = []\n\t\tself.properties = []\n\t\tself.jsondata = {}\n\n\n\t\tself.basic = ['pulpy']\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
#!/usr/bin/env python
"""
This code is fot testing the region growing.
"""
import os
import sys
import time
import nibabel as nib
import region_growing as rg
import matplotlib.pyplot as plt
import numpy as np
img = nib.load("zstat1.nii.gz")
data = img.get_data()
#test coor [36,60,28] [21,39,30] [23,38,30]
coor = [23,38,30]
num = 10000
size_list = []
st = time.time()
for t in range(1,50):
t = t/10.0
print t
region_img,size = rg.region_growing(data,coor,float(t),num,6)
print "Totoal time is :%s"%(time.time()-st)
size_list.append([t,size])
print size_list
size_list = np.array(size_list)
plt.plot(size_list[:,0],size_list[:,1],'ro')
plt.show()
result = img
result._data = region_img
nib.save(result,"region.nii.gz")
|
normal
|
{
"blob_id": "6bcddd1b2ec8653400f710e5cab552d4bec75b6b",
"index": 1162,
"step-1": "#!/usr/bin/env python\n\"\"\"\nThis code is fot testing the region growing.\n\"\"\"\nimport os\nimport sys\nimport time\nimport nibabel as nib\nimport region_growing as rg\nimport matplotlib.pyplot as plt \nimport numpy as np\n\nimg = nib.load(\"zstat1.nii.gz\")\ndata = img.get_data()\n#test coor [36,60,28] [21,39,30] [23,38,30]\ncoor = [23,38,30]\nnum = 10000\n\nsize_list = []\nst = time.time()\n\nfor t in range(1,50):\n t = t/10.0\n print t\n region_img,size = rg.region_growing(data,coor,float(t),num,6)\n print \"Totoal time is :%s\"%(time.time()-st)\n size_list.append([t,size])\n\nprint size_list\nsize_list = np.array(size_list)\n\nplt.plot(size_list[:,0],size_list[:,1],'ro')\nplt.show()\n\nresult = img\nresult._data = region_img\nnib.save(result,\"region.nii.gz\")\n\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
#https://docs.python.org/3.4/library/itertools.html#module-itertools
l = [(1, 2, 9), (1, 3, 12), (2, 3, 8), (2, 4, 4), (2, 5, 7), (3, 5, 5), (3, 6, 2), (4, 5, 2), (4, 7, 10),
(5, 6, 11), (5, 7, 2), (6, 8, 4), (7, 8, 4), (7, 9, 3), (8, 9, 13)]
b = ['America', 'Sudan', 'Srilanka', 'Pakistan', 'Nepal', 'India', 'France']
from itertools import groupby, filterfalse, dropwhile, cycle, count, repeat, chain, takewhile, islice, zip_longest
from collections import defaultdict
#NOTE- always use itertools with sorted list if index of element is not issue to your solution
def itertools_groupby_example(list_of_nodes):
graph = defaultdict(list)
for key, group in groupby(l, lambda x: x[0]):
graph[key].append(list(group))
print(dict(graph))
def itertools_false_filter_example(iterator):
l = []
for item in filterfalse(lambda x :x>10, iterator):
l.append(item)
print(l)
def itertools_dropwhile_example(iterator):
l = []
for item in dropwhile(lambda x: x>10, iterator):
l.append(item)
print(l)
def itertools_takewhile_example(iterator):
l = []
print(iterator)
for item in takewhile(lambda x: x>10, iterator):
l.append(item)
print(l)
def itertools_cycle_example(iterator):
for item in cycle(iterator):
print(item)
def itertools_count_example():
for item in count(start=1, step=1):
print(item)
def itertools_repeat_example():
for item in repeat(10, 5):
print(3)
def itertools_chain_example(iterator1, iterator2):
l = []
for item in chain(iterator1, iterator2):
l.append(item)
print(l)
def itertools_islice_example(iterator):
l = []
for item in islice(iterator, 0, 10, 2):
l.append(item)
print(l)
def itertools_chain_from_iterable_examaple():
l = []
for item in chain.from_iterable([[2,3,4],[2,5,6]]):
l.append(item)
print(l)
def itertools_zip_longest():
l1 = ['red', 'orange', 'yellow', 'green', 'blue']
l2 = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10,]
l3 = ['a','b','c']
for item in zip_longest(l1, l2, l3, fillvalue=None):
print(item)
iterator = [11,15,2,5,8,10,50,8,2,3,90,80,100]
iterator1 = [0,10,20,30,40,50,60,70,80,90,100,5]
iterator2 = ['a','b','c']
#itertools_false_filter_example(iterator1)
#itertools_dropwhile_example(iterator1)
#itertools_cycle_example(iterator1)
#itertools_count_example()
#itertools_repeat_example()
#itertools_chain_example(iterator1, iterator2)
#itertools_takewhile_example(iterator)
#itertools_islice_example(iterator)
#itertools_chain_from_iterable_examaple()
#itertools_zip_longest()
|
normal
|
{
"blob_id": "629353392e3a4f346f734543ae3f2b8dc616a6c3",
"index": 5816,
"step-1": "<mask token>\n\n\ndef itertools_groupby_example(list_of_nodes):\n graph = defaultdict(list)\n for key, group in groupby(l, lambda x: x[0]):\n graph[key].append(list(group))\n print(dict(graph))\n\n\ndef itertools_false_filter_example(iterator):\n l = []\n for item in filterfalse(lambda x: x > 10, iterator):\n l.append(item)\n print(l)\n\n\ndef itertools_dropwhile_example(iterator):\n l = []\n for item in dropwhile(lambda x: x > 10, iterator):\n l.append(item)\n print(l)\n\n\ndef itertools_takewhile_example(iterator):\n l = []\n print(iterator)\n for item in takewhile(lambda x: x > 10, iterator):\n l.append(item)\n print(l)\n\n\ndef itertools_cycle_example(iterator):\n for item in cycle(iterator):\n print(item)\n\n\n<mask token>\n\n\ndef itertools_chain_from_iterable_examaple():\n l = []\n for item in chain.from_iterable([[2, 3, 4], [2, 5, 6]]):\n l.append(item)\n print(l)\n\n\ndef itertools_zip_longest():\n l1 = ['red', 'orange', 'yellow', 'green', 'blue']\n l2 = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]\n l3 = ['a', 'b', 'c']\n for item in zip_longest(l1, l2, l3, fillvalue=None):\n print(item)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef itertools_groupby_example(list_of_nodes):\n graph = defaultdict(list)\n for key, group in groupby(l, lambda x: x[0]):\n graph[key].append(list(group))\n print(dict(graph))\n\n\ndef itertools_false_filter_example(iterator):\n l = []\n for item in filterfalse(lambda x: x > 10, iterator):\n l.append(item)\n print(l)\n\n\ndef itertools_dropwhile_example(iterator):\n l = []\n for item in dropwhile(lambda x: x > 10, iterator):\n l.append(item)\n print(l)\n\n\ndef itertools_takewhile_example(iterator):\n l = []\n print(iterator)\n for item in takewhile(lambda x: x > 10, iterator):\n l.append(item)\n print(l)\n\n\ndef itertools_cycle_example(iterator):\n for item in cycle(iterator):\n print(item)\n\n\ndef itertools_count_example():\n for item in count(start=1, step=1):\n print(item)\n\n\n<mask token>\n\n\ndef itertools_chain_example(iterator1, iterator2):\n l = []\n for item in chain(iterator1, iterator2):\n l.append(item)\n print(l)\n\n\ndef itertools_islice_example(iterator):\n l = []\n for item in islice(iterator, 0, 10, 2):\n l.append(item)\n print(l)\n\n\ndef itertools_chain_from_iterable_examaple():\n l = []\n for item in chain.from_iterable([[2, 3, 4], [2, 5, 6]]):\n l.append(item)\n print(l)\n\n\ndef itertools_zip_longest():\n l1 = ['red', 'orange', 'yellow', 'green', 'blue']\n l2 = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]\n l3 = ['a', 'b', 'c']\n for item in zip_longest(l1, l2, l3, fillvalue=None):\n print(item)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef itertools_groupby_example(list_of_nodes):\n graph = defaultdict(list)\n for key, group in groupby(l, lambda x: x[0]):\n graph[key].append(list(group))\n print(dict(graph))\n\n\ndef itertools_false_filter_example(iterator):\n l = []\n for item in filterfalse(lambda x: x > 10, iterator):\n l.append(item)\n print(l)\n\n\ndef itertools_dropwhile_example(iterator):\n l = []\n for item in dropwhile(lambda x: x > 10, iterator):\n l.append(item)\n print(l)\n\n\ndef itertools_takewhile_example(iterator):\n l = []\n print(iterator)\n for item in takewhile(lambda x: x > 10, iterator):\n l.append(item)\n print(l)\n\n\ndef itertools_cycle_example(iterator):\n for item in cycle(iterator):\n print(item)\n\n\ndef itertools_count_example():\n for item in count(start=1, step=1):\n print(item)\n\n\ndef itertools_repeat_example():\n for item in repeat(10, 5):\n print(3)\n\n\ndef itertools_chain_example(iterator1, iterator2):\n l = []\n for item in chain(iterator1, iterator2):\n l.append(item)\n print(l)\n\n\ndef itertools_islice_example(iterator):\n l = []\n for item in islice(iterator, 0, 10, 2):\n l.append(item)\n print(l)\n\n\ndef itertools_chain_from_iterable_examaple():\n l = []\n for item in chain.from_iterable([[2, 3, 4], [2, 5, 6]]):\n l.append(item)\n print(l)\n\n\ndef itertools_zip_longest():\n l1 = ['red', 'orange', 'yellow', 'green', 'blue']\n l2 = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]\n l3 = ['a', 'b', 'c']\n for item in zip_longest(l1, l2, l3, fillvalue=None):\n print(item)\n\n\n<mask token>\n",
"step-4": "l = [(1, 2, 9), (1, 3, 12), (2, 3, 8), (2, 4, 4), (2, 5, 7), (3, 5, 5), (3,\n 6, 2), (4, 5, 2), (4, 7, 10), (5, 6, 11), (5, 7, 2), (6, 8, 4), (7, 8, \n 4), (7, 9, 3), (8, 9, 13)]\nb = ['America', 'Sudan', 'Srilanka', 'Pakistan', 'Nepal', 'India', 'France']\n<mask token>\n\n\ndef itertools_groupby_example(list_of_nodes):\n graph = defaultdict(list)\n for key, group in groupby(l, lambda x: x[0]):\n graph[key].append(list(group))\n print(dict(graph))\n\n\ndef itertools_false_filter_example(iterator):\n l = []\n for item in filterfalse(lambda x: x > 10, iterator):\n l.append(item)\n print(l)\n\n\ndef itertools_dropwhile_example(iterator):\n l = []\n for item in dropwhile(lambda x: x > 10, iterator):\n l.append(item)\n print(l)\n\n\ndef itertools_takewhile_example(iterator):\n l = []\n print(iterator)\n for item in takewhile(lambda x: x > 10, iterator):\n l.append(item)\n print(l)\n\n\ndef itertools_cycle_example(iterator):\n for item in cycle(iterator):\n print(item)\n\n\ndef itertools_count_example():\n for item in count(start=1, step=1):\n print(item)\n\n\ndef itertools_repeat_example():\n for item in repeat(10, 5):\n print(3)\n\n\ndef itertools_chain_example(iterator1, iterator2):\n l = []\n for item in chain(iterator1, iterator2):\n l.append(item)\n print(l)\n\n\ndef itertools_islice_example(iterator):\n l = []\n for item in islice(iterator, 0, 10, 2):\n l.append(item)\n print(l)\n\n\ndef itertools_chain_from_iterable_examaple():\n l = []\n for item in chain.from_iterable([[2, 3, 4], [2, 5, 6]]):\n l.append(item)\n print(l)\n\n\ndef itertools_zip_longest():\n l1 = ['red', 'orange', 'yellow', 'green', 'blue']\n l2 = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]\n l3 = ['a', 'b', 'c']\n for item in zip_longest(l1, l2, l3, fillvalue=None):\n print(item)\n\n\niterator = [11, 15, 2, 5, 8, 10, 50, 8, 2, 3, 90, 80, 100]\niterator1 = [0, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100, 5]\niterator2 = ['a', 'b', 'c']\n",
"step-5": "#https://docs.python.org/3.4/library/itertools.html#module-itertools\n\n\nl = [(1, 2, 9), (1, 3, 12), (2, 3, 8), (2, 4, 4), (2, 5, 7), (3, 5, 5), (3, 6, 2), (4, 5, 2), (4, 7, 10),\n (5, 6, 11), (5, 7, 2), (6, 8, 4), (7, 8, 4), (7, 9, 3), (8, 9, 13)]\n\nb = ['America', 'Sudan', 'Srilanka', 'Pakistan', 'Nepal', 'India', 'France']\n\nfrom itertools import groupby, filterfalse, dropwhile, cycle, count, repeat, chain, takewhile, islice, zip_longest\nfrom collections import defaultdict\n#NOTE- always use itertools with sorted list if index of element is not issue to your solution\n\ndef itertools_groupby_example(list_of_nodes):\n\tgraph = defaultdict(list)\n\tfor key, group in groupby(l, lambda x: x[0]):\n\t\t\tgraph[key].append(list(group))\n\tprint(dict(graph))\n\ndef itertools_false_filter_example(iterator):\n\tl = []\n\tfor item in filterfalse(lambda x :x>10, iterator):\n\t\tl.append(item)\n\tprint(l)\n\ndef itertools_dropwhile_example(iterator):\n\tl = []\n\tfor item in dropwhile(lambda x: x>10, iterator):\n\t\tl.append(item)\n\tprint(l)\n\ndef itertools_takewhile_example(iterator):\n\tl = []\n\tprint(iterator)\n\tfor item in takewhile(lambda x: x>10, iterator):\n\t\tl.append(item)\n\tprint(l)\n\ndef itertools_cycle_example(iterator):\n\tfor item in cycle(iterator):\n\t\tprint(item)\n\ndef itertools_count_example():\n\tfor item in count(start=1, step=1):\n\t\tprint(item)\n\ndef itertools_repeat_example():\n\tfor item in repeat(10, 5):\n\t\tprint(3)\n\ndef itertools_chain_example(iterator1, iterator2):\n\tl = []\n\tfor item in chain(iterator1, iterator2):\n\t\tl.append(item)\n\tprint(l)\n\ndef itertools_islice_example(iterator):\n\tl = []\n\tfor item in islice(iterator, 0, 10, 2):\n\t\tl.append(item)\n\tprint(l)\n\ndef itertools_chain_from_iterable_examaple():\n\tl = []\n\tfor item in chain.from_iterable([[2,3,4],[2,5,6]]):\n\t\tl.append(item)\n\tprint(l)\n\ndef itertools_zip_longest():\n\tl1 = ['red', 'orange', 'yellow', 'green', 'blue']\n\tl2 = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10,]\n\tl3 = ['a','b','c']\n\n\tfor item in zip_longest(l1, l2, l3, fillvalue=None):\n\t\tprint(item)\n\niterator = [11,15,2,5,8,10,50,8,2,3,90,80,100]\niterator1 = [0,10,20,30,40,50,60,70,80,90,100,5]\niterator2 = ['a','b','c']\n\n#itertools_false_filter_example(iterator1)\n#itertools_dropwhile_example(iterator1)\n#itertools_cycle_example(iterator1)\n#itertools_count_example()\n#itertools_repeat_example()\n#itertools_chain_example(iterator1, iterator2)\n#itertools_takewhile_example(iterator)\n#itertools_islice_example(iterator)\n#itertools_chain_from_iterable_examaple()\n#itertools_zip_longest()",
"step-ids": [
7,
10,
11,
12,
14
]
}
|
[
7,
10,
11,
12,
14
] |
#!/bin/python
from flask import Flask, jsonify, request
import subprocess
import os
app = Flask(__name__)
text = ""
greetings = "'/play' and '/replay'\n"
@app.route('/')
def index():
return greetings
@app.route('/play', methods=['POST'])
def play():
global text
text = request.data.decode('utf-8')
os.system('./play.sh "' + text + '"')
return jsonify({'played': True, "text" : text}), 201
@app.route('/replay')
def replay():
global text
os.system('./replay.sh')
return jsonify({'replayed': True, "text" : text}), 200
if __name__ == '__main__':
app.run(host='0.0.0.0', debug=True)
|
normal
|
{
"blob_id": "956e63bf06255df4a36b5fa97aa62c0ed805c3f3",
"index": 9452,
"step-1": "<mask token>\n\n\n@app.route('/')\ndef index():\n return greetings\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\n@app.route('/')\ndef index():\n return greetings\n\n\n@app.route('/play', methods=['POST'])\ndef play():\n global text\n text = request.data.decode('utf-8')\n os.system('./play.sh \"' + text + '\"')\n return jsonify({'played': True, 'text': text}), 201\n\n\n@app.route('/replay')\ndef replay():\n global text\n os.system('./replay.sh')\n return jsonify({'replayed': True, 'text': text}), 200\n\n\nif __name__ == '__main__':\n app.run(host='0.0.0.0', debug=True)\n",
"step-3": "<mask token>\napp = Flask(__name__)\ntext = ''\ngreetings = \"'/play' and '/replay'\\n\"\n\n\n@app.route('/')\ndef index():\n return greetings\n\n\n@app.route('/play', methods=['POST'])\ndef play():\n global text\n text = request.data.decode('utf-8')\n os.system('./play.sh \"' + text + '\"')\n return jsonify({'played': True, 'text': text}), 201\n\n\n@app.route('/replay')\ndef replay():\n global text\n os.system('./replay.sh')\n return jsonify({'replayed': True, 'text': text}), 200\n\n\nif __name__ == '__main__':\n app.run(host='0.0.0.0', debug=True)\n",
"step-4": "from flask import Flask, jsonify, request\nimport subprocess\nimport os\napp = Flask(__name__)\ntext = ''\ngreetings = \"'/play' and '/replay'\\n\"\n\n\n@app.route('/')\ndef index():\n return greetings\n\n\n@app.route('/play', methods=['POST'])\ndef play():\n global text\n text = request.data.decode('utf-8')\n os.system('./play.sh \"' + text + '\"')\n return jsonify({'played': True, 'text': text}), 201\n\n\n@app.route('/replay')\ndef replay():\n global text\n os.system('./replay.sh')\n return jsonify({'replayed': True, 'text': text}), 200\n\n\nif __name__ == '__main__':\n app.run(host='0.0.0.0', debug=True)\n",
"step-5": "#!/bin/python\nfrom flask import Flask, jsonify, request\nimport subprocess\nimport os\n\napp = Flask(__name__)\n\ntext = \"\"\ngreetings = \"'/play' and '/replay'\\n\"\n\n@app.route('/')\ndef index():\n return greetings\n\n@app.route('/play', methods=['POST'])\ndef play():\n global text\n text = request.data.decode('utf-8') \n os.system('./play.sh \"' + text + '\"')\n return jsonify({'played': True, \"text\" : text}), 201\n\n@app.route('/replay')\ndef replay():\n global text\n os.system('./replay.sh')\n return jsonify({'replayed': True, \"text\" : text}), 200\n\nif __name__ == '__main__':\n app.run(host='0.0.0.0', debug=True)\n",
"step-ids": [
1,
4,
5,
6,
7
]
}
|
[
1,
4,
5,
6,
7
] |
import random
import tqdm
from keras.models import load_model
from ModelUtil import precision, recall, f1
from tqdm import tqdm
import cv2 as cv
import numpy as np
import os
import pandas as pd
from PIL import Image
os.environ['CUDA_VISIBLE_DEVICES']='1'
model_path = '/home/bo/Project/densenet.hdf5'
train_img_path = '/home/bo/Project/Eyes_data/first_train/'
test_img_path = '/home/bo/Project/Eyes_data/first_test/'
label_df = pd.read_csv('/home/bo/Project/Eyes_data/first_label.csv', error_bad_lines=False, index_col=0)
SIZE = 224
def preprocess_image(image_path, desired_size=SIZE):
"""
Resize the picture to the desired size
:param image_path: the path of image folder
:param desired_size: the size that image will be cropped as. The default size is 224*224
:return: the cropped image
"""
im = Image.open(image_path)
im = im.resize((desired_size,) * 2, resample=Image.LANCZOS)
return im
def set_data(img_path, dataframe):
"""
Correspond the image to the label and return them.
:param img_path: the path of images' folder
:param dataframe: the .csv file that shows relation between image and label
:return: Image, Label and the name of Image
"""
N = len(os.listdir(img_path))
x_ = np.empty((N, SIZE, SIZE, 3), dtype=np.uint8)
y_ = np.empty(N)
image_names = np.empty(N, dtype=np.dtype(('U', 15)))
for i, img_name in enumerate(tqdm(os.listdir(img_path))):
x_[i, :, :, :] = preprocess_image(img_path + img_name)
y_[i] = dataframe.loc[img_name.split('.')[0], 'level']
image_names[i] = img_name
return x_, y_
def predict(X):
model = load_model(model_path,
custom_objects={'precision': precision, 'recall': recall, 'f1': f1})
ret = model.predict(X)
return ret
def sobel(img_set):
ret = np.empty(img_set.shape)
for i, img in enumerate(tqdm(img_set)):
grad_x = cv.Sobel(np.float32(img), cv.CV_32F, 1, 0)
grad_y = cv.Sobel(np.float32(img), cv.CV_32F, 0, 1)
gradx = cv.convertScaleAbs(grad_x)
grady = cv.convertScaleAbs(grad_y)
gradxy = cv.addWeighted(gradx, 0.5, grady, 0.5, 0)
ret[i, :] = gradxy
return ret
def canny(img_set):
ret = np.empty(img_set.shape)
for i, image in enumerate(tqdm(img_set)):
blurred = cv.GaussianBlur(np.float32(image), (3, 3), 0)
gray = cv.cvtColor(blurred, cv.COLOR_RGB2GRAY)
edge_output = cv.Canny(gray, 50, 150)
dst = cv.bitwise_and(image, image, mask=edge_output)
print(dst)
ret[i, :] = dst
return ret
def scharr(img_set):
ret = np.empty(img_set.shape)
for i, img in enumerate(tqdm(img_set)):
grad_x = cv.Scharr(np.float32(img), cv.CV_32F, 1, 0)
grad_y = cv.Scharr(np.float32(img), cv.CV_32F, 0, 1)
gradx = cv.convertScaleAbs(grad_x)
grady = cv.convertScaleAbs(grad_y)
gradxy = cv.addWeighted(gradx, 0.5, grady, 0.5, 0)
ret[i, :] = gradxy
return ret
def laplace(img_set):
ret = np.empty(img_set.shape)
for i, img in enumerate(tqdm(img_set)):
gray_lap = cv.Laplacian(np.float32(img), cv.CV_32F, ksize=3)
dst = cv.convertScaleAbs(gray_lap)
ret[i, :] = dst
return ret
def sp_noise(img_set, prob=0.1):
ret = np.empty(img_set.shape)
for m, image in enumerate(tqdm(img_set)):
out = np.zeros(image.shape, np.uint8)
thres = 1 - prob
for i in range(image.shape[0]):
for j in range(image.shape[1]):
rdn = random.random()
if rdn < prob:
out[i][j] = 0
elif rdn > thres:
out[i][j] = 255
else:
out[i][j] = image[i][j]
ret[m,:] = out
return ret
def gasuss_noise(img_set, mean=0, var=0.01):
ret = np.empty(img_set.shape)
for m, image in enumerate(tqdm(img_set)):
image = np.array(image/255, dtype=float)
noise = np.random.normal(mean, var ** 0.5, image.shape)
out = image + noise
if out.min() < 0:
low_clip = -1.
else:
low_clip = 0.
out = np.clip(out, low_clip, 1.0)
out = np.uint8(out*255)
ret[m, :] = out
return ret
def ouput_csv(X_, Y_, csv_path):
model = load_model(model_path,
custom_objects={'precision': precision, 'recall': recall, 'f1': f1})
data = model.predict(X_)
dataDF = pd.DataFrame(data)
dataDF['level'] = Y_[:, 0]
dataDF['label'] = Y_[:, 1]
print(dataDF)
dataDF.to_csv(csv_path, index=False)
## if you would like to use sobel
x_train, y_train = set_data(train_img_path,label_df)
y_in = np.c_[y_train, np.ones(y_train.shape[0])]
x_test, y_test = set_data(test_img_path,label_df)
y_out = np.c_[y_test, np.zeros(y_test.shape[0])]
X_ = np.r_[sobel(x_train), sobel(x_test)]
Y_ = np.r_[y_in, y_out]
ouput_csv(X_, Y_, 'sobel_eye.csv')
## original output without operator
# x_train, y_train = set_data(train_img_path,label_df)
# y_in = np.c_[y_train, np.ones(y_train.shape[0])]
# x_test, y_test = set_data(test_img_path,label_df)
# y_out = np.c_[y_test, np.zeros(y_test.shape[0])]
#
# X_ = np.r_[x_train, x_test]
# Y_ = np.r_[y_in, y_out]
#
# ouput_csv(X_, Y_, 'sobel_eye.csv')
|
normal
|
{
"blob_id": "c2b3594d25e2d1670d9b99e0d3484c680f59421f",
"index": 9465,
"step-1": "<mask token>\n\n\ndef preprocess_image(image_path, desired_size=SIZE):\n \"\"\"\n Resize the picture to the desired size\n :param image_path: the path of image folder\n :param desired_size: the size that image will be cropped as. The default size is 224*224\n :return: the cropped image\n \"\"\"\n im = Image.open(image_path)\n im = im.resize((desired_size,) * 2, resample=Image.LANCZOS)\n return im\n\n\ndef set_data(img_path, dataframe):\n \"\"\"\n Correspond the image to the label and return them.\n :param img_path: the path of images' folder\n :param dataframe: the .csv file that shows relation between image and label\n :return: Image, Label and the name of Image\n \"\"\"\n N = len(os.listdir(img_path))\n x_ = np.empty((N, SIZE, SIZE, 3), dtype=np.uint8)\n y_ = np.empty(N)\n image_names = np.empty(N, dtype=np.dtype(('U', 15)))\n for i, img_name in enumerate(tqdm(os.listdir(img_path))):\n x_[i, :, :, :] = preprocess_image(img_path + img_name)\n y_[i] = dataframe.loc[img_name.split('.')[0], 'level']\n image_names[i] = img_name\n return x_, y_\n\n\ndef predict(X):\n model = load_model(model_path, custom_objects={'precision': precision,\n 'recall': recall, 'f1': f1})\n ret = model.predict(X)\n return ret\n\n\ndef sobel(img_set):\n ret = np.empty(img_set.shape)\n for i, img in enumerate(tqdm(img_set)):\n grad_x = cv.Sobel(np.float32(img), cv.CV_32F, 1, 0)\n grad_y = cv.Sobel(np.float32(img), cv.CV_32F, 0, 1)\n gradx = cv.convertScaleAbs(grad_x)\n grady = cv.convertScaleAbs(grad_y)\n gradxy = cv.addWeighted(gradx, 0.5, grady, 0.5, 0)\n ret[i, :] = gradxy\n return ret\n\n\ndef canny(img_set):\n ret = np.empty(img_set.shape)\n for i, image in enumerate(tqdm(img_set)):\n blurred = cv.GaussianBlur(np.float32(image), (3, 3), 0)\n gray = cv.cvtColor(blurred, cv.COLOR_RGB2GRAY)\n edge_output = cv.Canny(gray, 50, 150)\n dst = cv.bitwise_and(image, image, mask=edge_output)\n print(dst)\n ret[i, :] = dst\n return ret\n\n\ndef scharr(img_set):\n ret = np.empty(img_set.shape)\n for i, img in enumerate(tqdm(img_set)):\n grad_x = cv.Scharr(np.float32(img), cv.CV_32F, 1, 0)\n grad_y = cv.Scharr(np.float32(img), cv.CV_32F, 0, 1)\n gradx = cv.convertScaleAbs(grad_x)\n grady = cv.convertScaleAbs(grad_y)\n gradxy = cv.addWeighted(gradx, 0.5, grady, 0.5, 0)\n ret[i, :] = gradxy\n return ret\n\n\ndef laplace(img_set):\n ret = np.empty(img_set.shape)\n for i, img in enumerate(tqdm(img_set)):\n gray_lap = cv.Laplacian(np.float32(img), cv.CV_32F, ksize=3)\n dst = cv.convertScaleAbs(gray_lap)\n ret[i, :] = dst\n return ret\n\n\n<mask token>\n\n\ndef gasuss_noise(img_set, mean=0, var=0.01):\n ret = np.empty(img_set.shape)\n for m, image in enumerate(tqdm(img_set)):\n image = np.array(image / 255, dtype=float)\n noise = np.random.normal(mean, var ** 0.5, image.shape)\n out = image + noise\n if out.min() < 0:\n low_clip = -1.0\n else:\n low_clip = 0.0\n out = np.clip(out, low_clip, 1.0)\n out = np.uint8(out * 255)\n ret[m, :] = out\n return ret\n\n\ndef ouput_csv(X_, Y_, csv_path):\n model = load_model(model_path, custom_objects={'precision': precision,\n 'recall': recall, 'f1': f1})\n data = model.predict(X_)\n dataDF = pd.DataFrame(data)\n dataDF['level'] = Y_[:, 0]\n dataDF['label'] = Y_[:, 1]\n print(dataDF)\n dataDF.to_csv(csv_path, index=False)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef preprocess_image(image_path, desired_size=SIZE):\n \"\"\"\n Resize the picture to the desired size\n :param image_path: the path of image folder\n :param desired_size: the size that image will be cropped as. The default size is 224*224\n :return: the cropped image\n \"\"\"\n im = Image.open(image_path)\n im = im.resize((desired_size,) * 2, resample=Image.LANCZOS)\n return im\n\n\ndef set_data(img_path, dataframe):\n \"\"\"\n Correspond the image to the label and return them.\n :param img_path: the path of images' folder\n :param dataframe: the .csv file that shows relation between image and label\n :return: Image, Label and the name of Image\n \"\"\"\n N = len(os.listdir(img_path))\n x_ = np.empty((N, SIZE, SIZE, 3), dtype=np.uint8)\n y_ = np.empty(N)\n image_names = np.empty(N, dtype=np.dtype(('U', 15)))\n for i, img_name in enumerate(tqdm(os.listdir(img_path))):\n x_[i, :, :, :] = preprocess_image(img_path + img_name)\n y_[i] = dataframe.loc[img_name.split('.')[0], 'level']\n image_names[i] = img_name\n return x_, y_\n\n\ndef predict(X):\n model = load_model(model_path, custom_objects={'precision': precision,\n 'recall': recall, 'f1': f1})\n ret = model.predict(X)\n return ret\n\n\ndef sobel(img_set):\n ret = np.empty(img_set.shape)\n for i, img in enumerate(tqdm(img_set)):\n grad_x = cv.Sobel(np.float32(img), cv.CV_32F, 1, 0)\n grad_y = cv.Sobel(np.float32(img), cv.CV_32F, 0, 1)\n gradx = cv.convertScaleAbs(grad_x)\n grady = cv.convertScaleAbs(grad_y)\n gradxy = cv.addWeighted(gradx, 0.5, grady, 0.5, 0)\n ret[i, :] = gradxy\n return ret\n\n\ndef canny(img_set):\n ret = np.empty(img_set.shape)\n for i, image in enumerate(tqdm(img_set)):\n blurred = cv.GaussianBlur(np.float32(image), (3, 3), 0)\n gray = cv.cvtColor(blurred, cv.COLOR_RGB2GRAY)\n edge_output = cv.Canny(gray, 50, 150)\n dst = cv.bitwise_and(image, image, mask=edge_output)\n print(dst)\n ret[i, :] = dst\n return ret\n\n\ndef scharr(img_set):\n ret = np.empty(img_set.shape)\n for i, img in enumerate(tqdm(img_set)):\n grad_x = cv.Scharr(np.float32(img), cv.CV_32F, 1, 0)\n grad_y = cv.Scharr(np.float32(img), cv.CV_32F, 0, 1)\n gradx = cv.convertScaleAbs(grad_x)\n grady = cv.convertScaleAbs(grad_y)\n gradxy = cv.addWeighted(gradx, 0.5, grady, 0.5, 0)\n ret[i, :] = gradxy\n return ret\n\n\ndef laplace(img_set):\n ret = np.empty(img_set.shape)\n for i, img in enumerate(tqdm(img_set)):\n gray_lap = cv.Laplacian(np.float32(img), cv.CV_32F, ksize=3)\n dst = cv.convertScaleAbs(gray_lap)\n ret[i, :] = dst\n return ret\n\n\ndef sp_noise(img_set, prob=0.1):\n ret = np.empty(img_set.shape)\n for m, image in enumerate(tqdm(img_set)):\n out = np.zeros(image.shape, np.uint8)\n thres = 1 - prob\n for i in range(image.shape[0]):\n for j in range(image.shape[1]):\n rdn = random.random()\n if rdn < prob:\n out[i][j] = 0\n elif rdn > thres:\n out[i][j] = 255\n else:\n out[i][j] = image[i][j]\n ret[m, :] = out\n return ret\n\n\ndef gasuss_noise(img_set, mean=0, var=0.01):\n ret = np.empty(img_set.shape)\n for m, image in enumerate(tqdm(img_set)):\n image = np.array(image / 255, dtype=float)\n noise = np.random.normal(mean, var ** 0.5, image.shape)\n out = image + noise\n if out.min() < 0:\n low_clip = -1.0\n else:\n low_clip = 0.0\n out = np.clip(out, low_clip, 1.0)\n out = np.uint8(out * 255)\n ret[m, :] = out\n return ret\n\n\ndef ouput_csv(X_, Y_, csv_path):\n model = load_model(model_path, custom_objects={'precision': precision,\n 'recall': recall, 'f1': f1})\n data = model.predict(X_)\n dataDF = pd.DataFrame(data)\n dataDF['level'] = Y_[:, 0]\n dataDF['label'] = Y_[:, 1]\n print(dataDF)\n dataDF.to_csv(csv_path, index=False)\n\n\n<mask token>\n",
"step-3": "<mask token>\nos.environ['CUDA_VISIBLE_DEVICES'] = '1'\nmodel_path = '/home/bo/Project/densenet.hdf5'\ntrain_img_path = '/home/bo/Project/Eyes_data/first_train/'\ntest_img_path = '/home/bo/Project/Eyes_data/first_test/'\nlabel_df = pd.read_csv('/home/bo/Project/Eyes_data/first_label.csv',\n error_bad_lines=False, index_col=0)\nSIZE = 224\n\n\ndef preprocess_image(image_path, desired_size=SIZE):\n \"\"\"\n Resize the picture to the desired size\n :param image_path: the path of image folder\n :param desired_size: the size that image will be cropped as. The default size is 224*224\n :return: the cropped image\n \"\"\"\n im = Image.open(image_path)\n im = im.resize((desired_size,) * 2, resample=Image.LANCZOS)\n return im\n\n\ndef set_data(img_path, dataframe):\n \"\"\"\n Correspond the image to the label and return them.\n :param img_path: the path of images' folder\n :param dataframe: the .csv file that shows relation between image and label\n :return: Image, Label and the name of Image\n \"\"\"\n N = len(os.listdir(img_path))\n x_ = np.empty((N, SIZE, SIZE, 3), dtype=np.uint8)\n y_ = np.empty(N)\n image_names = np.empty(N, dtype=np.dtype(('U', 15)))\n for i, img_name in enumerate(tqdm(os.listdir(img_path))):\n x_[i, :, :, :] = preprocess_image(img_path + img_name)\n y_[i] = dataframe.loc[img_name.split('.')[0], 'level']\n image_names[i] = img_name\n return x_, y_\n\n\ndef predict(X):\n model = load_model(model_path, custom_objects={'precision': precision,\n 'recall': recall, 'f1': f1})\n ret = model.predict(X)\n return ret\n\n\ndef sobel(img_set):\n ret = np.empty(img_set.shape)\n for i, img in enumerate(tqdm(img_set)):\n grad_x = cv.Sobel(np.float32(img), cv.CV_32F, 1, 0)\n grad_y = cv.Sobel(np.float32(img), cv.CV_32F, 0, 1)\n gradx = cv.convertScaleAbs(grad_x)\n grady = cv.convertScaleAbs(grad_y)\n gradxy = cv.addWeighted(gradx, 0.5, grady, 0.5, 0)\n ret[i, :] = gradxy\n return ret\n\n\ndef canny(img_set):\n ret = np.empty(img_set.shape)\n for i, image in enumerate(tqdm(img_set)):\n blurred = cv.GaussianBlur(np.float32(image), (3, 3), 0)\n gray = cv.cvtColor(blurred, cv.COLOR_RGB2GRAY)\n edge_output = cv.Canny(gray, 50, 150)\n dst = cv.bitwise_and(image, image, mask=edge_output)\n print(dst)\n ret[i, :] = dst\n return ret\n\n\ndef scharr(img_set):\n ret = np.empty(img_set.shape)\n for i, img in enumerate(tqdm(img_set)):\n grad_x = cv.Scharr(np.float32(img), cv.CV_32F, 1, 0)\n grad_y = cv.Scharr(np.float32(img), cv.CV_32F, 0, 1)\n gradx = cv.convertScaleAbs(grad_x)\n grady = cv.convertScaleAbs(grad_y)\n gradxy = cv.addWeighted(gradx, 0.5, grady, 0.5, 0)\n ret[i, :] = gradxy\n return ret\n\n\ndef laplace(img_set):\n ret = np.empty(img_set.shape)\n for i, img in enumerate(tqdm(img_set)):\n gray_lap = cv.Laplacian(np.float32(img), cv.CV_32F, ksize=3)\n dst = cv.convertScaleAbs(gray_lap)\n ret[i, :] = dst\n return ret\n\n\ndef sp_noise(img_set, prob=0.1):\n ret = np.empty(img_set.shape)\n for m, image in enumerate(tqdm(img_set)):\n out = np.zeros(image.shape, np.uint8)\n thres = 1 - prob\n for i in range(image.shape[0]):\n for j in range(image.shape[1]):\n rdn = random.random()\n if rdn < prob:\n out[i][j] = 0\n elif rdn > thres:\n out[i][j] = 255\n else:\n out[i][j] = image[i][j]\n ret[m, :] = out\n return ret\n\n\ndef gasuss_noise(img_set, mean=0, var=0.01):\n ret = np.empty(img_set.shape)\n for m, image in enumerate(tqdm(img_set)):\n image = np.array(image / 255, dtype=float)\n noise = np.random.normal(mean, var ** 0.5, image.shape)\n out = image + noise\n if out.min() < 0:\n low_clip = -1.0\n else:\n low_clip = 0.0\n out = np.clip(out, low_clip, 1.0)\n out = np.uint8(out * 255)\n ret[m, :] = out\n return ret\n\n\ndef ouput_csv(X_, Y_, csv_path):\n model = load_model(model_path, custom_objects={'precision': precision,\n 'recall': recall, 'f1': f1})\n data = model.predict(X_)\n dataDF = pd.DataFrame(data)\n dataDF['level'] = Y_[:, 0]\n dataDF['label'] = Y_[:, 1]\n print(dataDF)\n dataDF.to_csv(csv_path, index=False)\n\n\nx_train, y_train = set_data(train_img_path, label_df)\ny_in = np.c_[y_train, np.ones(y_train.shape[0])]\nx_test, y_test = set_data(test_img_path, label_df)\ny_out = np.c_[y_test, np.zeros(y_test.shape[0])]\nX_ = np.r_[sobel(x_train), sobel(x_test)]\nY_ = np.r_[y_in, y_out]\nouput_csv(X_, Y_, 'sobel_eye.csv')\n",
"step-4": "import random\nimport tqdm\nfrom keras.models import load_model\nfrom ModelUtil import precision, recall, f1\nfrom tqdm import tqdm\nimport cv2 as cv\nimport numpy as np\nimport os\nimport pandas as pd\nfrom PIL import Image\nos.environ['CUDA_VISIBLE_DEVICES'] = '1'\nmodel_path = '/home/bo/Project/densenet.hdf5'\ntrain_img_path = '/home/bo/Project/Eyes_data/first_train/'\ntest_img_path = '/home/bo/Project/Eyes_data/first_test/'\nlabel_df = pd.read_csv('/home/bo/Project/Eyes_data/first_label.csv',\n error_bad_lines=False, index_col=0)\nSIZE = 224\n\n\ndef preprocess_image(image_path, desired_size=SIZE):\n \"\"\"\n Resize the picture to the desired size\n :param image_path: the path of image folder\n :param desired_size: the size that image will be cropped as. The default size is 224*224\n :return: the cropped image\n \"\"\"\n im = Image.open(image_path)\n im = im.resize((desired_size,) * 2, resample=Image.LANCZOS)\n return im\n\n\ndef set_data(img_path, dataframe):\n \"\"\"\n Correspond the image to the label and return them.\n :param img_path: the path of images' folder\n :param dataframe: the .csv file that shows relation between image and label\n :return: Image, Label and the name of Image\n \"\"\"\n N = len(os.listdir(img_path))\n x_ = np.empty((N, SIZE, SIZE, 3), dtype=np.uint8)\n y_ = np.empty(N)\n image_names = np.empty(N, dtype=np.dtype(('U', 15)))\n for i, img_name in enumerate(tqdm(os.listdir(img_path))):\n x_[i, :, :, :] = preprocess_image(img_path + img_name)\n y_[i] = dataframe.loc[img_name.split('.')[0], 'level']\n image_names[i] = img_name\n return x_, y_\n\n\ndef predict(X):\n model = load_model(model_path, custom_objects={'precision': precision,\n 'recall': recall, 'f1': f1})\n ret = model.predict(X)\n return ret\n\n\ndef sobel(img_set):\n ret = np.empty(img_set.shape)\n for i, img in enumerate(tqdm(img_set)):\n grad_x = cv.Sobel(np.float32(img), cv.CV_32F, 1, 0)\n grad_y = cv.Sobel(np.float32(img), cv.CV_32F, 0, 1)\n gradx = cv.convertScaleAbs(grad_x)\n grady = cv.convertScaleAbs(grad_y)\n gradxy = cv.addWeighted(gradx, 0.5, grady, 0.5, 0)\n ret[i, :] = gradxy\n return ret\n\n\ndef canny(img_set):\n ret = np.empty(img_set.shape)\n for i, image in enumerate(tqdm(img_set)):\n blurred = cv.GaussianBlur(np.float32(image), (3, 3), 0)\n gray = cv.cvtColor(blurred, cv.COLOR_RGB2GRAY)\n edge_output = cv.Canny(gray, 50, 150)\n dst = cv.bitwise_and(image, image, mask=edge_output)\n print(dst)\n ret[i, :] = dst\n return ret\n\n\ndef scharr(img_set):\n ret = np.empty(img_set.shape)\n for i, img in enumerate(tqdm(img_set)):\n grad_x = cv.Scharr(np.float32(img), cv.CV_32F, 1, 0)\n grad_y = cv.Scharr(np.float32(img), cv.CV_32F, 0, 1)\n gradx = cv.convertScaleAbs(grad_x)\n grady = cv.convertScaleAbs(grad_y)\n gradxy = cv.addWeighted(gradx, 0.5, grady, 0.5, 0)\n ret[i, :] = gradxy\n return ret\n\n\ndef laplace(img_set):\n ret = np.empty(img_set.shape)\n for i, img in enumerate(tqdm(img_set)):\n gray_lap = cv.Laplacian(np.float32(img), cv.CV_32F, ksize=3)\n dst = cv.convertScaleAbs(gray_lap)\n ret[i, :] = dst\n return ret\n\n\ndef sp_noise(img_set, prob=0.1):\n ret = np.empty(img_set.shape)\n for m, image in enumerate(tqdm(img_set)):\n out = np.zeros(image.shape, np.uint8)\n thres = 1 - prob\n for i in range(image.shape[0]):\n for j in range(image.shape[1]):\n rdn = random.random()\n if rdn < prob:\n out[i][j] = 0\n elif rdn > thres:\n out[i][j] = 255\n else:\n out[i][j] = image[i][j]\n ret[m, :] = out\n return ret\n\n\ndef gasuss_noise(img_set, mean=0, var=0.01):\n ret = np.empty(img_set.shape)\n for m, image in enumerate(tqdm(img_set)):\n image = np.array(image / 255, dtype=float)\n noise = np.random.normal(mean, var ** 0.5, image.shape)\n out = image + noise\n if out.min() < 0:\n low_clip = -1.0\n else:\n low_clip = 0.0\n out = np.clip(out, low_clip, 1.0)\n out = np.uint8(out * 255)\n ret[m, :] = out\n return ret\n\n\ndef ouput_csv(X_, Y_, csv_path):\n model = load_model(model_path, custom_objects={'precision': precision,\n 'recall': recall, 'f1': f1})\n data = model.predict(X_)\n dataDF = pd.DataFrame(data)\n dataDF['level'] = Y_[:, 0]\n dataDF['label'] = Y_[:, 1]\n print(dataDF)\n dataDF.to_csv(csv_path, index=False)\n\n\nx_train, y_train = set_data(train_img_path, label_df)\ny_in = np.c_[y_train, np.ones(y_train.shape[0])]\nx_test, y_test = set_data(test_img_path, label_df)\ny_out = np.c_[y_test, np.zeros(y_test.shape[0])]\nX_ = np.r_[sobel(x_train), sobel(x_test)]\nY_ = np.r_[y_in, y_out]\nouput_csv(X_, Y_, 'sobel_eye.csv')\n",
"step-5": "\nimport random\nimport tqdm\nfrom keras.models import load_model\nfrom ModelUtil import precision, recall, f1\nfrom tqdm import tqdm\nimport cv2 as cv\nimport numpy as np\nimport os\nimport pandas as pd\nfrom PIL import Image\n\n\nos.environ['CUDA_VISIBLE_DEVICES']='1'\n\n\nmodel_path = '/home/bo/Project/densenet.hdf5'\ntrain_img_path = '/home/bo/Project/Eyes_data/first_train/'\ntest_img_path = '/home/bo/Project/Eyes_data/first_test/'\nlabel_df = pd.read_csv('/home/bo/Project/Eyes_data/first_label.csv', error_bad_lines=False, index_col=0)\n\nSIZE = 224\n\n\ndef preprocess_image(image_path, desired_size=SIZE):\n \"\"\"\n Resize the picture to the desired size\n :param image_path: the path of image folder\n :param desired_size: the size that image will be cropped as. The default size is 224*224\n :return: the cropped image\n \"\"\"\n im = Image.open(image_path)\n im = im.resize((desired_size,) * 2, resample=Image.LANCZOS)\n\n return im\n\ndef set_data(img_path, dataframe):\n \"\"\"\n Correspond the image to the label and return them.\n :param img_path: the path of images' folder\n :param dataframe: the .csv file that shows relation between image and label\n :return: Image, Label and the name of Image\n \"\"\"\n N = len(os.listdir(img_path))\n x_ = np.empty((N, SIZE, SIZE, 3), dtype=np.uint8)\n y_ = np.empty(N)\n image_names = np.empty(N, dtype=np.dtype(('U', 15)))\n for i, img_name in enumerate(tqdm(os.listdir(img_path))):\n x_[i, :, :, :] = preprocess_image(img_path + img_name)\n y_[i] = dataframe.loc[img_name.split('.')[0], 'level']\n image_names[i] = img_name\n\n return x_, y_\n\n\ndef predict(X):\n model = load_model(model_path,\n custom_objects={'precision': precision, 'recall': recall, 'f1': f1})\n ret = model.predict(X)\n\n return ret\n\ndef sobel(img_set):\n\n ret = np.empty(img_set.shape)\n for i, img in enumerate(tqdm(img_set)):\n grad_x = cv.Sobel(np.float32(img), cv.CV_32F, 1, 0)\n grad_y = cv.Sobel(np.float32(img), cv.CV_32F, 0, 1)\n gradx = cv.convertScaleAbs(grad_x)\n grady = cv.convertScaleAbs(grad_y)\n gradxy = cv.addWeighted(gradx, 0.5, grady, 0.5, 0)\n ret[i, :] = gradxy\n return ret\n\n\ndef canny(img_set):\n\n ret = np.empty(img_set.shape)\n for i, image in enumerate(tqdm(img_set)):\n blurred = cv.GaussianBlur(np.float32(image), (3, 3), 0)\n gray = cv.cvtColor(blurred, cv.COLOR_RGB2GRAY)\n edge_output = cv.Canny(gray, 50, 150)\n dst = cv.bitwise_and(image, image, mask=edge_output)\n print(dst)\n ret[i, :] = dst\n\n return ret\n\n\ndef scharr(img_set):\n\n ret = np.empty(img_set.shape)\n for i, img in enumerate(tqdm(img_set)):\n grad_x = cv.Scharr(np.float32(img), cv.CV_32F, 1, 0)\n grad_y = cv.Scharr(np.float32(img), cv.CV_32F, 0, 1)\n gradx = cv.convertScaleAbs(grad_x)\n grady = cv.convertScaleAbs(grad_y)\n gradxy = cv.addWeighted(gradx, 0.5, grady, 0.5, 0)\n ret[i, :] = gradxy\n\n\n return ret\n\ndef laplace(img_set):\n\n ret = np.empty(img_set.shape)\n for i, img in enumerate(tqdm(img_set)):\n gray_lap = cv.Laplacian(np.float32(img), cv.CV_32F, ksize=3)\n dst = cv.convertScaleAbs(gray_lap)\n ret[i, :] = dst\n\n return ret\n\n\ndef sp_noise(img_set, prob=0.1):\n ret = np.empty(img_set.shape)\n for m, image in enumerate(tqdm(img_set)):\n out = np.zeros(image.shape, np.uint8)\n thres = 1 - prob\n for i in range(image.shape[0]):\n for j in range(image.shape[1]):\n rdn = random.random()\n if rdn < prob:\n out[i][j] = 0\n elif rdn > thres:\n out[i][j] = 255\n else:\n out[i][j] = image[i][j]\n ret[m,:] = out\n\n return ret\n\ndef gasuss_noise(img_set, mean=0, var=0.01):\n ret = np.empty(img_set.shape)\n for m, image in enumerate(tqdm(img_set)):\n image = np.array(image/255, dtype=float)\n noise = np.random.normal(mean, var ** 0.5, image.shape)\n out = image + noise\n if out.min() < 0:\n low_clip = -1.\n else:\n low_clip = 0.\n out = np.clip(out, low_clip, 1.0)\n out = np.uint8(out*255)\n ret[m, :] = out\n return ret\n\ndef ouput_csv(X_, Y_, csv_path):\n model = load_model(model_path,\n custom_objects={'precision': precision, 'recall': recall, 'f1': f1})\n data = model.predict(X_)\n dataDF = pd.DataFrame(data)\n dataDF['level'] = Y_[:, 0]\n dataDF['label'] = Y_[:, 1]\n print(dataDF)\n dataDF.to_csv(csv_path, index=False)\n\n\n\n## if you would like to use sobel\nx_train, y_train = set_data(train_img_path,label_df)\ny_in = np.c_[y_train, np.ones(y_train.shape[0])]\nx_test, y_test = set_data(test_img_path,label_df)\ny_out = np.c_[y_test, np.zeros(y_test.shape[0])]\n\nX_ = np.r_[sobel(x_train), sobel(x_test)]\nY_ = np.r_[y_in, y_out]\n\nouput_csv(X_, Y_, 'sobel_eye.csv')\n\n## original output without operator\n# x_train, y_train = set_data(train_img_path,label_df)\n# y_in = np.c_[y_train, np.ones(y_train.shape[0])]\n# x_test, y_test = set_data(test_img_path,label_df)\n# y_out = np.c_[y_test, np.zeros(y_test.shape[0])]\n#\n# X_ = np.r_[x_train, x_test]\n# Y_ = np.r_[y_in, y_out]\n#\n# ouput_csv(X_, Y_, 'sobel_eye.csv')\n",
"step-ids": [
9,
10,
12,
13,
14
]
}
|
[
9,
10,
12,
13,
14
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def main():
graphics = BreakoutGraphics()
lives = NUM_LIVES
graphics.window.add(graphics.scoreboard, 0, graphics.window_height)
while True:
pause(FRAME_RATE)
if graphics.ball_fall_down():
lives -= 1
if lives > 0:
graphics.reset_ball()
else:
graphics.game_over()
break
if graphics.you_win():
break
vx = graphics.getx()
vy = graphics.gety()
graphics.ball.move(vx, vy)
graphics.boundary()
graphics.collision()
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
FRAME_RATE = 1000 / 120
NUM_LIVES = 3
def main():
graphics = BreakoutGraphics()
lives = NUM_LIVES
graphics.window.add(graphics.scoreboard, 0, graphics.window_height)
while True:
pause(FRAME_RATE)
if graphics.ball_fall_down():
lives -= 1
if lives > 0:
graphics.reset_ball()
else:
graphics.game_over()
break
if graphics.you_win():
break
vx = graphics.getx()
vy = graphics.gety()
graphics.ball.move(vx, vy)
graphics.boundary()
graphics.collision()
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
from campy.gui.events.timer import pause
from breakoutgraphics import BreakoutGraphics
FRAME_RATE = 1000 / 120
NUM_LIVES = 3
def main():
graphics = BreakoutGraphics()
lives = NUM_LIVES
graphics.window.add(graphics.scoreboard, 0, graphics.window_height)
while True:
pause(FRAME_RATE)
if graphics.ball_fall_down():
lives -= 1
if lives > 0:
graphics.reset_ball()
else:
graphics.game_over()
break
if graphics.you_win():
break
vx = graphics.getx()
vy = graphics.gety()
graphics.ball.move(vx, vy)
graphics.boundary()
graphics.collision()
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
"""
stanCode Breakout Project
Adapted from Eric Roberts's Breakout by
Sonja Johnson-Yu, Kylie Jue, Nick Bowman,
and Jerry Liao
YOUR DESCRIPTION HERE
"""
from campy.gui.events.timer import pause
from breakoutgraphics import BreakoutGraphics
FRAME_RATE = 1000 / 120 # 120 frames per second.
NUM_LIVES = 3
def main():
graphics = BreakoutGraphics()
lives = NUM_LIVES # 生命
graphics.window.add(graphics.scoreboard, 0, graphics.window_height) # 計分板
# Add animation loop here!
while True:
pause(FRAME_RATE)
if graphics.ball_fall_down():
lives -= 1
if lives > 0:
graphics.reset_ball()
else:
graphics.game_over()
break
if graphics.you_win():
break
vx = graphics.getx()
vy = graphics.gety()
graphics.ball.move(vx, vy)
graphics.boundary()
graphics.collision()
if __name__ == '__main__':
main()
|
flexible
|
{
"blob_id": "b218f5e401510f844006cb6079737b54aa86827b",
"index": 2194,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef main():\n graphics = BreakoutGraphics()\n lives = NUM_LIVES\n graphics.window.add(graphics.scoreboard, 0, graphics.window_height)\n while True:\n pause(FRAME_RATE)\n if graphics.ball_fall_down():\n lives -= 1\n if lives > 0:\n graphics.reset_ball()\n else:\n graphics.game_over()\n break\n if graphics.you_win():\n break\n vx = graphics.getx()\n vy = graphics.gety()\n graphics.ball.move(vx, vy)\n graphics.boundary()\n graphics.collision()\n\n\nif __name__ == '__main__':\n main()\n",
"step-3": "<mask token>\nFRAME_RATE = 1000 / 120\nNUM_LIVES = 3\n\n\ndef main():\n graphics = BreakoutGraphics()\n lives = NUM_LIVES\n graphics.window.add(graphics.scoreboard, 0, graphics.window_height)\n while True:\n pause(FRAME_RATE)\n if graphics.ball_fall_down():\n lives -= 1\n if lives > 0:\n graphics.reset_ball()\n else:\n graphics.game_over()\n break\n if graphics.you_win():\n break\n vx = graphics.getx()\n vy = graphics.gety()\n graphics.ball.move(vx, vy)\n graphics.boundary()\n graphics.collision()\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "<mask token>\nfrom campy.gui.events.timer import pause\nfrom breakoutgraphics import BreakoutGraphics\nFRAME_RATE = 1000 / 120\nNUM_LIVES = 3\n\n\ndef main():\n graphics = BreakoutGraphics()\n lives = NUM_LIVES\n graphics.window.add(graphics.scoreboard, 0, graphics.window_height)\n while True:\n pause(FRAME_RATE)\n if graphics.ball_fall_down():\n lives -= 1\n if lives > 0:\n graphics.reset_ball()\n else:\n graphics.game_over()\n break\n if graphics.you_win():\n break\n vx = graphics.getx()\n vy = graphics.gety()\n graphics.ball.move(vx, vy)\n graphics.boundary()\n graphics.collision()\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "\"\"\"\nstanCode Breakout Project\nAdapted from Eric Roberts's Breakout by\nSonja Johnson-Yu, Kylie Jue, Nick Bowman,\nand Jerry Liao\n\nYOUR DESCRIPTION HERE\n\"\"\"\n\nfrom campy.gui.events.timer import pause\nfrom breakoutgraphics import BreakoutGraphics\n\nFRAME_RATE = 1000 / 120 # 120 frames per second.\nNUM_LIVES = 3\n\n\ndef main():\n graphics = BreakoutGraphics()\n lives = NUM_LIVES # 生命\n\n graphics.window.add(graphics.scoreboard, 0, graphics.window_height) # 計分板\n\n # Add animation loop here!\n while True:\n pause(FRAME_RATE)\n if graphics.ball_fall_down():\n lives -= 1\n if lives > 0:\n graphics.reset_ball()\n else:\n graphics.game_over()\n break\n if graphics.you_win():\n break\n vx = graphics.getx()\n vy = graphics.gety()\n graphics.ball.move(vx, vy)\n graphics.boundary()\n graphics.collision()\n\n\nif __name__ == '__main__':\n main()\n",
"step-ids": [
0,
2,
3,
4,
5
]
}
|
[
0,
2,
3,
4,
5
] |
from django import template
from apps.account.models import User, Follow, RequestFollow
from apps.post.models import Post
register = template.Library()
@register.inclusion_tag('user/user_list.html')
def user_list():
"""show user name list"""
users = User.objects.all()
return {"users": users}
# @register.inclusion_tag('user/following_post_list.html')
# def following_post_list(pk):
# """show user following
# input:pk user
# output: list following
# """
# following = Follow.objects.following(pk)
# posts = Post.objects.filter(author__email__in=following).values('title', 'author__email')
# return {'posts': posts}
# @register.simple_tag()
# def send_request_follow(pk_login_user, pk_other_user):
# """
# Follow the user
# :param pk_login_user:
# :param pk_other_user:
# :return: message
# """
# return RequestFollow.objects.request_following_user(pk_login_user, pk_other_user)
@register.simple_tag()
def accept_request(pk_login_user, pk_other_user):
RequestFollow.objects.accept_request(pk_login_user, pk_other_user)
return "accept request"
@register.simple_tag()
def delete_request(pk_login_user, pk_other_user):
RequestFollow.objects.delete_request(pk_login_user, pk_other_user)
return "delete request"
@register.simple_tag()
def count_followers(pk):
""" count followers user"""
followers = Follow.objects.followers(pk)
return len(followers)
@register.simple_tag()
def count_following(pk):
""" count following user"""
following = Follow.objects.following(pk)
return len(following)
|
normal
|
{
"blob_id": "999c19fd760ffc482a15f5a14e188d416fcc5f21",
"index": 7218,
"step-1": "<mask token>\n\n\n@register.inclusion_tag('user/user_list.html')\ndef user_list():\n \"\"\"show user name list\"\"\"\n users = User.objects.all()\n return {'users': users}\n\n\n@register.simple_tag()\ndef accept_request(pk_login_user, pk_other_user):\n RequestFollow.objects.accept_request(pk_login_user, pk_other_user)\n return 'accept request'\n\n\n<mask token>\n\n\n@register.simple_tag()\ndef count_followers(pk):\n \"\"\" count followers user\"\"\"\n followers = Follow.objects.followers(pk)\n return len(followers)\n\n\n@register.simple_tag()\ndef count_following(pk):\n \"\"\" count following user\"\"\"\n following = Follow.objects.following(pk)\n return len(following)\n",
"step-2": "<mask token>\n\n\n@register.inclusion_tag('user/user_list.html')\ndef user_list():\n \"\"\"show user name list\"\"\"\n users = User.objects.all()\n return {'users': users}\n\n\n@register.simple_tag()\ndef accept_request(pk_login_user, pk_other_user):\n RequestFollow.objects.accept_request(pk_login_user, pk_other_user)\n return 'accept request'\n\n\n@register.simple_tag()\ndef delete_request(pk_login_user, pk_other_user):\n RequestFollow.objects.delete_request(pk_login_user, pk_other_user)\n return 'delete request'\n\n\n@register.simple_tag()\ndef count_followers(pk):\n \"\"\" count followers user\"\"\"\n followers = Follow.objects.followers(pk)\n return len(followers)\n\n\n@register.simple_tag()\ndef count_following(pk):\n \"\"\" count following user\"\"\"\n following = Follow.objects.following(pk)\n return len(following)\n",
"step-3": "<mask token>\nregister = template.Library()\n\n\n@register.inclusion_tag('user/user_list.html')\ndef user_list():\n \"\"\"show user name list\"\"\"\n users = User.objects.all()\n return {'users': users}\n\n\n@register.simple_tag()\ndef accept_request(pk_login_user, pk_other_user):\n RequestFollow.objects.accept_request(pk_login_user, pk_other_user)\n return 'accept request'\n\n\n@register.simple_tag()\ndef delete_request(pk_login_user, pk_other_user):\n RequestFollow.objects.delete_request(pk_login_user, pk_other_user)\n return 'delete request'\n\n\n@register.simple_tag()\ndef count_followers(pk):\n \"\"\" count followers user\"\"\"\n followers = Follow.objects.followers(pk)\n return len(followers)\n\n\n@register.simple_tag()\ndef count_following(pk):\n \"\"\" count following user\"\"\"\n following = Follow.objects.following(pk)\n return len(following)\n",
"step-4": "from django import template\nfrom apps.account.models import User, Follow, RequestFollow\nfrom apps.post.models import Post\nregister = template.Library()\n\n\n@register.inclusion_tag('user/user_list.html')\ndef user_list():\n \"\"\"show user name list\"\"\"\n users = User.objects.all()\n return {'users': users}\n\n\n@register.simple_tag()\ndef accept_request(pk_login_user, pk_other_user):\n RequestFollow.objects.accept_request(pk_login_user, pk_other_user)\n return 'accept request'\n\n\n@register.simple_tag()\ndef delete_request(pk_login_user, pk_other_user):\n RequestFollow.objects.delete_request(pk_login_user, pk_other_user)\n return 'delete request'\n\n\n@register.simple_tag()\ndef count_followers(pk):\n \"\"\" count followers user\"\"\"\n followers = Follow.objects.followers(pk)\n return len(followers)\n\n\n@register.simple_tag()\ndef count_following(pk):\n \"\"\" count following user\"\"\"\n following = Follow.objects.following(pk)\n return len(following)\n",
"step-5": "from django import template\n\nfrom apps.account.models import User, Follow, RequestFollow\nfrom apps.post.models import Post\n\nregister = template.Library()\n\n\n@register.inclusion_tag('user/user_list.html')\ndef user_list():\n \"\"\"show user name list\"\"\"\n users = User.objects.all()\n return {\"users\": users}\n\n\n# @register.inclusion_tag('user/following_post_list.html')\n# def following_post_list(pk):\n# \"\"\"show user following\n# input:pk user\n# output: list following\n# \"\"\"\n# following = Follow.objects.following(pk)\n# posts = Post.objects.filter(author__email__in=following).values('title', 'author__email')\n# return {'posts': posts}\n\n\n# @register.simple_tag()\n# def send_request_follow(pk_login_user, pk_other_user):\n# \"\"\"\n# Follow the user\n# :param pk_login_user:\n# :param pk_other_user:\n# :return: message\n# \"\"\"\n# return RequestFollow.objects.request_following_user(pk_login_user, pk_other_user)\n\n\n@register.simple_tag()\ndef accept_request(pk_login_user, pk_other_user):\n RequestFollow.objects.accept_request(pk_login_user, pk_other_user)\n return \"accept request\"\n\n\n@register.simple_tag()\ndef delete_request(pk_login_user, pk_other_user):\n RequestFollow.objects.delete_request(pk_login_user, pk_other_user)\n return \"delete request\"\n\n\n@register.simple_tag()\ndef count_followers(pk):\n \"\"\" count followers user\"\"\"\n\n followers = Follow.objects.followers(pk)\n return len(followers)\n\n\n@register.simple_tag()\ndef count_following(pk):\n \"\"\" count following user\"\"\"\n following = Follow.objects.following(pk)\n return len(following)\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
<|reserved_special_token_0|>
class TestTaniHub:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class TestTaniHub:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def test_tanihub_number_2(self):
self.driver.get('http://timvroom.com/selenium/playground/')
title_page = self.driver.title
answer_box_1 = self.driver.find_element_by_id('answer1')
answer_box_1.send_keys(title_page)
name_txt_box = self.driver.find_element_by_id('name')
name_txt_box.send_keys('Kilgore Trout')
occupation_dropdown = self.driver.find_element_by_id('occupation')
Select(occupation_dropdown).select_by_value('scifiauthor')
list_blue_box = self.driver.find_elements_by_class_name('bluebox')
answer_box_4 = self.driver.find_element_by_id('answer4')
answer_box_4.send_keys(str(len(list_blue_box)))
click_me_link = self.driver.find_element_by_xpath(
"//a[text()='click me']")
click_me_link.click()
red_box_element = self.driver.find_element_by_id('redbox')
answer_box_6 = self.driver.find_element_by_id('answer6')
answer_box_6.send_keys(str(red_box_element.get_attribute('class')))
self.driver.execute_script('return ran_this_js_function()')
value_script = self.driver.execute_script(
'return got_return_from_js_function()')
answer_box_8 = self.driver.find_element_by_id('answer8')
answer_box_8.send_keys(str(value_script))
wrote_book_rdbtn = self.driver.find_element_by_xpath(
"//input[@type='radio' and @name='wrotebook']")
wrote_book_rdbtn.click()
answer_box_10 = self.driver.find_element_by_id('answer10')
orange_box = self.driver.find_element_by_id('orangebox').location
green_box = self.driver.find_element_by_id('greenbox').location
answer_box_11 = self.driver.find_element_by_id('answer11')
if green_box['y'] > orange_box['y']:
answer_box_11.send_keys('orange')
else:
answer_box_11.send_keys('green')
answer_box_10.send_keys(str(red_box_element.text))
self.driver.set_window_size(850, 650)
answer_box_13 = self.driver.find_element_by_id('answer13')
answer_box_14 = self.driver.find_element_by_id('answer14')
try:
is_here_element = self.driver.find_element_by_id('ishere')
if is_here_element.is_displayed():
answer_box_13.send_keys('yes')
else:
answer_box_13.send_keys('no')
except:
answer_box_13.send_keys('no')
try:
purple_box = self.driver.find_element_by_id('purplebox')
if purple_box.is_displayed():
answer_box_14.send_keys('yes')
else:
answer_box_14.send_keys('no')
except:
answer_box_14.send_keys('no')
click_then_wait_link = self.driver.find_element_by_xpath(
"//a[text()='click then wait']")
click_then_wait_link.click()
WebDriverWait(self.driver, 20).until(expected_conditions.
element_to_be_clickable((By.XPATH,
"//a[text()='click after wait']")))
click_after_wait_link = self.driver.find_element_by_xpath(
"//a[text()='click after wait']")
click_after_wait_link.click()
self.driver.switch_to.alert.accept()
submit_button = self.driver.find_element_by_id('submitbutton')
submit_button.click()
check_results = self.driver.find_element_by_id('checkresults')
check_results.click()
self.driver.quit()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class TestTaniHub:
driver = webdriver.Chrome(executable_path='/usr/local/bin/chromedriver')
def test_tanihub_number_1(self):
self.driver.get('https://tanihub.com/')
jabodetabek_option = self.driver.find_element_by_xpath(
"//p[text()='Jabodetabek']")
user_image_button = self.driver.find_element_by_xpath(
"//img[@alt='profile']")
time.sleep(5)
jabodetabek_option.click()
time.sleep(2)
assert user_image_button.is_displayed()
user_image_button.click()
email_text_box = self.driver.find_element_by_xpath(
"//input[@type='email' and @id='input-icon-3']")
assert email_text_box.is_displayed()
email_text_box.send_keys('testinguser@mailinator.com')
selanjutnya_msk_btn = self.driver.find_element_by_xpath(
"//button[@type='submit' and @id='Button-2']")
assert selanjutnya_msk_btn.is_enabled()
selanjutnya_msk_btn.click()
time.sleep(2)
password_txt_box = self.driver.find_element_by_xpath(
"//input[@type='password' and @id='input-password-4']")
assert password_txt_box.is_displayed()
password_txt_box.send_keys('admin123')
selanjutnya_msk_btn.click()
search_text_box = self.driver.find_element_by_xpath(
"//input[@id='input-icon-3' and @type='text']")
assert search_text_box.is_displayed()
search_text_box.send_keys('Minyak Goreng Rose Brand 2 L Karton')
search_text_box.send_keys(Keys.ENTER)
time.sleep(5)
search_result_first_cart_button = self.driver.find_element_by_xpath(
"//button[@id='CardProduct-1601' and @type='button']")
search_result_first_cart_button.click()
keranjang_btn = self.driver.find_element_by_xpath(
"//button[@id='Button-2' and @type='button']/span")
assert keranjang_btn.is_displayed()
keranjang_btn.click()
time.sleep(5)
checkout_btn = self.driver.find_element_by_xpath(
"//button[text()='Checkout' and @type='button']")
assert checkout_btn.is_displayed()
checkout_btn.click()
time.sleep(5)
self.driver.quit()
def test_tanihub_number_2(self):
self.driver.get('http://timvroom.com/selenium/playground/')
title_page = self.driver.title
answer_box_1 = self.driver.find_element_by_id('answer1')
answer_box_1.send_keys(title_page)
name_txt_box = self.driver.find_element_by_id('name')
name_txt_box.send_keys('Kilgore Trout')
occupation_dropdown = self.driver.find_element_by_id('occupation')
Select(occupation_dropdown).select_by_value('scifiauthor')
list_blue_box = self.driver.find_elements_by_class_name('bluebox')
answer_box_4 = self.driver.find_element_by_id('answer4')
answer_box_4.send_keys(str(len(list_blue_box)))
click_me_link = self.driver.find_element_by_xpath(
"//a[text()='click me']")
click_me_link.click()
red_box_element = self.driver.find_element_by_id('redbox')
answer_box_6 = self.driver.find_element_by_id('answer6')
answer_box_6.send_keys(str(red_box_element.get_attribute('class')))
self.driver.execute_script('return ran_this_js_function()')
value_script = self.driver.execute_script(
'return got_return_from_js_function()')
answer_box_8 = self.driver.find_element_by_id('answer8')
answer_box_8.send_keys(str(value_script))
wrote_book_rdbtn = self.driver.find_element_by_xpath(
"//input[@type='radio' and @name='wrotebook']")
wrote_book_rdbtn.click()
answer_box_10 = self.driver.find_element_by_id('answer10')
orange_box = self.driver.find_element_by_id('orangebox').location
green_box = self.driver.find_element_by_id('greenbox').location
answer_box_11 = self.driver.find_element_by_id('answer11')
if green_box['y'] > orange_box['y']:
answer_box_11.send_keys('orange')
else:
answer_box_11.send_keys('green')
answer_box_10.send_keys(str(red_box_element.text))
self.driver.set_window_size(850, 650)
answer_box_13 = self.driver.find_element_by_id('answer13')
answer_box_14 = self.driver.find_element_by_id('answer14')
try:
is_here_element = self.driver.find_element_by_id('ishere')
if is_here_element.is_displayed():
answer_box_13.send_keys('yes')
else:
answer_box_13.send_keys('no')
except:
answer_box_13.send_keys('no')
try:
purple_box = self.driver.find_element_by_id('purplebox')
if purple_box.is_displayed():
answer_box_14.send_keys('yes')
else:
answer_box_14.send_keys('no')
except:
answer_box_14.send_keys('no')
click_then_wait_link = self.driver.find_element_by_xpath(
"//a[text()='click then wait']")
click_then_wait_link.click()
WebDriverWait(self.driver, 20).until(expected_conditions.
element_to_be_clickable((By.XPATH,
"//a[text()='click after wait']")))
click_after_wait_link = self.driver.find_element_by_xpath(
"//a[text()='click after wait']")
click_after_wait_link.click()
self.driver.switch_to.alert.accept()
submit_button = self.driver.find_element_by_id('submitbutton')
submit_button.click()
check_results = self.driver.find_element_by_id('checkresults')
check_results.click()
self.driver.quit()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class TestTaniHub:
driver = webdriver.Chrome(executable_path='/usr/local/bin/chromedriver')
def test_tanihub_number_1(self):
self.driver.get('https://tanihub.com/')
jabodetabek_option = self.driver.find_element_by_xpath(
"//p[text()='Jabodetabek']")
user_image_button = self.driver.find_element_by_xpath(
"//img[@alt='profile']")
time.sleep(5)
jabodetabek_option.click()
time.sleep(2)
assert user_image_button.is_displayed()
user_image_button.click()
email_text_box = self.driver.find_element_by_xpath(
"//input[@type='email' and @id='input-icon-3']")
assert email_text_box.is_displayed()
email_text_box.send_keys('testinguser@mailinator.com')
selanjutnya_msk_btn = self.driver.find_element_by_xpath(
"//button[@type='submit' and @id='Button-2']")
assert selanjutnya_msk_btn.is_enabled()
selanjutnya_msk_btn.click()
time.sleep(2)
password_txt_box = self.driver.find_element_by_xpath(
"//input[@type='password' and @id='input-password-4']")
assert password_txt_box.is_displayed()
password_txt_box.send_keys('admin123')
selanjutnya_msk_btn.click()
search_text_box = self.driver.find_element_by_xpath(
"//input[@id='input-icon-3' and @type='text']")
assert search_text_box.is_displayed()
search_text_box.send_keys('Minyak Goreng Rose Brand 2 L Karton')
search_text_box.send_keys(Keys.ENTER)
time.sleep(5)
search_result_first_cart_button = self.driver.find_element_by_xpath(
"//button[@id='CardProduct-1601' and @type='button']")
search_result_first_cart_button.click()
keranjang_btn = self.driver.find_element_by_xpath(
"//button[@id='Button-2' and @type='button']/span")
assert keranjang_btn.is_displayed()
keranjang_btn.click()
time.sleep(5)
checkout_btn = self.driver.find_element_by_xpath(
"//button[text()='Checkout' and @type='button']")
assert checkout_btn.is_displayed()
checkout_btn.click()
time.sleep(5)
self.driver.quit()
def test_tanihub_number_2(self):
self.driver.get('http://timvroom.com/selenium/playground/')
title_page = self.driver.title
answer_box_1 = self.driver.find_element_by_id('answer1')
answer_box_1.send_keys(title_page)
name_txt_box = self.driver.find_element_by_id('name')
name_txt_box.send_keys('Kilgore Trout')
occupation_dropdown = self.driver.find_element_by_id('occupation')
Select(occupation_dropdown).select_by_value('scifiauthor')
list_blue_box = self.driver.find_elements_by_class_name('bluebox')
answer_box_4 = self.driver.find_element_by_id('answer4')
answer_box_4.send_keys(str(len(list_blue_box)))
click_me_link = self.driver.find_element_by_xpath(
"//a[text()='click me']")
click_me_link.click()
red_box_element = self.driver.find_element_by_id('redbox')
answer_box_6 = self.driver.find_element_by_id('answer6')
answer_box_6.send_keys(str(red_box_element.get_attribute('class')))
self.driver.execute_script('return ran_this_js_function()')
value_script = self.driver.execute_script(
'return got_return_from_js_function()')
answer_box_8 = self.driver.find_element_by_id('answer8')
answer_box_8.send_keys(str(value_script))
wrote_book_rdbtn = self.driver.find_element_by_xpath(
"//input[@type='radio' and @name='wrotebook']")
wrote_book_rdbtn.click()
answer_box_10 = self.driver.find_element_by_id('answer10')
orange_box = self.driver.find_element_by_id('orangebox').location
green_box = self.driver.find_element_by_id('greenbox').location
answer_box_11 = self.driver.find_element_by_id('answer11')
if green_box['y'] > orange_box['y']:
answer_box_11.send_keys('orange')
else:
answer_box_11.send_keys('green')
answer_box_10.send_keys(str(red_box_element.text))
self.driver.set_window_size(850, 650)
answer_box_13 = self.driver.find_element_by_id('answer13')
answer_box_14 = self.driver.find_element_by_id('answer14')
try:
is_here_element = self.driver.find_element_by_id('ishere')
if is_here_element.is_displayed():
answer_box_13.send_keys('yes')
else:
answer_box_13.send_keys('no')
except:
answer_box_13.send_keys('no')
try:
purple_box = self.driver.find_element_by_id('purplebox')
if purple_box.is_displayed():
answer_box_14.send_keys('yes')
else:
answer_box_14.send_keys('no')
except:
answer_box_14.send_keys('no')
click_then_wait_link = self.driver.find_element_by_xpath(
"//a[text()='click then wait']")
click_then_wait_link.click()
WebDriverWait(self.driver, 20).until(expected_conditions.
element_to_be_clickable((By.XPATH,
"//a[text()='click after wait']")))
click_after_wait_link = self.driver.find_element_by_xpath(
"//a[text()='click after wait']")
click_after_wait_link.click()
self.driver.switch_to.alert.accept()
submit_button = self.driver.find_element_by_id('submitbutton')
submit_button.click()
check_results = self.driver.find_element_by_id('checkresults')
check_results.click()
self.driver.quit()
def test_selenium_number_1():
TestTaniHub().test_tanihub_number_1()
def test_selenium_number_2():
TestTaniHub().test_tanihub_number_2()
<|reserved_special_token_1|>
import time
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support.select import Select
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.support import expected_conditions
class TestTaniHub():
driver = webdriver.Chrome(executable_path='/usr/local/bin/chromedriver')
def test_tanihub_number_1(self):
self.driver.get('https://tanihub.com/')
jabodetabek_option = self.driver.find_element_by_xpath("//p[text()='Jabodetabek']")
user_image_button = self.driver.find_element_by_xpath("//img[@alt='profile']")
time.sleep(5)
jabodetabek_option.click()
time.sleep(2)
assert user_image_button.is_displayed()
user_image_button.click()
email_text_box = self.driver.find_element_by_xpath("//input[@type='email' and @id='input-icon-3']")
assert email_text_box.is_displayed()
email_text_box.send_keys('testinguser@mailinator.com')
selanjutnya_msk_btn = self.driver.find_element_by_xpath("//button[@type='submit' and @id='Button-2']")
assert selanjutnya_msk_btn.is_enabled()
selanjutnya_msk_btn.click()
time.sleep(2)
password_txt_box = self.driver.find_element_by_xpath("//input[@type='password' and @id='input-password-4']")
assert password_txt_box.is_displayed()
password_txt_box.send_keys('admin123')
selanjutnya_msk_btn.click()
search_text_box = self.driver.find_element_by_xpath("//input[@id='input-icon-3' and @type='text']")
assert search_text_box.is_displayed()
search_text_box.send_keys('Minyak Goreng Rose Brand 2 L Karton')
search_text_box.send_keys(Keys.ENTER)
time.sleep(5)
search_result_first_cart_button = self.driver.find_element_by_xpath("//button[@id='CardProduct-1601' and @type='button']")
# assert search_result_first_cart_button.is_displayed()
search_result_first_cart_button.click()
keranjang_btn = self.driver.find_element_by_xpath("//button[@id='Button-2' and @type='button']/span")
assert keranjang_btn.is_displayed()
keranjang_btn.click()
time.sleep(5)
checkout_btn = self.driver.find_element_by_xpath("//button[text()='Checkout' and @type='button']")
assert checkout_btn.is_displayed()
checkout_btn.click()
time.sleep(5)
self.driver.quit()
def test_tanihub_number_2(self):
self.driver.get('http://timvroom.com/selenium/playground/')
title_page = self.driver.title
answer_box_1 = self.driver.find_element_by_id("answer1")
answer_box_1.send_keys(title_page)
name_txt_box = self.driver.find_element_by_id("name")
name_txt_box.send_keys('Kilgore Trout')
occupation_dropdown = self.driver.find_element_by_id("occupation")
Select(occupation_dropdown).select_by_value('scifiauthor')
list_blue_box = self.driver.find_elements_by_class_name("bluebox")
answer_box_4 = self.driver.find_element_by_id("answer4")
answer_box_4.send_keys(str(len(list_blue_box)))
click_me_link = self.driver.find_element_by_xpath("//a[text()='click me']")
click_me_link.click()
red_box_element = self.driver.find_element_by_id("redbox")
answer_box_6 = self.driver.find_element_by_id("answer6")
answer_box_6.send_keys(str(red_box_element.get_attribute("class")))
self.driver.execute_script('return ran_this_js_function()')
value_script = self.driver.execute_script('return got_return_from_js_function()')
answer_box_8 = self.driver.find_element_by_id("answer8")
answer_box_8.send_keys(str(value_script))
wrote_book_rdbtn = self.driver.find_element_by_xpath("//input[@type='radio' and @name='wrotebook']")
wrote_book_rdbtn.click()
answer_box_10 = self.driver.find_element_by_id("answer10")
orange_box = self.driver.find_element_by_id("orangebox").location
green_box = self.driver.find_element_by_id("greenbox").location
answer_box_11 = self.driver.find_element_by_id("answer11")
if green_box['y'] > orange_box['y']:
answer_box_11.send_keys('orange')
else:
answer_box_11.send_keys('green')
answer_box_10.send_keys(str(red_box_element.text))
self.driver.set_window_size(850, 650)
answer_box_13 = self.driver.find_element_by_id("answer13")
answer_box_14 = self.driver.find_element_by_id("answer14")
try:
is_here_element = self.driver.find_element_by_id("ishere")
if is_here_element.is_displayed():
answer_box_13.send_keys('yes')
else:
answer_box_13.send_keys('no')
except:
answer_box_13.send_keys('no')
try:
purple_box = self.driver.find_element_by_id("purplebox")
if purple_box.is_displayed():
answer_box_14.send_keys('yes')
else:
answer_box_14.send_keys('no')
except:
answer_box_14.send_keys('no')
click_then_wait_link = self.driver.find_element_by_xpath("//a[text()='click then wait']")
click_then_wait_link.click()
WebDriverWait(self.driver, 20).until(expected_conditions.element_to_be_clickable((By.XPATH, "//a[text()='click after wait']")))
click_after_wait_link = self.driver.find_element_by_xpath("//a[text()='click after wait']")
click_after_wait_link.click()
self.driver.switch_to.alert.accept()
submit_button = self.driver.find_element_by_id("submitbutton")
submit_button.click()
check_results = self.driver.find_element_by_id("checkresults")
check_results.click()
self.driver.quit()
def test_selenium_number_1():
TestTaniHub().test_tanihub_number_1()
def test_selenium_number_2():
TestTaniHub().test_tanihub_number_2()
|
flexible
|
{
"blob_id": "777dc2056443f0404ccb75d570f2ddc3a3aa747b",
"index": 6669,
"step-1": "<mask token>\n\n\nclass TestTaniHub:\n <mask token>\n <mask token>\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass TestTaniHub:\n <mask token>\n <mask token>\n\n def test_tanihub_number_2(self):\n self.driver.get('http://timvroom.com/selenium/playground/')\n title_page = self.driver.title\n answer_box_1 = self.driver.find_element_by_id('answer1')\n answer_box_1.send_keys(title_page)\n name_txt_box = self.driver.find_element_by_id('name')\n name_txt_box.send_keys('Kilgore Trout')\n occupation_dropdown = self.driver.find_element_by_id('occupation')\n Select(occupation_dropdown).select_by_value('scifiauthor')\n list_blue_box = self.driver.find_elements_by_class_name('bluebox')\n answer_box_4 = self.driver.find_element_by_id('answer4')\n answer_box_4.send_keys(str(len(list_blue_box)))\n click_me_link = self.driver.find_element_by_xpath(\n \"//a[text()='click me']\")\n click_me_link.click()\n red_box_element = self.driver.find_element_by_id('redbox')\n answer_box_6 = self.driver.find_element_by_id('answer6')\n answer_box_6.send_keys(str(red_box_element.get_attribute('class')))\n self.driver.execute_script('return ran_this_js_function()')\n value_script = self.driver.execute_script(\n 'return got_return_from_js_function()')\n answer_box_8 = self.driver.find_element_by_id('answer8')\n answer_box_8.send_keys(str(value_script))\n wrote_book_rdbtn = self.driver.find_element_by_xpath(\n \"//input[@type='radio' and @name='wrotebook']\")\n wrote_book_rdbtn.click()\n answer_box_10 = self.driver.find_element_by_id('answer10')\n orange_box = self.driver.find_element_by_id('orangebox').location\n green_box = self.driver.find_element_by_id('greenbox').location\n answer_box_11 = self.driver.find_element_by_id('answer11')\n if green_box['y'] > orange_box['y']:\n answer_box_11.send_keys('orange')\n else:\n answer_box_11.send_keys('green')\n answer_box_10.send_keys(str(red_box_element.text))\n self.driver.set_window_size(850, 650)\n answer_box_13 = self.driver.find_element_by_id('answer13')\n answer_box_14 = self.driver.find_element_by_id('answer14')\n try:\n is_here_element = self.driver.find_element_by_id('ishere')\n if is_here_element.is_displayed():\n answer_box_13.send_keys('yes')\n else:\n answer_box_13.send_keys('no')\n except:\n answer_box_13.send_keys('no')\n try:\n purple_box = self.driver.find_element_by_id('purplebox')\n if purple_box.is_displayed():\n answer_box_14.send_keys('yes')\n else:\n answer_box_14.send_keys('no')\n except:\n answer_box_14.send_keys('no')\n click_then_wait_link = self.driver.find_element_by_xpath(\n \"//a[text()='click then wait']\")\n click_then_wait_link.click()\n WebDriverWait(self.driver, 20).until(expected_conditions.\n element_to_be_clickable((By.XPATH,\n \"//a[text()='click after wait']\")))\n click_after_wait_link = self.driver.find_element_by_xpath(\n \"//a[text()='click after wait']\")\n click_after_wait_link.click()\n self.driver.switch_to.alert.accept()\n submit_button = self.driver.find_element_by_id('submitbutton')\n submit_button.click()\n check_results = self.driver.find_element_by_id('checkresults')\n check_results.click()\n self.driver.quit()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass TestTaniHub:\n driver = webdriver.Chrome(executable_path='/usr/local/bin/chromedriver')\n\n def test_tanihub_number_1(self):\n self.driver.get('https://tanihub.com/')\n jabodetabek_option = self.driver.find_element_by_xpath(\n \"//p[text()='Jabodetabek']\")\n user_image_button = self.driver.find_element_by_xpath(\n \"//img[@alt='profile']\")\n time.sleep(5)\n jabodetabek_option.click()\n time.sleep(2)\n assert user_image_button.is_displayed()\n user_image_button.click()\n email_text_box = self.driver.find_element_by_xpath(\n \"//input[@type='email' and @id='input-icon-3']\")\n assert email_text_box.is_displayed()\n email_text_box.send_keys('testinguser@mailinator.com')\n selanjutnya_msk_btn = self.driver.find_element_by_xpath(\n \"//button[@type='submit' and @id='Button-2']\")\n assert selanjutnya_msk_btn.is_enabled()\n selanjutnya_msk_btn.click()\n time.sleep(2)\n password_txt_box = self.driver.find_element_by_xpath(\n \"//input[@type='password' and @id='input-password-4']\")\n assert password_txt_box.is_displayed()\n password_txt_box.send_keys('admin123')\n selanjutnya_msk_btn.click()\n search_text_box = self.driver.find_element_by_xpath(\n \"//input[@id='input-icon-3' and @type='text']\")\n assert search_text_box.is_displayed()\n search_text_box.send_keys('Minyak Goreng Rose Brand 2 L Karton')\n search_text_box.send_keys(Keys.ENTER)\n time.sleep(5)\n search_result_first_cart_button = self.driver.find_element_by_xpath(\n \"//button[@id='CardProduct-1601' and @type='button']\")\n search_result_first_cart_button.click()\n keranjang_btn = self.driver.find_element_by_xpath(\n \"//button[@id='Button-2' and @type='button']/span\")\n assert keranjang_btn.is_displayed()\n keranjang_btn.click()\n time.sleep(5)\n checkout_btn = self.driver.find_element_by_xpath(\n \"//button[text()='Checkout' and @type='button']\")\n assert checkout_btn.is_displayed()\n checkout_btn.click()\n time.sleep(5)\n self.driver.quit()\n\n def test_tanihub_number_2(self):\n self.driver.get('http://timvroom.com/selenium/playground/')\n title_page = self.driver.title\n answer_box_1 = self.driver.find_element_by_id('answer1')\n answer_box_1.send_keys(title_page)\n name_txt_box = self.driver.find_element_by_id('name')\n name_txt_box.send_keys('Kilgore Trout')\n occupation_dropdown = self.driver.find_element_by_id('occupation')\n Select(occupation_dropdown).select_by_value('scifiauthor')\n list_blue_box = self.driver.find_elements_by_class_name('bluebox')\n answer_box_4 = self.driver.find_element_by_id('answer4')\n answer_box_4.send_keys(str(len(list_blue_box)))\n click_me_link = self.driver.find_element_by_xpath(\n \"//a[text()='click me']\")\n click_me_link.click()\n red_box_element = self.driver.find_element_by_id('redbox')\n answer_box_6 = self.driver.find_element_by_id('answer6')\n answer_box_6.send_keys(str(red_box_element.get_attribute('class')))\n self.driver.execute_script('return ran_this_js_function()')\n value_script = self.driver.execute_script(\n 'return got_return_from_js_function()')\n answer_box_8 = self.driver.find_element_by_id('answer8')\n answer_box_8.send_keys(str(value_script))\n wrote_book_rdbtn = self.driver.find_element_by_xpath(\n \"//input[@type='radio' and @name='wrotebook']\")\n wrote_book_rdbtn.click()\n answer_box_10 = self.driver.find_element_by_id('answer10')\n orange_box = self.driver.find_element_by_id('orangebox').location\n green_box = self.driver.find_element_by_id('greenbox').location\n answer_box_11 = self.driver.find_element_by_id('answer11')\n if green_box['y'] > orange_box['y']:\n answer_box_11.send_keys('orange')\n else:\n answer_box_11.send_keys('green')\n answer_box_10.send_keys(str(red_box_element.text))\n self.driver.set_window_size(850, 650)\n answer_box_13 = self.driver.find_element_by_id('answer13')\n answer_box_14 = self.driver.find_element_by_id('answer14')\n try:\n is_here_element = self.driver.find_element_by_id('ishere')\n if is_here_element.is_displayed():\n answer_box_13.send_keys('yes')\n else:\n answer_box_13.send_keys('no')\n except:\n answer_box_13.send_keys('no')\n try:\n purple_box = self.driver.find_element_by_id('purplebox')\n if purple_box.is_displayed():\n answer_box_14.send_keys('yes')\n else:\n answer_box_14.send_keys('no')\n except:\n answer_box_14.send_keys('no')\n click_then_wait_link = self.driver.find_element_by_xpath(\n \"//a[text()='click then wait']\")\n click_then_wait_link.click()\n WebDriverWait(self.driver, 20).until(expected_conditions.\n element_to_be_clickable((By.XPATH,\n \"//a[text()='click after wait']\")))\n click_after_wait_link = self.driver.find_element_by_xpath(\n \"//a[text()='click after wait']\")\n click_after_wait_link.click()\n self.driver.switch_to.alert.accept()\n submit_button = self.driver.find_element_by_id('submitbutton')\n submit_button.click()\n check_results = self.driver.find_element_by_id('checkresults')\n check_results.click()\n self.driver.quit()\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass TestTaniHub:\n driver = webdriver.Chrome(executable_path='/usr/local/bin/chromedriver')\n\n def test_tanihub_number_1(self):\n self.driver.get('https://tanihub.com/')\n jabodetabek_option = self.driver.find_element_by_xpath(\n \"//p[text()='Jabodetabek']\")\n user_image_button = self.driver.find_element_by_xpath(\n \"//img[@alt='profile']\")\n time.sleep(5)\n jabodetabek_option.click()\n time.sleep(2)\n assert user_image_button.is_displayed()\n user_image_button.click()\n email_text_box = self.driver.find_element_by_xpath(\n \"//input[@type='email' and @id='input-icon-3']\")\n assert email_text_box.is_displayed()\n email_text_box.send_keys('testinguser@mailinator.com')\n selanjutnya_msk_btn = self.driver.find_element_by_xpath(\n \"//button[@type='submit' and @id='Button-2']\")\n assert selanjutnya_msk_btn.is_enabled()\n selanjutnya_msk_btn.click()\n time.sleep(2)\n password_txt_box = self.driver.find_element_by_xpath(\n \"//input[@type='password' and @id='input-password-4']\")\n assert password_txt_box.is_displayed()\n password_txt_box.send_keys('admin123')\n selanjutnya_msk_btn.click()\n search_text_box = self.driver.find_element_by_xpath(\n \"//input[@id='input-icon-3' and @type='text']\")\n assert search_text_box.is_displayed()\n search_text_box.send_keys('Minyak Goreng Rose Brand 2 L Karton')\n search_text_box.send_keys(Keys.ENTER)\n time.sleep(5)\n search_result_first_cart_button = self.driver.find_element_by_xpath(\n \"//button[@id='CardProduct-1601' and @type='button']\")\n search_result_first_cart_button.click()\n keranjang_btn = self.driver.find_element_by_xpath(\n \"//button[@id='Button-2' and @type='button']/span\")\n assert keranjang_btn.is_displayed()\n keranjang_btn.click()\n time.sleep(5)\n checkout_btn = self.driver.find_element_by_xpath(\n \"//button[text()='Checkout' and @type='button']\")\n assert checkout_btn.is_displayed()\n checkout_btn.click()\n time.sleep(5)\n self.driver.quit()\n\n def test_tanihub_number_2(self):\n self.driver.get('http://timvroom.com/selenium/playground/')\n title_page = self.driver.title\n answer_box_1 = self.driver.find_element_by_id('answer1')\n answer_box_1.send_keys(title_page)\n name_txt_box = self.driver.find_element_by_id('name')\n name_txt_box.send_keys('Kilgore Trout')\n occupation_dropdown = self.driver.find_element_by_id('occupation')\n Select(occupation_dropdown).select_by_value('scifiauthor')\n list_blue_box = self.driver.find_elements_by_class_name('bluebox')\n answer_box_4 = self.driver.find_element_by_id('answer4')\n answer_box_4.send_keys(str(len(list_blue_box)))\n click_me_link = self.driver.find_element_by_xpath(\n \"//a[text()='click me']\")\n click_me_link.click()\n red_box_element = self.driver.find_element_by_id('redbox')\n answer_box_6 = self.driver.find_element_by_id('answer6')\n answer_box_6.send_keys(str(red_box_element.get_attribute('class')))\n self.driver.execute_script('return ran_this_js_function()')\n value_script = self.driver.execute_script(\n 'return got_return_from_js_function()')\n answer_box_8 = self.driver.find_element_by_id('answer8')\n answer_box_8.send_keys(str(value_script))\n wrote_book_rdbtn = self.driver.find_element_by_xpath(\n \"//input[@type='radio' and @name='wrotebook']\")\n wrote_book_rdbtn.click()\n answer_box_10 = self.driver.find_element_by_id('answer10')\n orange_box = self.driver.find_element_by_id('orangebox').location\n green_box = self.driver.find_element_by_id('greenbox').location\n answer_box_11 = self.driver.find_element_by_id('answer11')\n if green_box['y'] > orange_box['y']:\n answer_box_11.send_keys('orange')\n else:\n answer_box_11.send_keys('green')\n answer_box_10.send_keys(str(red_box_element.text))\n self.driver.set_window_size(850, 650)\n answer_box_13 = self.driver.find_element_by_id('answer13')\n answer_box_14 = self.driver.find_element_by_id('answer14')\n try:\n is_here_element = self.driver.find_element_by_id('ishere')\n if is_here_element.is_displayed():\n answer_box_13.send_keys('yes')\n else:\n answer_box_13.send_keys('no')\n except:\n answer_box_13.send_keys('no')\n try:\n purple_box = self.driver.find_element_by_id('purplebox')\n if purple_box.is_displayed():\n answer_box_14.send_keys('yes')\n else:\n answer_box_14.send_keys('no')\n except:\n answer_box_14.send_keys('no')\n click_then_wait_link = self.driver.find_element_by_xpath(\n \"//a[text()='click then wait']\")\n click_then_wait_link.click()\n WebDriverWait(self.driver, 20).until(expected_conditions.\n element_to_be_clickable((By.XPATH,\n \"//a[text()='click after wait']\")))\n click_after_wait_link = self.driver.find_element_by_xpath(\n \"//a[text()='click after wait']\")\n click_after_wait_link.click()\n self.driver.switch_to.alert.accept()\n submit_button = self.driver.find_element_by_id('submitbutton')\n submit_button.click()\n check_results = self.driver.find_element_by_id('checkresults')\n check_results.click()\n self.driver.quit()\n\n\ndef test_selenium_number_1():\n TestTaniHub().test_tanihub_number_1()\n\n\ndef test_selenium_number_2():\n TestTaniHub().test_tanihub_number_2()\n",
"step-5": "import time\n\nfrom selenium import webdriver\nfrom selenium.webdriver.common.by import By\nfrom selenium.webdriver.common.keys import Keys\nfrom selenium.webdriver.support.select import Select\nfrom selenium.webdriver.support.wait import WebDriverWait\nfrom selenium.webdriver.support import expected_conditions\n\n\nclass TestTaniHub():\n driver = webdriver.Chrome(executable_path='/usr/local/bin/chromedriver')\n\n def test_tanihub_number_1(self):\n self.driver.get('https://tanihub.com/')\n jabodetabek_option = self.driver.find_element_by_xpath(\"//p[text()='Jabodetabek']\")\n user_image_button = self.driver.find_element_by_xpath(\"//img[@alt='profile']\")\n time.sleep(5)\n jabodetabek_option.click()\n time.sleep(2)\n assert user_image_button.is_displayed()\n user_image_button.click()\n email_text_box = self.driver.find_element_by_xpath(\"//input[@type='email' and @id='input-icon-3']\")\n assert email_text_box.is_displayed()\n email_text_box.send_keys('testinguser@mailinator.com')\n selanjutnya_msk_btn = self.driver.find_element_by_xpath(\"//button[@type='submit' and @id='Button-2']\")\n assert selanjutnya_msk_btn.is_enabled()\n selanjutnya_msk_btn.click()\n time.sleep(2)\n password_txt_box = self.driver.find_element_by_xpath(\"//input[@type='password' and @id='input-password-4']\")\n assert password_txt_box.is_displayed()\n password_txt_box.send_keys('admin123')\n selanjutnya_msk_btn.click()\n search_text_box = self.driver.find_element_by_xpath(\"//input[@id='input-icon-3' and @type='text']\")\n assert search_text_box.is_displayed()\n search_text_box.send_keys('Minyak Goreng Rose Brand 2 L Karton')\n search_text_box.send_keys(Keys.ENTER)\n time.sleep(5)\n search_result_first_cart_button = self.driver.find_element_by_xpath(\"//button[@id='CardProduct-1601' and @type='button']\")\n # assert search_result_first_cart_button.is_displayed()\n search_result_first_cart_button.click()\n keranjang_btn = self.driver.find_element_by_xpath(\"//button[@id='Button-2' and @type='button']/span\")\n assert keranjang_btn.is_displayed()\n keranjang_btn.click()\n time.sleep(5)\n checkout_btn = self.driver.find_element_by_xpath(\"//button[text()='Checkout' and @type='button']\")\n assert checkout_btn.is_displayed()\n checkout_btn.click()\n time.sleep(5)\n self.driver.quit()\n\n def test_tanihub_number_2(self):\n self.driver.get('http://timvroom.com/selenium/playground/')\n title_page = self.driver.title\n answer_box_1 = self.driver.find_element_by_id(\"answer1\")\n answer_box_1.send_keys(title_page)\n name_txt_box = self.driver.find_element_by_id(\"name\")\n name_txt_box.send_keys('Kilgore Trout')\n occupation_dropdown = self.driver.find_element_by_id(\"occupation\")\n Select(occupation_dropdown).select_by_value('scifiauthor')\n list_blue_box = self.driver.find_elements_by_class_name(\"bluebox\")\n answer_box_4 = self.driver.find_element_by_id(\"answer4\")\n answer_box_4.send_keys(str(len(list_blue_box)))\n click_me_link = self.driver.find_element_by_xpath(\"//a[text()='click me']\")\n click_me_link.click()\n red_box_element = self.driver.find_element_by_id(\"redbox\")\n answer_box_6 = self.driver.find_element_by_id(\"answer6\")\n answer_box_6.send_keys(str(red_box_element.get_attribute(\"class\")))\n self.driver.execute_script('return ran_this_js_function()')\n value_script = self.driver.execute_script('return got_return_from_js_function()')\n answer_box_8 = self.driver.find_element_by_id(\"answer8\")\n answer_box_8.send_keys(str(value_script))\n wrote_book_rdbtn = self.driver.find_element_by_xpath(\"//input[@type='radio' and @name='wrotebook']\")\n wrote_book_rdbtn.click()\n answer_box_10 = self.driver.find_element_by_id(\"answer10\")\n orange_box = self.driver.find_element_by_id(\"orangebox\").location\n green_box = self.driver.find_element_by_id(\"greenbox\").location\n answer_box_11 = self.driver.find_element_by_id(\"answer11\")\n if green_box['y'] > orange_box['y']:\n answer_box_11.send_keys('orange')\n else:\n answer_box_11.send_keys('green')\n answer_box_10.send_keys(str(red_box_element.text))\n self.driver.set_window_size(850, 650)\n answer_box_13 = self.driver.find_element_by_id(\"answer13\")\n answer_box_14 = self.driver.find_element_by_id(\"answer14\")\n try:\n is_here_element = self.driver.find_element_by_id(\"ishere\")\n if is_here_element.is_displayed():\n answer_box_13.send_keys('yes')\n else:\n answer_box_13.send_keys('no')\n except:\n answer_box_13.send_keys('no')\n try:\n purple_box = self.driver.find_element_by_id(\"purplebox\")\n if purple_box.is_displayed():\n answer_box_14.send_keys('yes')\n else:\n answer_box_14.send_keys('no')\n except:\n answer_box_14.send_keys('no')\n click_then_wait_link = self.driver.find_element_by_xpath(\"//a[text()='click then wait']\")\n click_then_wait_link.click()\n WebDriverWait(self.driver, 20).until(expected_conditions.element_to_be_clickable((By.XPATH, \"//a[text()='click after wait']\")))\n click_after_wait_link = self.driver.find_element_by_xpath(\"//a[text()='click after wait']\")\n click_after_wait_link.click()\n self.driver.switch_to.alert.accept()\n submit_button = self.driver.find_element_by_id(\"submitbutton\")\n submit_button.click()\n check_results = self.driver.find_element_by_id(\"checkresults\")\n check_results.click()\n self.driver.quit()\n\ndef test_selenium_number_1():\n TestTaniHub().test_tanihub_number_1()\n\ndef test_selenium_number_2():\n TestTaniHub().test_tanihub_number_2()",
"step-ids": [
1,
2,
4,
6,
8
]
}
|
[
1,
2,
4,
6,
8
] |
from django.db.models import Q
from django.contrib.auth.mixins import LoginRequiredMixin
from django.http import HttpResponseRedirect
from django.shortcuts import render, redirect
from django.views.generic import ListView, DetailView, CreateView, UpdateView, DeleteView
from carga_horaria.models import Profesor, AsignaturaBase, Asignatura, Asistente
from carga_horaria.formsAlexis import ProfesorForm, AsignaturaBaseForm, AsignaturaCreateForm, AsignaturaUpdateForm, AsistenteForm
from django.core.urlresolvers import reverse_lazy, reverse
from guardian.shortcuts import get_objects_for_user
from .models import Persona
from .models import Fundacion
from .models import Colegio
from .models import Periodo
from .models import Nivel
class LevelFilterMixin(object):
def get_context_data(self, *args, **kwargs):
ctx = super().get_context_data(*args, **kwargs)
ctx['levels'] = [(tag.name, tag.value) for tag in Nivel][::-1]
ctx['nivel_actual'] = self.request.GET.get('nivel')
return ctx
def get_queryset(self):
qs = super().get_queryset()
nivel = self.request.GET.get('nivel')
if nivel:
qs = qs.filter(plan__nivel=nivel)
return qs
# FIXME: I will leave it like this for now,
# but it's still possible for somebody to poke object ids to see what shouldn't see
# fix this!!1
class SearchMixin(object):
def get_queryset(self):
qs = super(SearchMixin, self).get_queryset()
q = self.request.GET.get('q', None)
if q:
if qs.model == Profesor:
qs = qs.filter(Q(persona__nombre__unaccent__icontains=q) | Q(persona__rut__unaccent__icontains=q) | Q(asignacionextra__descripcion__unaccent__icontains=q) | Q(asignacionnoaula__descripcion__unaccent__icontains=q))
else:
qs = qs.filter(Q(persona__nombre__unaccent__icontains=q) | Q(persona__rut__unaccent__icontains=q) | Q(asignacionasistente__descripcion__unaccent__icontains=q) | Q(funcion__unaccent__icontains=q))
return qs
def get_for_user(request, qs, lookup, user):
periodo = request.session.get('periodo', 2020)
if not user.is_superuser:
colegios = [c.pk for c in get_objects_for_user(user, "carga_horaria.change_colegio")]
# new logic for colegio switcher
selected = request.session.get('colegio__pk', None)
if selected:
colegios = [selected]
# end
kwargs = {"{}__in".format(lookup): colegios,
"{}periode".format(lookup[:-2]): periodo}
return qs.filter(**kwargs).distinct()
else:
colegios = [c.pk for c in Colegio.objects.all()]
# new logic for colegio switcher
selected = request.session.get('colegio__pk', None)
if selected:
colegios = [selected]
# end
kwargs = {"{}__in".format(lookup): colegios,
"{}periode".format(lookup[:-2]): periodo}
return qs.filter(**kwargs).distinct()
class GetObjectsForUserMixin(object):
def get_queryset(self):
qs = super(GetObjectsForUserMixin, self).get_queryset()
periodo = self.request.session.get('periodo', 2020)
if not self.request.user.is_superuser:
colegios = [c.pk for c in get_objects_for_user(self.request.user, "carga_horaria.change_colegio")]
# new logic for colegio switcher
selected = self.request.session.get('colegio__pk', None)
if selected:
colegios = [selected]
# end
kwargs = {"{}__in".format(self.lookup): colegios,
"{}periode".format(self.lookup[:-2]): periodo}
return qs.filter(**kwargs).distinct()
else:
colegios = [c.pk for c in Colegio.objects.all()]
# new logic for colegio switcher
selected = self.request.session.get('colegio__pk', None)
if selected:
colegios = [selected]
# end
kwargs = {"{}__in".format(self.lookup): colegios,
"{}periode".format(self.lookup[:-2]): periodo}
return qs.filter(**kwargs).distinct()
class ObjPermissionRequiredMixin(object):
def get_object(self, *args, **kwargs):
obj = super(ObjPermissionRequiredMixin, self).get_object(*args, **kwargs)
if self.request.user.has_perm(self.permission, obj):
return obj
else:
raise Http404
"""
Comienzo Crud Profesor
"""
class ProfesorListView(LoginRequiredMixin, SearchMixin, GetObjectsForUserMixin, ListView):
"""
Listado de profesores
"""
model = Profesor
lookup = 'colegio__pk'
template_name = 'carga_horaria/profesor/listado_profesor.html'
search_fields = ['nombre', 'horas']
paginate_by = 6
class ProfesorDetailView(LoginRequiredMixin, DetailView):
"""
Detalle de Profesor
"""
model = Profesor
template_name = 'carga_horaria/profesor/detalle_profesor.html'
class ProfesorCreateView(LoginRequiredMixin, CreateView):
model = Profesor
form_class = ProfesorForm
template_name = 'carga_horaria/profesor/nuevo_profesor.html'
success_url = reverse_lazy('carga-horaria:profesores')
def get_form_kwargs(self, *args, **kwargs):
kwargs = super(ProfesorCreateView, self).get_form_kwargs(*args, **kwargs)
colegio_pk = self.request.session.get('colegio__pk', None)
if colegio_pk:
kwargs.update({'user': self.request.user,
'colegio': colegio_pk,
'fundacion': Colegio.objects.get(pk=self.request.session.get('colegio__pk', None)).fundacion.pk})
else:
kwargs.update({'user': self.request.user})
return kwargs
def form_valid(self, form):
profesor = form.save(commit=False)
profesor.persona, _ = Persona.objects.update_or_create(rut=form.cleaned_data['rut'],
defaults={'nombre': form.cleaned_data['nombre'],
'direccion': form.cleaned_data['direccion'],
'comuna': form.cleaned_data['comuna'],
'nacionalidad': form.cleaned_data['nacionalidad'],
'telefono': form.cleaned_data['telefono'],
'email_personal': form.cleaned_data['email_personal'],
'email_institucional': form.cleaned_data['email_institucional'],
'estado_civil': form.cleaned_data['estado_civil'],
'discapacidad': form.cleaned_data['discapacidad'],
'recibe_pension': form.cleaned_data['recibe_pension'],
'adventista': form.cleaned_data['adventista'],
'fecha_nacimiento': form.cleaned_data['fecha_nacimiento']})
profesor.save()
return redirect(reverse('carga-horaria:profesores'))
class ProfesorUpdateView(LoginRequiredMixin, UpdateView):
model = Profesor
form_class = ProfesorForm
template_name = 'carga_horaria/profesor/editar_profesor.html'
def get_form_kwargs(self, *args, **kwargs):
kwargs = super(ProfesorUpdateView, self).get_form_kwargs(*args, **kwargs)
colegio_pk = self.request.session.get('colegio__pk', None)
if colegio_pk:
kwargs.update({'user': self.request.user,
'colegio': colegio_pk,
'fundacion': Colegio.objects.get(pk=self.request.session.get('colegio__pk', None)).fundacion.pk})
else:
kwargs.update({'user': self.request.user})
return kwargs
def form_valid(self, form):
profesor = form.save(commit=False)
profesor.persona, _ = Persona.objects.update_or_create(rut=form.cleaned_data['rut'],
defaults={'nombre': form.cleaned_data['nombre'],
'direccion': form.cleaned_data['direccion'],
'comuna': form.cleaned_data['comuna'],
'nacionalidad': form.cleaned_data['nacionalidad'],
'telefono': form.cleaned_data['telefono'],
'email_personal': form.cleaned_data['email_personal'],
'email_institucional': form.cleaned_data['email_institucional'],
'estado_civil': form.cleaned_data['estado_civil'],
'discapacidad': form.cleaned_data['discapacidad'],
'recibe_pension': form.cleaned_data['recibe_pension'],
'adventista': form.cleaned_data['adventista'],
'fecha_nacimiento': form.cleaned_data['fecha_nacimiento']})
profesor.save()
return redirect(self.get_success_url())
def get_success_url(self):
return reverse(
'carga-horaria:profesor',
kwargs={
'pk': self.object.pk,
}
)
class ProfesorDeleteView(LoginRequiredMixin, DeleteView):
model = Profesor
success_url = reverse_lazy('carga-horaria:profesores')
def get(self, request, *args, **kwargs):
return self.post(request, *args, **kwargs)
# """
# Comienzo Crud Curso
# """
# class CursoListView(ListView):
# """
# Listado de cursos
# """
# model = Curso
# template_name = 'carga_horaria/curso/listado_curso.html'
# search_fields = ['periodo', 'letra']
# paginate_by = 6
# class CursoDetailView(DetailView):
# """
# Detalle de curso
# """
# model = Curso
# template_name = 'carga_horaria/curso/detalle_curso.html'
# class CursoCreateView(CreateView):
# model = Curso
# form_class = CursoForm
# template_name = 'carga_horaria/curso/nuevo_curso.html'
# success_url = reverse_lazy('carga-horaria:cursos')
# class CursoUpdateView(UpdateView):
# model = Curso
# form_class = CursoForm
# template_name = 'carga_horaria/curso/editar_curso.html'
# def get_success_url(self):
# return reverse(
# 'carga-horaria:curso',
# kwargs={
# 'pk': self.object.pk,
# }
# )
# class CursoDeleteView(DeleteView):
# model = Curso
# success_url = reverse_lazy('carga-horaria:cursos')
# def get(self, request, *args, **kwargs):
# return self.post(request, *args, **kwargs)
"""
Comienzo Crud Asistente
"""
class AsistenteListView(LoginRequiredMixin, SearchMixin, GetObjectsForUserMixin, ListView):
"""
Listado de asistentes
"""
model = Asistente
lookup = 'colegio__pk'
template_name = 'carga_horaria/asistente/listado_asistente.html'
search_fields = ['nombre', 'horas']
paginate_by = 6
class AsistenteDetailView(LoginRequiredMixin, DetailView):
"""
Detalle de Asistente
"""
model = Asistente
template_name = 'carga_horaria/asistente/detalle_asistente.html'
class AsistenteCreateView(LoginRequiredMixin, CreateView):
model = Asistente
form_class = AsistenteForm
template_name = 'carga_horaria/asistente/nuevo_asistente.html'
success_url = reverse_lazy('carga-horaria:asistentes')
def get_form_kwargs(self, *args, **kwargs):
kwargs = super(AsistenteCreateView, self).get_form_kwargs(*args, **kwargs)
colegio_pk = self.request.session.get('colegio__pk', None)
if colegio_pk:
kwargs.update({'user': self.request.user,
'colegio': colegio_pk,
'fundacion': Colegio.objects.get(pk=self.request.session.get('colegio__pk', None)).fundacion.pk})
else:
kwargs.update({'user': self.request.user})
return kwargs
def form_valid(self, form):
asistente = form.save(commit=False)
asistente.persona, _ = Persona.objects.update_or_create(rut=form.cleaned_data['rut'],
defaults={'nombre': form.cleaned_data['nombre'],
'direccion': form.cleaned_data['direccion'],
'comuna': form.cleaned_data['comuna'],
'nacionalidad': form.cleaned_data['nacionalidad'],
'telefono': form.cleaned_data['telefono'],
'email_personal': form.cleaned_data['email_personal'],
'email_institucional': form.cleaned_data['email_institucional'],
'estado_civil': form.cleaned_data['estado_civil'],
'discapacidad': form.cleaned_data['discapacidad'],
'recibe_pension': form.cleaned_data['recibe_pension'],
'adventista': form.cleaned_data['adventista'],
'fecha_nacimiento': form.cleaned_data['fecha_nacimiento']})
asistente.save()
return redirect(reverse('carga-horaria:asistentes'))
class AsistenteUpdateView(LoginRequiredMixin, UpdateView):
model = Asistente
form_class = AsistenteForm
template_name = 'carga_horaria/asistente/editar_asistente.html'
def get_success_url(self):
return reverse(
'carga-horaria:asistente',
kwargs={
'pk': self.object.pk,
}
)
def form_valid(self, form):
asistente = form.save(commit=False)
asistente.persona, _ = Persona.objects.update_or_create(rut=form.cleaned_data['rut'],
defaults={'nombre': form.cleaned_data['nombre'],
'direccion': form.cleaned_data['direccion'],
'comuna': form.cleaned_data['comuna'],
'nacionalidad': form.cleaned_data['nacionalidad'],
'telefono': form.cleaned_data['telefono'],
'email_personal': form.cleaned_data['email_personal'],
'email_institucional': form.cleaned_data['email_institucional'],
'estado_civil': form.cleaned_data['estado_civil'],
'discapacidad': form.cleaned_data['discapacidad'],
'recibe_pension': form.cleaned_data['recibe_pension'],
'adventista': form.cleaned_data['adventista'],
'fecha_nacimiento': form.cleaned_data['fecha_nacimiento']})
asistente.save()
return redirect(self.get_success_url())
class AsistenteDeleteView(LoginRequiredMixin, DeleteView):
model = Asistente
success_url = reverse_lazy('carga-horaria:asistentes')
def get(self, request, *args, **kwargs):
return self.post(request, *args, **kwargs)
"""
Comienzo Crud Asignatura Base
"""
class AsignaturaBaseListView(LoginRequiredMixin, GetObjectsForUserMixin, ListView):
"""
Listado de asignatura base
"""
model = AsignaturaBase
lookup = 'plan__colegio__pk'
template_name = 'carga_horaria/asignaturabase/listado_asignaturabase.html'
search_fields = ['nombre', 'plan']
paginate_by = 10
def get_context_data(self, *args, **kwargs):
ctx = super().get_context_data(*args, **kwargs)
ctx['levels'] = [(tag.name, tag.value) for tag in Nivel]
ctx['nivel_actual'] = self.request.GET.get('nivel')
return ctx
def get_queryset(self):
qs = super().get_queryset()
nivel = self.request.GET.get('nivel')
if nivel:
qs = qs.filter(plan__nivel=nivel)
return qs
class AsignaturaBaseDetailView(LoginRequiredMixin, DetailView):
"""
Detalle de asignatura base
"""
model = AsignaturaBase
template_name = 'carga_horaria/asignaturabase/detalle_asignaturabase.html'
class AsignaturaBaseCreateView(LoginRequiredMixin, CreateView):
model = AsignaturaBase
form_class = AsignaturaBaseForm
template_name = 'carga_horaria/asignaturabase/nuevo_asignaturabase.html'
success_url = reverse_lazy('carga-horaria:asignaturasbase')
def get_form_kwargs(self, *args, **kwargs):
kwargs = super(AsignaturaBaseCreateView, self).get_form_kwargs(*args, **kwargs)
kwargs.update({'user': self.request.user,
'colegio': self.request.session.get('colegio__pk', None)})
return kwargs
class AsignaturaBaseUpdateView(LoginRequiredMixin, UpdateView):
model = AsignaturaBase
form_class = AsignaturaBaseForm
template_name = 'carga_horaria/asignaturabase/editar_asignaturabase.html'
def get_success_url(self):
return reverse(
'carga-horaria:asignaturabase',
kwargs={
'pk': self.object.pk,
}
)
class AsignaturaBaseDeleteView(LoginRequiredMixin, DeleteView):
model = AsignaturaBase
success_url = reverse_lazy('carga-horaria:asignaturasbase')
def get(self, request, *args, **kwargs):
return self.post(request, *args, **kwargs)
"""
Comienzo Crud Asignatura
"""
class AsignaturaListView(LoginRequiredMixin, ListView):
"""
Listado de asignatura
"""
model = Asignatura
template_name = 'carga_horaria/asignatura/listado_asignatura.html'
search_fields = ['base', 'periodo']
paginate_by = 10
def get_context_data(self, *args, **kwargs):
ctx = super().get_context_data(*args, **kwargs)
ctx['levels'] = [(tag.name, tag.value) for tag in Nivel][::-1]
ctx['nivel_actual'] = self.request.GET.get('nivel')
return ctx
def get_queryset(self):
qs = super().get_queryset()
nivel = self.request.GET.get('nivel')
if nivel:
qs = qs.filter(base__plan__nivel=nivel)
periodo = self.request.GET.get('periodo')
if periodo:
qs = qs.filter(periodo__pk=periodo)
return qs
class AsignaturaDetailView(LoginRequiredMixin, DetailView):
"""
Detalle de asignatura
"""
model = Asignatura
template_name = 'carga_horaria/asignatura/detalle_asignatura.html'
def get_context_data(self, *args, **kwargs):
ctx = super().get_context_data(*args, **kwargs)
ctx['periodo'] = Periodo.objects.get(pk=self.kwargs['periodo_pk'])
return ctx
class AsignaturaCreateView(LoginRequiredMixin, CreateView):
model = Asignatura
form_class = AsignaturaCreateForm
template_name = 'carga_horaria/asignatura/nuevo_asignatura.html'
def form_valid(self, form):
# dirty validation
periodo = Periodo.objects.get(pk=self.kwargs['pk'])
horas = form.cleaned_data['horas']
available = periodo.available
if horas > available:
form.add_error('horas', "Horas superan el tiempo disponible ({})".format(available))
return self.form_invalid(form)
else:
self.object = form.save()
self.object.periodos.add(periodo)
return HttpResponseRedirect(self.get_success_url())
def get_success_url(self):
return reverse(
'carga-horaria:periodo',
kwargs={
'pk': self.kwargs['pk'],
}
)
class AsignaturaUpdateView(LoginRequiredMixin, UpdateView):
model = Asignatura
form_class = AsignaturaUpdateForm
template_name = 'carga_horaria/asignatura/editar_asignatura.html'
def get_success_url(self):
return reverse('carga-horaria:periodo', kwargs={'pk': self.kwargs['periodo_pk']})
def form_valid(self, form):
# dirty validation
periodo = Periodo.objects.get(pk=self.kwargs['periodo_pk'])
horas = form.cleaned_data['horas']
old_horas = Asignatura.objects.get(pk=self.object.pk).horas
delta = horas - old_horas
available = periodo.available
if delta > available:
form.add_error('horas', "Horas superan el tiempo disponible ({})".format(available + old_horas))
return self.form_invalid(form)
elif self.object.base:
if periodo.colegio.jec:
horas_base = self.object.base.horas_jec
else:
horas_base = self.object.base.horas_nec
if horas < horas_base:
form.add_error('horas', "Horas deben ser como mínimo las del plan de estudios original ({})".format(horas_base))
return self.form_invalid(form)
return super().form_valid(form)
class AsignaturaDeleteView(LoginRequiredMixin, DeleteView):
model = Asignatura
def get(self, request, *args, **kwargs):
return self.post(request, *args, **kwargs)
def get_success_url(self):
return reverse(
'carga-horaria:periodo',
kwargs={
'pk': self.kwargs['periodo_pk'],
}
)
|
normal
|
{
"blob_id": "d0d86d8b5b276218add6dd11a44d5c3951cc4e14",
"index": 3846,
"step-1": "<mask token>\n\n\nclass AsistenteDetailView(LoginRequiredMixin, DetailView):\n \"\"\"\n Detalle de Asistente\n \"\"\"\n model = Asistente\n template_name = 'carga_horaria/asistente/detalle_asistente.html'\n\n\nclass AsistenteCreateView(LoginRequiredMixin, CreateView):\n model = Asistente\n form_class = AsistenteForm\n template_name = 'carga_horaria/asistente/nuevo_asistente.html'\n success_url = reverse_lazy('carga-horaria:asistentes')\n\n def get_form_kwargs(self, *args, **kwargs):\n kwargs = super(AsistenteCreateView, self).get_form_kwargs(*args, **\n kwargs)\n colegio_pk = self.request.session.get('colegio__pk', None)\n if colegio_pk:\n kwargs.update({'user': self.request.user, 'colegio': colegio_pk,\n 'fundacion': Colegio.objects.get(pk=self.request.session.\n get('colegio__pk', None)).fundacion.pk})\n else:\n kwargs.update({'user': self.request.user})\n return kwargs\n\n def form_valid(self, form):\n asistente = form.save(commit=False)\n asistente.persona, _ = Persona.objects.update_or_create(rut=form.\n cleaned_data['rut'], defaults={'nombre': form.cleaned_data[\n 'nombre'], 'direccion': form.cleaned_data['direccion'],\n 'comuna': form.cleaned_data['comuna'], 'nacionalidad': form.\n cleaned_data['nacionalidad'], 'telefono': form.cleaned_data[\n 'telefono'], 'email_personal': form.cleaned_data[\n 'email_personal'], 'email_institucional': form.cleaned_data[\n 'email_institucional'], 'estado_civil': form.cleaned_data[\n 'estado_civil'], 'discapacidad': form.cleaned_data[\n 'discapacidad'], 'recibe_pension': form.cleaned_data[\n 'recibe_pension'], 'adventista': form.cleaned_data['adventista'\n ], 'fecha_nacimiento': form.cleaned_data['fecha_nacimiento']})\n asistente.save()\n return redirect(reverse('carga-horaria:asistentes'))\n\n\nclass AsistenteUpdateView(LoginRequiredMixin, UpdateView):\n model = Asistente\n form_class = AsistenteForm\n template_name = 'carga_horaria/asistente/editar_asistente.html'\n\n def get_success_url(self):\n return reverse('carga-horaria:asistente', kwargs={'pk': self.object.pk}\n )\n\n def form_valid(self, form):\n asistente = form.save(commit=False)\n asistente.persona, _ = Persona.objects.update_or_create(rut=form.\n cleaned_data['rut'], defaults={'nombre': form.cleaned_data[\n 'nombre'], 'direccion': form.cleaned_data['direccion'],\n 'comuna': form.cleaned_data['comuna'], 'nacionalidad': form.\n cleaned_data['nacionalidad'], 'telefono': form.cleaned_data[\n 'telefono'], 'email_personal': form.cleaned_data[\n 'email_personal'], 'email_institucional': form.cleaned_data[\n 'email_institucional'], 'estado_civil': form.cleaned_data[\n 'estado_civil'], 'discapacidad': form.cleaned_data[\n 'discapacidad'], 'recibe_pension': form.cleaned_data[\n 'recibe_pension'], 'adventista': form.cleaned_data['adventista'\n ], 'fecha_nacimiento': form.cleaned_data['fecha_nacimiento']})\n asistente.save()\n return redirect(self.get_success_url())\n\n\nclass AsistenteDeleteView(LoginRequiredMixin, DeleteView):\n model = Asistente\n success_url = reverse_lazy('carga-horaria:asistentes')\n\n def get(self, request, *args, **kwargs):\n return self.post(request, *args, **kwargs)\n\n\n<mask token>\n\n\nclass AsignaturaBaseListView(LoginRequiredMixin, GetObjectsForUserMixin,\n ListView):\n \"\"\"\n Listado de asignatura base\n \"\"\"\n model = AsignaturaBase\n lookup = 'plan__colegio__pk'\n template_name = 'carga_horaria/asignaturabase/listado_asignaturabase.html'\n search_fields = ['nombre', 'plan']\n paginate_by = 10\n\n def get_context_data(self, *args, **kwargs):\n ctx = super().get_context_data(*args, **kwargs)\n ctx['levels'] = [(tag.name, tag.value) for tag in Nivel]\n ctx['nivel_actual'] = self.request.GET.get('nivel')\n return ctx\n\n def get_queryset(self):\n qs = super().get_queryset()\n nivel = self.request.GET.get('nivel')\n if nivel:\n qs = qs.filter(plan__nivel=nivel)\n return qs\n\n\nclass AsignaturaBaseDetailView(LoginRequiredMixin, DetailView):\n \"\"\"\n Detalle de asignatura base\n \"\"\"\n model = AsignaturaBase\n template_name = 'carga_horaria/asignaturabase/detalle_asignaturabase.html'\n\n\nclass AsignaturaBaseCreateView(LoginRequiredMixin, CreateView):\n model = AsignaturaBase\n form_class = AsignaturaBaseForm\n template_name = 'carga_horaria/asignaturabase/nuevo_asignaturabase.html'\n success_url = reverse_lazy('carga-horaria:asignaturasbase')\n\n def get_form_kwargs(self, *args, **kwargs):\n kwargs = super(AsignaturaBaseCreateView, self).get_form_kwargs(*\n args, **kwargs)\n kwargs.update({'user': self.request.user, 'colegio': self.request.\n session.get('colegio__pk', None)})\n return kwargs\n\n\nclass AsignaturaBaseUpdateView(LoginRequiredMixin, UpdateView):\n model = AsignaturaBase\n form_class = AsignaturaBaseForm\n template_name = 'carga_horaria/asignaturabase/editar_asignaturabase.html'\n\n def get_success_url(self):\n return reverse('carga-horaria:asignaturabase', kwargs={'pk': self.\n object.pk})\n\n\nclass AsignaturaBaseDeleteView(LoginRequiredMixin, DeleteView):\n model = AsignaturaBase\n success_url = reverse_lazy('carga-horaria:asignaturasbase')\n\n def get(self, request, *args, **kwargs):\n return self.post(request, *args, **kwargs)\n\n\n<mask token>\n\n\nclass AsignaturaListView(LoginRequiredMixin, ListView):\n \"\"\"\n Listado de asignatura\n \"\"\"\n model = Asignatura\n template_name = 'carga_horaria/asignatura/listado_asignatura.html'\n search_fields = ['base', 'periodo']\n paginate_by = 10\n\n def get_context_data(self, *args, **kwargs):\n ctx = super().get_context_data(*args, **kwargs)\n ctx['levels'] = [(tag.name, tag.value) for tag in Nivel][::-1]\n ctx['nivel_actual'] = self.request.GET.get('nivel')\n return ctx\n\n def get_queryset(self):\n qs = super().get_queryset()\n nivel = self.request.GET.get('nivel')\n if nivel:\n qs = qs.filter(base__plan__nivel=nivel)\n periodo = self.request.GET.get('periodo')\n if periodo:\n qs = qs.filter(periodo__pk=periodo)\n return qs\n\n\nclass AsignaturaDetailView(LoginRequiredMixin, DetailView):\n \"\"\"\n Detalle de asignatura\n \"\"\"\n model = Asignatura\n template_name = 'carga_horaria/asignatura/detalle_asignatura.html'\n\n def get_context_data(self, *args, **kwargs):\n ctx = super().get_context_data(*args, **kwargs)\n ctx['periodo'] = Periodo.objects.get(pk=self.kwargs['periodo_pk'])\n return ctx\n\n\nclass AsignaturaCreateView(LoginRequiredMixin, CreateView):\n model = Asignatura\n form_class = AsignaturaCreateForm\n template_name = 'carga_horaria/asignatura/nuevo_asignatura.html'\n\n def form_valid(self, form):\n periodo = Periodo.objects.get(pk=self.kwargs['pk'])\n horas = form.cleaned_data['horas']\n available = periodo.available\n if horas > available:\n form.add_error('horas',\n 'Horas superan el tiempo disponible ({})'.format(available))\n return self.form_invalid(form)\n else:\n self.object = form.save()\n self.object.periodos.add(periodo)\n return HttpResponseRedirect(self.get_success_url())\n\n def get_success_url(self):\n return reverse('carga-horaria:periodo', kwargs={'pk': self.kwargs[\n 'pk']})\n\n\nclass AsignaturaUpdateView(LoginRequiredMixin, UpdateView):\n model = Asignatura\n form_class = AsignaturaUpdateForm\n template_name = 'carga_horaria/asignatura/editar_asignatura.html'\n\n def get_success_url(self):\n return reverse('carga-horaria:periodo', kwargs={'pk': self.kwargs[\n 'periodo_pk']})\n\n def form_valid(self, form):\n periodo = Periodo.objects.get(pk=self.kwargs['periodo_pk'])\n horas = form.cleaned_data['horas']\n old_horas = Asignatura.objects.get(pk=self.object.pk).horas\n delta = horas - old_horas\n available = periodo.available\n if delta > available:\n form.add_error('horas',\n 'Horas superan el tiempo disponible ({})'.format(available +\n old_horas))\n return self.form_invalid(form)\n elif self.object.base:\n if periodo.colegio.jec:\n horas_base = self.object.base.horas_jec\n else:\n horas_base = self.object.base.horas_nec\n if horas < horas_base:\n form.add_error('horas',\n 'Horas deben ser como mínimo las del plan de estudios original ({})'\n .format(horas_base))\n return self.form_invalid(form)\n return super().form_valid(form)\n\n\nclass AsignaturaDeleteView(LoginRequiredMixin, DeleteView):\n model = Asignatura\n\n def get(self, request, *args, **kwargs):\n return self.post(request, *args, **kwargs)\n\n def get_success_url(self):\n return reverse('carga-horaria:periodo', kwargs={'pk': self.kwargs[\n 'periodo_pk']})\n",
"step-2": "<mask token>\n\n\nclass AsistenteListView(LoginRequiredMixin, SearchMixin,\n GetObjectsForUserMixin, ListView):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass AsistenteDetailView(LoginRequiredMixin, DetailView):\n \"\"\"\n Detalle de Asistente\n \"\"\"\n model = Asistente\n template_name = 'carga_horaria/asistente/detalle_asistente.html'\n\n\nclass AsistenteCreateView(LoginRequiredMixin, CreateView):\n model = Asistente\n form_class = AsistenteForm\n template_name = 'carga_horaria/asistente/nuevo_asistente.html'\n success_url = reverse_lazy('carga-horaria:asistentes')\n\n def get_form_kwargs(self, *args, **kwargs):\n kwargs = super(AsistenteCreateView, self).get_form_kwargs(*args, **\n kwargs)\n colegio_pk = self.request.session.get('colegio__pk', None)\n if colegio_pk:\n kwargs.update({'user': self.request.user, 'colegio': colegio_pk,\n 'fundacion': Colegio.objects.get(pk=self.request.session.\n get('colegio__pk', None)).fundacion.pk})\n else:\n kwargs.update({'user': self.request.user})\n return kwargs\n\n def form_valid(self, form):\n asistente = form.save(commit=False)\n asistente.persona, _ = Persona.objects.update_or_create(rut=form.\n cleaned_data['rut'], defaults={'nombre': form.cleaned_data[\n 'nombre'], 'direccion': form.cleaned_data['direccion'],\n 'comuna': form.cleaned_data['comuna'], 'nacionalidad': form.\n cleaned_data['nacionalidad'], 'telefono': form.cleaned_data[\n 'telefono'], 'email_personal': form.cleaned_data[\n 'email_personal'], 'email_institucional': form.cleaned_data[\n 'email_institucional'], 'estado_civil': form.cleaned_data[\n 'estado_civil'], 'discapacidad': form.cleaned_data[\n 'discapacidad'], 'recibe_pension': form.cleaned_data[\n 'recibe_pension'], 'adventista': form.cleaned_data['adventista'\n ], 'fecha_nacimiento': form.cleaned_data['fecha_nacimiento']})\n asistente.save()\n return redirect(reverse('carga-horaria:asistentes'))\n\n\nclass AsistenteUpdateView(LoginRequiredMixin, UpdateView):\n model = Asistente\n form_class = AsistenteForm\n template_name = 'carga_horaria/asistente/editar_asistente.html'\n\n def get_success_url(self):\n return reverse('carga-horaria:asistente', kwargs={'pk': self.object.pk}\n )\n\n def form_valid(self, form):\n asistente = form.save(commit=False)\n asistente.persona, _ = Persona.objects.update_or_create(rut=form.\n cleaned_data['rut'], defaults={'nombre': form.cleaned_data[\n 'nombre'], 'direccion': form.cleaned_data['direccion'],\n 'comuna': form.cleaned_data['comuna'], 'nacionalidad': form.\n cleaned_data['nacionalidad'], 'telefono': form.cleaned_data[\n 'telefono'], 'email_personal': form.cleaned_data[\n 'email_personal'], 'email_institucional': form.cleaned_data[\n 'email_institucional'], 'estado_civil': form.cleaned_data[\n 'estado_civil'], 'discapacidad': form.cleaned_data[\n 'discapacidad'], 'recibe_pension': form.cleaned_data[\n 'recibe_pension'], 'adventista': form.cleaned_data['adventista'\n ], 'fecha_nacimiento': form.cleaned_data['fecha_nacimiento']})\n asistente.save()\n return redirect(self.get_success_url())\n\n\nclass AsistenteDeleteView(LoginRequiredMixin, DeleteView):\n model = Asistente\n success_url = reverse_lazy('carga-horaria:asistentes')\n\n def get(self, request, *args, **kwargs):\n return self.post(request, *args, **kwargs)\n\n\n<mask token>\n\n\nclass AsignaturaBaseListView(LoginRequiredMixin, GetObjectsForUserMixin,\n ListView):\n \"\"\"\n Listado de asignatura base\n \"\"\"\n model = AsignaturaBase\n lookup = 'plan__colegio__pk'\n template_name = 'carga_horaria/asignaturabase/listado_asignaturabase.html'\n search_fields = ['nombre', 'plan']\n paginate_by = 10\n\n def get_context_data(self, *args, **kwargs):\n ctx = super().get_context_data(*args, **kwargs)\n ctx['levels'] = [(tag.name, tag.value) for tag in Nivel]\n ctx['nivel_actual'] = self.request.GET.get('nivel')\n return ctx\n\n def get_queryset(self):\n qs = super().get_queryset()\n nivel = self.request.GET.get('nivel')\n if nivel:\n qs = qs.filter(plan__nivel=nivel)\n return qs\n\n\nclass AsignaturaBaseDetailView(LoginRequiredMixin, DetailView):\n \"\"\"\n Detalle de asignatura base\n \"\"\"\n model = AsignaturaBase\n template_name = 'carga_horaria/asignaturabase/detalle_asignaturabase.html'\n\n\nclass AsignaturaBaseCreateView(LoginRequiredMixin, CreateView):\n model = AsignaturaBase\n form_class = AsignaturaBaseForm\n template_name = 'carga_horaria/asignaturabase/nuevo_asignaturabase.html'\n success_url = reverse_lazy('carga-horaria:asignaturasbase')\n\n def get_form_kwargs(self, *args, **kwargs):\n kwargs = super(AsignaturaBaseCreateView, self).get_form_kwargs(*\n args, **kwargs)\n kwargs.update({'user': self.request.user, 'colegio': self.request.\n session.get('colegio__pk', None)})\n return kwargs\n\n\nclass AsignaturaBaseUpdateView(LoginRequiredMixin, UpdateView):\n model = AsignaturaBase\n form_class = AsignaturaBaseForm\n template_name = 'carga_horaria/asignaturabase/editar_asignaturabase.html'\n\n def get_success_url(self):\n return reverse('carga-horaria:asignaturabase', kwargs={'pk': self.\n object.pk})\n\n\nclass AsignaturaBaseDeleteView(LoginRequiredMixin, DeleteView):\n model = AsignaturaBase\n success_url = reverse_lazy('carga-horaria:asignaturasbase')\n\n def get(self, request, *args, **kwargs):\n return self.post(request, *args, **kwargs)\n\n\n<mask token>\n\n\nclass AsignaturaListView(LoginRequiredMixin, ListView):\n \"\"\"\n Listado de asignatura\n \"\"\"\n model = Asignatura\n template_name = 'carga_horaria/asignatura/listado_asignatura.html'\n search_fields = ['base', 'periodo']\n paginate_by = 10\n\n def get_context_data(self, *args, **kwargs):\n ctx = super().get_context_data(*args, **kwargs)\n ctx['levels'] = [(tag.name, tag.value) for tag in Nivel][::-1]\n ctx['nivel_actual'] = self.request.GET.get('nivel')\n return ctx\n\n def get_queryset(self):\n qs = super().get_queryset()\n nivel = self.request.GET.get('nivel')\n if nivel:\n qs = qs.filter(base__plan__nivel=nivel)\n periodo = self.request.GET.get('periodo')\n if periodo:\n qs = qs.filter(periodo__pk=periodo)\n return qs\n\n\nclass AsignaturaDetailView(LoginRequiredMixin, DetailView):\n \"\"\"\n Detalle de asignatura\n \"\"\"\n model = Asignatura\n template_name = 'carga_horaria/asignatura/detalle_asignatura.html'\n\n def get_context_data(self, *args, **kwargs):\n ctx = super().get_context_data(*args, **kwargs)\n ctx['periodo'] = Periodo.objects.get(pk=self.kwargs['periodo_pk'])\n return ctx\n\n\nclass AsignaturaCreateView(LoginRequiredMixin, CreateView):\n model = Asignatura\n form_class = AsignaturaCreateForm\n template_name = 'carga_horaria/asignatura/nuevo_asignatura.html'\n\n def form_valid(self, form):\n periodo = Periodo.objects.get(pk=self.kwargs['pk'])\n horas = form.cleaned_data['horas']\n available = periodo.available\n if horas > available:\n form.add_error('horas',\n 'Horas superan el tiempo disponible ({})'.format(available))\n return self.form_invalid(form)\n else:\n self.object = form.save()\n self.object.periodos.add(periodo)\n return HttpResponseRedirect(self.get_success_url())\n\n def get_success_url(self):\n return reverse('carga-horaria:periodo', kwargs={'pk': self.kwargs[\n 'pk']})\n\n\nclass AsignaturaUpdateView(LoginRequiredMixin, UpdateView):\n model = Asignatura\n form_class = AsignaturaUpdateForm\n template_name = 'carga_horaria/asignatura/editar_asignatura.html'\n\n def get_success_url(self):\n return reverse('carga-horaria:periodo', kwargs={'pk': self.kwargs[\n 'periodo_pk']})\n\n def form_valid(self, form):\n periodo = Periodo.objects.get(pk=self.kwargs['periodo_pk'])\n horas = form.cleaned_data['horas']\n old_horas = Asignatura.objects.get(pk=self.object.pk).horas\n delta = horas - old_horas\n available = periodo.available\n if delta > available:\n form.add_error('horas',\n 'Horas superan el tiempo disponible ({})'.format(available +\n old_horas))\n return self.form_invalid(form)\n elif self.object.base:\n if periodo.colegio.jec:\n horas_base = self.object.base.horas_jec\n else:\n horas_base = self.object.base.horas_nec\n if horas < horas_base:\n form.add_error('horas',\n 'Horas deben ser como mínimo las del plan de estudios original ({})'\n .format(horas_base))\n return self.form_invalid(form)\n return super().form_valid(form)\n\n\nclass AsignaturaDeleteView(LoginRequiredMixin, DeleteView):\n model = Asignatura\n\n def get(self, request, *args, **kwargs):\n return self.post(request, *args, **kwargs)\n\n def get_success_url(self):\n return reverse('carga-horaria:periodo', kwargs={'pk': self.kwargs[\n 'periodo_pk']})\n",
"step-3": "<mask token>\n\n\nclass ProfesorDeleteView(LoginRequiredMixin, DeleteView):\n <mask token>\n <mask token>\n <mask token>\n\n\n<mask token>\n\n\nclass AsistenteListView(LoginRequiredMixin, SearchMixin,\n GetObjectsForUserMixin, ListView):\n \"\"\"\n Listado de asistentes\n \"\"\"\n model = Asistente\n lookup = 'colegio__pk'\n template_name = 'carga_horaria/asistente/listado_asistente.html'\n search_fields = ['nombre', 'horas']\n paginate_by = 6\n\n\nclass AsistenteDetailView(LoginRequiredMixin, DetailView):\n \"\"\"\n Detalle de Asistente\n \"\"\"\n model = Asistente\n template_name = 'carga_horaria/asistente/detalle_asistente.html'\n\n\nclass AsistenteCreateView(LoginRequiredMixin, CreateView):\n model = Asistente\n form_class = AsistenteForm\n template_name = 'carga_horaria/asistente/nuevo_asistente.html'\n success_url = reverse_lazy('carga-horaria:asistentes')\n\n def get_form_kwargs(self, *args, **kwargs):\n kwargs = super(AsistenteCreateView, self).get_form_kwargs(*args, **\n kwargs)\n colegio_pk = self.request.session.get('colegio__pk', None)\n if colegio_pk:\n kwargs.update({'user': self.request.user, 'colegio': colegio_pk,\n 'fundacion': Colegio.objects.get(pk=self.request.session.\n get('colegio__pk', None)).fundacion.pk})\n else:\n kwargs.update({'user': self.request.user})\n return kwargs\n\n def form_valid(self, form):\n asistente = form.save(commit=False)\n asistente.persona, _ = Persona.objects.update_or_create(rut=form.\n cleaned_data['rut'], defaults={'nombre': form.cleaned_data[\n 'nombre'], 'direccion': form.cleaned_data['direccion'],\n 'comuna': form.cleaned_data['comuna'], 'nacionalidad': form.\n cleaned_data['nacionalidad'], 'telefono': form.cleaned_data[\n 'telefono'], 'email_personal': form.cleaned_data[\n 'email_personal'], 'email_institucional': form.cleaned_data[\n 'email_institucional'], 'estado_civil': form.cleaned_data[\n 'estado_civil'], 'discapacidad': form.cleaned_data[\n 'discapacidad'], 'recibe_pension': form.cleaned_data[\n 'recibe_pension'], 'adventista': form.cleaned_data['adventista'\n ], 'fecha_nacimiento': form.cleaned_data['fecha_nacimiento']})\n asistente.save()\n return redirect(reverse('carga-horaria:asistentes'))\n\n\nclass AsistenteUpdateView(LoginRequiredMixin, UpdateView):\n model = Asistente\n form_class = AsistenteForm\n template_name = 'carga_horaria/asistente/editar_asistente.html'\n\n def get_success_url(self):\n return reverse('carga-horaria:asistente', kwargs={'pk': self.object.pk}\n )\n\n def form_valid(self, form):\n asistente = form.save(commit=False)\n asistente.persona, _ = Persona.objects.update_or_create(rut=form.\n cleaned_data['rut'], defaults={'nombre': form.cleaned_data[\n 'nombre'], 'direccion': form.cleaned_data['direccion'],\n 'comuna': form.cleaned_data['comuna'], 'nacionalidad': form.\n cleaned_data['nacionalidad'], 'telefono': form.cleaned_data[\n 'telefono'], 'email_personal': form.cleaned_data[\n 'email_personal'], 'email_institucional': form.cleaned_data[\n 'email_institucional'], 'estado_civil': form.cleaned_data[\n 'estado_civil'], 'discapacidad': form.cleaned_data[\n 'discapacidad'], 'recibe_pension': form.cleaned_data[\n 'recibe_pension'], 'adventista': form.cleaned_data['adventista'\n ], 'fecha_nacimiento': form.cleaned_data['fecha_nacimiento']})\n asistente.save()\n return redirect(self.get_success_url())\n\n\nclass AsistenteDeleteView(LoginRequiredMixin, DeleteView):\n model = Asistente\n success_url = reverse_lazy('carga-horaria:asistentes')\n\n def get(self, request, *args, **kwargs):\n return self.post(request, *args, **kwargs)\n\n\n<mask token>\n\n\nclass AsignaturaBaseListView(LoginRequiredMixin, GetObjectsForUserMixin,\n ListView):\n \"\"\"\n Listado de asignatura base\n \"\"\"\n model = AsignaturaBase\n lookup = 'plan__colegio__pk'\n template_name = 'carga_horaria/asignaturabase/listado_asignaturabase.html'\n search_fields = ['nombre', 'plan']\n paginate_by = 10\n\n def get_context_data(self, *args, **kwargs):\n ctx = super().get_context_data(*args, **kwargs)\n ctx['levels'] = [(tag.name, tag.value) for tag in Nivel]\n ctx['nivel_actual'] = self.request.GET.get('nivel')\n return ctx\n\n def get_queryset(self):\n qs = super().get_queryset()\n nivel = self.request.GET.get('nivel')\n if nivel:\n qs = qs.filter(plan__nivel=nivel)\n return qs\n\n\nclass AsignaturaBaseDetailView(LoginRequiredMixin, DetailView):\n \"\"\"\n Detalle de asignatura base\n \"\"\"\n model = AsignaturaBase\n template_name = 'carga_horaria/asignaturabase/detalle_asignaturabase.html'\n\n\nclass AsignaturaBaseCreateView(LoginRequiredMixin, CreateView):\n model = AsignaturaBase\n form_class = AsignaturaBaseForm\n template_name = 'carga_horaria/asignaturabase/nuevo_asignaturabase.html'\n success_url = reverse_lazy('carga-horaria:asignaturasbase')\n\n def get_form_kwargs(self, *args, **kwargs):\n kwargs = super(AsignaturaBaseCreateView, self).get_form_kwargs(*\n args, **kwargs)\n kwargs.update({'user': self.request.user, 'colegio': self.request.\n session.get('colegio__pk', None)})\n return kwargs\n\n\nclass AsignaturaBaseUpdateView(LoginRequiredMixin, UpdateView):\n model = AsignaturaBase\n form_class = AsignaturaBaseForm\n template_name = 'carga_horaria/asignaturabase/editar_asignaturabase.html'\n\n def get_success_url(self):\n return reverse('carga-horaria:asignaturabase', kwargs={'pk': self.\n object.pk})\n\n\nclass AsignaturaBaseDeleteView(LoginRequiredMixin, DeleteView):\n model = AsignaturaBase\n success_url = reverse_lazy('carga-horaria:asignaturasbase')\n\n def get(self, request, *args, **kwargs):\n return self.post(request, *args, **kwargs)\n\n\n<mask token>\n\n\nclass AsignaturaListView(LoginRequiredMixin, ListView):\n \"\"\"\n Listado de asignatura\n \"\"\"\n model = Asignatura\n template_name = 'carga_horaria/asignatura/listado_asignatura.html'\n search_fields = ['base', 'periodo']\n paginate_by = 10\n\n def get_context_data(self, *args, **kwargs):\n ctx = super().get_context_data(*args, **kwargs)\n ctx['levels'] = [(tag.name, tag.value) for tag in Nivel][::-1]\n ctx['nivel_actual'] = self.request.GET.get('nivel')\n return ctx\n\n def get_queryset(self):\n qs = super().get_queryset()\n nivel = self.request.GET.get('nivel')\n if nivel:\n qs = qs.filter(base__plan__nivel=nivel)\n periodo = self.request.GET.get('periodo')\n if periodo:\n qs = qs.filter(periodo__pk=periodo)\n return qs\n\n\nclass AsignaturaDetailView(LoginRequiredMixin, DetailView):\n \"\"\"\n Detalle de asignatura\n \"\"\"\n model = Asignatura\n template_name = 'carga_horaria/asignatura/detalle_asignatura.html'\n\n def get_context_data(self, *args, **kwargs):\n ctx = super().get_context_data(*args, **kwargs)\n ctx['periodo'] = Periodo.objects.get(pk=self.kwargs['periodo_pk'])\n return ctx\n\n\nclass AsignaturaCreateView(LoginRequiredMixin, CreateView):\n model = Asignatura\n form_class = AsignaturaCreateForm\n template_name = 'carga_horaria/asignatura/nuevo_asignatura.html'\n\n def form_valid(self, form):\n periodo = Periodo.objects.get(pk=self.kwargs['pk'])\n horas = form.cleaned_data['horas']\n available = periodo.available\n if horas > available:\n form.add_error('horas',\n 'Horas superan el tiempo disponible ({})'.format(available))\n return self.form_invalid(form)\n else:\n self.object = form.save()\n self.object.periodos.add(periodo)\n return HttpResponseRedirect(self.get_success_url())\n\n def get_success_url(self):\n return reverse('carga-horaria:periodo', kwargs={'pk': self.kwargs[\n 'pk']})\n\n\nclass AsignaturaUpdateView(LoginRequiredMixin, UpdateView):\n model = Asignatura\n form_class = AsignaturaUpdateForm\n template_name = 'carga_horaria/asignatura/editar_asignatura.html'\n\n def get_success_url(self):\n return reverse('carga-horaria:periodo', kwargs={'pk': self.kwargs[\n 'periodo_pk']})\n\n def form_valid(self, form):\n periodo = Periodo.objects.get(pk=self.kwargs['periodo_pk'])\n horas = form.cleaned_data['horas']\n old_horas = Asignatura.objects.get(pk=self.object.pk).horas\n delta = horas - old_horas\n available = periodo.available\n if delta > available:\n form.add_error('horas',\n 'Horas superan el tiempo disponible ({})'.format(available +\n old_horas))\n return self.form_invalid(form)\n elif self.object.base:\n if periodo.colegio.jec:\n horas_base = self.object.base.horas_jec\n else:\n horas_base = self.object.base.horas_nec\n if horas < horas_base:\n form.add_error('horas',\n 'Horas deben ser como mínimo las del plan de estudios original ({})'\n .format(horas_base))\n return self.form_invalid(form)\n return super().form_valid(form)\n\n\nclass AsignaturaDeleteView(LoginRequiredMixin, DeleteView):\n model = Asignatura\n\n def get(self, request, *args, **kwargs):\n return self.post(request, *args, **kwargs)\n\n def get_success_url(self):\n return reverse('carga-horaria:periodo', kwargs={'pk': self.kwargs[\n 'periodo_pk']})\n",
"step-4": "<mask token>\n\n\nclass ProfesorListView(LoginRequiredMixin, SearchMixin,\n GetObjectsForUserMixin, ListView):\n \"\"\"\n Listado de profesores\n \"\"\"\n model = Profesor\n lookup = 'colegio__pk'\n template_name = 'carga_horaria/profesor/listado_profesor.html'\n search_fields = ['nombre', 'horas']\n paginate_by = 6\n\n\nclass ProfesorDetailView(LoginRequiredMixin, DetailView):\n \"\"\"\n Detalle de Profesor\n \"\"\"\n model = Profesor\n template_name = 'carga_horaria/profesor/detalle_profesor.html'\n\n\nclass ProfesorCreateView(LoginRequiredMixin, CreateView):\n model = Profesor\n form_class = ProfesorForm\n template_name = 'carga_horaria/profesor/nuevo_profesor.html'\n success_url = reverse_lazy('carga-horaria:profesores')\n\n def get_form_kwargs(self, *args, **kwargs):\n kwargs = super(ProfesorCreateView, self).get_form_kwargs(*args, **\n kwargs)\n colegio_pk = self.request.session.get('colegio__pk', None)\n if colegio_pk:\n kwargs.update({'user': self.request.user, 'colegio': colegio_pk,\n 'fundacion': Colegio.objects.get(pk=self.request.session.\n get('colegio__pk', None)).fundacion.pk})\n else:\n kwargs.update({'user': self.request.user})\n return kwargs\n\n def form_valid(self, form):\n profesor = form.save(commit=False)\n profesor.persona, _ = Persona.objects.update_or_create(rut=form.\n cleaned_data['rut'], defaults={'nombre': form.cleaned_data[\n 'nombre'], 'direccion': form.cleaned_data['direccion'],\n 'comuna': form.cleaned_data['comuna'], 'nacionalidad': form.\n cleaned_data['nacionalidad'], 'telefono': form.cleaned_data[\n 'telefono'], 'email_personal': form.cleaned_data[\n 'email_personal'], 'email_institucional': form.cleaned_data[\n 'email_institucional'], 'estado_civil': form.cleaned_data[\n 'estado_civil'], 'discapacidad': form.cleaned_data[\n 'discapacidad'], 'recibe_pension': form.cleaned_data[\n 'recibe_pension'], 'adventista': form.cleaned_data['adventista'\n ], 'fecha_nacimiento': form.cleaned_data['fecha_nacimiento']})\n profesor.save()\n return redirect(reverse('carga-horaria:profesores'))\n\n\nclass ProfesorUpdateView(LoginRequiredMixin, UpdateView):\n model = Profesor\n form_class = ProfesorForm\n template_name = 'carga_horaria/profesor/editar_profesor.html'\n\n def get_form_kwargs(self, *args, **kwargs):\n kwargs = super(ProfesorUpdateView, self).get_form_kwargs(*args, **\n kwargs)\n colegio_pk = self.request.session.get('colegio__pk', None)\n if colegio_pk:\n kwargs.update({'user': self.request.user, 'colegio': colegio_pk,\n 'fundacion': Colegio.objects.get(pk=self.request.session.\n get('colegio__pk', None)).fundacion.pk})\n else:\n kwargs.update({'user': self.request.user})\n return kwargs\n\n def form_valid(self, form):\n profesor = form.save(commit=False)\n profesor.persona, _ = Persona.objects.update_or_create(rut=form.\n cleaned_data['rut'], defaults={'nombre': form.cleaned_data[\n 'nombre'], 'direccion': form.cleaned_data['direccion'],\n 'comuna': form.cleaned_data['comuna'], 'nacionalidad': form.\n cleaned_data['nacionalidad'], 'telefono': form.cleaned_data[\n 'telefono'], 'email_personal': form.cleaned_data[\n 'email_personal'], 'email_institucional': form.cleaned_data[\n 'email_institucional'], 'estado_civil': form.cleaned_data[\n 'estado_civil'], 'discapacidad': form.cleaned_data[\n 'discapacidad'], 'recibe_pension': form.cleaned_data[\n 'recibe_pension'], 'adventista': form.cleaned_data['adventista'\n ], 'fecha_nacimiento': form.cleaned_data['fecha_nacimiento']})\n profesor.save()\n return redirect(self.get_success_url())\n\n def get_success_url(self):\n return reverse('carga-horaria:profesor', kwargs={'pk': self.object.pk})\n\n\nclass ProfesorDeleteView(LoginRequiredMixin, DeleteView):\n model = Profesor\n success_url = reverse_lazy('carga-horaria:profesores')\n\n def get(self, request, *args, **kwargs):\n return self.post(request, *args, **kwargs)\n\n\n<mask token>\n\n\nclass AsistenteListView(LoginRequiredMixin, SearchMixin,\n GetObjectsForUserMixin, ListView):\n \"\"\"\n Listado de asistentes\n \"\"\"\n model = Asistente\n lookup = 'colegio__pk'\n template_name = 'carga_horaria/asistente/listado_asistente.html'\n search_fields = ['nombre', 'horas']\n paginate_by = 6\n\n\nclass AsistenteDetailView(LoginRequiredMixin, DetailView):\n \"\"\"\n Detalle de Asistente\n \"\"\"\n model = Asistente\n template_name = 'carga_horaria/asistente/detalle_asistente.html'\n\n\nclass AsistenteCreateView(LoginRequiredMixin, CreateView):\n model = Asistente\n form_class = AsistenteForm\n template_name = 'carga_horaria/asistente/nuevo_asistente.html'\n success_url = reverse_lazy('carga-horaria:asistentes')\n\n def get_form_kwargs(self, *args, **kwargs):\n kwargs = super(AsistenteCreateView, self).get_form_kwargs(*args, **\n kwargs)\n colegio_pk = self.request.session.get('colegio__pk', None)\n if colegio_pk:\n kwargs.update({'user': self.request.user, 'colegio': colegio_pk,\n 'fundacion': Colegio.objects.get(pk=self.request.session.\n get('colegio__pk', None)).fundacion.pk})\n else:\n kwargs.update({'user': self.request.user})\n return kwargs\n\n def form_valid(self, form):\n asistente = form.save(commit=False)\n asistente.persona, _ = Persona.objects.update_or_create(rut=form.\n cleaned_data['rut'], defaults={'nombre': form.cleaned_data[\n 'nombre'], 'direccion': form.cleaned_data['direccion'],\n 'comuna': form.cleaned_data['comuna'], 'nacionalidad': form.\n cleaned_data['nacionalidad'], 'telefono': form.cleaned_data[\n 'telefono'], 'email_personal': form.cleaned_data[\n 'email_personal'], 'email_institucional': form.cleaned_data[\n 'email_institucional'], 'estado_civil': form.cleaned_data[\n 'estado_civil'], 'discapacidad': form.cleaned_data[\n 'discapacidad'], 'recibe_pension': form.cleaned_data[\n 'recibe_pension'], 'adventista': form.cleaned_data['adventista'\n ], 'fecha_nacimiento': form.cleaned_data['fecha_nacimiento']})\n asistente.save()\n return redirect(reverse('carga-horaria:asistentes'))\n\n\nclass AsistenteUpdateView(LoginRequiredMixin, UpdateView):\n model = Asistente\n form_class = AsistenteForm\n template_name = 'carga_horaria/asistente/editar_asistente.html'\n\n def get_success_url(self):\n return reverse('carga-horaria:asistente', kwargs={'pk': self.object.pk}\n )\n\n def form_valid(self, form):\n asistente = form.save(commit=False)\n asistente.persona, _ = Persona.objects.update_or_create(rut=form.\n cleaned_data['rut'], defaults={'nombre': form.cleaned_data[\n 'nombre'], 'direccion': form.cleaned_data['direccion'],\n 'comuna': form.cleaned_data['comuna'], 'nacionalidad': form.\n cleaned_data['nacionalidad'], 'telefono': form.cleaned_data[\n 'telefono'], 'email_personal': form.cleaned_data[\n 'email_personal'], 'email_institucional': form.cleaned_data[\n 'email_institucional'], 'estado_civil': form.cleaned_data[\n 'estado_civil'], 'discapacidad': form.cleaned_data[\n 'discapacidad'], 'recibe_pension': form.cleaned_data[\n 'recibe_pension'], 'adventista': form.cleaned_data['adventista'\n ], 'fecha_nacimiento': form.cleaned_data['fecha_nacimiento']})\n asistente.save()\n return redirect(self.get_success_url())\n\n\nclass AsistenteDeleteView(LoginRequiredMixin, DeleteView):\n model = Asistente\n success_url = reverse_lazy('carga-horaria:asistentes')\n\n def get(self, request, *args, **kwargs):\n return self.post(request, *args, **kwargs)\n\n\n<mask token>\n\n\nclass AsignaturaBaseListView(LoginRequiredMixin, GetObjectsForUserMixin,\n ListView):\n \"\"\"\n Listado de asignatura base\n \"\"\"\n model = AsignaturaBase\n lookup = 'plan__colegio__pk'\n template_name = 'carga_horaria/asignaturabase/listado_asignaturabase.html'\n search_fields = ['nombre', 'plan']\n paginate_by = 10\n\n def get_context_data(self, *args, **kwargs):\n ctx = super().get_context_data(*args, **kwargs)\n ctx['levels'] = [(tag.name, tag.value) for tag in Nivel]\n ctx['nivel_actual'] = self.request.GET.get('nivel')\n return ctx\n\n def get_queryset(self):\n qs = super().get_queryset()\n nivel = self.request.GET.get('nivel')\n if nivel:\n qs = qs.filter(plan__nivel=nivel)\n return qs\n\n\nclass AsignaturaBaseDetailView(LoginRequiredMixin, DetailView):\n \"\"\"\n Detalle de asignatura base\n \"\"\"\n model = AsignaturaBase\n template_name = 'carga_horaria/asignaturabase/detalle_asignaturabase.html'\n\n\nclass AsignaturaBaseCreateView(LoginRequiredMixin, CreateView):\n model = AsignaturaBase\n form_class = AsignaturaBaseForm\n template_name = 'carga_horaria/asignaturabase/nuevo_asignaturabase.html'\n success_url = reverse_lazy('carga-horaria:asignaturasbase')\n\n def get_form_kwargs(self, *args, **kwargs):\n kwargs = super(AsignaturaBaseCreateView, self).get_form_kwargs(*\n args, **kwargs)\n kwargs.update({'user': self.request.user, 'colegio': self.request.\n session.get('colegio__pk', None)})\n return kwargs\n\n\nclass AsignaturaBaseUpdateView(LoginRequiredMixin, UpdateView):\n model = AsignaturaBase\n form_class = AsignaturaBaseForm\n template_name = 'carga_horaria/asignaturabase/editar_asignaturabase.html'\n\n def get_success_url(self):\n return reverse('carga-horaria:asignaturabase', kwargs={'pk': self.\n object.pk})\n\n\nclass AsignaturaBaseDeleteView(LoginRequiredMixin, DeleteView):\n model = AsignaturaBase\n success_url = reverse_lazy('carga-horaria:asignaturasbase')\n\n def get(self, request, *args, **kwargs):\n return self.post(request, *args, **kwargs)\n\n\n<mask token>\n\n\nclass AsignaturaListView(LoginRequiredMixin, ListView):\n \"\"\"\n Listado de asignatura\n \"\"\"\n model = Asignatura\n template_name = 'carga_horaria/asignatura/listado_asignatura.html'\n search_fields = ['base', 'periodo']\n paginate_by = 10\n\n def get_context_data(self, *args, **kwargs):\n ctx = super().get_context_data(*args, **kwargs)\n ctx['levels'] = [(tag.name, tag.value) for tag in Nivel][::-1]\n ctx['nivel_actual'] = self.request.GET.get('nivel')\n return ctx\n\n def get_queryset(self):\n qs = super().get_queryset()\n nivel = self.request.GET.get('nivel')\n if nivel:\n qs = qs.filter(base__plan__nivel=nivel)\n periodo = self.request.GET.get('periodo')\n if periodo:\n qs = qs.filter(periodo__pk=periodo)\n return qs\n\n\nclass AsignaturaDetailView(LoginRequiredMixin, DetailView):\n \"\"\"\n Detalle de asignatura\n \"\"\"\n model = Asignatura\n template_name = 'carga_horaria/asignatura/detalle_asignatura.html'\n\n def get_context_data(self, *args, **kwargs):\n ctx = super().get_context_data(*args, **kwargs)\n ctx['periodo'] = Periodo.objects.get(pk=self.kwargs['periodo_pk'])\n return ctx\n\n\nclass AsignaturaCreateView(LoginRequiredMixin, CreateView):\n model = Asignatura\n form_class = AsignaturaCreateForm\n template_name = 'carga_horaria/asignatura/nuevo_asignatura.html'\n\n def form_valid(self, form):\n periodo = Periodo.objects.get(pk=self.kwargs['pk'])\n horas = form.cleaned_data['horas']\n available = periodo.available\n if horas > available:\n form.add_error('horas',\n 'Horas superan el tiempo disponible ({})'.format(available))\n return self.form_invalid(form)\n else:\n self.object = form.save()\n self.object.periodos.add(periodo)\n return HttpResponseRedirect(self.get_success_url())\n\n def get_success_url(self):\n return reverse('carga-horaria:periodo', kwargs={'pk': self.kwargs[\n 'pk']})\n\n\nclass AsignaturaUpdateView(LoginRequiredMixin, UpdateView):\n model = Asignatura\n form_class = AsignaturaUpdateForm\n template_name = 'carga_horaria/asignatura/editar_asignatura.html'\n\n def get_success_url(self):\n return reverse('carga-horaria:periodo', kwargs={'pk': self.kwargs[\n 'periodo_pk']})\n\n def form_valid(self, form):\n periodo = Periodo.objects.get(pk=self.kwargs['periodo_pk'])\n horas = form.cleaned_data['horas']\n old_horas = Asignatura.objects.get(pk=self.object.pk).horas\n delta = horas - old_horas\n available = periodo.available\n if delta > available:\n form.add_error('horas',\n 'Horas superan el tiempo disponible ({})'.format(available +\n old_horas))\n return self.form_invalid(form)\n elif self.object.base:\n if periodo.colegio.jec:\n horas_base = self.object.base.horas_jec\n else:\n horas_base = self.object.base.horas_nec\n if horas < horas_base:\n form.add_error('horas',\n 'Horas deben ser como mínimo las del plan de estudios original ({})'\n .format(horas_base))\n return self.form_invalid(form)\n return super().form_valid(form)\n\n\nclass AsignaturaDeleteView(LoginRequiredMixin, DeleteView):\n model = Asignatura\n\n def get(self, request, *args, **kwargs):\n return self.post(request, *args, **kwargs)\n\n def get_success_url(self):\n return reverse('carga-horaria:periodo', kwargs={'pk': self.kwargs[\n 'periodo_pk']})\n",
"step-5": "from django.db.models import Q\nfrom django.contrib.auth.mixins import LoginRequiredMixin\nfrom django.http import HttpResponseRedirect\nfrom django.shortcuts import render, redirect\nfrom django.views.generic import ListView, DetailView, CreateView, UpdateView, DeleteView\nfrom carga_horaria.models import Profesor, AsignaturaBase, Asignatura, Asistente\nfrom carga_horaria.formsAlexis import ProfesorForm, AsignaturaBaseForm, AsignaturaCreateForm, AsignaturaUpdateForm, AsistenteForm\nfrom django.core.urlresolvers import reverse_lazy, reverse\nfrom guardian.shortcuts import get_objects_for_user\nfrom .models import Persona\nfrom .models import Fundacion\nfrom .models import Colegio\nfrom .models import Periodo\nfrom .models import Nivel\n\n\nclass LevelFilterMixin(object):\n def get_context_data(self, *args, **kwargs):\n ctx = super().get_context_data(*args, **kwargs)\n ctx['levels'] = [(tag.name, tag.value) for tag in Nivel][::-1]\n ctx['nivel_actual'] = self.request.GET.get('nivel')\n return ctx\n\n def get_queryset(self):\n qs = super().get_queryset()\n\n nivel = self.request.GET.get('nivel')\n if nivel:\n qs = qs.filter(plan__nivel=nivel)\n\n return qs\n\n\n\n# FIXME: I will leave it like this for now,\n# but it's still possible for somebody to poke object ids to see what shouldn't see\n# fix this!!1\n\n\nclass SearchMixin(object):\n def get_queryset(self):\n qs = super(SearchMixin, self).get_queryset()\n q = self.request.GET.get('q', None)\n if q:\n if qs.model == Profesor:\n qs = qs.filter(Q(persona__nombre__unaccent__icontains=q) | Q(persona__rut__unaccent__icontains=q) | Q(asignacionextra__descripcion__unaccent__icontains=q) | Q(asignacionnoaula__descripcion__unaccent__icontains=q))\n else:\n qs = qs.filter(Q(persona__nombre__unaccent__icontains=q) | Q(persona__rut__unaccent__icontains=q) | Q(asignacionasistente__descripcion__unaccent__icontains=q) | Q(funcion__unaccent__icontains=q))\n return qs\n\n\ndef get_for_user(request, qs, lookup, user):\n periodo = request.session.get('periodo', 2020)\n\n if not user.is_superuser:\n colegios = [c.pk for c in get_objects_for_user(user, \"carga_horaria.change_colegio\")]\n \n # new logic for colegio switcher\n selected = request.session.get('colegio__pk', None)\n if selected:\n colegios = [selected]\n # end\n \n kwargs = {\"{}__in\".format(lookup): colegios,\n \"{}periode\".format(lookup[:-2]): periodo}\n return qs.filter(**kwargs).distinct()\n else:\n colegios = [c.pk for c in Colegio.objects.all()]\n # new logic for colegio switcher\n selected = request.session.get('colegio__pk', None)\n if selected:\n colegios = [selected]\n # end\n \n kwargs = {\"{}__in\".format(lookup): colegios,\n \"{}periode\".format(lookup[:-2]): periodo}\n return qs.filter(**kwargs).distinct()\n \n \n\nclass GetObjectsForUserMixin(object):\n def get_queryset(self):\n qs = super(GetObjectsForUserMixin, self).get_queryset()\n periodo = self.request.session.get('periodo', 2020)\n\n if not self.request.user.is_superuser:\n colegios = [c.pk for c in get_objects_for_user(self.request.user, \"carga_horaria.change_colegio\")]\n\n # new logic for colegio switcher\n selected = self.request.session.get('colegio__pk', None)\n if selected:\n colegios = [selected]\n # end\n \n kwargs = {\"{}__in\".format(self.lookup): colegios,\n \"{}periode\".format(self.lookup[:-2]): periodo}\n return qs.filter(**kwargs).distinct()\n else:\n colegios = [c.pk for c in Colegio.objects.all()]\n # new logic for colegio switcher\n selected = self.request.session.get('colegio__pk', None)\n if selected:\n colegios = [selected]\n # end\n \n kwargs = {\"{}__in\".format(self.lookup): colegios,\n \"{}periode\".format(self.lookup[:-2]): periodo}\n return qs.filter(**kwargs).distinct()\n\n\nclass ObjPermissionRequiredMixin(object):\n def get_object(self, *args, **kwargs):\n obj = super(ObjPermissionRequiredMixin, self).get_object(*args, **kwargs)\n if self.request.user.has_perm(self.permission, obj):\n return obj\n else:\n raise Http404\n\n\n\"\"\"\n Comienzo Crud Profesor\n\"\"\"\nclass ProfesorListView(LoginRequiredMixin, SearchMixin, GetObjectsForUserMixin, ListView):\n \"\"\"\n Listado de profesores\n \"\"\"\n model = Profesor\n lookup = 'colegio__pk'\n template_name = 'carga_horaria/profesor/listado_profesor.html'\n search_fields = ['nombre', 'horas']\n paginate_by = 6\n\n\n\nclass ProfesorDetailView(LoginRequiredMixin, DetailView):\n \"\"\"\n Detalle de Profesor\n \"\"\"\n model = Profesor\n template_name = 'carga_horaria/profesor/detalle_profesor.html'\n\n\nclass ProfesorCreateView(LoginRequiredMixin, CreateView):\n model = Profesor\n form_class = ProfesorForm\n template_name = 'carga_horaria/profesor/nuevo_profesor.html'\n success_url = reverse_lazy('carga-horaria:profesores')\n\n def get_form_kwargs(self, *args, **kwargs):\n kwargs = super(ProfesorCreateView, self).get_form_kwargs(*args, **kwargs)\n colegio_pk = self.request.session.get('colegio__pk', None)\n if colegio_pk:\n kwargs.update({'user': self.request.user,\n 'colegio': colegio_pk,\n 'fundacion': Colegio.objects.get(pk=self.request.session.get('colegio__pk', None)).fundacion.pk})\n else:\n kwargs.update({'user': self.request.user})\n\n return kwargs\n\n def form_valid(self, form):\n profesor = form.save(commit=False)\n profesor.persona, _ = Persona.objects.update_or_create(rut=form.cleaned_data['rut'],\n defaults={'nombre': form.cleaned_data['nombre'],\n 'direccion': form.cleaned_data['direccion'],\n 'comuna': form.cleaned_data['comuna'],\n 'nacionalidad': form.cleaned_data['nacionalidad'],\n 'telefono': form.cleaned_data['telefono'],\n 'email_personal': form.cleaned_data['email_personal'],\n 'email_institucional': form.cleaned_data['email_institucional'],\n 'estado_civil': form.cleaned_data['estado_civil'],\n 'discapacidad': form.cleaned_data['discapacidad'],\n 'recibe_pension': form.cleaned_data['recibe_pension'],\n 'adventista': form.cleaned_data['adventista'],\n 'fecha_nacimiento': form.cleaned_data['fecha_nacimiento']})\n profesor.save()\n return redirect(reverse('carga-horaria:profesores'))\n\n\nclass ProfesorUpdateView(LoginRequiredMixin, UpdateView):\n model = Profesor\n form_class = ProfesorForm\n template_name = 'carga_horaria/profesor/editar_profesor.html'\n\n def get_form_kwargs(self, *args, **kwargs):\n kwargs = super(ProfesorUpdateView, self).get_form_kwargs(*args, **kwargs)\n colegio_pk = self.request.session.get('colegio__pk', None)\n if colegio_pk:\n kwargs.update({'user': self.request.user,\n 'colegio': colegio_pk,\n 'fundacion': Colegio.objects.get(pk=self.request.session.get('colegio__pk', None)).fundacion.pk})\n else:\n kwargs.update({'user': self.request.user})\n\n return kwargs\n\n def form_valid(self, form):\n profesor = form.save(commit=False)\n profesor.persona, _ = Persona.objects.update_or_create(rut=form.cleaned_data['rut'],\n defaults={'nombre': form.cleaned_data['nombre'],\n 'direccion': form.cleaned_data['direccion'],\n 'comuna': form.cleaned_data['comuna'],\n 'nacionalidad': form.cleaned_data['nacionalidad'],\n 'telefono': form.cleaned_data['telefono'],\n 'email_personal': form.cleaned_data['email_personal'],\n 'email_institucional': form.cleaned_data['email_institucional'],\n 'estado_civil': form.cleaned_data['estado_civil'],\n 'discapacidad': form.cleaned_data['discapacidad'],\n 'recibe_pension': form.cleaned_data['recibe_pension'],\n 'adventista': form.cleaned_data['adventista'],\n 'fecha_nacimiento': form.cleaned_data['fecha_nacimiento']})\n profesor.save()\n return redirect(self.get_success_url())\n\n\n def get_success_url(self):\n return reverse(\n 'carga-horaria:profesor',\n kwargs={\n 'pk': self.object.pk,\n }\n )\n\n\nclass ProfesorDeleteView(LoginRequiredMixin, DeleteView):\n model = Profesor\n success_url = reverse_lazy('carga-horaria:profesores')\n\n def get(self, request, *args, **kwargs):\n return self.post(request, *args, **kwargs)\n\n\n# \"\"\"\n# Comienzo Crud Curso\n# \"\"\"\n# class CursoListView(ListView):\n# \"\"\"\n# Listado de cursos\n# \"\"\"\n# model = Curso\n# template_name = 'carga_horaria/curso/listado_curso.html'\n# search_fields = ['periodo', 'letra']\n# paginate_by = 6\n\n\n# class CursoDetailView(DetailView):\n# \"\"\"\n# Detalle de curso\n# \"\"\"\n# model = Curso\n# template_name = 'carga_horaria/curso/detalle_curso.html'\n\n\n# class CursoCreateView(CreateView):\n# model = Curso\n# form_class = CursoForm\n# template_name = 'carga_horaria/curso/nuevo_curso.html'\n# success_url = reverse_lazy('carga-horaria:cursos')\n\n\n# class CursoUpdateView(UpdateView):\n# model = Curso\n# form_class = CursoForm\n# template_name = 'carga_horaria/curso/editar_curso.html'\n\n# def get_success_url(self):\n# return reverse(\n# 'carga-horaria:curso',\n# kwargs={\n# 'pk': self.object.pk,\n# }\n# )\n\n\n# class CursoDeleteView(DeleteView):\n# model = Curso\n# success_url = reverse_lazy('carga-horaria:cursos')\n\n# def get(self, request, *args, **kwargs):\n# return self.post(request, *args, **kwargs)\n\n\n\"\"\"\n Comienzo Crud Asistente\n\"\"\"\nclass AsistenteListView(LoginRequiredMixin, SearchMixin, GetObjectsForUserMixin, ListView):\n \"\"\"\n Listado de asistentes\n \"\"\"\n model = Asistente\n lookup = 'colegio__pk'\n template_name = 'carga_horaria/asistente/listado_asistente.html'\n search_fields = ['nombre', 'horas']\n paginate_by = 6\n\n\nclass AsistenteDetailView(LoginRequiredMixin, DetailView):\n \"\"\"\n Detalle de Asistente\n \"\"\"\n model = Asistente\n template_name = 'carga_horaria/asistente/detalle_asistente.html'\n\n\nclass AsistenteCreateView(LoginRequiredMixin, CreateView):\n model = Asistente\n form_class = AsistenteForm\n template_name = 'carga_horaria/asistente/nuevo_asistente.html'\n success_url = reverse_lazy('carga-horaria:asistentes')\n\n def get_form_kwargs(self, *args, **kwargs):\n kwargs = super(AsistenteCreateView, self).get_form_kwargs(*args, **kwargs)\n colegio_pk = self.request.session.get('colegio__pk', None)\n if colegio_pk:\n kwargs.update({'user': self.request.user,\n 'colegio': colegio_pk,\n 'fundacion': Colegio.objects.get(pk=self.request.session.get('colegio__pk', None)).fundacion.pk})\n else:\n kwargs.update({'user': self.request.user})\n\n return kwargs\n\n\n def form_valid(self, form):\n asistente = form.save(commit=False)\n asistente.persona, _ = Persona.objects.update_or_create(rut=form.cleaned_data['rut'],\n defaults={'nombre': form.cleaned_data['nombre'],\n 'direccion': form.cleaned_data['direccion'],\n 'comuna': form.cleaned_data['comuna'],\n 'nacionalidad': form.cleaned_data['nacionalidad'],\n 'telefono': form.cleaned_data['telefono'],\n 'email_personal': form.cleaned_data['email_personal'],\n 'email_institucional': form.cleaned_data['email_institucional'],\n 'estado_civil': form.cleaned_data['estado_civil'],\n 'discapacidad': form.cleaned_data['discapacidad'],\n 'recibe_pension': form.cleaned_data['recibe_pension'],\n 'adventista': form.cleaned_data['adventista'],\n 'fecha_nacimiento': form.cleaned_data['fecha_nacimiento']})\n asistente.save()\n return redirect(reverse('carga-horaria:asistentes'))\n\n\nclass AsistenteUpdateView(LoginRequiredMixin, UpdateView):\n model = Asistente\n form_class = AsistenteForm\n template_name = 'carga_horaria/asistente/editar_asistente.html'\n\n def get_success_url(self):\n return reverse(\n 'carga-horaria:asistente',\n kwargs={\n 'pk': self.object.pk,\n }\n )\n\n def form_valid(self, form):\n asistente = form.save(commit=False)\n asistente.persona, _ = Persona.objects.update_or_create(rut=form.cleaned_data['rut'],\n defaults={'nombre': form.cleaned_data['nombre'],\n 'direccion': form.cleaned_data['direccion'],\n 'comuna': form.cleaned_data['comuna'],\n 'nacionalidad': form.cleaned_data['nacionalidad'],\n 'telefono': form.cleaned_data['telefono'],\n 'email_personal': form.cleaned_data['email_personal'],\n 'email_institucional': form.cleaned_data['email_institucional'],\n 'estado_civil': form.cleaned_data['estado_civil'],\n 'discapacidad': form.cleaned_data['discapacidad'],\n 'recibe_pension': form.cleaned_data['recibe_pension'],\n 'adventista': form.cleaned_data['adventista'],\n 'fecha_nacimiento': form.cleaned_data['fecha_nacimiento']})\n asistente.save()\n return redirect(self.get_success_url())\n\n\nclass AsistenteDeleteView(LoginRequiredMixin, DeleteView):\n model = Asistente\n success_url = reverse_lazy('carga-horaria:asistentes')\n\n def get(self, request, *args, **kwargs):\n return self.post(request, *args, **kwargs)\n\n\n\n\n\"\"\"\n Comienzo Crud Asignatura Base\n\"\"\"\nclass AsignaturaBaseListView(LoginRequiredMixin, GetObjectsForUserMixin, ListView):\n \"\"\"\n Listado de asignatura base\n \"\"\"\n model = AsignaturaBase\n lookup = 'plan__colegio__pk'\n template_name = 'carga_horaria/asignaturabase/listado_asignaturabase.html'\n search_fields = ['nombre', 'plan']\n paginate_by = 10\n\n def get_context_data(self, *args, **kwargs):\n ctx = super().get_context_data(*args, **kwargs)\n ctx['levels'] = [(tag.name, tag.value) for tag in Nivel]\n ctx['nivel_actual'] = self.request.GET.get('nivel')\n return ctx\n\n def get_queryset(self):\n qs = super().get_queryset()\n\n nivel = self.request.GET.get('nivel')\n if nivel:\n qs = qs.filter(plan__nivel=nivel)\n\n return qs\n\n\nclass AsignaturaBaseDetailView(LoginRequiredMixin, DetailView):\n \"\"\"\n Detalle de asignatura base\n \"\"\"\n model = AsignaturaBase\n template_name = 'carga_horaria/asignaturabase/detalle_asignaturabase.html'\n\n\nclass AsignaturaBaseCreateView(LoginRequiredMixin, CreateView):\n model = AsignaturaBase\n form_class = AsignaturaBaseForm\n template_name = 'carga_horaria/asignaturabase/nuevo_asignaturabase.html'\n success_url = reverse_lazy('carga-horaria:asignaturasbase')\n\n def get_form_kwargs(self, *args, **kwargs):\n kwargs = super(AsignaturaBaseCreateView, self).get_form_kwargs(*args, **kwargs)\n kwargs.update({'user': self.request.user,\n 'colegio': self.request.session.get('colegio__pk', None)})\n return kwargs\n\n\nclass AsignaturaBaseUpdateView(LoginRequiredMixin, UpdateView):\n model = AsignaturaBase\n form_class = AsignaturaBaseForm\n template_name = 'carga_horaria/asignaturabase/editar_asignaturabase.html'\n\n def get_success_url(self):\n return reverse(\n 'carga-horaria:asignaturabase',\n kwargs={\n 'pk': self.object.pk,\n }\n )\n\n\nclass AsignaturaBaseDeleteView(LoginRequiredMixin, DeleteView):\n model = AsignaturaBase\n success_url = reverse_lazy('carga-horaria:asignaturasbase')\n\n def get(self, request, *args, **kwargs):\n return self.post(request, *args, **kwargs)\n\n\n\"\"\"\n Comienzo Crud Asignatura\n\"\"\"\nclass AsignaturaListView(LoginRequiredMixin, ListView):\n \"\"\"\n Listado de asignatura\n \"\"\"\n model = Asignatura\n template_name = 'carga_horaria/asignatura/listado_asignatura.html'\n search_fields = ['base', 'periodo']\n paginate_by = 10\n\n def get_context_data(self, *args, **kwargs):\n ctx = super().get_context_data(*args, **kwargs)\n ctx['levels'] = [(tag.name, tag.value) for tag in Nivel][::-1]\n ctx['nivel_actual'] = self.request.GET.get('nivel')\n return ctx\n\n def get_queryset(self):\n qs = super().get_queryset()\n\n nivel = self.request.GET.get('nivel')\n if nivel:\n qs = qs.filter(base__plan__nivel=nivel)\n\n periodo = self.request.GET.get('periodo')\n if periodo:\n qs = qs.filter(periodo__pk=periodo)\n return qs\n\n\nclass AsignaturaDetailView(LoginRequiredMixin, DetailView):\n \"\"\"\n Detalle de asignatura\n \"\"\"\n model = Asignatura\n template_name = 'carga_horaria/asignatura/detalle_asignatura.html'\n\n def get_context_data(self, *args, **kwargs):\n ctx = super().get_context_data(*args, **kwargs)\n ctx['periodo'] = Periodo.objects.get(pk=self.kwargs['periodo_pk'])\n return ctx\n\nclass AsignaturaCreateView(LoginRequiredMixin, CreateView):\n model = Asignatura\n form_class = AsignaturaCreateForm\n template_name = 'carga_horaria/asignatura/nuevo_asignatura.html'\n\n def form_valid(self, form):\n # dirty validation\n periodo = Periodo.objects.get(pk=self.kwargs['pk'])\n horas = form.cleaned_data['horas']\n available = periodo.available\n if horas > available:\n form.add_error('horas', \"Horas superan el tiempo disponible ({})\".format(available))\n return self.form_invalid(form)\n else:\n self.object = form.save()\n self.object.periodos.add(periodo)\n return HttpResponseRedirect(self.get_success_url())\n\n def get_success_url(self):\n return reverse(\n 'carga-horaria:periodo',\n kwargs={\n 'pk': self.kwargs['pk'],\n }\n )\n\n\n\nclass AsignaturaUpdateView(LoginRequiredMixin, UpdateView):\n model = Asignatura\n form_class = AsignaturaUpdateForm\n template_name = 'carga_horaria/asignatura/editar_asignatura.html'\n\n def get_success_url(self):\n return reverse('carga-horaria:periodo', kwargs={'pk': self.kwargs['periodo_pk']})\n\n def form_valid(self, form):\n # dirty validation\n periodo = Periodo.objects.get(pk=self.kwargs['periodo_pk'])\n horas = form.cleaned_data['horas']\n old_horas = Asignatura.objects.get(pk=self.object.pk).horas\n delta = horas - old_horas\n available = periodo.available\n\n if delta > available:\n form.add_error('horas', \"Horas superan el tiempo disponible ({})\".format(available + old_horas))\n return self.form_invalid(form)\n elif self.object.base:\n if periodo.colegio.jec:\n horas_base = self.object.base.horas_jec\n else:\n horas_base = self.object.base.horas_nec\n\n if horas < horas_base:\n form.add_error('horas', \"Horas deben ser como mínimo las del plan de estudios original ({})\".format(horas_base))\n return self.form_invalid(form)\n\n return super().form_valid(form)\n\n\nclass AsignaturaDeleteView(LoginRequiredMixin, DeleteView):\n model = Asignatura\n\n def get(self, request, *args, **kwargs):\n return self.post(request, *args, **kwargs)\n\n def get_success_url(self):\n return reverse(\n 'carga-horaria:periodo',\n kwargs={\n 'pk': self.kwargs['periodo_pk'],\n }\n )\n",
"step-ids": [
52,
53,
56,
73,
85
]
}
|
[
52,
53,
56,
73,
85
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
router.register('users', views.CategoryView)
<|reserved_special_token_0|>
if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT
)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
router = routers.DefaultRouter()
router.register('users', views.CategoryView)
urlpatterns = [path('admin/', admin.site.urls), path('', views.index_view,
name='index_view'), path('about_view/', views.about_view, name=
'about_view'), path('shop_view/', views.shop_view, name='shop_view'),
path('checkout_view/', views.checkout_view, name='checkout_view'), path
('contactus_view/', views.contactus_view, name='contactus_view'), path(
'gallery_view/', views.gallery_view, name='gallery_view'), path(
'shopdetail_view/', views.shopdetail_view, name='shopdetail_view'),
path('cart_view/', views.cart_view, name='cart_view'), path(
'myaccount_view/', views.myaccount_view, name='myaccount_view'), path(
'wishlist_view/', views.wishlist_view, name='wishlist_view'), path(
'category/', views.category, name='category'), path(
'include_search_view/', views.include_search_view, name=
'include_search_view'), path('catsearch_view/<int:id>', views.
catsearch_view, name='catsearch_view'), path('home_view/', views.
home_view, name='home_view'), path('directcat_view/<int:id>', views.
directcat_view, name='directcat_view'), path('showlist/', views.
showlist, name='showlist'), path('product_search/', views.
product_search, name='product_search'), path('emailresponse/', views.
emailresponse, name='emailresponse'), path('accounts/', include(
'django.contrib.auth.urls')), path('signup/', views.signup_view, name=
'signup'), path('accounts/logout/', views.logout_view, name=
'logout_view'), path('myaccount_details/', views.myaccount_details,
name='myaccount_details'), path('sort/<str:key>', views.sort_view, name
='sort'), path('profile/', views.profile, name='profile'), path(
'change_password/', views.change_password, name='change_password'),
path('addcart/', views.add_to_cart, name='addcart'), path('cart_bag/',
views.cart_bag, name='cart_bag'), path('update_cart/', views.
update_cart, name='update_cart'), path('cart_del/<int:id>', views.
cart_del, name='cart_del'), path('', include(router.urls)), path(
'users/', include('rest_framework.urls', namespace='rest_framework'))]
if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT
)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
from django.conf import settings
from django.conf.urls.static import static
from django.contrib import admin
from django.db import router
from django.urls import path, include
from rest_framework import routers
from spencersapp import views
router = routers.DefaultRouter()
router.register('users', views.CategoryView)
urlpatterns = [path('admin/', admin.site.urls), path('', views.index_view,
name='index_view'), path('about_view/', views.about_view, name=
'about_view'), path('shop_view/', views.shop_view, name='shop_view'),
path('checkout_view/', views.checkout_view, name='checkout_view'), path
('contactus_view/', views.contactus_view, name='contactus_view'), path(
'gallery_view/', views.gallery_view, name='gallery_view'), path(
'shopdetail_view/', views.shopdetail_view, name='shopdetail_view'),
path('cart_view/', views.cart_view, name='cart_view'), path(
'myaccount_view/', views.myaccount_view, name='myaccount_view'), path(
'wishlist_view/', views.wishlist_view, name='wishlist_view'), path(
'category/', views.category, name='category'), path(
'include_search_view/', views.include_search_view, name=
'include_search_view'), path('catsearch_view/<int:id>', views.
catsearch_view, name='catsearch_view'), path('home_view/', views.
home_view, name='home_view'), path('directcat_view/<int:id>', views.
directcat_view, name='directcat_view'), path('showlist/', views.
showlist, name='showlist'), path('product_search/', views.
product_search, name='product_search'), path('emailresponse/', views.
emailresponse, name='emailresponse'), path('accounts/', include(
'django.contrib.auth.urls')), path('signup/', views.signup_view, name=
'signup'), path('accounts/logout/', views.logout_view, name=
'logout_view'), path('myaccount_details/', views.myaccount_details,
name='myaccount_details'), path('sort/<str:key>', views.sort_view, name
='sort'), path('profile/', views.profile, name='profile'), path(
'change_password/', views.change_password, name='change_password'),
path('addcart/', views.add_to_cart, name='addcart'), path('cart_bag/',
views.cart_bag, name='cart_bag'), path('update_cart/', views.
update_cart, name='update_cart'), path('cart_del/<int:id>', views.
cart_del, name='cart_del'), path('', include(router.urls)), path(
'users/', include('rest_framework.urls', namespace='rest_framework'))]
if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT
)
<|reserved_special_token_1|>
"""storeproject URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.conf import settings
from django.conf.urls.static import static
from django.contrib import admin
from django.db import router
from django.urls import path, include
from rest_framework import routers
from spencersapp import views
router = routers.DefaultRouter()
router.register(r'users', views.CategoryView)
urlpatterns = [
path('admin/', admin.site.urls),
path('', views.index_view, name='index_view'),
path('about_view/', views.about_view, name='about_view'),
path('shop_view/', views.shop_view, name='shop_view'),
path('checkout_view/', views.checkout_view, name='checkout_view'),
path('contactus_view/', views.contactus_view, name='contactus_view'),
path('gallery_view/', views.gallery_view, name='gallery_view'),
path('shopdetail_view/', views.shopdetail_view, name='shopdetail_view'),
path('cart_view/', views.cart_view, name='cart_view'),
path('myaccount_view/', views.myaccount_view, name='myaccount_view'),
path('wishlist_view/', views.wishlist_view, name='wishlist_view'),
path('category/', views.category, name='category'),
path('include_search_view/', views.include_search_view, name='include_search_view'),
path('catsearch_view/<int:id>', views.catsearch_view, name='catsearch_view'),
# path('registration_view/', views.registration_view, name='registration_view'),
# path('login_view/', views.login_view, name='login_view'),
path('home_view/', views.home_view, name='home_view'),
path('directcat_view/<int:id>', views.directcat_view, name='directcat_view'),
path('showlist/', views.showlist, name='showlist'),
path('product_search/', views.product_search, name='product_search'),
path('emailresponse/', views.emailresponse, name='emailresponse'),
# path('user_login/', views.user_login, name='user_login'),
path('accounts/', include('django.contrib.auth.urls')),
path('signup/', views.signup_view, name='signup'),
path('accounts/logout/', views.logout_view, name='logout_view'),
path('myaccount_details/', views.myaccount_details, name='myaccount_details'),
path('sort/<str:key>', views.sort_view, name='sort'),
path('profile/', views.profile, name='profile'),
path('change_password/', views.change_password, name='change_password'),
path('addcart/',views.add_to_cart, name='addcart'),
path('cart_bag/', views.cart_bag, name='cart_bag'),
path('update_cart/', views.update_cart, name='update_cart'),
path('cart_del/<int:id>', views.cart_del, name='cart_del'),
path('', include(router.urls)),
path('users/', include('rest_framework.urls', namespace='rest_framework'))
]
if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
flexible
|
{
"blob_id": "4a8fa195a573f8001e55b099a8882fe71bcca233",
"index": 8335,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nrouter.register('users', views.CategoryView)\n<mask token>\nif settings.DEBUG:\n urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT\n )\n",
"step-3": "<mask token>\nrouter = routers.DefaultRouter()\nrouter.register('users', views.CategoryView)\nurlpatterns = [path('admin/', admin.site.urls), path('', views.index_view,\n name='index_view'), path('about_view/', views.about_view, name=\n 'about_view'), path('shop_view/', views.shop_view, name='shop_view'),\n path('checkout_view/', views.checkout_view, name='checkout_view'), path\n ('contactus_view/', views.contactus_view, name='contactus_view'), path(\n 'gallery_view/', views.gallery_view, name='gallery_view'), path(\n 'shopdetail_view/', views.shopdetail_view, name='shopdetail_view'),\n path('cart_view/', views.cart_view, name='cart_view'), path(\n 'myaccount_view/', views.myaccount_view, name='myaccount_view'), path(\n 'wishlist_view/', views.wishlist_view, name='wishlist_view'), path(\n 'category/', views.category, name='category'), path(\n 'include_search_view/', views.include_search_view, name=\n 'include_search_view'), path('catsearch_view/<int:id>', views.\n catsearch_view, name='catsearch_view'), path('home_view/', views.\n home_view, name='home_view'), path('directcat_view/<int:id>', views.\n directcat_view, name='directcat_view'), path('showlist/', views.\n showlist, name='showlist'), path('product_search/', views.\n product_search, name='product_search'), path('emailresponse/', views.\n emailresponse, name='emailresponse'), path('accounts/', include(\n 'django.contrib.auth.urls')), path('signup/', views.signup_view, name=\n 'signup'), path('accounts/logout/', views.logout_view, name=\n 'logout_view'), path('myaccount_details/', views.myaccount_details,\n name='myaccount_details'), path('sort/<str:key>', views.sort_view, name\n ='sort'), path('profile/', views.profile, name='profile'), path(\n 'change_password/', views.change_password, name='change_password'),\n path('addcart/', views.add_to_cart, name='addcart'), path('cart_bag/',\n views.cart_bag, name='cart_bag'), path('update_cart/', views.\n update_cart, name='update_cart'), path('cart_del/<int:id>', views.\n cart_del, name='cart_del'), path('', include(router.urls)), path(\n 'users/', include('rest_framework.urls', namespace='rest_framework'))]\nif settings.DEBUG:\n urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT\n )\n",
"step-4": "<mask token>\nfrom django.conf import settings\nfrom django.conf.urls.static import static\nfrom django.contrib import admin\nfrom django.db import router\nfrom django.urls import path, include\nfrom rest_framework import routers\nfrom spencersapp import views\nrouter = routers.DefaultRouter()\nrouter.register('users', views.CategoryView)\nurlpatterns = [path('admin/', admin.site.urls), path('', views.index_view,\n name='index_view'), path('about_view/', views.about_view, name=\n 'about_view'), path('shop_view/', views.shop_view, name='shop_view'),\n path('checkout_view/', views.checkout_view, name='checkout_view'), path\n ('contactus_view/', views.contactus_view, name='contactus_view'), path(\n 'gallery_view/', views.gallery_view, name='gallery_view'), path(\n 'shopdetail_view/', views.shopdetail_view, name='shopdetail_view'),\n path('cart_view/', views.cart_view, name='cart_view'), path(\n 'myaccount_view/', views.myaccount_view, name='myaccount_view'), path(\n 'wishlist_view/', views.wishlist_view, name='wishlist_view'), path(\n 'category/', views.category, name='category'), path(\n 'include_search_view/', views.include_search_view, name=\n 'include_search_view'), path('catsearch_view/<int:id>', views.\n catsearch_view, name='catsearch_view'), path('home_view/', views.\n home_view, name='home_view'), path('directcat_view/<int:id>', views.\n directcat_view, name='directcat_view'), path('showlist/', views.\n showlist, name='showlist'), path('product_search/', views.\n product_search, name='product_search'), path('emailresponse/', views.\n emailresponse, name='emailresponse'), path('accounts/', include(\n 'django.contrib.auth.urls')), path('signup/', views.signup_view, name=\n 'signup'), path('accounts/logout/', views.logout_view, name=\n 'logout_view'), path('myaccount_details/', views.myaccount_details,\n name='myaccount_details'), path('sort/<str:key>', views.sort_view, name\n ='sort'), path('profile/', views.profile, name='profile'), path(\n 'change_password/', views.change_password, name='change_password'),\n path('addcart/', views.add_to_cart, name='addcart'), path('cart_bag/',\n views.cart_bag, name='cart_bag'), path('update_cart/', views.\n update_cart, name='update_cart'), path('cart_del/<int:id>', views.\n cart_del, name='cart_del'), path('', include(router.urls)), path(\n 'users/', include('rest_framework.urls', namespace='rest_framework'))]\nif settings.DEBUG:\n urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT\n )\n",
"step-5": "\"\"\"storeproject URL Configuration\n\nThe `urlpatterns` list routes URLs to views. For more information please see:\n https://docs.djangoproject.com/en/3.1/topics/http/urls/\nExamples:\nFunction views\n 1. Add an import: from my_app import views\n 2. Add a URL to urlpatterns: path('', views.home, name='home')\nClass-based views\n 1. Add an import: from other_app.views import Home\n 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')\nIncluding another URLconf\n 1. Import the include() function: from django.urls import include, path\n 2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))\n\"\"\"\nfrom django.conf import settings\nfrom django.conf.urls.static import static\nfrom django.contrib import admin\nfrom django.db import router\nfrom django.urls import path, include\nfrom rest_framework import routers\n\nfrom spencersapp import views\n\n\nrouter = routers.DefaultRouter()\nrouter.register(r'users', views.CategoryView)\n\nurlpatterns = [\n path('admin/', admin.site.urls),\n path('', views.index_view, name='index_view'),\n path('about_view/', views.about_view, name='about_view'),\n path('shop_view/', views.shop_view, name='shop_view'),\n path('checkout_view/', views.checkout_view, name='checkout_view'),\n path('contactus_view/', views.contactus_view, name='contactus_view'),\n path('gallery_view/', views.gallery_view, name='gallery_view'),\n path('shopdetail_view/', views.shopdetail_view, name='shopdetail_view'),\n path('cart_view/', views.cart_view, name='cart_view'),\n path('myaccount_view/', views.myaccount_view, name='myaccount_view'),\n path('wishlist_view/', views.wishlist_view, name='wishlist_view'),\n path('category/', views.category, name='category'),\n path('include_search_view/', views.include_search_view, name='include_search_view'),\n path('catsearch_view/<int:id>', views.catsearch_view, name='catsearch_view'),\n # path('registration_view/', views.registration_view, name='registration_view'),\n # path('login_view/', views.login_view, name='login_view'),\n path('home_view/', views.home_view, name='home_view'),\n path('directcat_view/<int:id>', views.directcat_view, name='directcat_view'),\n path('showlist/', views.showlist, name='showlist'),\n path('product_search/', views.product_search, name='product_search'),\n path('emailresponse/', views.emailresponse, name='emailresponse'),\n # path('user_login/', views.user_login, name='user_login'),\n path('accounts/', include('django.contrib.auth.urls')),\n path('signup/', views.signup_view, name='signup'),\n path('accounts/logout/', views.logout_view, name='logout_view'),\n path('myaccount_details/', views.myaccount_details, name='myaccount_details'),\n path('sort/<str:key>', views.sort_view, name='sort'),\n path('profile/', views.profile, name='profile'),\n path('change_password/', views.change_password, name='change_password'),\n path('addcart/',views.add_to_cart, name='addcart'),\n path('cart_bag/', views.cart_bag, name='cart_bag'),\n path('update_cart/', views.update_cart, name='update_cart'),\n path('cart_del/<int:id>', views.cart_del, name='cart_del'),\n path('', include(router.urls)),\n path('users/', include('rest_framework.urls', namespace='rest_framework'))\n]\nif settings.DEBUG:\n urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)\n\n\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.