file_name large_stringlengths 4 140 | prefix large_stringlengths 0 39k | suffix large_stringlengths 0 36.1k | middle large_stringlengths 0 29.4k | fim_type large_stringclasses 4
values |
|---|---|---|---|---|
index.py | import os
import traceback
import json
import requests
from flask import Flask, request
from cities_list import CITIES
from messages import get_message, search_keyword
token = os.environ.get('FB_ACCESS_TOKEN')
api_key = os.environ.get('WEATHER_API_KEY')
app = Flask(__name__)
def location_quick_reply(sender, text=None):
if not text:
text = get_message('location-button')
return {
"recipient": {
"id": sender
},
"message": {
"text": text,
"quick_replies": [
{
"content_type": "location",
}
]
}
}
def | (sender, type, payload):
return {
"recipient": {
"id": sender
},
"message": {
"attachment": {
"type": type,
"payload": payload,
}
}
}
def send_text(sender, text):
return {
"recipient": {
"id": sender
},
"message": {
"text": text
}
}
def send_message(payload):
requests.post('https://graph.facebook.com/v2.6/me/messages/?access_token=' + token, json=payload)
def send_weather_info(sender, **kwargs):
latitude = kwargs.pop('latitude', None)
longitude = kwargs.pop('longitude', None)
city_name = kwargs.pop('city_name', None)
if latitude and longitude:
query = 'lat={}&lon={}'.format(latitude, longitude)
elif city_name:
query = 'q={},br'.format(city_name.title())
url = 'http://api.openweathermap.org/data/2.5/weather?' \
'{}&appid={}&units={}&lang={}'.format(query,
api_key,
'metric',
'pt')
r = requests.get(url)
response = r.json()
print(response)
if 'cod' in response:
if response['cod'] != 200:
return 'error'
name = response['name']
weather = response['main']
wind = response['wind']
elements = [{
'title': name,
'subtitle': 'Temperatura: {} graus'.format(str(weather['temp']).replace('.',',')),
'image_url': 'https://cdn-images-1.medium.com/max/800/1*LkbHjhacSRDNDzupX7pgEQ.jpeg'
}]
for info in response['weather']:
description = info['description'].capitalize()
icon = info['icon']
weather_data = 'Umidade: {}%\n' \
'Pressão: {}\n' \
'Velocidade do vento: {}'.format(weather['humidity'],
weather['pressure'],
wind['speed'])
if 'visibility' in response:
weather_data = '{}\n Visibilidade: {}'.format(weather_data, response['visibility'])
elements.append({
'title': description,
'subtitle': weather_data,
'image_url': 'http://openweathermap.org/img/w/{}.png'.format(icon)
})
payload = send_attachment(sender,
'template',
{
"template_type": "list",
"top_element_style": "large",
"elements": elements,
"buttons": [
{
"title": "Fazer nova pesquisa",
"type": "postback",
"payload": "do_it_again"
}
]
})
send_message(payload)
return None
@app.route('/', methods=['GET', 'POST'])
def webhook():
if request.method == 'POST':
try:
data = json.loads(request.data.decode())
sender = data['entry'][0]['messaging'][0]['sender']['id']
print(data)
if 'message' in data['entry'][0]['messaging'][0]:
message = data['entry'][0]['messaging'][0]['message']
if 'postback' in data['entry'][0]['messaging'][0]:
# Action when user first enters the chat
payload = data['entry'][0]['messaging'][0]['postback']['payload']
if payload == 'begin_button':
message = send_text(sender, 'Olá, tudo bem? Vamos começar?')
send_message(message)
payload = location_quick_reply(sender)
send_message(payload)
return 'Ok'
# Resend the location button
if payload == 'do_it_again':
payload = location_quick_reply(sender)
send_message(payload)
if 'attachments' in message:
if 'payload' in message['attachments'][0]:
if 'coordinates' in message['attachments'][0]['payload']:
location = message['attachments'][0]['payload']['coordinates']
latitude = location['lat']
longitude = location['long']
send_weather_info(sender, latitude=latitude, longitude=longitude)
if _return == 'error':
message = send_text(sender, get_message('error'))
send_message(message)
payload = location_quick_reply(sender)
send_message(payload)
else:
text = message['text']
for city in CITIES:
if text.lower() in city:
_return = send_weather_info(sender, city_name=text)
if _return == 'error':
message = send_text(sender, get_message('error'))
send_message(message)
# Send location button
payload = location_quick_reply(sender)
send_message(payload)
return 'Ok'
# If text not in city list...
chat_message = search_keyword(text)
if chat_message:
# if found keyword, reply with chat stuff
message = send_text(sender, chat_message)
send_message(message)
else:
message = send_text(sender, get_message('not-a-city'))
send_message(message)
# Send location button
payload = location_quick_reply(sender)
send_message(payload)
except Exception as e:
print(traceback.format_exc())
elif request.method == 'GET':
if request.args.get('hub.verify_token') == os.environ.get('FB_VERIFY_TOKEN'):
return request.args.get('hub.challenge')
return "Wrong Verify Token"
return "Nothing"
if __name__ == '__main__':
app.run(debug=True)
| send_attachment | identifier_name |
index.py | import os
import traceback
import json
import requests
from flask import Flask, request
from cities_list import CITIES
from messages import get_message, search_keyword
token = os.environ.get('FB_ACCESS_TOKEN')
api_key = os.environ.get('WEATHER_API_KEY')
app = Flask(__name__)
def location_quick_reply(sender, text=None):
if not text:
text = get_message('location-button')
return {
"recipient": {
"id": sender
},
"message": {
"text": text,
"quick_replies": [
{
"content_type": "location",
}
]
}
}
def send_attachment(sender, type, payload):
|
def send_text(sender, text):
return {
"recipient": {
"id": sender
},
"message": {
"text": text
}
}
def send_message(payload):
requests.post('https://graph.facebook.com/v2.6/me/messages/?access_token=' + token, json=payload)
def send_weather_info(sender, **kwargs):
latitude = kwargs.pop('latitude', None)
longitude = kwargs.pop('longitude', None)
city_name = kwargs.pop('city_name', None)
if latitude and longitude:
query = 'lat={}&lon={}'.format(latitude, longitude)
elif city_name:
query = 'q={},br'.format(city_name.title())
url = 'http://api.openweathermap.org/data/2.5/weather?' \
'{}&appid={}&units={}&lang={}'.format(query,
api_key,
'metric',
'pt')
r = requests.get(url)
response = r.json()
print(response)
if 'cod' in response:
if response['cod'] != 200:
return 'error'
name = response['name']
weather = response['main']
wind = response['wind']
elements = [{
'title': name,
'subtitle': 'Temperatura: {} graus'.format(str(weather['temp']).replace('.',',')),
'image_url': 'https://cdn-images-1.medium.com/max/800/1*LkbHjhacSRDNDzupX7pgEQ.jpeg'
}]
for info in response['weather']:
description = info['description'].capitalize()
icon = info['icon']
weather_data = 'Umidade: {}%\n' \
'Pressão: {}\n' \
'Velocidade do vento: {}'.format(weather['humidity'],
weather['pressure'],
wind['speed'])
if 'visibility' in response:
weather_data = '{}\n Visibilidade: {}'.format(weather_data, response['visibility'])
elements.append({
'title': description,
'subtitle': weather_data,
'image_url': 'http://openweathermap.org/img/w/{}.png'.format(icon)
})
payload = send_attachment(sender,
'template',
{
"template_type": "list",
"top_element_style": "large",
"elements": elements,
"buttons": [
{
"title": "Fazer nova pesquisa",
"type": "postback",
"payload": "do_it_again"
}
]
})
send_message(payload)
return None
@app.route('/', methods=['GET', 'POST'])
def webhook():
if request.method == 'POST':
try:
data = json.loads(request.data.decode())
sender = data['entry'][0]['messaging'][0]['sender']['id']
print(data)
if 'message' in data['entry'][0]['messaging'][0]:
message = data['entry'][0]['messaging'][0]['message']
if 'postback' in data['entry'][0]['messaging'][0]:
# Action when user first enters the chat
payload = data['entry'][0]['messaging'][0]['postback']['payload']
if payload == 'begin_button':
message = send_text(sender, 'Olá, tudo bem? Vamos começar?')
send_message(message)
payload = location_quick_reply(sender)
send_message(payload)
return 'Ok'
# Resend the location button
if payload == 'do_it_again':
payload = location_quick_reply(sender)
send_message(payload)
if 'attachments' in message:
if 'payload' in message['attachments'][0]:
if 'coordinates' in message['attachments'][0]['payload']:
location = message['attachments'][0]['payload']['coordinates']
latitude = location['lat']
longitude = location['long']
send_weather_info(sender, latitude=latitude, longitude=longitude)
if _return == 'error':
message = send_text(sender, get_message('error'))
send_message(message)
payload = location_quick_reply(sender)
send_message(payload)
else:
text = message['text']
for city in CITIES:
if text.lower() in city:
_return = send_weather_info(sender, city_name=text)
if _return == 'error':
message = send_text(sender, get_message('error'))
send_message(message)
# Send location button
payload = location_quick_reply(sender)
send_message(payload)
return 'Ok'
# If text not in city list...
chat_message = search_keyword(text)
if chat_message:
# if found keyword, reply with chat stuff
message = send_text(sender, chat_message)
send_message(message)
else:
message = send_text(sender, get_message('not-a-city'))
send_message(message)
# Send location button
payload = location_quick_reply(sender)
send_message(payload)
except Exception as e:
print(traceback.format_exc())
elif request.method == 'GET':
if request.args.get('hub.verify_token') == os.environ.get('FB_VERIFY_TOKEN'):
return request.args.get('hub.challenge')
return "Wrong Verify Token"
return "Nothing"
if __name__ == '__main__':
app.run(debug=True)
| return {
"recipient": {
"id": sender
},
"message": {
"attachment": {
"type": type,
"payload": payload,
}
}
} | identifier_body |
lex-bad-char-literals.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
static c: char =
'\u539_' //~ ERROR: illegal character in numeric character escape
;
static c2: char =
'\Uffffffff' //~ ERROR: illegal numeric character escape
;
static c3: char =
'\x1' //~ ERROR: numeric character escape is too short
;
static c4: char =
'\u23q' //~ ERROR: illegal character in numeric character escape
;
//~^^ ERROR: numeric character escape is too short
static s: &'static str =
"\x1" //~ ERROR: numeric character escape is too short | ;
static c: char =
'\●' //~ ERROR: unknown character escape
;
static s: &'static str =
"\●" //~ ERROR: unknown character escape
;
// THIS MUST BE LAST, since unterminated character constants kill the lexer
static c: char =
'● //~ ERROR: unterminated character constant
; | ;
static s2: &'static str =
"\u23q" //~ ERROR: illegal character in numeric character escape
//~^ ERROR: numeric character escape is too short | random_line_split |
zigbee.py | #!/usr/bin/env python
# -*- coding:UTF-8 -*-
import urllib
import urllib2
import json
import serial
import time
import gpio
import re
import binascii
import threading
import datetime
import sys
# use your deviceID and apikey
deviceID="xxxxxxxxxx"
apikey = "xxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
key_pin = "gpio12"
s = ""
door = ""
PIR = ""
Leak = ""
Smoke = ""
Remote = ""
Door_mac = ""
PIR_mac = ""
Leak_mac = ""
Smoke_mac = ""
Remote_mac = ""
# use USB UART or UART on pcDuino to communicate with zigbee gateway
try:
ser = serial.Serial("/dev/ttyUSB0", 115200,timeout = 0.1)
except serial.serialutil.SerialException:
try:
ser = serial.Serial("/dev/ttyS1", 115200,timeout = 0.1)
with open("/sys/devices/virtual/misc/gpio/mode/gpio0",'w') as UART_RX:
UART_RX.write('3')
with open("/sys/devices/virtual/misc/gpio/mode/gpio1",'w') as UART_TX:
UART_TX.write('3')
except serial.serialutil.SerialException:
print "serial failed!"
exit()
def setup():
gpio.pinMode(key_pin,gpio.INPUT)
def key_interrupt():
val=gpio.digitalRead(key_pin)
if val==0:
time.sleep(0.010)
if val==0:
return '1'
return '0'
def http_post(data):
try:
url = 'http://www.linksprite.io/api/http'
jdata = json.dumps(data)
req = urllib2.Request(url, jdata)
req.add_header('Content-Type','application/json')
response = urllib2.urlopen(req)
return response.read()
except urllib2.URLError:
print "connect failed"
return "connect failed"
pass
def hexShow(argv):
result = ''
hLen = len(argv)
for i in xrange(hLen):
hvol = ord(argv[i])
hhex = '%02x'%hvol
result += hhex+' '
return result
def register():
|
def set_target(short_mac):
send = "0c fc 02 01 04 01 01 01 02"+short_mac+"02 0a"
s = send.replace(' ','')
a=binascii.a2b_hex(s)
while True:
ser.write(a)
recv=ser.readline()
rec=hexShow(recv)
a = rec.find("04 fd 02 01",0)
if a != -1:
print "set target ok"
break
time.sleep(0.2)
def gateway_mac():
while True:
ser.write('\x02')
ser.write('\x14')
ser.write('\x6f')
data = ser.readline()
dat = hexShow(data)
leng = len(dat)
if leng > 30:
a = dat.find("0c 15 00 6f",0)
if a != -1:
dt = dat[15:38]
return dt
break
time.sleep(1)
def bind(eq_mac,gat_mac):
send = "16 d8"+eq_mac+"01 01 00 03"+gat_mac+"01"
s = send.replace(' ','')
a=binascii.a2b_hex(s)
start = datetime.datetime.now()
while True:
ser.write(a)
recv=ser.readline()
rec=hexShow(recv)
b = rec.find("02 d9 00")
if b != -1:
print "bind ok"
break
time.sleep(0.2)
def cluster():
send = "08 FC 00 00 05 00 01 01 00"
s = send.replace(' ','')
a=binascii.a2b_hex(s)
start = datetime.datetime.now()
while True:
ser.write(a)
recv=ser.readline()
rec=hexShow(recv)
leng = len(rec)
finsh = datetime.datetime.now()
tim = (finsh-start).seconds
if tim > 5:
print "failure! please add again"
return "xxxx"
break
if leng > 30:
b = rec.find("0b fe 03")
c = rec.find("00 01 07 fe 03 00")
if b != -1:
return rec[b+30:b+35]
break
elif c != -1:
return "11 00"
time.sleep(0.2)
def report():
send = "11 FC 00 01 00 06 01 00 21 00 20 f0 00 f0 00 01 00 00"
s = send.replace(' ','')
a=binascii.a2b_hex(s)
while True:
ser.write(a)
recv=ser.readline()
rec=hexShow(recv)
leng = len(rec)
if leng > 15:
b = rec.find("06 fd 00")
if b != -1:
print "send report ok"
break
time.sleep(0.2)
def alarm():
line = ser.readline()
val = hexShow(line)
leng = len(val)
if leng >= 56:
#print val
po = val.find("fe 01")
if po != -1:
aa = val[po+21:po+26]
sta = val[po+46]
s = aa+sta
return s
return -1
def open_socket():
send = "05 FC 01 06 00 01"
s = send.replace(' ','')
a=binascii.a2b_hex(s)
def close_socket():
send = "05 FC 01 06 00 00"
s = send.replace(' ','')
a=binascii.a2b_hex(s)
def recovery():
global s
global PIR
s = '0'
PIR = '0'
values ={
"action":"update",
"apikey":apikey,
"deviceid":deviceID,
"params":
{
"PIR":PIR,
"SOS":s
}}
http_post(values)
def update(mac,sta):
global Door_mac
global PIR_mac
global Leak_mac
global Smoke_mac
global Remote_mac
global s
global door
global PIR
global Leak
global Smoke
global Remote
try:
f = open('door.txt','r')
Door_mac=f.read()
f.close()
except IOError:
pass
try:
f = open('pir.txt','r')
PIR_mac=f.read()
f.close()
except IOError:
pass
try:
f = open('leak.txt','r')
Leak_mac=f.read()
f.close()
except IOError:
pass
try:
f = open('smoke.txt','r')
Smoke_mac=f.read()
f.close()
except IOError:
pass
try:
f = open('remote.txt','r')
Remote_mac=f.read()
f.close()
except IOError:
pass
if mac == Door_mac:
door = sta
elif mac == PIR_mac:
PIR = sta
elif mac == Leak_mac:
Leak = sta
elif mac == Smoke_mac:
Smoke = sta
elif mac == Remote_mac:
Remote = sta
if sta == '1':
s = sta
else:
print "You should add the equipment first"
values ={
"action":"update",
"apikey":apikey,
"deviceid":deviceID,
"params":
{
"Door":door,
"PIR":PIR,
"Leak":Leak,
"Smoke":Smoke,
"Remote":Remote,
"SOS":s
}}
http_post(values)
if s == '1'or PIR == '1':
timer = threading.Timer(2,recovery)
timer.start()
def main():
global Door_mac
global PIR_mac
global Leak_mac
global Smoke_mac
global Remote_mac
setup()
if ser.isOpen() == True:
print "serial open succeed!"
else:
print "serial open failure!"
while True:
# If check the GPIO12's status, if it is high, excuete commands to
# add new zigbee device into zigbee gateway
a = key_interrupt()
if a == '1':
print "Add equipment!"
# Set gateway to allow adding device
val=register()
short = val[0:5]
print "short:"+short
mac = val[6:29]
print "mac:"+mac
# Get the gateway MAC address
gatmac=gateway_mac()
print "gatewaymac:"+gatmac
# Configure the communication with zigbee device
set_target(short)
# Bind the zigbee device
bind(mac,gatmac)
# Read the zone type to check the type of zigbee device
# which can identify the alarm information from different zigbee sensor.
zone_type=cluster()
print "zone_type:"+zone_type
if zone_type == "15 00":
Door_mac = short
f = open('door.txt','w')
f.write(short)
f.close()
report()
elif zone_type == "0d 00":
PIR_mac = short
f=open('pir.txt','w')
f.write(short)
f.close()
report()
elif zone_type == "2a 00":
Leak_mac = short
f=open('leak.txt','w')
f.write(short)
f.close()
report()
elif zone_type == "28 00":
Smoke_mac = short
f=open('smoke.txt','w')
f.write(short)
f.close()
report()
elif zone_type == "11 00":
Remote_mac = short
f=open('remote.txt','w')
f.write(short)
f.close()
report()
# Check the alarm information from zigbee sensor node
data=alarm()
if data != -1:
short_mac = data[0:5]
print"short mac:"+short_mac
status = data[5]
print"status:"+status
# upload the alarm information to linksprite.io server
update(short_mac,status)
time.sleep(0.2)
if __name__=='__main__':
try:
main()
except KeyboardInterrupt:
ser.close()
| while True:
ser.write('\x02')
ser.write('\x75')
ser.write('\x1e')
data = ser.readline()
val=hexShow(data)
leng = len(val)
if leng > 45:
a = val.find("0e fc 02 e1",1)
if a != -1:
print "add equipment ok"
b=a+12
mac = val[b:b+29]
return mac
break
time.sleep(0.2) | identifier_body |
zigbee.py | #!/usr/bin/env python
# -*- coding:UTF-8 -*-
import urllib
import urllib2
import json
import serial
import time
import gpio
import re
import binascii
import threading
import datetime
import sys
# use your deviceID and apikey
deviceID="xxxxxxxxxx"
apikey = "xxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
key_pin = "gpio12"
s = ""
door = ""
PIR = ""
Leak = ""
Smoke = ""
Remote = ""
Door_mac = ""
PIR_mac = ""
Leak_mac = ""
Smoke_mac = ""
Remote_mac = ""
# use USB UART or UART on pcDuino to communicate with zigbee gateway
try:
ser = serial.Serial("/dev/ttyUSB0", 115200,timeout = 0.1)
except serial.serialutil.SerialException:
try:
ser = serial.Serial("/dev/ttyS1", 115200,timeout = 0.1)
with open("/sys/devices/virtual/misc/gpio/mode/gpio0",'w') as UART_RX:
UART_RX.write('3')
with open("/sys/devices/virtual/misc/gpio/mode/gpio1",'w') as UART_TX:
UART_TX.write('3')
except serial.serialutil.SerialException:
print "serial failed!"
exit()
def setup():
gpio.pinMode(key_pin,gpio.INPUT)
def key_interrupt():
val=gpio.digitalRead(key_pin)
if val==0:
time.sleep(0.010)
if val==0:
return '1'
return '0'
def http_post(data):
try:
url = 'http://www.linksprite.io/api/http'
jdata = json.dumps(data)
req = urllib2.Request(url, jdata)
req.add_header('Content-Type','application/json')
response = urllib2.urlopen(req)
return response.read()
except urllib2.URLError:
print "connect failed"
return "connect failed"
pass
def hexShow(argv):
result = ''
hLen = len(argv)
for i in xrange(hLen):
hvol = ord(argv[i])
hhex = '%02x'%hvol
result += hhex+' '
return result
def register():
while True:
ser.write('\x02')
ser.write('\x75')
ser.write('\x1e')
data = ser.readline()
val=hexShow(data)
leng = len(val)
if leng > 45:
a = val.find("0e fc 02 e1",1)
if a != -1:
print "add equipment ok"
b=a+12
mac = val[b:b+29]
return mac
break
time.sleep(0.2)
def | (short_mac):
send = "0c fc 02 01 04 01 01 01 02"+short_mac+"02 0a"
s = send.replace(' ','')
a=binascii.a2b_hex(s)
while True:
ser.write(a)
recv=ser.readline()
rec=hexShow(recv)
a = rec.find("04 fd 02 01",0)
if a != -1:
print "set target ok"
break
time.sleep(0.2)
def gateway_mac():
while True:
ser.write('\x02')
ser.write('\x14')
ser.write('\x6f')
data = ser.readline()
dat = hexShow(data)
leng = len(dat)
if leng > 30:
a = dat.find("0c 15 00 6f",0)
if a != -1:
dt = dat[15:38]
return dt
break
time.sleep(1)
def bind(eq_mac,gat_mac):
send = "16 d8"+eq_mac+"01 01 00 03"+gat_mac+"01"
s = send.replace(' ','')
a=binascii.a2b_hex(s)
start = datetime.datetime.now()
while True:
ser.write(a)
recv=ser.readline()
rec=hexShow(recv)
b = rec.find("02 d9 00")
if b != -1:
print "bind ok"
break
time.sleep(0.2)
def cluster():
send = "08 FC 00 00 05 00 01 01 00"
s = send.replace(' ','')
a=binascii.a2b_hex(s)
start = datetime.datetime.now()
while True:
ser.write(a)
recv=ser.readline()
rec=hexShow(recv)
leng = len(rec)
finsh = datetime.datetime.now()
tim = (finsh-start).seconds
if tim > 5:
print "failure! please add again"
return "xxxx"
break
if leng > 30:
b = rec.find("0b fe 03")
c = rec.find("00 01 07 fe 03 00")
if b != -1:
return rec[b+30:b+35]
break
elif c != -1:
return "11 00"
time.sleep(0.2)
def report():
send = "11 FC 00 01 00 06 01 00 21 00 20 f0 00 f0 00 01 00 00"
s = send.replace(' ','')
a=binascii.a2b_hex(s)
while True:
ser.write(a)
recv=ser.readline()
rec=hexShow(recv)
leng = len(rec)
if leng > 15:
b = rec.find("06 fd 00")
if b != -1:
print "send report ok"
break
time.sleep(0.2)
def alarm():
line = ser.readline()
val = hexShow(line)
leng = len(val)
if leng >= 56:
#print val
po = val.find("fe 01")
if po != -1:
aa = val[po+21:po+26]
sta = val[po+46]
s = aa+sta
return s
return -1
def open_socket():
send = "05 FC 01 06 00 01"
s = send.replace(' ','')
a=binascii.a2b_hex(s)
def close_socket():
send = "05 FC 01 06 00 00"
s = send.replace(' ','')
a=binascii.a2b_hex(s)
def recovery():
global s
global PIR
s = '0'
PIR = '0'
values ={
"action":"update",
"apikey":apikey,
"deviceid":deviceID,
"params":
{
"PIR":PIR,
"SOS":s
}}
http_post(values)
def update(mac,sta):
global Door_mac
global PIR_mac
global Leak_mac
global Smoke_mac
global Remote_mac
global s
global door
global PIR
global Leak
global Smoke
global Remote
try:
f = open('door.txt','r')
Door_mac=f.read()
f.close()
except IOError:
pass
try:
f = open('pir.txt','r')
PIR_mac=f.read()
f.close()
except IOError:
pass
try:
f = open('leak.txt','r')
Leak_mac=f.read()
f.close()
except IOError:
pass
try:
f = open('smoke.txt','r')
Smoke_mac=f.read()
f.close()
except IOError:
pass
try:
f = open('remote.txt','r')
Remote_mac=f.read()
f.close()
except IOError:
pass
if mac == Door_mac:
door = sta
elif mac == PIR_mac:
PIR = sta
elif mac == Leak_mac:
Leak = sta
elif mac == Smoke_mac:
Smoke = sta
elif mac == Remote_mac:
Remote = sta
if sta == '1':
s = sta
else:
print "You should add the equipment first"
values ={
"action":"update",
"apikey":apikey,
"deviceid":deviceID,
"params":
{
"Door":door,
"PIR":PIR,
"Leak":Leak,
"Smoke":Smoke,
"Remote":Remote,
"SOS":s
}}
http_post(values)
if s == '1'or PIR == '1':
timer = threading.Timer(2,recovery)
timer.start()
def main():
global Door_mac
global PIR_mac
global Leak_mac
global Smoke_mac
global Remote_mac
setup()
if ser.isOpen() == True:
print "serial open succeed!"
else:
print "serial open failure!"
while True:
# If check the GPIO12's status, if it is high, excuete commands to
# add new zigbee device into zigbee gateway
a = key_interrupt()
if a == '1':
print "Add equipment!"
# Set gateway to allow adding device
val=register()
short = val[0:5]
print "short:"+short
mac = val[6:29]
print "mac:"+mac
# Get the gateway MAC address
gatmac=gateway_mac()
print "gatewaymac:"+gatmac
# Configure the communication with zigbee device
set_target(short)
# Bind the zigbee device
bind(mac,gatmac)
# Read the zone type to check the type of zigbee device
# which can identify the alarm information from different zigbee sensor.
zone_type=cluster()
print "zone_type:"+zone_type
if zone_type == "15 00":
Door_mac = short
f = open('door.txt','w')
f.write(short)
f.close()
report()
elif zone_type == "0d 00":
PIR_mac = short
f=open('pir.txt','w')
f.write(short)
f.close()
report()
elif zone_type == "2a 00":
Leak_mac = short
f=open('leak.txt','w')
f.write(short)
f.close()
report()
elif zone_type == "28 00":
Smoke_mac = short
f=open('smoke.txt','w')
f.write(short)
f.close()
report()
elif zone_type == "11 00":
Remote_mac = short
f=open('remote.txt','w')
f.write(short)
f.close()
report()
# Check the alarm information from zigbee sensor node
data=alarm()
if data != -1:
short_mac = data[0:5]
print"short mac:"+short_mac
status = data[5]
print"status:"+status
# upload the alarm information to linksprite.io server
update(short_mac,status)
time.sleep(0.2)
if __name__=='__main__':
try:
main()
except KeyboardInterrupt:
ser.close()
| set_target | identifier_name |
zigbee.py | #!/usr/bin/env python
# -*- coding:UTF-8 -*-
import urllib
import urllib2
import json
import serial
import time
import gpio
import re
import binascii
import threading
import datetime
import sys
# use your deviceID and apikey
deviceID="xxxxxxxxxx"
apikey = "xxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
key_pin = "gpio12"
s = ""
door = ""
PIR = ""
Leak = ""
Smoke = ""
Remote = ""
Door_mac = ""
PIR_mac = ""
Leak_mac = ""
Smoke_mac = ""
Remote_mac = ""
# use USB UART or UART on pcDuino to communicate with zigbee gateway
try:
ser = serial.Serial("/dev/ttyUSB0", 115200,timeout = 0.1)
except serial.serialutil.SerialException:
try:
ser = serial.Serial("/dev/ttyS1", 115200,timeout = 0.1)
with open("/sys/devices/virtual/misc/gpio/mode/gpio0",'w') as UART_RX:
UART_RX.write('3')
with open("/sys/devices/virtual/misc/gpio/mode/gpio1",'w') as UART_TX:
UART_TX.write('3')
except serial.serialutil.SerialException:
print "serial failed!"
exit()
def setup():
gpio.pinMode(key_pin,gpio.INPUT)
def key_interrupt():
val=gpio.digitalRead(key_pin)
if val==0:
time.sleep(0.010)
if val==0:
return '1'
return '0'
def http_post(data):
try:
url = 'http://www.linksprite.io/api/http'
jdata = json.dumps(data)
req = urllib2.Request(url, jdata)
req.add_header('Content-Type','application/json')
response = urllib2.urlopen(req)
return response.read()
except urllib2.URLError:
print "connect failed"
return "connect failed"
pass
def hexShow(argv):
result = ''
hLen = len(argv)
for i in xrange(hLen):
hvol = ord(argv[i])
hhex = '%02x'%hvol
result += hhex+' '
return result
def register():
while True:
ser.write('\x02')
ser.write('\x75')
ser.write('\x1e')
data = ser.readline()
val=hexShow(data)
leng = len(val)
if leng > 45:
a = val.find("0e fc 02 e1",1)
if a != -1:
print "add equipment ok"
b=a+12
mac = val[b:b+29]
return mac
break
time.sleep(0.2)
def set_target(short_mac):
send = "0c fc 02 01 04 01 01 01 02"+short_mac+"02 0a"
s = send.replace(' ','')
a=binascii.a2b_hex(s)
while True:
ser.write(a)
recv=ser.readline()
rec=hexShow(recv)
a = rec.find("04 fd 02 01",0)
if a != -1:
print "set target ok"
break
time.sleep(0.2)
def gateway_mac():
while True:
ser.write('\x02')
ser.write('\x14')
ser.write('\x6f')
data = ser.readline()
dat = hexShow(data)
leng = len(dat)
if leng > 30:
a = dat.find("0c 15 00 6f",0)
if a != -1:
dt = dat[15:38]
return dt
break
time.sleep(1)
def bind(eq_mac,gat_mac):
send = "16 d8"+eq_mac+"01 01 00 03"+gat_mac+"01"
s = send.replace(' ','')
a=binascii.a2b_hex(s)
start = datetime.datetime.now()
while True:
ser.write(a)
recv=ser.readline()
rec=hexShow(recv)
b = rec.find("02 d9 00")
if b != -1:
print "bind ok"
break
time.sleep(0.2)
def cluster():
send = "08 FC 00 00 05 00 01 01 00"
s = send.replace(' ','')
a=binascii.a2b_hex(s)
start = datetime.datetime.now()
while True:
ser.write(a)
recv=ser.readline()
rec=hexShow(recv)
leng = len(rec)
finsh = datetime.datetime.now()
tim = (finsh-start).seconds
if tim > 5:
print "failure! please add again"
return "xxxx"
break
if leng > 30:
b = rec.find("0b fe 03")
c = rec.find("00 01 07 fe 03 00")
if b != -1:
return rec[b+30:b+35]
break
elif c != -1:
return "11 00"
time.sleep(0.2)
def report():
send = "11 FC 00 01 00 06 01 00 21 00 20 f0 00 f0 00 01 00 00"
s = send.replace(' ','')
a=binascii.a2b_hex(s)
while True:
ser.write(a)
recv=ser.readline()
rec=hexShow(recv)
leng = len(rec)
if leng > 15:
b = rec.find("06 fd 00")
if b != -1:
print "send report ok"
break
time.sleep(0.2)
def alarm():
line = ser.readline()
val = hexShow(line)
leng = len(val)
if leng >= 56:
#print val
po = val.find("fe 01")
if po != -1:
aa = val[po+21:po+26]
sta = val[po+46]
s = aa+sta
return s
return -1
def open_socket():
send = "05 FC 01 06 00 01"
s = send.replace(' ','')
a=binascii.a2b_hex(s)
def close_socket():
send = "05 FC 01 06 00 00"
s = send.replace(' ','')
a=binascii.a2b_hex(s)
def recovery():
global s
global PIR
s = '0'
PIR = '0'
values ={
"action":"update",
"apikey":apikey,
"deviceid":deviceID,
"params":
{
"PIR":PIR,
"SOS":s
}}
http_post(values)
def update(mac,sta):
global Door_mac
global PIR_mac
global Leak_mac
global Smoke_mac
global Remote_mac
global s
global door
global PIR
global Leak
global Smoke
global Remote
try:
f = open('door.txt','r')
Door_mac=f.read()
f.close()
except IOError:
pass
try:
f = open('pir.txt','r')
PIR_mac=f.read()
f.close()
except IOError:
pass
try:
f = open('leak.txt','r')
Leak_mac=f.read()
f.close()
except IOError:
pass
try:
f = open('smoke.txt','r')
Smoke_mac=f.read()
f.close()
except IOError:
pass
try:
f = open('remote.txt','r')
Remote_mac=f.read()
f.close()
except IOError:
pass
if mac == Door_mac:
door = sta
elif mac == PIR_mac:
PIR = sta
elif mac == Leak_mac:
Leak = sta
elif mac == Smoke_mac:
Smoke = sta
elif mac == Remote_mac:
Remote = sta
if sta == '1':
s = sta
else:
print "You should add the equipment first"
values ={
"action":"update",
"apikey":apikey,
"deviceid":deviceID,
"params":
{
"Door":door,
"PIR":PIR,
"Leak":Leak,
"Smoke":Smoke,
"Remote":Remote,
"SOS":s
}}
http_post(values)
if s == '1'or PIR == '1':
timer = threading.Timer(2,recovery)
timer.start()
def main():
global Door_mac
global PIR_mac
global Leak_mac
global Smoke_mac
global Remote_mac
setup()
if ser.isOpen() == True:
|
else:
print "serial open failure!"
while True:
# If check the GPIO12's status, if it is high, excuete commands to
# add new zigbee device into zigbee gateway
a = key_interrupt()
if a == '1':
print "Add equipment!"
# Set gateway to allow adding device
val=register()
short = val[0:5]
print "short:"+short
mac = val[6:29]
print "mac:"+mac
# Get the gateway MAC address
gatmac=gateway_mac()
print "gatewaymac:"+gatmac
# Configure the communication with zigbee device
set_target(short)
# Bind the zigbee device
bind(mac,gatmac)
# Read the zone type to check the type of zigbee device
# which can identify the alarm information from different zigbee sensor.
zone_type=cluster()
print "zone_type:"+zone_type
if zone_type == "15 00":
Door_mac = short
f = open('door.txt','w')
f.write(short)
f.close()
report()
elif zone_type == "0d 00":
PIR_mac = short
f=open('pir.txt','w')
f.write(short)
f.close()
report()
elif zone_type == "2a 00":
Leak_mac = short
f=open('leak.txt','w')
f.write(short)
f.close()
report()
elif zone_type == "28 00":
Smoke_mac = short
f=open('smoke.txt','w')
f.write(short)
f.close()
report()
elif zone_type == "11 00":
Remote_mac = short
f=open('remote.txt','w')
f.write(short)
f.close()
report()
# Check the alarm information from zigbee sensor node
data=alarm()
if data != -1:
short_mac = data[0:5]
print"short mac:"+short_mac
status = data[5]
print"status:"+status
# upload the alarm information to linksprite.io server
update(short_mac,status)
time.sleep(0.2)
if __name__=='__main__':
try:
main()
except KeyboardInterrupt:
ser.close()
| print "serial open succeed!" | conditional_block |
zigbee.py | #!/usr/bin/env python
# -*- coding:UTF-8 -*-
import urllib
import urllib2
import json
import serial
import time
import gpio
import re
import binascii
import threading
import datetime
import sys
# use your deviceID and apikey
deviceID="xxxxxxxxxx"
apikey = "xxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
key_pin = "gpio12"
s = ""
door = ""
PIR = ""
Leak = ""
Smoke = ""
Remote = ""
Door_mac = ""
PIR_mac = ""
Leak_mac = ""
Smoke_mac = ""
Remote_mac = ""
# use USB UART or UART on pcDuino to communicate with zigbee gateway
try:
ser = serial.Serial("/dev/ttyUSB0", 115200,timeout = 0.1)
except serial.serialutil.SerialException:
try:
ser = serial.Serial("/dev/ttyS1", 115200,timeout = 0.1)
with open("/sys/devices/virtual/misc/gpio/mode/gpio0",'w') as UART_RX:
UART_RX.write('3')
with open("/sys/devices/virtual/misc/gpio/mode/gpio1",'w') as UART_TX:
UART_TX.write('3')
except serial.serialutil.SerialException:
print "serial failed!"
exit()
def setup():
gpio.pinMode(key_pin,gpio.INPUT)
def key_interrupt():
val=gpio.digitalRead(key_pin)
if val==0:
time.sleep(0.010)
if val==0:
return '1'
return '0'
def http_post(data):
try:
url = 'http://www.linksprite.io/api/http'
jdata = json.dumps(data)
req = urllib2.Request(url, jdata)
req.add_header('Content-Type','application/json')
response = urllib2.urlopen(req)
return response.read()
except urllib2.URLError:
print "connect failed"
return "connect failed"
pass
def hexShow(argv):
result = ''
hLen = len(argv)
for i in xrange(hLen):
hvol = ord(argv[i])
hhex = '%02x'%hvol
result += hhex+' '
return result
def register():
while True:
ser.write('\x02')
ser.write('\x75')
ser.write('\x1e')
data = ser.readline()
val=hexShow(data)
leng = len(val)
if leng > 45:
a = val.find("0e fc 02 e1",1)
if a != -1:
print "add equipment ok"
b=a+12
mac = val[b:b+29]
return mac
break
time.sleep(0.2)
def set_target(short_mac):
send = "0c fc 02 01 04 01 01 01 02"+short_mac+"02 0a"
s = send.replace(' ','')
a=binascii.a2b_hex(s)
while True:
ser.write(a)
recv=ser.readline()
rec=hexShow(recv)
a = rec.find("04 fd 02 01",0)
if a != -1:
print "set target ok"
break
time.sleep(0.2)
def gateway_mac():
while True:
ser.write('\x02')
ser.write('\x14')
ser.write('\x6f')
data = ser.readline()
dat = hexShow(data)
leng = len(dat)
if leng > 30:
a = dat.find("0c 15 00 6f",0)
if a != -1:
dt = dat[15:38]
return dt
break
time.sleep(1)
def bind(eq_mac,gat_mac):
send = "16 d8"+eq_mac+"01 01 00 03"+gat_mac+"01"
s = send.replace(' ','')
a=binascii.a2b_hex(s)
start = datetime.datetime.now()
while True:
ser.write(a)
recv=ser.readline()
rec=hexShow(recv)
b = rec.find("02 d9 00")
if b != -1:
print "bind ok"
break
time.sleep(0.2)
def cluster():
send = "08 FC 00 00 05 00 01 01 00"
s = send.replace(' ','')
a=binascii.a2b_hex(s)
start = datetime.datetime.now()
while True:
ser.write(a)
recv=ser.readline()
rec=hexShow(recv)
leng = len(rec)
finsh = datetime.datetime.now()
tim = (finsh-start).seconds
if tim > 5:
print "failure! please add again"
return "xxxx"
break
if leng > 30:
b = rec.find("0b fe 03")
c = rec.find("00 01 07 fe 03 00")
if b != -1:
return rec[b+30:b+35]
break
elif c != -1:
return "11 00"
time.sleep(0.2)
def report():
send = "11 FC 00 01 00 06 01 00 21 00 20 f0 00 f0 00 01 00 00"
s = send.replace(' ','')
a=binascii.a2b_hex(s)
while True:
ser.write(a)
recv=ser.readline()
rec=hexShow(recv)
leng = len(rec)
if leng > 15:
b = rec.find("06 fd 00")
if b != -1:
print "send report ok"
break
time.sleep(0.2)
def alarm():
line = ser.readline()
val = hexShow(line)
leng = len(val)
if leng >= 56:
#print val
po = val.find("fe 01")
if po != -1:
aa = val[po+21:po+26]
sta = val[po+46]
s = aa+sta
return s
return -1
def open_socket():
send = "05 FC 01 06 00 01"
s = send.replace(' ','')
a=binascii.a2b_hex(s)
def close_socket():
send = "05 FC 01 06 00 00"
s = send.replace(' ','')
a=binascii.a2b_hex(s)
def recovery():
global s
global PIR
s = '0'
PIR = '0'
values ={
"action":"update",
"apikey":apikey,
"deviceid":deviceID,
"params":
{
"PIR":PIR,
"SOS":s
}}
http_post(values)
def update(mac,sta):
global Door_mac
global PIR_mac
global Leak_mac
global Smoke_mac
global Remote_mac
global s
global door
global PIR
global Leak
global Smoke
global Remote
try:
f = open('door.txt','r')
Door_mac=f.read()
f.close()
except IOError:
pass
try:
f = open('pir.txt','r')
PIR_mac=f.read()
f.close()
except IOError:
pass
try:
f = open('leak.txt','r')
Leak_mac=f.read()
f.close()
except IOError:
pass
try:
f = open('smoke.txt','r')
Smoke_mac=f.read()
f.close()
except IOError:
pass
try:
f = open('remote.txt','r')
Remote_mac=f.read()
f.close()
except IOError:
pass
if mac == Door_mac:
door = sta
elif mac == PIR_mac:
PIR = sta
elif mac == Leak_mac:
Leak = sta
elif mac == Smoke_mac:
Smoke = sta
elif mac == Remote_mac:
Remote = sta
if sta == '1':
s = sta
else:
print "You should add the equipment first"
values ={
"action":"update",
"apikey":apikey,
"deviceid":deviceID,
"params":
{
"Door":door,
"PIR":PIR,
"Leak":Leak,
"Smoke":Smoke,
"Remote":Remote,
"SOS":s
}} |
def main():
global Door_mac
global PIR_mac
global Leak_mac
global Smoke_mac
global Remote_mac
setup()
if ser.isOpen() == True:
print "serial open succeed!"
else:
print "serial open failure!"
while True:
# If check the GPIO12's status, if it is high, excuete commands to
# add new zigbee device into zigbee gateway
a = key_interrupt()
if a == '1':
print "Add equipment!"
# Set gateway to allow adding device
val=register()
short = val[0:5]
print "short:"+short
mac = val[6:29]
print "mac:"+mac
# Get the gateway MAC address
gatmac=gateway_mac()
print "gatewaymac:"+gatmac
# Configure the communication with zigbee device
set_target(short)
# Bind the zigbee device
bind(mac,gatmac)
# Read the zone type to check the type of zigbee device
# which can identify the alarm information from different zigbee sensor.
zone_type=cluster()
print "zone_type:"+zone_type
if zone_type == "15 00":
Door_mac = short
f = open('door.txt','w')
f.write(short)
f.close()
report()
elif zone_type == "0d 00":
PIR_mac = short
f=open('pir.txt','w')
f.write(short)
f.close()
report()
elif zone_type == "2a 00":
Leak_mac = short
f=open('leak.txt','w')
f.write(short)
f.close()
report()
elif zone_type == "28 00":
Smoke_mac = short
f=open('smoke.txt','w')
f.write(short)
f.close()
report()
elif zone_type == "11 00":
Remote_mac = short
f=open('remote.txt','w')
f.write(short)
f.close()
report()
# Check the alarm information from zigbee sensor node
data=alarm()
if data != -1:
short_mac = data[0:5]
print"short mac:"+short_mac
status = data[5]
print"status:"+status
# upload the alarm information to linksprite.io server
update(short_mac,status)
time.sleep(0.2)
if __name__=='__main__':
try:
main()
except KeyboardInterrupt:
ser.close() | http_post(values)
if s == '1'or PIR == '1':
timer = threading.Timer(2,recovery)
timer.start() | random_line_split |
testall.py | import sys, os
import re
import unittest
import traceback
import pywin32_testutil
# A list of demos that depend on user-interface of *any* kind. Tests listed
# here are not suitable for unattended testing.
ui_demos = """GetSaveFileName print_desktop win32cred_demo win32gui_demo
win32gui_dialog win32gui_menu win32gui_taskbar
win32rcparser_demo winprocess win32console_demo
win32gui_devicenotify
NetValidatePasswordPolicy""".split()
# Other demos known as 'bad' (or at least highly unlikely to work)
# cerapi: no CE module is built (CE via pywin32 appears dead)
# desktopmanager: hangs (well, hangs for 60secs or so...)
bad_demos = "cerapi desktopmanager win32comport_demo".split()
argvs = {
"rastest": ("-l",),
}
# re to pull apart an exception line into the exception type and the args.
re_exception = re.compile("([a-zA-Z0-9_.]*): (.*)$")
def find_exception_in_output(data):
have_traceback = False
for line in data.splitlines():
line = line.decode('ascii') # not sure what the correct encoding is...
if line.startswith("Traceback ("):
have_traceback = True
continue
if line.startswith(" "):
continue
if have_traceback:
# first line not starting with a space since the traceback.
# must be the exception!
m = re_exception.match(line)
if m:
exc_type, args = m.groups()
# get hacky - get the *real* exception object from the name.
bits = exc_type.split(".", 1)
if len(bits) > 1:
mod = __import__(bits[0])
exc = getattr(mod, bits[1])
else:
# probably builtin
exc = eval(bits[0])
else:
# hrm - probably just an exception with no args
try:
exc = eval(line.strip())
args = "()"
except:
return None
# try and turn the args into real args.
try:
args = eval(args)
except:
pass
if not isinstance(args, tuple):
args = (args,)
# try and instantiate the exception.
try:
ret = exc(*args)
except:
ret = None
return ret
# apparently not - keep looking...
have_traceback = False
class TestRunner:
def __init__(self, argv):
self.argv = argv
def __call__(self):
try:
import subprocess
p = subprocess.Popen(self.argv,
stdout=subprocess.PIPE, | output, _ = p.communicate()
rc = p.returncode
except ImportError:
# py2.3?
fin, fout, ferr = os.popen3(" ".join(self.argv))
fin.close()
output = fout.read() + ferr.read()
fout.close()
rc = ferr.close()
if rc:
base = os.path.basename(self.argv[1])
# See if we can detect and reconstruct an exception in the output.
reconstituted = find_exception_in_output(output)
if reconstituted is not None:
raise reconstituted
raise AssertionError("%s failed with exit code %s. Output is:\n%s" % (base, rc, output))
def get_demo_tests():
import win32api
ret = []
demo_dir = os.path.abspath(os.path.join(os.path.dirname(win32api.__file__), "Demos"))
assert os.path.isdir(demo_dir), demo_dir
for name in os.listdir(demo_dir):
base, ext = os.path.splitext(name)
if ext != ".py" or base in ui_demos or base in bad_demos:
continue
argv = (sys.executable, os.path.join(demo_dir, base+".py")) + \
argvs.get(base, ())
ret.append(unittest.FunctionTestCase(TestRunner(argv), description="win32/demos/" + name))
return ret
def import_all():
# Some hacks for import order - dde depends on win32ui
try:
import win32ui
except ImportError:
pass # 'what-ev-a....'
import win32api
dir = os.path.dirname(win32api.__file__)
num = 0
is_debug = os.path.basename(win32api.__file__).endswith("_d")
for name in os.listdir(dir):
base, ext = os.path.splitext(name)
if (ext==".pyd") and \
name != "_winxptheme.pyd" and \
(is_debug and base.endswith("_d") or \
not is_debug and not base.endswith("_d")):
try:
__import__(base)
except:
print "FAILED to import", name
raise
num += 1
def suite():
# Loop over all .py files here, except me :)
try:
me = __file__
except NameError:
me = sys.argv[0]
me = os.path.abspath(me)
files = os.listdir(os.path.dirname(me))
suite = unittest.TestSuite()
suite.addTest(unittest.FunctionTestCase(import_all))
for file in files:
base, ext = os.path.splitext(file)
if ext=='.py' and os.path.basename(me) != file:
try:
mod = __import__(base)
except:
print "FAILED to import test module %r" % base
traceback.print_exc()
continue
if hasattr(mod, "suite"):
test = mod.suite()
else:
test = unittest.defaultTestLoader.loadTestsFromModule(mod)
suite.addTest(test)
for test in get_demo_tests():
suite.addTest(test)
return suite
class CustomLoader(pywin32_testutil.TestLoader):
def loadTestsFromModule(self, module):
return self.fixupTestsForLeakTests(suite())
if __name__=='__main__':
pywin32_testutil.testmain(testLoader=CustomLoader()) | stderr=subprocess.STDOUT) | random_line_split |
testall.py | import sys, os
import re
import unittest
import traceback
import pywin32_testutil
# A list of demos that depend on user-interface of *any* kind. Tests listed
# here are not suitable for unattended testing.
ui_demos = """GetSaveFileName print_desktop win32cred_demo win32gui_demo
win32gui_dialog win32gui_menu win32gui_taskbar
win32rcparser_demo winprocess win32console_demo
win32gui_devicenotify
NetValidatePasswordPolicy""".split()
# Other demos known as 'bad' (or at least highly unlikely to work)
# cerapi: no CE module is built (CE via pywin32 appears dead)
# desktopmanager: hangs (well, hangs for 60secs or so...)
bad_demos = "cerapi desktopmanager win32comport_demo".split()
argvs = {
"rastest": ("-l",),
}
# re to pull apart an exception line into the exception type and the args.
re_exception = re.compile("([a-zA-Z0-9_.]*): (.*)$")
def find_exception_in_output(data):
have_traceback = False
for line in data.splitlines():
line = line.decode('ascii') # not sure what the correct encoding is...
if line.startswith("Traceback ("):
have_traceback = True
continue
if line.startswith(" "):
continue
if have_traceback:
# first line not starting with a space since the traceback.
# must be the exception!
m = re_exception.match(line)
if m:
exc_type, args = m.groups()
# get hacky - get the *real* exception object from the name.
bits = exc_type.split(".", 1)
if len(bits) > 1:
mod = __import__(bits[0])
exc = getattr(mod, bits[1])
else:
# probably builtin
exc = eval(bits[0])
else:
# hrm - probably just an exception with no args
try:
exc = eval(line.strip())
args = "()"
except:
return None
# try and turn the args into real args.
try:
args = eval(args)
except:
pass
if not isinstance(args, tuple):
args = (args,)
# try and instantiate the exception.
try:
ret = exc(*args)
except:
ret = None
return ret
# apparently not - keep looking...
have_traceback = False
class TestRunner:
|
def get_demo_tests():
import win32api
ret = []
demo_dir = os.path.abspath(os.path.join(os.path.dirname(win32api.__file__), "Demos"))
assert os.path.isdir(demo_dir), demo_dir
for name in os.listdir(demo_dir):
base, ext = os.path.splitext(name)
if ext != ".py" or base in ui_demos or base in bad_demos:
continue
argv = (sys.executable, os.path.join(demo_dir, base+".py")) + \
argvs.get(base, ())
ret.append(unittest.FunctionTestCase(TestRunner(argv), description="win32/demos/" + name))
return ret
def import_all():
# Some hacks for import order - dde depends on win32ui
try:
import win32ui
except ImportError:
pass # 'what-ev-a....'
import win32api
dir = os.path.dirname(win32api.__file__)
num = 0
is_debug = os.path.basename(win32api.__file__).endswith("_d")
for name in os.listdir(dir):
base, ext = os.path.splitext(name)
if (ext==".pyd") and \
name != "_winxptheme.pyd" and \
(is_debug and base.endswith("_d") or \
not is_debug and not base.endswith("_d")):
try:
__import__(base)
except:
print "FAILED to import", name
raise
num += 1
def suite():
# Loop over all .py files here, except me :)
try:
me = __file__
except NameError:
me = sys.argv[0]
me = os.path.abspath(me)
files = os.listdir(os.path.dirname(me))
suite = unittest.TestSuite()
suite.addTest(unittest.FunctionTestCase(import_all))
for file in files:
base, ext = os.path.splitext(file)
if ext=='.py' and os.path.basename(me) != file:
try:
mod = __import__(base)
except:
print "FAILED to import test module %r" % base
traceback.print_exc()
continue
if hasattr(mod, "suite"):
test = mod.suite()
else:
test = unittest.defaultTestLoader.loadTestsFromModule(mod)
suite.addTest(test)
for test in get_demo_tests():
suite.addTest(test)
return suite
class CustomLoader(pywin32_testutil.TestLoader):
def loadTestsFromModule(self, module):
return self.fixupTestsForLeakTests(suite())
if __name__=='__main__':
pywin32_testutil.testmain(testLoader=CustomLoader())
| def __init__(self, argv):
self.argv = argv
def __call__(self):
try:
import subprocess
p = subprocess.Popen(self.argv,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
output, _ = p.communicate()
rc = p.returncode
except ImportError:
# py2.3?
fin, fout, ferr = os.popen3(" ".join(self.argv))
fin.close()
output = fout.read() + ferr.read()
fout.close()
rc = ferr.close()
if rc:
base = os.path.basename(self.argv[1])
# See if we can detect and reconstruct an exception in the output.
reconstituted = find_exception_in_output(output)
if reconstituted is not None:
raise reconstituted
raise AssertionError("%s failed with exit code %s. Output is:\n%s" % (base, rc, output)) | identifier_body |
testall.py | import sys, os
import re
import unittest
import traceback
import pywin32_testutil
# A list of demos that depend on user-interface of *any* kind. Tests listed
# here are not suitable for unattended testing.
ui_demos = """GetSaveFileName print_desktop win32cred_demo win32gui_demo
win32gui_dialog win32gui_menu win32gui_taskbar
win32rcparser_demo winprocess win32console_demo
win32gui_devicenotify
NetValidatePasswordPolicy""".split()
# Other demos known as 'bad' (or at least highly unlikely to work)
# cerapi: no CE module is built (CE via pywin32 appears dead)
# desktopmanager: hangs (well, hangs for 60secs or so...)
bad_demos = "cerapi desktopmanager win32comport_demo".split()
argvs = {
"rastest": ("-l",),
}
# re to pull apart an exception line into the exception type and the args.
re_exception = re.compile("([a-zA-Z0-9_.]*): (.*)$")
def find_exception_in_output(data):
have_traceback = False
for line in data.splitlines():
line = line.decode('ascii') # not sure what the correct encoding is...
if line.startswith("Traceback ("):
have_traceback = True
continue
if line.startswith(" "):
continue
if have_traceback:
# first line not starting with a space since the traceback.
# must be the exception!
m = re_exception.match(line)
if m:
exc_type, args = m.groups()
# get hacky - get the *real* exception object from the name.
bits = exc_type.split(".", 1)
if len(bits) > 1:
mod = __import__(bits[0])
exc = getattr(mod, bits[1])
else:
# probably builtin
exc = eval(bits[0])
else:
# hrm - probably just an exception with no args
try:
exc = eval(line.strip())
args = "()"
except:
return None
# try and turn the args into real args.
try:
args = eval(args)
except:
pass
if not isinstance(args, tuple):
args = (args,)
# try and instantiate the exception.
try:
ret = exc(*args)
except:
ret = None
return ret
# apparently not - keep looking...
have_traceback = False
class TestRunner:
def __init__(self, argv):
self.argv = argv
def __call__(self):
try:
import subprocess
p = subprocess.Popen(self.argv,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
output, _ = p.communicate()
rc = p.returncode
except ImportError:
# py2.3?
fin, fout, ferr = os.popen3(" ".join(self.argv))
fin.close()
output = fout.read() + ferr.read()
fout.close()
rc = ferr.close()
if rc:
base = os.path.basename(self.argv[1])
# See if we can detect and reconstruct an exception in the output.
reconstituted = find_exception_in_output(output)
if reconstituted is not None:
raise reconstituted
raise AssertionError("%s failed with exit code %s. Output is:\n%s" % (base, rc, output))
def get_demo_tests():
import win32api
ret = []
demo_dir = os.path.abspath(os.path.join(os.path.dirname(win32api.__file__), "Demos"))
assert os.path.isdir(demo_dir), demo_dir
for name in os.listdir(demo_dir):
base, ext = os.path.splitext(name)
if ext != ".py" or base in ui_demos or base in bad_demos:
continue
argv = (sys.executable, os.path.join(demo_dir, base+".py")) + \
argvs.get(base, ())
ret.append(unittest.FunctionTestCase(TestRunner(argv), description="win32/demos/" + name))
return ret
def import_all():
# Some hacks for import order - dde depends on win32ui
try:
import win32ui
except ImportError:
pass # 'what-ev-a....'
import win32api
dir = os.path.dirname(win32api.__file__)
num = 0
is_debug = os.path.basename(win32api.__file__).endswith("_d")
for name in os.listdir(dir):
base, ext = os.path.splitext(name)
if (ext==".pyd") and \
name != "_winxptheme.pyd" and \
(is_debug and base.endswith("_d") or \
not is_debug and not base.endswith("_d")):
try:
__import__(base)
except:
print "FAILED to import", name
raise
num += 1
def suite():
# Loop over all .py files here, except me :)
try:
me = __file__
except NameError:
me = sys.argv[0]
me = os.path.abspath(me)
files = os.listdir(os.path.dirname(me))
suite = unittest.TestSuite()
suite.addTest(unittest.FunctionTestCase(import_all))
for file in files:
base, ext = os.path.splitext(file)
if ext=='.py' and os.path.basename(me) != file:
try:
mod = __import__(base)
except:
print "FAILED to import test module %r" % base
traceback.print_exc()
continue
if hasattr(mod, "suite"):
test = mod.suite()
else:
|
suite.addTest(test)
for test in get_demo_tests():
suite.addTest(test)
return suite
class CustomLoader(pywin32_testutil.TestLoader):
def loadTestsFromModule(self, module):
return self.fixupTestsForLeakTests(suite())
if __name__=='__main__':
pywin32_testutil.testmain(testLoader=CustomLoader())
| test = unittest.defaultTestLoader.loadTestsFromModule(mod) | conditional_block |
testall.py | import sys, os
import re
import unittest
import traceback
import pywin32_testutil
# A list of demos that depend on user-interface of *any* kind. Tests listed
# here are not suitable for unattended testing.
ui_demos = """GetSaveFileName print_desktop win32cred_demo win32gui_demo
win32gui_dialog win32gui_menu win32gui_taskbar
win32rcparser_demo winprocess win32console_demo
win32gui_devicenotify
NetValidatePasswordPolicy""".split()
# Other demos known as 'bad' (or at least highly unlikely to work)
# cerapi: no CE module is built (CE via pywin32 appears dead)
# desktopmanager: hangs (well, hangs for 60secs or so...)
bad_demos = "cerapi desktopmanager win32comport_demo".split()
argvs = {
"rastest": ("-l",),
}
# re to pull apart an exception line into the exception type and the args.
re_exception = re.compile("([a-zA-Z0-9_.]*): (.*)$")
def find_exception_in_output(data):
have_traceback = False
for line in data.splitlines():
line = line.decode('ascii') # not sure what the correct encoding is...
if line.startswith("Traceback ("):
have_traceback = True
continue
if line.startswith(" "):
continue
if have_traceback:
# first line not starting with a space since the traceback.
# must be the exception!
m = re_exception.match(line)
if m:
exc_type, args = m.groups()
# get hacky - get the *real* exception object from the name.
bits = exc_type.split(".", 1)
if len(bits) > 1:
mod = __import__(bits[0])
exc = getattr(mod, bits[1])
else:
# probably builtin
exc = eval(bits[0])
else:
# hrm - probably just an exception with no args
try:
exc = eval(line.strip())
args = "()"
except:
return None
# try and turn the args into real args.
try:
args = eval(args)
except:
pass
if not isinstance(args, tuple):
args = (args,)
# try and instantiate the exception.
try:
ret = exc(*args)
except:
ret = None
return ret
# apparently not - keep looking...
have_traceback = False
class TestRunner:
def __init__(self, argv):
self.argv = argv
def __call__(self):
try:
import subprocess
p = subprocess.Popen(self.argv,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
output, _ = p.communicate()
rc = p.returncode
except ImportError:
# py2.3?
fin, fout, ferr = os.popen3(" ".join(self.argv))
fin.close()
output = fout.read() + ferr.read()
fout.close()
rc = ferr.close()
if rc:
base = os.path.basename(self.argv[1])
# See if we can detect and reconstruct an exception in the output.
reconstituted = find_exception_in_output(output)
if reconstituted is not None:
raise reconstituted
raise AssertionError("%s failed with exit code %s. Output is:\n%s" % (base, rc, output))
def get_demo_tests():
import win32api
ret = []
demo_dir = os.path.abspath(os.path.join(os.path.dirname(win32api.__file__), "Demos"))
assert os.path.isdir(demo_dir), demo_dir
for name in os.listdir(demo_dir):
base, ext = os.path.splitext(name)
if ext != ".py" or base in ui_demos or base in bad_demos:
continue
argv = (sys.executable, os.path.join(demo_dir, base+".py")) + \
argvs.get(base, ())
ret.append(unittest.FunctionTestCase(TestRunner(argv), description="win32/demos/" + name))
return ret
def | ():
# Some hacks for import order - dde depends on win32ui
try:
import win32ui
except ImportError:
pass # 'what-ev-a....'
import win32api
dir = os.path.dirname(win32api.__file__)
num = 0
is_debug = os.path.basename(win32api.__file__).endswith("_d")
for name in os.listdir(dir):
base, ext = os.path.splitext(name)
if (ext==".pyd") and \
name != "_winxptheme.pyd" and \
(is_debug and base.endswith("_d") or \
not is_debug and not base.endswith("_d")):
try:
__import__(base)
except:
print "FAILED to import", name
raise
num += 1
def suite():
# Loop over all .py files here, except me :)
try:
me = __file__
except NameError:
me = sys.argv[0]
me = os.path.abspath(me)
files = os.listdir(os.path.dirname(me))
suite = unittest.TestSuite()
suite.addTest(unittest.FunctionTestCase(import_all))
for file in files:
base, ext = os.path.splitext(file)
if ext=='.py' and os.path.basename(me) != file:
try:
mod = __import__(base)
except:
print "FAILED to import test module %r" % base
traceback.print_exc()
continue
if hasattr(mod, "suite"):
test = mod.suite()
else:
test = unittest.defaultTestLoader.loadTestsFromModule(mod)
suite.addTest(test)
for test in get_demo_tests():
suite.addTest(test)
return suite
class CustomLoader(pywin32_testutil.TestLoader):
def loadTestsFromModule(self, module):
return self.fixupTestsForLeakTests(suite())
if __name__=='__main__':
pywin32_testutil.testmain(testLoader=CustomLoader())
| import_all | identifier_name |
index.js | /*
* ATLauncher CLI - https://github.com/ATLauncher/ATLauncher-CLI | * (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
(function () {
'use strict';
module.exports = {
cacheManager: require('./cacheManager'),
minecraftVersionManager: require('./minecraftVersionManager'),
packManager: require('./packManager')
};
})(); | * Copyright (C) 2016 ATLauncher
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or | random_line_split |
accountsResultComponent.js | /*
* Copyright (C) 2013-2015 Uncharted Software Inc.
*
* Property of Uncharted(TM), formerly Oculus Info Inc.
* http://uncharted.software/
* | * the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
* of the Software, and to permit persons to whom the Software is furnished to do
* so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
define(
[
'lib/communication/accountsViewChannels',
'views/components/resultComponentBase'
],
function(
accountsChannel,
resultComponentBase
) {
//--------------------------------------------------------------------------------------------------------------
var _add = function(xfId, container, headerInfo, result, snippet) {
resultComponentBase.addSearchResult(
xfId,
container,
headerInfo,
result,
snippet,
accountsChannel.RESULT_SELECTION_CHANGE,
accountsChannel.RESULT_ENTITY_FULL_DETAILS_SHOW
);
};
//--------------------------------------------------------------------------------------------------------------
return {
addSearchResult : _add
};
}
); | * Released under the MIT License.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in | random_line_split |
relaycontrol.py | import paho.mqtt.client as mqtt
import json, time
import RPi.GPIO as GPIO
from time import sleep
# The script as below using BCM GPIO 00..nn numbers
GPIO.setmode(GPIO.BCM)
# Set relay pins as output
GPIO.setup(24, GPIO.OUT)
# ----- CHANGE THESE FOR YOUR SETUP -----
MQTT_HOST = "190.97.168.236"
MQTT_PORT = 1883
USERNAME = ''
PASSWORD = ""
# ---------------------------------------
def on_connect(client, userdata, rc):
print("\nConnected with result code " + str(rc) + "\n")
#Subscribing in on_connect() means that if we lose the connection and
# reconnect then subscriptions will be renewed.
client.subscribe("/iot/control/")
print("Subscribed to iotcontrol")
def on_message_iotrl(client, userdata, msg):
print("\n\t* Raspberry UPDATED ("+msg.topic+"): " + str(msg.payload))
if msg.payload == "gpio24on":
GPIO.output(24, GPIO.HIGH)
client.publish("/iot/status", "Relay gpio18on", 2)
if msg.payload == "gpio24off":
GPIO.output(24, GPIO.LOW)
client.publish("/iot/status", "Relay gpio18off", 2)
def command_error():
|
client = mqtt.Client(client_id="rasp-g1")
# Callback declarations (functions run based on certain messages)
client.on_connect = on_connect
client.message_callback_add("/iot/control/", on_message_iotrl)
# This is where the MQTT service connects and starts listening for messages
client.username_pw_set(USERNAME, PASSWORD)
client.connect(MQTT_HOST, MQTT_PORT, 60)
client.loop_start() # Background thread to call loop() automatically
# Main program loop
while True:
time.sleep(10)
| print("Error: Unknown command") | identifier_body |
relaycontrol.py | import paho.mqtt.client as mqtt
import json, time
import RPi.GPIO as GPIO
from time import sleep
# The script as below using BCM GPIO 00..nn numbers
GPIO.setmode(GPIO.BCM)
# Set relay pins as output
GPIO.setup(24, GPIO.OUT)
# ----- CHANGE THESE FOR YOUR SETUP -----
MQTT_HOST = "190.97.168.236"
MQTT_PORT = 1883
USERNAME = ''
PASSWORD = ""
# ---------------------------------------
def on_connect(client, userdata, rc):
print("\nConnected with result code " + str(rc) + "\n")
#Subscribing in on_connect() means that if we lose the connection and
# reconnect then subscriptions will be renewed.
client.subscribe("/iot/control/")
print("Subscribed to iotcontrol")
def | (client, userdata, msg):
print("\n\t* Raspberry UPDATED ("+msg.topic+"): " + str(msg.payload))
if msg.payload == "gpio24on":
GPIO.output(24, GPIO.HIGH)
client.publish("/iot/status", "Relay gpio18on", 2)
if msg.payload == "gpio24off":
GPIO.output(24, GPIO.LOW)
client.publish("/iot/status", "Relay gpio18off", 2)
def command_error():
print("Error: Unknown command")
client = mqtt.Client(client_id="rasp-g1")
# Callback declarations (functions run based on certain messages)
client.on_connect = on_connect
client.message_callback_add("/iot/control/", on_message_iotrl)
# This is where the MQTT service connects and starts listening for messages
client.username_pw_set(USERNAME, PASSWORD)
client.connect(MQTT_HOST, MQTT_PORT, 60)
client.loop_start() # Background thread to call loop() automatically
# Main program loop
while True:
time.sleep(10)
| on_message_iotrl | identifier_name |
relaycontrol.py | import paho.mqtt.client as mqtt
import json, time
import RPi.GPIO as GPIO
from time import sleep
# The script as below using BCM GPIO 00..nn numbers
GPIO.setmode(GPIO.BCM)
# Set relay pins as output
GPIO.setup(24, GPIO.OUT)
# ----- CHANGE THESE FOR YOUR SETUP -----
MQTT_HOST = "190.97.168.236"
MQTT_PORT = 1883
USERNAME = ''
PASSWORD = ""
# ---------------------------------------
def on_connect(client, userdata, rc):
print("\nConnected with result code " + str(rc) + "\n")
#Subscribing in on_connect() means that if we lose the connection and
# reconnect then subscriptions will be renewed.
client.subscribe("/iot/control/")
print("Subscribed to iotcontrol")
def on_message_iotrl(client, userdata, msg):
print("\n\t* Raspberry UPDATED ("+msg.topic+"): " + str(msg.payload))
if msg.payload == "gpio24on":
GPIO.output(24, GPIO.HIGH)
client.publish("/iot/status", "Relay gpio18on", 2)
if msg.payload == "gpio24off":
GPIO.output(24, GPIO.LOW)
client.publish("/iot/status", "Relay gpio18off", 2)
def command_error():
print("Error: Unknown command")
client = mqtt.Client(client_id="rasp-g1")
# Callback declarations (functions run based on certain messages)
client.on_connect = on_connect
client.message_callback_add("/iot/control/", on_message_iotrl)
# This is where the MQTT service connects and starts listening for messages
client.username_pw_set(USERNAME, PASSWORD)
client.connect(MQTT_HOST, MQTT_PORT, 60)
client.loop_start() # Background thread to call loop() automatically
# Main program loop
while True:
| time.sleep(10) | conditional_block | |
relaycontrol.py | import paho.mqtt.client as mqtt
import json, time
import RPi.GPIO as GPIO
from time import sleep
# The script as below using BCM GPIO 00..nn numbers
GPIO.setmode(GPIO.BCM)
# Set relay pins as output
GPIO.setup(24, GPIO.OUT)
# ----- CHANGE THESE FOR YOUR SETUP -----
MQTT_HOST = "190.97.168.236"
MQTT_PORT = 1883
USERNAME = ''
PASSWORD = ""
# ---------------------------------------
def on_connect(client, userdata, rc):
print("\nConnected with result code " + str(rc) + "\n")
#Subscribing in on_connect() means that if we lose the connection and
# reconnect then subscriptions will be renewed.
client.subscribe("/iot/control/")
print("Subscribed to iotcontrol")
def on_message_iotrl(client, userdata, msg):
print("\n\t* Raspberry UPDATED ("+msg.topic+"): " + str(msg.payload))
if msg.payload == "gpio24on":
GPIO.output(24, GPIO.HIGH)
client.publish("/iot/status", "Relay gpio18on", 2) | client.publish("/iot/status", "Relay gpio18off", 2)
def command_error():
print("Error: Unknown command")
client = mqtt.Client(client_id="rasp-g1")
# Callback declarations (functions run based on certain messages)
client.on_connect = on_connect
client.message_callback_add("/iot/control/", on_message_iotrl)
# This is where the MQTT service connects and starts listening for messages
client.username_pw_set(USERNAME, PASSWORD)
client.connect(MQTT_HOST, MQTT_PORT, 60)
client.loop_start() # Background thread to call loop() automatically
# Main program loop
while True:
time.sleep(10) | if msg.payload == "gpio24off":
GPIO.output(24, GPIO.LOW) | random_line_split |
lib.rs | // Copyright 2019 The Chromium OS Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//! Support crate for writing fuzzers in Chrome OS.
//!
//! The major features provided by this crate are:
//!
//! * The [`fuzz_target`] macro which wraps the body of the fuzzing code with
//! all with all the boilerplate needed to build and run it as a fuzzer on
//! Chrome OS infrastructure.
//! * The [`FuzzRng`] type that provides a random number generator using fuzzer
//! input as the source of its randomness. Fuzzers that need to generate
//! structured data can use this type in conjunction with the [`rand`] crate
//! to generate the data they need.
//!
//! # Getting Started
//!
//! To use this crate add it as a dependency to the fuzzer's `Cargo.toml` along
//! with the crate to be fuzzed:
//!
//! ```Cargo.toml
//! [dependencies]
//! cros_fuzz = "*"
//! your_crate = "*"
//! ```
//!
//! Then use the [`fuzz_target`] macro to write the body of the fuzzer. All
//! fuzzers should use the `#![no_main]` crate attribute as the main function
//! will be provided by the fuzzer runtime.
//!
//! ```rust,ignore
//! #![no_main]
//!
//! use cros_fuzz::fuzz_target;
//! use your_crate::some_function;
//!
//! fuzz_target!(|data: &[u8]| {
//! some_function(data);
//! });
//! ```
//!
//! [`FuzzRng`]: rand/struct.FuzzRng.html
//! [`fuzz_target`]: macro.fuzz_target.html
//! [`rand`]: https://docs.rs/rand
pub mod rand;
/// The main macro for writing a fuzzer. The fuzzer runtime will repeatedly
/// call the body of `fuzz_target!` with a slice of pseudo-random bytes, until
/// your program hits an error condition (segfault, panic, etc). | ///
/// # Examples
///
/// ```
/// use std::str;
/// # #[macro_use] extern crate cros_fuzz;
///
/// fuzz_target!(|data: &[u8]| {
/// let _ = str::from_utf8(data);
/// });
///
/// # fn main() {
/// # let buf = b"hello, world!";
/// # llvm_fuzzer_test_one_input(buf.as_ptr(), buf.len());
/// # }
/// ```
#[macro_export]
macro_rules! fuzz_target {
(|$bytes:ident| $body:block) => {
use std::panic;
use std::process;
use std::slice;
#[export_name = "LLVMFuzzerTestOneInput"]
fn llvm_fuzzer_test_one_input(data: *const u8, size: usize) -> i32 {
// We cannot unwind past ffi boundaries.
panic::catch_unwind(|| {
// Safe because the libfuzzer runtime will guarantee that `data` is
// at least `size` bytes long and that it will be valid for the lifetime
// of this function.
let $bytes = unsafe { slice::from_raw_parts(data, size) };
$body
})
.err()
.map(|_| process::abort());
0
}
};
(|$bytes:ident: &[u8]| $body:block) => {
fuzz_target!(|$bytes| $body);
};
} | random_line_split | |
base64_ops_test.py | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for EncodeBase64 and DecodeBase64."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import base64
import numpy as np
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import string_ops
from tensorflow.python.platform import test
class Base64OpsTest(test_util.TensorFlowTestCase):
def setUp(self):
self._msg = array_ops.placeholder(dtype=dtypes.string)
self._encoded_f = string_ops.encode_base64(self._msg, pad=False)
self._decoded_f = string_ops.decode_base64(self._encoded_f)
self._encoded_t = string_ops.encode_base64(self._msg, pad=True)
self._decoded_t = string_ops.decode_base64(self._encoded_t)
def _RemovePad(self, msg, base64_msg):
if len(msg) % 3 == 1:
return base64_msg[:-2]
if len(msg) % 3 == 2:
return base64_msg[:-1]
return base64_msg
def _RunTest(self, msg, pad):
with self.test_session() as sess:
if pad:
encoded, decoded = sess.run([self._encoded_t, self._decoded_t],
feed_dict={self._msg: msg})
else:
encoded, decoded = sess.run([self._encoded_f, self._decoded_f],
feed_dict={self._msg: msg})
if not isinstance(msg, (list, tuple)):
msg = [msg]
encoded = [encoded]
decoded = [decoded]
base64_msg = [base64.urlsafe_b64encode(m) for m in msg]
if not pad:
base64_msg = [self._RemovePad(m, b) for m, b in zip(msg, base64_msg)]
for i in range(len(msg)):
self.assertEqual(base64_msg[i], encoded[i])
self.assertEqual(msg[i], decoded[i])
def testWithPythonBase64(self):
for pad in (False, True):
self._RunTest(b"", pad=pad)
for _ in range(100):
length = np.random.randint(1024 * 1024)
msg = np.random.bytes(length)
self._RunTest(msg, pad=pad)
def testShape(self):
for pad in (False, True): | for _ in range(np.random.randint(10))]
self._RunTest(msg, pad=pad)
# Zero-element, non-trivial shapes.
for _ in range(10):
k = np.random.randint(10)
msg = np.empty((0, k), dtype=bytes)
encoded = string_ops.encode_base64(msg, pad=pad)
decoded = string_ops.decode_base64(encoded)
with self.test_session() as sess:
encoded_value, decoded_value = sess.run([encoded, decoded])
self.assertEqual(encoded_value.shape, msg.shape)
self.assertEqual(decoded_value.shape, msg.shape)
def testInvalidInput(self):
def try_decode(enc):
self._decoded_f.eval(feed_dict={self._encoded_f: enc})
with self.test_session():
# Invalid length.
msg = np.random.bytes(99)
enc = base64.urlsafe_b64encode(msg)
with self.assertRaisesRegexp(errors.InvalidArgumentError, "1 modulo 4"):
try_decode(enc + b"a")
# Invalid char used in encoding.
msg = np.random.bytes(34)
enc = base64.urlsafe_b64encode(msg)
for i in range(len(msg)):
with self.assertRaises(errors.InvalidArgumentError):
try_decode(enc[:i] + b"?" + enc[(i + 1):])
with self.assertRaises(errors.InvalidArgumentError):
try_decode(enc[:i] + b"\x80" + enc[(i + 1):]) # outside ascii range.
with self.assertRaises(errors.InvalidArgumentError):
try_decode(enc[:i] + b"+" + enc[(i + 1):]) # not url-safe.
with self.assertRaises(errors.InvalidArgumentError):
try_decode(enc[:i] + b"/" + enc[(i + 1):]) # not url-safe.
# Partial padding.
msg = np.random.bytes(34)
enc = base64.urlsafe_b64encode(msg)
with self.assertRaises(errors.InvalidArgumentError):
# enc contains == at the end. Partial padding is not allowed.
try_decode(enc[:-1])
# Unnecessary padding.
msg = np.random.bytes(33)
enc = base64.urlsafe_b64encode(msg)
with self.assertRaises(errors.InvalidArgumentError):
try_decode(enc + b"==")
with self.assertRaises(errors.InvalidArgumentError):
try_decode(enc + b"===")
with self.assertRaises(errors.InvalidArgumentError):
try_decode(enc + b"====")
# Padding in the middle. (Previous implementation was ok with this as long
# as padding char location was 2 or 3 (mod 4).
msg = np.random.bytes(33)
enc = base64.urlsafe_b64encode(msg)
for i in range(len(msg) - 1):
with self.assertRaises(errors.InvalidArgumentError):
try_decode(enc[:i] + b"=" + enc[(i + 1):])
for i in range(len(msg) - 2):
with self.assertRaises(errors.InvalidArgumentError):
try_decode(enc[:i] + b"==" + enc[(i + 2):])
if __name__ == "__main__":
test.main() | for _ in range(10):
msg = [np.random.bytes(np.random.randint(20)) | random_line_split |
base64_ops_test.py | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for EncodeBase64 and DecodeBase64."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import base64
import numpy as np
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import string_ops
from tensorflow.python.platform import test
class Base64OpsTest(test_util.TensorFlowTestCase):
def setUp(self):
self._msg = array_ops.placeholder(dtype=dtypes.string)
self._encoded_f = string_ops.encode_base64(self._msg, pad=False)
self._decoded_f = string_ops.decode_base64(self._encoded_f)
self._encoded_t = string_ops.encode_base64(self._msg, pad=True)
self._decoded_t = string_ops.decode_base64(self._encoded_t)
def _RemovePad(self, msg, base64_msg):
if len(msg) % 3 == 1:
return base64_msg[:-2]
if len(msg) % 3 == 2:
return base64_msg[:-1]
return base64_msg
def _RunTest(self, msg, pad):
with self.test_session() as sess:
if pad:
encoded, decoded = sess.run([self._encoded_t, self._decoded_t],
feed_dict={self._msg: msg})
else:
encoded, decoded = sess.run([self._encoded_f, self._decoded_f],
feed_dict={self._msg: msg})
if not isinstance(msg, (list, tuple)):
msg = [msg]
encoded = [encoded]
decoded = [decoded]
base64_msg = [base64.urlsafe_b64encode(m) for m in msg]
if not pad:
base64_msg = [self._RemovePad(m, b) for m, b in zip(msg, base64_msg)]
for i in range(len(msg)):
self.assertEqual(base64_msg[i], encoded[i])
self.assertEqual(msg[i], decoded[i])
def testWithPythonBase64(self):
for pad in (False, True):
self._RunTest(b"", pad=pad)
for _ in range(100):
length = np.random.randint(1024 * 1024)
msg = np.random.bytes(length)
self._RunTest(msg, pad=pad)
def testShape(self):
for pad in (False, True):
for _ in range(10):
msg = [np.random.bytes(np.random.randint(20))
for _ in range(np.random.randint(10))]
self._RunTest(msg, pad=pad)
# Zero-element, non-trivial shapes.
for _ in range(10):
k = np.random.randint(10)
msg = np.empty((0, k), dtype=bytes)
encoded = string_ops.encode_base64(msg, pad=pad)
decoded = string_ops.decode_base64(encoded)
with self.test_session() as sess:
encoded_value, decoded_value = sess.run([encoded, decoded])
self.assertEqual(encoded_value.shape, msg.shape)
self.assertEqual(decoded_value.shape, msg.shape)
def testInvalidInput(self):
def try_decode(enc):
|
with self.test_session():
# Invalid length.
msg = np.random.bytes(99)
enc = base64.urlsafe_b64encode(msg)
with self.assertRaisesRegexp(errors.InvalidArgumentError, "1 modulo 4"):
try_decode(enc + b"a")
# Invalid char used in encoding.
msg = np.random.bytes(34)
enc = base64.urlsafe_b64encode(msg)
for i in range(len(msg)):
with self.assertRaises(errors.InvalidArgumentError):
try_decode(enc[:i] + b"?" + enc[(i + 1):])
with self.assertRaises(errors.InvalidArgumentError):
try_decode(enc[:i] + b"\x80" + enc[(i + 1):]) # outside ascii range.
with self.assertRaises(errors.InvalidArgumentError):
try_decode(enc[:i] + b"+" + enc[(i + 1):]) # not url-safe.
with self.assertRaises(errors.InvalidArgumentError):
try_decode(enc[:i] + b"/" + enc[(i + 1):]) # not url-safe.
# Partial padding.
msg = np.random.bytes(34)
enc = base64.urlsafe_b64encode(msg)
with self.assertRaises(errors.InvalidArgumentError):
# enc contains == at the end. Partial padding is not allowed.
try_decode(enc[:-1])
# Unnecessary padding.
msg = np.random.bytes(33)
enc = base64.urlsafe_b64encode(msg)
with self.assertRaises(errors.InvalidArgumentError):
try_decode(enc + b"==")
with self.assertRaises(errors.InvalidArgumentError):
try_decode(enc + b"===")
with self.assertRaises(errors.InvalidArgumentError):
try_decode(enc + b"====")
# Padding in the middle. (Previous implementation was ok with this as long
# as padding char location was 2 or 3 (mod 4).
msg = np.random.bytes(33)
enc = base64.urlsafe_b64encode(msg)
for i in range(len(msg) - 1):
with self.assertRaises(errors.InvalidArgumentError):
try_decode(enc[:i] + b"=" + enc[(i + 1):])
for i in range(len(msg) - 2):
with self.assertRaises(errors.InvalidArgumentError):
try_decode(enc[:i] + b"==" + enc[(i + 2):])
if __name__ == "__main__":
test.main()
| self._decoded_f.eval(feed_dict={self._encoded_f: enc}) | identifier_body |
base64_ops_test.py | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for EncodeBase64 and DecodeBase64."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import base64
import numpy as np
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import string_ops
from tensorflow.python.platform import test
class Base64OpsTest(test_util.TensorFlowTestCase):
def setUp(self):
self._msg = array_ops.placeholder(dtype=dtypes.string)
self._encoded_f = string_ops.encode_base64(self._msg, pad=False)
self._decoded_f = string_ops.decode_base64(self._encoded_f)
self._encoded_t = string_ops.encode_base64(self._msg, pad=True)
self._decoded_t = string_ops.decode_base64(self._encoded_t)
def _RemovePad(self, msg, base64_msg):
if len(msg) % 3 == 1:
return base64_msg[:-2]
if len(msg) % 3 == 2:
return base64_msg[:-1]
return base64_msg
def _RunTest(self, msg, pad):
with self.test_session() as sess:
if pad:
encoded, decoded = sess.run([self._encoded_t, self._decoded_t],
feed_dict={self._msg: msg})
else:
encoded, decoded = sess.run([self._encoded_f, self._decoded_f],
feed_dict={self._msg: msg})
if not isinstance(msg, (list, tuple)):
msg = [msg]
encoded = [encoded]
decoded = [decoded]
base64_msg = [base64.urlsafe_b64encode(m) for m in msg]
if not pad:
base64_msg = [self._RemovePad(m, b) for m, b in zip(msg, base64_msg)]
for i in range(len(msg)):
self.assertEqual(base64_msg[i], encoded[i])
self.assertEqual(msg[i], decoded[i])
def testWithPythonBase64(self):
for pad in (False, True):
self._RunTest(b"", pad=pad)
for _ in range(100):
length = np.random.randint(1024 * 1024)
msg = np.random.bytes(length)
self._RunTest(msg, pad=pad)
def testShape(self):
for pad in (False, True):
|
def testInvalidInput(self):
def try_decode(enc):
self._decoded_f.eval(feed_dict={self._encoded_f: enc})
with self.test_session():
# Invalid length.
msg = np.random.bytes(99)
enc = base64.urlsafe_b64encode(msg)
with self.assertRaisesRegexp(errors.InvalidArgumentError, "1 modulo 4"):
try_decode(enc + b"a")
# Invalid char used in encoding.
msg = np.random.bytes(34)
enc = base64.urlsafe_b64encode(msg)
for i in range(len(msg)):
with self.assertRaises(errors.InvalidArgumentError):
try_decode(enc[:i] + b"?" + enc[(i + 1):])
with self.assertRaises(errors.InvalidArgumentError):
try_decode(enc[:i] + b"\x80" + enc[(i + 1):]) # outside ascii range.
with self.assertRaises(errors.InvalidArgumentError):
try_decode(enc[:i] + b"+" + enc[(i + 1):]) # not url-safe.
with self.assertRaises(errors.InvalidArgumentError):
try_decode(enc[:i] + b"/" + enc[(i + 1):]) # not url-safe.
# Partial padding.
msg = np.random.bytes(34)
enc = base64.urlsafe_b64encode(msg)
with self.assertRaises(errors.InvalidArgumentError):
# enc contains == at the end. Partial padding is not allowed.
try_decode(enc[:-1])
# Unnecessary padding.
msg = np.random.bytes(33)
enc = base64.urlsafe_b64encode(msg)
with self.assertRaises(errors.InvalidArgumentError):
try_decode(enc + b"==")
with self.assertRaises(errors.InvalidArgumentError):
try_decode(enc + b"===")
with self.assertRaises(errors.InvalidArgumentError):
try_decode(enc + b"====")
# Padding in the middle. (Previous implementation was ok with this as long
# as padding char location was 2 or 3 (mod 4).
msg = np.random.bytes(33)
enc = base64.urlsafe_b64encode(msg)
for i in range(len(msg) - 1):
with self.assertRaises(errors.InvalidArgumentError):
try_decode(enc[:i] + b"=" + enc[(i + 1):])
for i in range(len(msg) - 2):
with self.assertRaises(errors.InvalidArgumentError):
try_decode(enc[:i] + b"==" + enc[(i + 2):])
if __name__ == "__main__":
test.main()
| for _ in range(10):
msg = [np.random.bytes(np.random.randint(20))
for _ in range(np.random.randint(10))]
self._RunTest(msg, pad=pad)
# Zero-element, non-trivial shapes.
for _ in range(10):
k = np.random.randint(10)
msg = np.empty((0, k), dtype=bytes)
encoded = string_ops.encode_base64(msg, pad=pad)
decoded = string_ops.decode_base64(encoded)
with self.test_session() as sess:
encoded_value, decoded_value = sess.run([encoded, decoded])
self.assertEqual(encoded_value.shape, msg.shape)
self.assertEqual(decoded_value.shape, msg.shape) | conditional_block |
base64_ops_test.py | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for EncodeBase64 and DecodeBase64."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import base64
import numpy as np
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import string_ops
from tensorflow.python.platform import test
class Base64OpsTest(test_util.TensorFlowTestCase):
def setUp(self):
self._msg = array_ops.placeholder(dtype=dtypes.string)
self._encoded_f = string_ops.encode_base64(self._msg, pad=False)
self._decoded_f = string_ops.decode_base64(self._encoded_f)
self._encoded_t = string_ops.encode_base64(self._msg, pad=True)
self._decoded_t = string_ops.decode_base64(self._encoded_t)
def _RemovePad(self, msg, base64_msg):
if len(msg) % 3 == 1:
return base64_msg[:-2]
if len(msg) % 3 == 2:
return base64_msg[:-1]
return base64_msg
def _RunTest(self, msg, pad):
with self.test_session() as sess:
if pad:
encoded, decoded = sess.run([self._encoded_t, self._decoded_t],
feed_dict={self._msg: msg})
else:
encoded, decoded = sess.run([self._encoded_f, self._decoded_f],
feed_dict={self._msg: msg})
if not isinstance(msg, (list, tuple)):
msg = [msg]
encoded = [encoded]
decoded = [decoded]
base64_msg = [base64.urlsafe_b64encode(m) for m in msg]
if not pad:
base64_msg = [self._RemovePad(m, b) for m, b in zip(msg, base64_msg)]
for i in range(len(msg)):
self.assertEqual(base64_msg[i], encoded[i])
self.assertEqual(msg[i], decoded[i])
def testWithPythonBase64(self):
for pad in (False, True):
self._RunTest(b"", pad=pad)
for _ in range(100):
length = np.random.randint(1024 * 1024)
msg = np.random.bytes(length)
self._RunTest(msg, pad=pad)
def testShape(self):
for pad in (False, True):
for _ in range(10):
msg = [np.random.bytes(np.random.randint(20))
for _ in range(np.random.randint(10))]
self._RunTest(msg, pad=pad)
# Zero-element, non-trivial shapes.
for _ in range(10):
k = np.random.randint(10)
msg = np.empty((0, k), dtype=bytes)
encoded = string_ops.encode_base64(msg, pad=pad)
decoded = string_ops.decode_base64(encoded)
with self.test_session() as sess:
encoded_value, decoded_value = sess.run([encoded, decoded])
self.assertEqual(encoded_value.shape, msg.shape)
self.assertEqual(decoded_value.shape, msg.shape)
def | (self):
def try_decode(enc):
self._decoded_f.eval(feed_dict={self._encoded_f: enc})
with self.test_session():
# Invalid length.
msg = np.random.bytes(99)
enc = base64.urlsafe_b64encode(msg)
with self.assertRaisesRegexp(errors.InvalidArgumentError, "1 modulo 4"):
try_decode(enc + b"a")
# Invalid char used in encoding.
msg = np.random.bytes(34)
enc = base64.urlsafe_b64encode(msg)
for i in range(len(msg)):
with self.assertRaises(errors.InvalidArgumentError):
try_decode(enc[:i] + b"?" + enc[(i + 1):])
with self.assertRaises(errors.InvalidArgumentError):
try_decode(enc[:i] + b"\x80" + enc[(i + 1):]) # outside ascii range.
with self.assertRaises(errors.InvalidArgumentError):
try_decode(enc[:i] + b"+" + enc[(i + 1):]) # not url-safe.
with self.assertRaises(errors.InvalidArgumentError):
try_decode(enc[:i] + b"/" + enc[(i + 1):]) # not url-safe.
# Partial padding.
msg = np.random.bytes(34)
enc = base64.urlsafe_b64encode(msg)
with self.assertRaises(errors.InvalidArgumentError):
# enc contains == at the end. Partial padding is not allowed.
try_decode(enc[:-1])
# Unnecessary padding.
msg = np.random.bytes(33)
enc = base64.urlsafe_b64encode(msg)
with self.assertRaises(errors.InvalidArgumentError):
try_decode(enc + b"==")
with self.assertRaises(errors.InvalidArgumentError):
try_decode(enc + b"===")
with self.assertRaises(errors.InvalidArgumentError):
try_decode(enc + b"====")
# Padding in the middle. (Previous implementation was ok with this as long
# as padding char location was 2 or 3 (mod 4).
msg = np.random.bytes(33)
enc = base64.urlsafe_b64encode(msg)
for i in range(len(msg) - 1):
with self.assertRaises(errors.InvalidArgumentError):
try_decode(enc[:i] + b"=" + enc[(i + 1):])
for i in range(len(msg) - 2):
with self.assertRaises(errors.InvalidArgumentError):
try_decode(enc[:i] + b"==" + enc[(i + 2):])
if __name__ == "__main__":
test.main()
| testInvalidInput | identifier_name |
0021_sso_id_verification.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.12 on 2018-04-11 15:33
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
| dependencies = [
('third_party_auth', '0020_cleanup_slug_fields'),
]
operations = [
migrations.AddField(
model_name='ltiproviderconfig',
name='enable_sso_id_verification',
field=models.BooleanField(default=False, help_text=b'Use the presence of a profile from a trusted third party as proof of identity verification.'),
),
migrations.AddField(
model_name='oauth2providerconfig',
name='enable_sso_id_verification',
field=models.BooleanField(default=False, help_text=b'Use the presence of a profile from a trusted third party as proof of identity verification.'),
),
migrations.AddField(
model_name='samlproviderconfig',
name='enable_sso_id_verification',
field=models.BooleanField(default=False, help_text=b'Use the presence of a profile from a trusted third party as proof of identity verification.'),
),
] | identifier_body | |
0021_sso_id_verification.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.12 on 2018-04-11 15:33
from __future__ import unicode_literals
from django.db import migrations, models
class | (migrations.Migration):
dependencies = [
('third_party_auth', '0020_cleanup_slug_fields'),
]
operations = [
migrations.AddField(
model_name='ltiproviderconfig',
name='enable_sso_id_verification',
field=models.BooleanField(default=False, help_text=b'Use the presence of a profile from a trusted third party as proof of identity verification.'),
),
migrations.AddField(
model_name='oauth2providerconfig',
name='enable_sso_id_verification',
field=models.BooleanField(default=False, help_text=b'Use the presence of a profile from a trusted third party as proof of identity verification.'),
),
migrations.AddField(
model_name='samlproviderconfig',
name='enable_sso_id_verification',
field=models.BooleanField(default=False, help_text=b'Use the presence of a profile from a trusted third party as proof of identity verification.'),
),
]
| Migration | identifier_name |
mod.rs | use crate::types::game::paddock::PaddockInformationsForSell;
use crate::types::game::paddock::PaddockInstancesInformations;
use protocol_derive::{Decode, Encode};
#[derive(Clone, PartialEq, Debug, Encode, Decode)]
#[protocol(id = 5824)]
pub struct PaddockPropertiesMessage<'a> {
pub properties: PaddockInstancesInformations<'a>,
}
#[derive(Clone, PartialEq, Debug, Encode, Decode)]
#[protocol(id = 6141)]
pub struct PaddockToSellListRequestMessage<'a> {
#[protocol(var)]
pub page_index: u16,
pub _phantom: std::marker::PhantomData<&'a ()>,
}
#[derive(Clone, PartialEq, Debug, Encode, Decode)]
#[protocol(id = 6026)]
pub struct GameDataPlayFarmObjectAnimationMessage<'a> {
#[protocol(var_contents)]
pub cell_id: std::borrow::Cow<'a, [u16]>,
}
#[derive(Clone, PartialEq, Debug, Encode, Decode)]
#[protocol(id = 6161)]
pub struct | <'a> {
pub area_id: i32,
pub at_least_nb_mount: i8,
pub at_least_nb_machine: i8,
#[protocol(var)]
pub max_price: u64,
pub order_by: u8,
pub _phantom: std::marker::PhantomData<&'a ()>,
}
#[derive(Clone, PartialEq, Debug, Encode, Decode)]
#[protocol(id = 6018)]
pub struct PaddockSellBuyDialogMessage<'a> {
pub bsell: bool,
#[protocol(var)]
pub owner_id: u32,
#[protocol(var)]
pub price: u64,
pub _phantom: std::marker::PhantomData<&'a ()>,
}
#[derive(Clone, PartialEq, Debug, Encode, Decode)]
#[protocol(id = 6138)]
pub struct PaddockToSellListMessage<'a> {
#[protocol(var)]
pub page_index: u16,
#[protocol(var)]
pub total_page: u16,
pub paddock_list: std::borrow::Cow<'a, [PaddockInformationsForSell<'a>]>,
}
| PaddockToSellFilterMessage | identifier_name |
mod.rs | use crate::types::game::paddock::PaddockInformationsForSell;
use crate::types::game::paddock::PaddockInstancesInformations;
use protocol_derive::{Decode, Encode};
#[derive(Clone, PartialEq, Debug, Encode, Decode)]
#[protocol(id = 5824)]
pub struct PaddockPropertiesMessage<'a> {
pub properties: PaddockInstancesInformations<'a>,
} | #[protocol(var)]
pub page_index: u16,
pub _phantom: std::marker::PhantomData<&'a ()>,
}
#[derive(Clone, PartialEq, Debug, Encode, Decode)]
#[protocol(id = 6026)]
pub struct GameDataPlayFarmObjectAnimationMessage<'a> {
#[protocol(var_contents)]
pub cell_id: std::borrow::Cow<'a, [u16]>,
}
#[derive(Clone, PartialEq, Debug, Encode, Decode)]
#[protocol(id = 6161)]
pub struct PaddockToSellFilterMessage<'a> {
pub area_id: i32,
pub at_least_nb_mount: i8,
pub at_least_nb_machine: i8,
#[protocol(var)]
pub max_price: u64,
pub order_by: u8,
pub _phantom: std::marker::PhantomData<&'a ()>,
}
#[derive(Clone, PartialEq, Debug, Encode, Decode)]
#[protocol(id = 6018)]
pub struct PaddockSellBuyDialogMessage<'a> {
pub bsell: bool,
#[protocol(var)]
pub owner_id: u32,
#[protocol(var)]
pub price: u64,
pub _phantom: std::marker::PhantomData<&'a ()>,
}
#[derive(Clone, PartialEq, Debug, Encode, Decode)]
#[protocol(id = 6138)]
pub struct PaddockToSellListMessage<'a> {
#[protocol(var)]
pub page_index: u16,
#[protocol(var)]
pub total_page: u16,
pub paddock_list: std::borrow::Cow<'a, [PaddockInformationsForSell<'a>]>,
} |
#[derive(Clone, PartialEq, Debug, Encode, Decode)]
#[protocol(id = 6141)]
pub struct PaddockToSellListRequestMessage<'a> { | random_line_split |
lighthouse-ext-background.js | /**
* @license Copyright 2018 Google Inc. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
'use strict';
const background = require('./lighthouse-background');
const ExtensionProtocol = require('../../../lighthouse-core/gather/connections/extension');
const log = require('lighthouse-logger');
const assetSaver = require('../../../lighthouse-core/lib/asset-saver.js');
/** @typedef {import('../../../lighthouse-core/gather/connections/connection.js')} Connection */
const STORAGE_KEY = 'lighthouse_audits';
const SETTINGS_KEY = 'lighthouse_settings';
let lighthouseIsRunning = false;
/** @type {?[string, string, string]} */
let latestStatusLog = null;
/**
* Sets the extension badge text.
* @param {string=} optUrl If present, sets the badge text to "Testing <url>".
* Otherwise, restore the default badge text.
*/
function updateBadgeUI(optUrl) {
lighthouseIsRunning = !!optUrl;
if ('chrome' in window && chrome.runtime) {
const manifest = chrome.runtime.getManifest();
if (!manifest.browser_action || !manifest.browser_action.default_icon) {
return;
}
let title = manifest.browser_action.default_title || '';
let path = manifest.browser_action.default_icon[38];
if (lighthouseIsRunning) {
title = `Testing ${optUrl}`;
path = 'images/lh_logo_icon_light.png';
}
chrome.browserAction.setTitle({title});
chrome.browserAction.setIcon({path});
}
}
/**
* @param {{flags: LH.Flags}} options Lighthouse options.
* @param {Array<string>} categoryIDs Name values of categories to include.
* @return {Promise<LH.RunnerResult|void>}
*/
async function runLighthouseInExtension(options, categoryIDs) {
// Default to 'info' logging level.
log.setLevel('info');
const connection = new ExtensionProtocol();
options.flags = Object.assign({}, options.flags, {output: 'html'});
const url = await connection.getCurrentTabURL();
const runnerResult = await background.runLighthouseForConnection(connection, url, options,
categoryIDs, updateBadgeUI);
if (!runnerResult) {
// For now, should always be a runnerResult as the extension can't do `gatherMode`
throw new Error('no runnerResult generated by Lighthouse');
}
const blobURL = createReportPageAsBlob(runnerResult);
await new Promise(resolve => chrome.windows.create({url: blobURL}, resolve));
}
/**
* Run lighthouse for connection and provide similar results as in CLI.
* @param {Connection} connection
* @param {string} url
* @param {{flags: LH.Flags} & {outputFormat: string, logAssets: boolean}} options Lighthouse options.
Specify outputFormat to change the output format.
* @param {Array<string>} categoryIDs Name values of categories to include.
* @return {Promise<string|Array<string>|void>}
*/
async function runLighthouseAsInCLI(connection, url, options, categoryIDs) {
log.setLevel('info');
options.flags = Object.assign({}, options.flags, {output: options.outputFormat});
const results = await background.runLighthouseForConnection(connection, url, options,
categoryIDs);
if (results) {
if (options && options.logAssets) {
await assetSaver.logAssets(results.artifacts, results.lhr.audits);
}
return results.report;
}
}
/**
* @param {LH.RunnerResult} runnerResult Lighthouse results object
* @return {string} Blob URL of the report (or error page) HTML
*/
function | (runnerResult) {
performance.mark('report-start');
const html = runnerResult.report;
const blob = new Blob([html], {type: 'text/html'});
const blobURL = URL.createObjectURL(blob);
performance.mark('report-end');
performance.measure('generate report', 'report-start', 'report-end');
return blobURL;
}
/**
* Save currently selected set of category categories to local storage.
* @param {{selectedCategories: Array<string>, useDevTools: boolean}} settings
*/
function saveSettings(settings) {
const storage = {
[STORAGE_KEY]: {},
[SETTINGS_KEY]: {},
};
// Stash selected categories.
background.getDefaultCategories().forEach(category => {
storage[STORAGE_KEY][category.id] = settings.selectedCategories.includes(category.id);
});
// Stash throttling setting.
storage[SETTINGS_KEY].useDevTools = settings.useDevTools;
// Save object to chrome local storage.
chrome.storage.local.set(storage);
}
/**
* Load selected category categories from local storage.
* @return {Promise<{selectedCategories: Array<string>, useDevTools: boolean}>}
*/
function loadSettings() {
return new Promise(resolve => {
// Protip: debug what's in storage with:
// chrome.storage.local.get(['lighthouse_audits'], console.log)
chrome.storage.local.get([STORAGE_KEY, SETTINGS_KEY], result => {
// Start with list of all default categories set to true so list is
// always up to date.
const defaultCategories = {};
background.getDefaultCategories().forEach(category => {
defaultCategories[category.id] = true;
});
// Load saved categories and settings, overwriting defaults with any
// saved selections.
const savedCategories = Object.assign(defaultCategories, result[STORAGE_KEY]);
const defaultSettings = {
useDevTools: false,
};
const savedSettings = Object.assign(defaultSettings, result[SETTINGS_KEY]);
resolve({
useDevTools: !!savedSettings.useDevTools,
selectedCategories: Object.keys(savedCategories).filter(cat => savedCategories[cat]),
});
});
});
}
/** @param {(status: [string, string, string]) => void} listenCallback */
function listenForStatus(listenCallback) {
log.events.addListener('status', function(log) {
latestStatusLog = log;
listenCallback(log);
});
// Show latest saved status log to give immediate feedback
// when reopening the popup message when lighthouse is running
if (lighthouseIsRunning && latestStatusLog) {
listenCallback(latestStatusLog);
}
}
function isRunning() {
return lighthouseIsRunning;
}
// Run when in extension context, but not in devtools.
if ('chrome' in window && chrome.runtime) {
chrome.runtime.onInstalled.addListener(details => {
if (details.previousVersion) {
// eslint-disable-next-line no-console
console.log('previousVersion', details.previousVersion);
}
});
}
if (typeof module !== 'undefined' && module.exports) {
// Export for popup.js to import types. We don't want tsc to infer an index
// type, so use exports instead of module.exports.
exports.runLighthouseInExtension = runLighthouseInExtension;
exports.getDefaultCategories = background.getDefaultCategories;
exports.isRunning = isRunning;
exports.listenForStatus = listenForStatus;
exports.saveSettings = saveSettings;
exports.loadSettings = loadSettings;
}
// Expose on window for extension, other consumers of file.
// @ts-ignore
window.runLighthouseInExtension = runLighthouseInExtension;
// @ts-ignore
window.runLighthouseAsInCLI = runLighthouseAsInCLI;
// @ts-ignore
window.getDefaultCategories = background.getDefaultCategories;
// @ts-ignore
window.isRunning = isRunning;
// @ts-ignore
window.listenForStatus = listenForStatus;
// @ts-ignore
window.loadSettings = loadSettings;
// @ts-ignore
window.saveSettings = saveSettings;
| createReportPageAsBlob | identifier_name |
lighthouse-ext-background.js | /**
* @license Copyright 2018 Google Inc. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
'use strict';
const background = require('./lighthouse-background');
const ExtensionProtocol = require('../../../lighthouse-core/gather/connections/extension');
const log = require('lighthouse-logger');
const assetSaver = require('../../../lighthouse-core/lib/asset-saver.js');
/** @typedef {import('../../../lighthouse-core/gather/connections/connection.js')} Connection */
const STORAGE_KEY = 'lighthouse_audits';
const SETTINGS_KEY = 'lighthouse_settings';
let lighthouseIsRunning = false;
/** @type {?[string, string, string]} */
let latestStatusLog = null;
/**
* Sets the extension badge text.
* @param {string=} optUrl If present, sets the badge text to "Testing <url>".
* Otherwise, restore the default badge text.
*/
function updateBadgeUI(optUrl) {
lighthouseIsRunning = !!optUrl;
if ('chrome' in window && chrome.runtime) {
const manifest = chrome.runtime.getManifest();
if (!manifest.browser_action || !manifest.browser_action.default_icon) {
return;
}
let title = manifest.browser_action.default_title || '';
let path = manifest.browser_action.default_icon[38];
if (lighthouseIsRunning) {
title = `Testing ${optUrl}`;
path = 'images/lh_logo_icon_light.png';
}
chrome.browserAction.setTitle({title});
chrome.browserAction.setIcon({path});
}
}
/**
* @param {{flags: LH.Flags}} options Lighthouse options.
* @param {Array<string>} categoryIDs Name values of categories to include.
* @return {Promise<LH.RunnerResult|void>}
*/
async function runLighthouseInExtension(options, categoryIDs) {
// Default to 'info' logging level.
log.setLevel('info');
const connection = new ExtensionProtocol();
options.flags = Object.assign({}, options.flags, {output: 'html'});
const url = await connection.getCurrentTabURL();
const runnerResult = await background.runLighthouseForConnection(connection, url, options,
categoryIDs, updateBadgeUI);
if (!runnerResult) {
// For now, should always be a runnerResult as the extension can't do `gatherMode`
throw new Error('no runnerResult generated by Lighthouse');
}
const blobURL = createReportPageAsBlob(runnerResult);
await new Promise(resolve => chrome.windows.create({url: blobURL}, resolve));
}
/**
* Run lighthouse for connection and provide similar results as in CLI.
* @param {Connection} connection
* @param {string} url
* @param {{flags: LH.Flags} & {outputFormat: string, logAssets: boolean}} options Lighthouse options.
Specify outputFormat to change the output format.
* @param {Array<string>} categoryIDs Name values of categories to include.
* @return {Promise<string|Array<string>|void>}
*/
async function runLighthouseAsInCLI(connection, url, options, categoryIDs) {
log.setLevel('info');
options.flags = Object.assign({}, options.flags, {output: options.outputFormat});
const results = await background.runLighthouseForConnection(connection, url, options,
categoryIDs);
if (results) {
if (options && options.logAssets) |
return results.report;
}
}
/**
* @param {LH.RunnerResult} runnerResult Lighthouse results object
* @return {string} Blob URL of the report (or error page) HTML
*/
function createReportPageAsBlob(runnerResult) {
performance.mark('report-start');
const html = runnerResult.report;
const blob = new Blob([html], {type: 'text/html'});
const blobURL = URL.createObjectURL(blob);
performance.mark('report-end');
performance.measure('generate report', 'report-start', 'report-end');
return blobURL;
}
/**
* Save currently selected set of category categories to local storage.
* @param {{selectedCategories: Array<string>, useDevTools: boolean}} settings
*/
function saveSettings(settings) {
const storage = {
[STORAGE_KEY]: {},
[SETTINGS_KEY]: {},
};
// Stash selected categories.
background.getDefaultCategories().forEach(category => {
storage[STORAGE_KEY][category.id] = settings.selectedCategories.includes(category.id);
});
// Stash throttling setting.
storage[SETTINGS_KEY].useDevTools = settings.useDevTools;
// Save object to chrome local storage.
chrome.storage.local.set(storage);
}
/**
* Load selected category categories from local storage.
* @return {Promise<{selectedCategories: Array<string>, useDevTools: boolean}>}
*/
function loadSettings() {
return new Promise(resolve => {
// Protip: debug what's in storage with:
// chrome.storage.local.get(['lighthouse_audits'], console.log)
chrome.storage.local.get([STORAGE_KEY, SETTINGS_KEY], result => {
// Start with list of all default categories set to true so list is
// always up to date.
const defaultCategories = {};
background.getDefaultCategories().forEach(category => {
defaultCategories[category.id] = true;
});
// Load saved categories and settings, overwriting defaults with any
// saved selections.
const savedCategories = Object.assign(defaultCategories, result[STORAGE_KEY]);
const defaultSettings = {
useDevTools: false,
};
const savedSettings = Object.assign(defaultSettings, result[SETTINGS_KEY]);
resolve({
useDevTools: !!savedSettings.useDevTools,
selectedCategories: Object.keys(savedCategories).filter(cat => savedCategories[cat]),
});
});
});
}
/** @param {(status: [string, string, string]) => void} listenCallback */
function listenForStatus(listenCallback) {
log.events.addListener('status', function(log) {
latestStatusLog = log;
listenCallback(log);
});
// Show latest saved status log to give immediate feedback
// when reopening the popup message when lighthouse is running
if (lighthouseIsRunning && latestStatusLog) {
listenCallback(latestStatusLog);
}
}
function isRunning() {
return lighthouseIsRunning;
}
// Run when in extension context, but not in devtools.
if ('chrome' in window && chrome.runtime) {
chrome.runtime.onInstalled.addListener(details => {
if (details.previousVersion) {
// eslint-disable-next-line no-console
console.log('previousVersion', details.previousVersion);
}
});
}
if (typeof module !== 'undefined' && module.exports) {
// Export for popup.js to import types. We don't want tsc to infer an index
// type, so use exports instead of module.exports.
exports.runLighthouseInExtension = runLighthouseInExtension;
exports.getDefaultCategories = background.getDefaultCategories;
exports.isRunning = isRunning;
exports.listenForStatus = listenForStatus;
exports.saveSettings = saveSettings;
exports.loadSettings = loadSettings;
}
// Expose on window for extension, other consumers of file.
// @ts-ignore
window.runLighthouseInExtension = runLighthouseInExtension;
// @ts-ignore
window.runLighthouseAsInCLI = runLighthouseAsInCLI;
// @ts-ignore
window.getDefaultCategories = background.getDefaultCategories;
// @ts-ignore
window.isRunning = isRunning;
// @ts-ignore
window.listenForStatus = listenForStatus;
// @ts-ignore
window.loadSettings = loadSettings;
// @ts-ignore
window.saveSettings = saveSettings;
| {
await assetSaver.logAssets(results.artifacts, results.lhr.audits);
} | conditional_block |
lighthouse-ext-background.js | /**
* @license Copyright 2018 Google Inc. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
'use strict';
const background = require('./lighthouse-background');
const ExtensionProtocol = require('../../../lighthouse-core/gather/connections/extension');
const log = require('lighthouse-logger');
const assetSaver = require('../../../lighthouse-core/lib/asset-saver.js');
/** @typedef {import('../../../lighthouse-core/gather/connections/connection.js')} Connection */
const STORAGE_KEY = 'lighthouse_audits';
const SETTINGS_KEY = 'lighthouse_settings';
let lighthouseIsRunning = false;
/** @type {?[string, string, string]} */
let latestStatusLog = null;
/**
* Sets the extension badge text.
* @param {string=} optUrl If present, sets the badge text to "Testing <url>".
* Otherwise, restore the default badge text.
*/
function updateBadgeUI(optUrl) {
lighthouseIsRunning = !!optUrl;
if ('chrome' in window && chrome.runtime) {
const manifest = chrome.runtime.getManifest();
if (!manifest.browser_action || !manifest.browser_action.default_icon) {
return;
}
let title = manifest.browser_action.default_title || '';
let path = manifest.browser_action.default_icon[38];
if (lighthouseIsRunning) {
title = `Testing ${optUrl}`;
path = 'images/lh_logo_icon_light.png';
}
chrome.browserAction.setTitle({title});
chrome.browserAction.setIcon({path});
}
}
/**
* @param {{flags: LH.Flags}} options Lighthouse options.
* @param {Array<string>} categoryIDs Name values of categories to include.
* @return {Promise<LH.RunnerResult|void>}
*/
async function runLighthouseInExtension(options, categoryIDs) {
// Default to 'info' logging level.
log.setLevel('info');
const connection = new ExtensionProtocol();
options.flags = Object.assign({}, options.flags, {output: 'html'});
const url = await connection.getCurrentTabURL();
const runnerResult = await background.runLighthouseForConnection(connection, url, options,
categoryIDs, updateBadgeUI);
if (!runnerResult) {
// For now, should always be a runnerResult as the extension can't do `gatherMode`
throw new Error('no runnerResult generated by Lighthouse');
}
const blobURL = createReportPageAsBlob(runnerResult);
await new Promise(resolve => chrome.windows.create({url: blobURL}, resolve));
}
/**
* Run lighthouse for connection and provide similar results as in CLI.
* @param {Connection} connection
* @param {string} url
* @param {{flags: LH.Flags} & {outputFormat: string, logAssets: boolean}} options Lighthouse options.
Specify outputFormat to change the output format.
* @param {Array<string>} categoryIDs Name values of categories to include.
* @return {Promise<string|Array<string>|void>}
*/
async function runLighthouseAsInCLI(connection, url, options, categoryIDs) {
log.setLevel('info');
options.flags = Object.assign({}, options.flags, {output: options.outputFormat});
const results = await background.runLighthouseForConnection(connection, url, options,
categoryIDs);
if (results) {
if (options && options.logAssets) {
await assetSaver.logAssets(results.artifacts, results.lhr.audits);
}
return results.report;
}
}
/**
* @param {LH.RunnerResult} runnerResult Lighthouse results object
* @return {string} Blob URL of the report (or error page) HTML
*/
function createReportPageAsBlob(runnerResult) {
performance.mark('report-start');
const html = runnerResult.report;
const blob = new Blob([html], {type: 'text/html'});
const blobURL = URL.createObjectURL(blob);
performance.mark('report-end');
performance.measure('generate report', 'report-start', 'report-end');
return blobURL;
}
/**
* Save currently selected set of category categories to local storage.
* @param {{selectedCategories: Array<string>, useDevTools: boolean}} settings
*/
function saveSettings(settings) {
const storage = {
[STORAGE_KEY]: {},
[SETTINGS_KEY]: {},
};
// Stash selected categories.
background.getDefaultCategories().forEach(category => {
storage[STORAGE_KEY][category.id] = settings.selectedCategories.includes(category.id);
});
// Stash throttling setting.
storage[SETTINGS_KEY].useDevTools = settings.useDevTools;
// Save object to chrome local storage.
chrome.storage.local.set(storage);
}
/**
* Load selected category categories from local storage.
* @return {Promise<{selectedCategories: Array<string>, useDevTools: boolean}>}
*/
function loadSettings() {
return new Promise(resolve => {
// Protip: debug what's in storage with:
// chrome.storage.local.get(['lighthouse_audits'], console.log)
chrome.storage.local.get([STORAGE_KEY, SETTINGS_KEY], result => {
// Start with list of all default categories set to true so list is
// always up to date.
const defaultCategories = {};
background.getDefaultCategories().forEach(category => {
defaultCategories[category.id] = true;
});
// Load saved categories and settings, overwriting defaults with any
// saved selections.
const savedCategories = Object.assign(defaultCategories, result[STORAGE_KEY]);
const defaultSettings = {
useDevTools: false,
};
const savedSettings = Object.assign(defaultSettings, result[SETTINGS_KEY]);
resolve({
useDevTools: !!savedSettings.useDevTools,
selectedCategories: Object.keys(savedCategories).filter(cat => savedCategories[cat]),
});
});
});
}
/** @param {(status: [string, string, string]) => void} listenCallback */
function listenForStatus(listenCallback) {
log.events.addListener('status', function(log) {
latestStatusLog = log;
listenCallback(log);
});
// Show latest saved status log to give immediate feedback
// when reopening the popup message when lighthouse is running
if (lighthouseIsRunning && latestStatusLog) {
listenCallback(latestStatusLog);
}
}
function isRunning() {
return lighthouseIsRunning;
}
// Run when in extension context, but not in devtools.
if ('chrome' in window && chrome.runtime) {
chrome.runtime.onInstalled.addListener(details => {
if (details.previousVersion) {
// eslint-disable-next-line no-console
console.log('previousVersion', details.previousVersion);
}
});
}
if (typeof module !== 'undefined' && module.exports) {
// Export for popup.js to import types. We don't want tsc to infer an index
// type, so use exports instead of module.exports.
exports.runLighthouseInExtension = runLighthouseInExtension;
exports.getDefaultCategories = background.getDefaultCategories;
exports.isRunning = isRunning;
exports.listenForStatus = listenForStatus;
exports.saveSettings = saveSettings;
exports.loadSettings = loadSettings;
}
// Expose on window for extension, other consumers of file.
// @ts-ignore
window.runLighthouseInExtension = runLighthouseInExtension;
// @ts-ignore
window.runLighthouseAsInCLI = runLighthouseAsInCLI;
// @ts-ignore
window.getDefaultCategories = background.getDefaultCategories;
// @ts-ignore
window.isRunning = isRunning; | // @ts-ignore
window.loadSettings = loadSettings;
// @ts-ignore
window.saveSettings = saveSettings; | // @ts-ignore
window.listenForStatus = listenForStatus; | random_line_split |
lighthouse-ext-background.js | /**
* @license Copyright 2018 Google Inc. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
'use strict';
const background = require('./lighthouse-background');
const ExtensionProtocol = require('../../../lighthouse-core/gather/connections/extension');
const log = require('lighthouse-logger');
const assetSaver = require('../../../lighthouse-core/lib/asset-saver.js');
/** @typedef {import('../../../lighthouse-core/gather/connections/connection.js')} Connection */
const STORAGE_KEY = 'lighthouse_audits';
const SETTINGS_KEY = 'lighthouse_settings';
let lighthouseIsRunning = false;
/** @type {?[string, string, string]} */
let latestStatusLog = null;
/**
* Sets the extension badge text.
* @param {string=} optUrl If present, sets the badge text to "Testing <url>".
* Otherwise, restore the default badge text.
*/
function updateBadgeUI(optUrl) {
lighthouseIsRunning = !!optUrl;
if ('chrome' in window && chrome.runtime) {
const manifest = chrome.runtime.getManifest();
if (!manifest.browser_action || !manifest.browser_action.default_icon) {
return;
}
let title = manifest.browser_action.default_title || '';
let path = manifest.browser_action.default_icon[38];
if (lighthouseIsRunning) {
title = `Testing ${optUrl}`;
path = 'images/lh_logo_icon_light.png';
}
chrome.browserAction.setTitle({title});
chrome.browserAction.setIcon({path});
}
}
/**
* @param {{flags: LH.Flags}} options Lighthouse options.
* @param {Array<string>} categoryIDs Name values of categories to include.
* @return {Promise<LH.RunnerResult|void>}
*/
async function runLighthouseInExtension(options, categoryIDs) {
// Default to 'info' logging level.
log.setLevel('info');
const connection = new ExtensionProtocol();
options.flags = Object.assign({}, options.flags, {output: 'html'});
const url = await connection.getCurrentTabURL();
const runnerResult = await background.runLighthouseForConnection(connection, url, options,
categoryIDs, updateBadgeUI);
if (!runnerResult) {
// For now, should always be a runnerResult as the extension can't do `gatherMode`
throw new Error('no runnerResult generated by Lighthouse');
}
const blobURL = createReportPageAsBlob(runnerResult);
await new Promise(resolve => chrome.windows.create({url: blobURL}, resolve));
}
/**
* Run lighthouse for connection and provide similar results as in CLI.
* @param {Connection} connection
* @param {string} url
* @param {{flags: LH.Flags} & {outputFormat: string, logAssets: boolean}} options Lighthouse options.
Specify outputFormat to change the output format.
* @param {Array<string>} categoryIDs Name values of categories to include.
* @return {Promise<string|Array<string>|void>}
*/
async function runLighthouseAsInCLI(connection, url, options, categoryIDs) {
log.setLevel('info');
options.flags = Object.assign({}, options.flags, {output: options.outputFormat});
const results = await background.runLighthouseForConnection(connection, url, options,
categoryIDs);
if (results) {
if (options && options.logAssets) {
await assetSaver.logAssets(results.artifacts, results.lhr.audits);
}
return results.report;
}
}
/**
* @param {LH.RunnerResult} runnerResult Lighthouse results object
* @return {string} Blob URL of the report (or error page) HTML
*/
function createReportPageAsBlob(runnerResult) {
performance.mark('report-start');
const html = runnerResult.report;
const blob = new Blob([html], {type: 'text/html'});
const blobURL = URL.createObjectURL(blob);
performance.mark('report-end');
performance.measure('generate report', 'report-start', 'report-end');
return blobURL;
}
/**
* Save currently selected set of category categories to local storage.
* @param {{selectedCategories: Array<string>, useDevTools: boolean}} settings
*/
function saveSettings(settings) {
const storage = {
[STORAGE_KEY]: {},
[SETTINGS_KEY]: {},
};
// Stash selected categories.
background.getDefaultCategories().forEach(category => {
storage[STORAGE_KEY][category.id] = settings.selectedCategories.includes(category.id);
});
// Stash throttling setting.
storage[SETTINGS_KEY].useDevTools = settings.useDevTools;
// Save object to chrome local storage.
chrome.storage.local.set(storage);
}
/**
* Load selected category categories from local storage.
* @return {Promise<{selectedCategories: Array<string>, useDevTools: boolean}>}
*/
function loadSettings() |
/** @param {(status: [string, string, string]) => void} listenCallback */
function listenForStatus(listenCallback) {
log.events.addListener('status', function(log) {
latestStatusLog = log;
listenCallback(log);
});
// Show latest saved status log to give immediate feedback
// when reopening the popup message when lighthouse is running
if (lighthouseIsRunning && latestStatusLog) {
listenCallback(latestStatusLog);
}
}
function isRunning() {
return lighthouseIsRunning;
}
// Run when in extension context, but not in devtools.
if ('chrome' in window && chrome.runtime) {
chrome.runtime.onInstalled.addListener(details => {
if (details.previousVersion) {
// eslint-disable-next-line no-console
console.log('previousVersion', details.previousVersion);
}
});
}
if (typeof module !== 'undefined' && module.exports) {
// Export for popup.js to import types. We don't want tsc to infer an index
// type, so use exports instead of module.exports.
exports.runLighthouseInExtension = runLighthouseInExtension;
exports.getDefaultCategories = background.getDefaultCategories;
exports.isRunning = isRunning;
exports.listenForStatus = listenForStatus;
exports.saveSettings = saveSettings;
exports.loadSettings = loadSettings;
}
// Expose on window for extension, other consumers of file.
// @ts-ignore
window.runLighthouseInExtension = runLighthouseInExtension;
// @ts-ignore
window.runLighthouseAsInCLI = runLighthouseAsInCLI;
// @ts-ignore
window.getDefaultCategories = background.getDefaultCategories;
// @ts-ignore
window.isRunning = isRunning;
// @ts-ignore
window.listenForStatus = listenForStatus;
// @ts-ignore
window.loadSettings = loadSettings;
// @ts-ignore
window.saveSettings = saveSettings;
| {
return new Promise(resolve => {
// Protip: debug what's in storage with:
// chrome.storage.local.get(['lighthouse_audits'], console.log)
chrome.storage.local.get([STORAGE_KEY, SETTINGS_KEY], result => {
// Start with list of all default categories set to true so list is
// always up to date.
const defaultCategories = {};
background.getDefaultCategories().forEach(category => {
defaultCategories[category.id] = true;
});
// Load saved categories and settings, overwriting defaults with any
// saved selections.
const savedCategories = Object.assign(defaultCategories, result[STORAGE_KEY]);
const defaultSettings = {
useDevTools: false,
};
const savedSettings = Object.assign(defaultSettings, result[SETTINGS_KEY]);
resolve({
useDevTools: !!savedSettings.useDevTools,
selectedCategories: Object.keys(savedCategories).filter(cat => savedCategories[cat]),
});
});
});
} | identifier_body |
shared-libs.module.ts | import { NgModule } from '@angular/core';
import { FormsModule } from '@angular/forms';
import { CommonModule } from '@angular/common';
import { NgbModule } from '@ng-bootstrap/ng-bootstrap';
import { NgJhipsterModule } from 'ng-jhipster';
import { InfiniteScrollModule } from 'ngx-infinite-scroll';
import { CookieModule } from 'ngx-cookie';
import { FontAwesomeModule } from '@fortawesome/angular-fontawesome';
@NgModule({
imports: [
NgbModule.forRoot(),
NgJhipsterModule.forRoot({
// set below to true to make alerts look like toast
alertAsToast: false,
alertTimeout: 5000,
i18nEnabled: true,
defaultI18nLang: 'en'
}),
InfiniteScrollModule,
CookieModule.forRoot(),
FontAwesomeModule
],
exports: [FormsModule, CommonModule, NgbModule, NgJhipsterModule, InfiniteScrollModule, FontAwesomeModule]
})
export class | {}
| GatewaySharedLibsModule | identifier_name |
shared-libs.module.ts | import { CommonModule } from '@angular/common';
import { NgbModule } from '@ng-bootstrap/ng-bootstrap';
import { NgJhipsterModule } from 'ng-jhipster';
import { InfiniteScrollModule } from 'ngx-infinite-scroll';
import { CookieModule } from 'ngx-cookie';
import { FontAwesomeModule } from '@fortawesome/angular-fontawesome';
@NgModule({
imports: [
NgbModule.forRoot(),
NgJhipsterModule.forRoot({
// set below to true to make alerts look like toast
alertAsToast: false,
alertTimeout: 5000,
i18nEnabled: true,
defaultI18nLang: 'en'
}),
InfiniteScrollModule,
CookieModule.forRoot(),
FontAwesomeModule
],
exports: [FormsModule, CommonModule, NgbModule, NgJhipsterModule, InfiniteScrollModule, FontAwesomeModule]
})
export class GatewaySharedLibsModule {} | import { NgModule } from '@angular/core';
import { FormsModule } from '@angular/forms'; | random_line_split | |
SmilesDupeFilter.py | # $Id$
#
# Copyright (C) 2003 Rational Discovery LLC
# All Rights Reserved
#
from rdkit import RDConfig
from rdkit import six
import sys, os
from rdkit import Chem
from rdkit.VLib.Filter import FilterNode
class | (FilterNode):
""" canonical-smiles based duplicate filter
Assumptions:
- inputs are molecules
Sample Usage:
>>> from rdkit.VLib.NodeLib.SDSupply import SDSupplyNode
>>> fileN = os.path.join(RDConfig.RDCodeDir,'VLib','NodeLib',\
'test_data','NCI_aids.10.sdf')
>>> suppl = SDSupplyNode(fileN)
>>> filt = DupeFilter()
>>> filt.AddParent(suppl)
>>> ms = [x for x in filt]
>>> len(ms)
10
>>> ms[0].GetProp("_Name")
'48'
>>> ms[1].GetProp("_Name")
'78'
>>> filt.reset()
>>> filt.next().GetProp("_Name")
'48'
"""
def __init__(self, **kwargs):
FilterNode.__init__(self, func=self.filter, **kwargs)
self._smisSeen = []
def reset(self):
FilterNode.reset(self)
self._smisSeen = []
def filter(self, cmpd):
smi = Chem.MolToSmiles(cmpd)
if smi not in self._smisSeen:
self._smisSeen.append(smi)
return 1
else:
return 0
#------------------------------------
#
# doctest boilerplate
#
def _test():
import doctest, sys
return doctest.testmod(sys.modules["__main__"])
if __name__ == '__main__':
import sys
failed, tried = _test()
sys.exit(failed)
| DupeFilter | identifier_name |
SmilesDupeFilter.py | # $Id$
#
# Copyright (C) 2003 Rational Discovery LLC
# All Rights Reserved
#
from rdkit import RDConfig
from rdkit import six
import sys, os
from rdkit import Chem
from rdkit.VLib.Filter import FilterNode
class DupeFilter(FilterNode):
""" canonical-smiles based duplicate filter
Assumptions:
- inputs are molecules
Sample Usage:
>>> from rdkit.VLib.NodeLib.SDSupply import SDSupplyNode
>>> fileN = os.path.join(RDConfig.RDCodeDir,'VLib','NodeLib',\
'test_data','NCI_aids.10.sdf')
>>> suppl = SDSupplyNode(fileN)
>>> filt = DupeFilter()
>>> filt.AddParent(suppl)
>>> ms = [x for x in filt]
>>> len(ms)
10
>>> ms[0].GetProp("_Name")
'48'
>>> ms[1].GetProp("_Name")
'78'
>>> filt.reset()
>>> filt.next().GetProp("_Name")
'48'
"""
def __init__(self, **kwargs):
FilterNode.__init__(self, func=self.filter, **kwargs)
self._smisSeen = []
def reset(self):
|
def filter(self, cmpd):
smi = Chem.MolToSmiles(cmpd)
if smi not in self._smisSeen:
self._smisSeen.append(smi)
return 1
else:
return 0
#------------------------------------
#
# doctest boilerplate
#
def _test():
import doctest, sys
return doctest.testmod(sys.modules["__main__"])
if __name__ == '__main__':
import sys
failed, tried = _test()
sys.exit(failed)
| FilterNode.reset(self)
self._smisSeen = [] | identifier_body |
SmilesDupeFilter.py | # $Id$
#
# Copyright (C) 2003 Rational Discovery LLC
# All Rights Reserved
#
from rdkit import RDConfig
from rdkit import six
import sys, os
from rdkit import Chem
from rdkit.VLib.Filter import FilterNode
class DupeFilter(FilterNode):
""" canonical-smiles based duplicate filter
Assumptions:
- inputs are molecules
Sample Usage:
>>> from rdkit.VLib.NodeLib.SDSupply import SDSupplyNode
>>> fileN = os.path.join(RDConfig.RDCodeDir,'VLib','NodeLib',\
'test_data','NCI_aids.10.sdf')
>>> suppl = SDSupplyNode(fileN)
>>> filt = DupeFilter()
>>> filt.AddParent(suppl)
>>> ms = [x for x in filt]
>>> len(ms)
10
>>> ms[0].GetProp("_Name")
'48'
>>> ms[1].GetProp("_Name")
'78'
>>> filt.reset()
>>> filt.next().GetProp("_Name")
'48'
"""
def __init__(self, **kwargs):
FilterNode.__init__(self, func=self.filter, **kwargs)
self._smisSeen = []
def reset(self):
FilterNode.reset(self)
self._smisSeen = []
def filter(self, cmpd):
smi = Chem.MolToSmiles(cmpd)
if smi not in self._smisSeen:
self._smisSeen.append(smi)
return 1
else:
return 0
#------------------------------------
#
# doctest boilerplate
#
def _test():
import doctest, sys |
if __name__ == '__main__':
import sys
failed, tried = _test()
sys.exit(failed) | return doctest.testmod(sys.modules["__main__"])
| random_line_split |
SmilesDupeFilter.py | # $Id$
#
# Copyright (C) 2003 Rational Discovery LLC
# All Rights Reserved
#
from rdkit import RDConfig
from rdkit import six
import sys, os
from rdkit import Chem
from rdkit.VLib.Filter import FilterNode
class DupeFilter(FilterNode):
""" canonical-smiles based duplicate filter
Assumptions:
- inputs are molecules
Sample Usage:
>>> from rdkit.VLib.NodeLib.SDSupply import SDSupplyNode
>>> fileN = os.path.join(RDConfig.RDCodeDir,'VLib','NodeLib',\
'test_data','NCI_aids.10.sdf')
>>> suppl = SDSupplyNode(fileN)
>>> filt = DupeFilter()
>>> filt.AddParent(suppl)
>>> ms = [x for x in filt]
>>> len(ms)
10
>>> ms[0].GetProp("_Name")
'48'
>>> ms[1].GetProp("_Name")
'78'
>>> filt.reset()
>>> filt.next().GetProp("_Name")
'48'
"""
def __init__(self, **kwargs):
FilterNode.__init__(self, func=self.filter, **kwargs)
self._smisSeen = []
def reset(self):
FilterNode.reset(self)
self._smisSeen = []
def filter(self, cmpd):
smi = Chem.MolToSmiles(cmpd)
if smi not in self._smisSeen:
self._smisSeen.append(smi)
return 1
else:
|
#------------------------------------
#
# doctest boilerplate
#
def _test():
import doctest, sys
return doctest.testmod(sys.modules["__main__"])
if __name__ == '__main__':
import sys
failed, tried = _test()
sys.exit(failed)
| return 0 | conditional_block |
lib.rs | // Tifflin OS - Asynchronous common interface
// - By John Hodge (thePowersGang)
//
//
//! Asynchronous waiting support
#[macro_use]
extern crate syscalls;
/// Trait for types that can be used for 'idle_loop'
pub trait WaitController
{
fn get_count(&self) -> usize;
fn populate(&self, cb: &mut FnMut(::syscalls::WaitItem));
fn handle(&mut self, events: &[::syscalls::WaitItem]);
}
/// Idle, handling events on each WaitController passed
pub fn | (items: &mut [&mut WaitController])
{
let mut objects = Vec::new();
loop {
let count = items.iter().fold(0, |sum,ctrlr| sum + ctrlr.get_count());
objects.reserve( count );
for ctrlr in items.iter() {
ctrlr.populate(&mut |wi| objects.push(wi));
}
::syscalls::threads::wait(&mut objects, !0);
let mut ofs = 0;
for ctrlr in items.iter_mut()
{
let num = ctrlr.get_count();
ctrlr.handle( &objects[ofs .. ofs + num] );
ofs += num;
}
objects.clear();
}
}
| idle_loop | identifier_name |
lib.rs | // Tifflin OS - Asynchronous common interface
// - By John Hodge (thePowersGang)
//
//
//! Asynchronous waiting support
#[macro_use]
extern crate syscalls;
/// Trait for types that can be used for 'idle_loop'
pub trait WaitController
{
fn get_count(&self) -> usize;
fn populate(&self, cb: &mut FnMut(::syscalls::WaitItem));
fn handle(&mut self, events: &[::syscalls::WaitItem]);
}
/// Idle, handling events on each WaitController passed
pub fn idle_loop(items: &mut [&mut WaitController]) | let count = items.iter().fold(0, |sum,ctrlr| sum + ctrlr.get_count());
objects.reserve( count );
for ctrlr in items.iter() {
ctrlr.populate(&mut |wi| objects.push(wi));
}
::syscalls::threads::wait(&mut objects, !0);
let mut ofs = 0;
for ctrlr in items.iter_mut()
{
let num = ctrlr.get_count();
ctrlr.handle( &objects[ofs .. ofs + num] );
ofs += num;
}
objects.clear();
}
} | {
let mut objects = Vec::new();
loop { | random_line_split |
lib.rs | // Tifflin OS - Asynchronous common interface
// - By John Hodge (thePowersGang)
//
//
//! Asynchronous waiting support
#[macro_use]
extern crate syscalls;
/// Trait for types that can be used for 'idle_loop'
pub trait WaitController
{
fn get_count(&self) -> usize;
fn populate(&self, cb: &mut FnMut(::syscalls::WaitItem));
fn handle(&mut self, events: &[::syscalls::WaitItem]);
}
/// Idle, handling events on each WaitController passed
pub fn idle_loop(items: &mut [&mut WaitController])
| {
let mut objects = Vec::new();
loop {
let count = items.iter().fold(0, |sum,ctrlr| sum + ctrlr.get_count());
objects.reserve( count );
for ctrlr in items.iter() {
ctrlr.populate(&mut |wi| objects.push(wi));
}
::syscalls::threads::wait(&mut objects, !0);
let mut ofs = 0;
for ctrlr in items.iter_mut()
{
let num = ctrlr.get_count();
ctrlr.handle( &objects[ofs .. ofs + num] );
ofs += num;
}
objects.clear();
}
} | identifier_body | |
metadata.rs | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! This module contains utilities for outputting metadata for diagnostic errors.
//!
//! Each set of errors is mapped to a metadata file by a name, which is
//! currently always a crate name.
use std::collections::BTreeMap;
use std::path::PathBuf;
use std::fs::{remove_file, create_dir_all, File};
use std::error::Error;
use syntax_pos::Span;
use ext::base::ExtCtxt;
use diagnostics::plugin::{ErrorMap, ErrorInfo};
use serde_json;
// Default metadata directory to use for extended error JSON.
const ERROR_METADATA_PREFIX: &'static str = "tmp/extended-errors";
/// JSON encodable/decodable version of `ErrorInfo`.
#[derive(PartialEq, Deserialize, Serialize)]
pub struct ErrorMetadata {
pub description: Option<String>,
pub use_site: Option<ErrorLocation>
}
/// Mapping from error codes to metadata that can be (de)serialized.
pub type ErrorMetadataMap = BTreeMap<String, ErrorMetadata>;
/// JSON encodable error location type with filename and line number.
#[derive(PartialEq, Deserialize, Serialize)]
pub struct | {
pub filename: String,
pub line: usize
}
impl ErrorLocation {
/// Create an error location from a span.
pub fn from_span(ecx: &ExtCtxt, sp: Span) -> ErrorLocation {
let loc = ecx.codemap().lookup_char_pos_adj(sp.lo);
ErrorLocation {
filename: loc.filename,
line: loc.line
}
}
}
/// Get the directory where metadata for a given `prefix` should be stored.
///
/// See `output_metadata`.
pub fn get_metadata_dir(prefix: &str) -> PathBuf {
PathBuf::from(ERROR_METADATA_PREFIX).join(prefix)
}
/// Map `name` to a path in the given directory: <directory>/<name>.json
fn get_metadata_path(directory: PathBuf, name: &str) -> PathBuf {
directory.join(format!("{}.json", name))
}
/// Write metadata for the errors in `err_map` to disk, to a file corresponding to `prefix/name`.
///
/// For our current purposes the prefix is the target architecture and the name is a crate name.
/// If an error occurs steps will be taken to ensure that no file is created.
pub fn output_metadata(ecx: &ExtCtxt, prefix: &str, name: &str, err_map: &ErrorMap)
-> Result<(), Box<Error>>
{
// Create the directory to place the file in.
let metadata_dir = get_metadata_dir(prefix);
create_dir_all(&metadata_dir)?;
// Open the metadata file.
let metadata_path = get_metadata_path(metadata_dir, name);
let mut metadata_file = File::create(&metadata_path)?;
// Construct a serializable map.
let json_map = err_map.iter().map(|(k, &ErrorInfo { description, use_site })| {
let key = k.as_str().to_string();
let value = ErrorMetadata {
description: description.map(|n| n.as_str().to_string()),
use_site: use_site.map(|sp| ErrorLocation::from_span(ecx, sp))
};
(key, value)
}).collect::<ErrorMetadataMap>();
// Write the data to the file, deleting it if the write fails.
let result = serde_json::to_writer(&mut metadata_file, &json_map);
if result.is_err() {
remove_file(&metadata_path)?;
}
Ok(result?)
}
| ErrorLocation | identifier_name |
metadata.rs | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! This module contains utilities for outputting metadata for diagnostic errors.
//!
//! Each set of errors is mapped to a metadata file by a name, which is
//! currently always a crate name.
use std::collections::BTreeMap;
use std::path::PathBuf;
use std::fs::{remove_file, create_dir_all, File};
use std::error::Error;
use syntax_pos::Span;
use ext::base::ExtCtxt;
use diagnostics::plugin::{ErrorMap, ErrorInfo};
use serde_json;
// Default metadata directory to use for extended error JSON.
const ERROR_METADATA_PREFIX: &'static str = "tmp/extended-errors";
/// JSON encodable/decodable version of `ErrorInfo`.
#[derive(PartialEq, Deserialize, Serialize)]
pub struct ErrorMetadata {
pub description: Option<String>,
pub use_site: Option<ErrorLocation>
}
/// Mapping from error codes to metadata that can be (de)serialized.
pub type ErrorMetadataMap = BTreeMap<String, ErrorMetadata>;
/// JSON encodable error location type with filename and line number.
#[derive(PartialEq, Deserialize, Serialize)]
pub struct ErrorLocation {
pub filename: String,
pub line: usize
}
impl ErrorLocation {
/// Create an error location from a span.
pub fn from_span(ecx: &ExtCtxt, sp: Span) -> ErrorLocation {
let loc = ecx.codemap().lookup_char_pos_adj(sp.lo);
ErrorLocation {
filename: loc.filename,
line: loc.line
}
}
}
/// Get the directory where metadata for a given `prefix` should be stored.
///
/// See `output_metadata`.
pub fn get_metadata_dir(prefix: &str) -> PathBuf {
PathBuf::from(ERROR_METADATA_PREFIX).join(prefix)
}
/// Map `name` to a path in the given directory: <directory>/<name>.json
fn get_metadata_path(directory: PathBuf, name: &str) -> PathBuf |
/// Write metadata for the errors in `err_map` to disk, to a file corresponding to `prefix/name`.
///
/// For our current purposes the prefix is the target architecture and the name is a crate name.
/// If an error occurs steps will be taken to ensure that no file is created.
pub fn output_metadata(ecx: &ExtCtxt, prefix: &str, name: &str, err_map: &ErrorMap)
-> Result<(), Box<Error>>
{
// Create the directory to place the file in.
let metadata_dir = get_metadata_dir(prefix);
create_dir_all(&metadata_dir)?;
// Open the metadata file.
let metadata_path = get_metadata_path(metadata_dir, name);
let mut metadata_file = File::create(&metadata_path)?;
// Construct a serializable map.
let json_map = err_map.iter().map(|(k, &ErrorInfo { description, use_site })| {
let key = k.as_str().to_string();
let value = ErrorMetadata {
description: description.map(|n| n.as_str().to_string()),
use_site: use_site.map(|sp| ErrorLocation::from_span(ecx, sp))
};
(key, value)
}).collect::<ErrorMetadataMap>();
// Write the data to the file, deleting it if the write fails.
let result = serde_json::to_writer(&mut metadata_file, &json_map);
if result.is_err() {
remove_file(&metadata_path)?;
}
Ok(result?)
}
| {
directory.join(format!("{}.json", name))
} | identifier_body |
metadata.rs | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! This module contains utilities for outputting metadata for diagnostic errors.
//!
//! Each set of errors is mapped to a metadata file by a name, which is
//! currently always a crate name.
use std::collections::BTreeMap;
use std::path::PathBuf;
use std::fs::{remove_file, create_dir_all, File};
use std::error::Error;
use syntax_pos::Span;
use ext::base::ExtCtxt;
use diagnostics::plugin::{ErrorMap, ErrorInfo};
use serde_json;
// Default metadata directory to use for extended error JSON.
const ERROR_METADATA_PREFIX: &'static str = "tmp/extended-errors";
/// JSON encodable/decodable version of `ErrorInfo`.
#[derive(PartialEq, Deserialize, Serialize)]
pub struct ErrorMetadata {
pub description: Option<String>,
pub use_site: Option<ErrorLocation>
}
/// Mapping from error codes to metadata that can be (de)serialized.
pub type ErrorMetadataMap = BTreeMap<String, ErrorMetadata>;
/// JSON encodable error location type with filename and line number.
#[derive(PartialEq, Deserialize, Serialize)]
pub struct ErrorLocation {
pub filename: String,
pub line: usize
}
impl ErrorLocation {
/// Create an error location from a span.
pub fn from_span(ecx: &ExtCtxt, sp: Span) -> ErrorLocation {
let loc = ecx.codemap().lookup_char_pos_adj(sp.lo);
ErrorLocation {
filename: loc.filename,
line: loc.line
}
}
}
/// Get the directory where metadata for a given `prefix` should be stored.
///
/// See `output_metadata`.
pub fn get_metadata_dir(prefix: &str) -> PathBuf {
PathBuf::from(ERROR_METADATA_PREFIX).join(prefix)
}
/// Map `name` to a path in the given directory: <directory>/<name>.json
fn get_metadata_path(directory: PathBuf, name: &str) -> PathBuf {
directory.join(format!("{}.json", name))
}
/// Write metadata for the errors in `err_map` to disk, to a file corresponding to `prefix/name`.
///
/// For our current purposes the prefix is the target architecture and the name is a crate name.
/// If an error occurs steps will be taken to ensure that no file is created.
pub fn output_metadata(ecx: &ExtCtxt, prefix: &str, name: &str, err_map: &ErrorMap)
-> Result<(), Box<Error>>
{
// Create the directory to place the file in.
let metadata_dir = get_metadata_dir(prefix);
create_dir_all(&metadata_dir)?;
// Open the metadata file.
let metadata_path = get_metadata_path(metadata_dir, name);
let mut metadata_file = File::create(&metadata_path)?;
// Construct a serializable map.
let json_map = err_map.iter().map(|(k, &ErrorInfo { description, use_site })| {
let key = k.as_str().to_string();
let value = ErrorMetadata {
description: description.map(|n| n.as_str().to_string()),
use_site: use_site.map(|sp| ErrorLocation::from_span(ecx, sp))
};
(key, value)
}).collect::<ErrorMetadataMap>();
// Write the data to the file, deleting it if the write fails.
let result = serde_json::to_writer(&mut metadata_file, &json_map);
if result.is_err() |
Ok(result?)
}
| {
remove_file(&metadata_path)?;
} | conditional_block |
metadata.rs | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! This module contains utilities for outputting metadata for diagnostic errors.
//!
//! Each set of errors is mapped to a metadata file by a name, which is
//! currently always a crate name.
use std::collections::BTreeMap;
use std::path::PathBuf;
use std::fs::{remove_file, create_dir_all, File};
use std::error::Error;
use syntax_pos::Span;
use ext::base::ExtCtxt;
use diagnostics::plugin::{ErrorMap, ErrorInfo};
use serde_json;
// Default metadata directory to use for extended error JSON.
const ERROR_METADATA_PREFIX: &'static str = "tmp/extended-errors";
/// JSON encodable/decodable version of `ErrorInfo`.
#[derive(PartialEq, Deserialize, Serialize)]
pub struct ErrorMetadata {
pub description: Option<String>,
pub use_site: Option<ErrorLocation>
}
/// Mapping from error codes to metadata that can be (de)serialized.
pub type ErrorMetadataMap = BTreeMap<String, ErrorMetadata>;
| pub struct ErrorLocation {
pub filename: String,
pub line: usize
}
impl ErrorLocation {
/// Create an error location from a span.
pub fn from_span(ecx: &ExtCtxt, sp: Span) -> ErrorLocation {
let loc = ecx.codemap().lookup_char_pos_adj(sp.lo);
ErrorLocation {
filename: loc.filename,
line: loc.line
}
}
}
/// Get the directory where metadata for a given `prefix` should be stored.
///
/// See `output_metadata`.
pub fn get_metadata_dir(prefix: &str) -> PathBuf {
PathBuf::from(ERROR_METADATA_PREFIX).join(prefix)
}
/// Map `name` to a path in the given directory: <directory>/<name>.json
fn get_metadata_path(directory: PathBuf, name: &str) -> PathBuf {
directory.join(format!("{}.json", name))
}
/// Write metadata for the errors in `err_map` to disk, to a file corresponding to `prefix/name`.
///
/// For our current purposes the prefix is the target architecture and the name is a crate name.
/// If an error occurs steps will be taken to ensure that no file is created.
pub fn output_metadata(ecx: &ExtCtxt, prefix: &str, name: &str, err_map: &ErrorMap)
-> Result<(), Box<Error>>
{
// Create the directory to place the file in.
let metadata_dir = get_metadata_dir(prefix);
create_dir_all(&metadata_dir)?;
// Open the metadata file.
let metadata_path = get_metadata_path(metadata_dir, name);
let mut metadata_file = File::create(&metadata_path)?;
// Construct a serializable map.
let json_map = err_map.iter().map(|(k, &ErrorInfo { description, use_site })| {
let key = k.as_str().to_string();
let value = ErrorMetadata {
description: description.map(|n| n.as_str().to_string()),
use_site: use_site.map(|sp| ErrorLocation::from_span(ecx, sp))
};
(key, value)
}).collect::<ErrorMetadataMap>();
// Write the data to the file, deleting it if the write fails.
let result = serde_json::to_writer(&mut metadata_file, &json_map);
if result.is_err() {
remove_file(&metadata_path)?;
}
Ok(result?)
} | /// JSON encodable error location type with filename and line number.
#[derive(PartialEq, Deserialize, Serialize)] | random_line_split |
upgrade.py | #!/usr/bin/env python
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
from resource_management.core.resources.system import Execute
from resource_management.libraries.functions import format | from resource_management.core.exceptions import Fail
from resource_management.core.logger import Logger
def run_migration(env, upgrade_type):
"""
If the acl migration script is present, then run it for either upgrade or downgrade.
That script was introduced in HDP 2.3.4.0 and requires stopping all Kafka brokers first.
Requires configs to be present.
:param env: Environment.
:param upgrade_type: "rolling" or "nonrolling
"""
import params
if upgrade_type is None:
raise Fail('Parameter "upgrade_type" is missing.')
if params.upgrade_direction is None:
raise Fail('Parameter "upgrade_direction" is missing.')
if not params.security_enabled:
Logger.info("Skip running the Kafka ACL migration script since cluster security is not enabled.")
return
Logger.info("Upgrade type: {0}, direction: {1}".format(str(upgrade_type), params.upgrade_direction))
# If the schema upgrade script exists in the version upgrading to, then attempt to upgrade/downgrade it while still using the present bits.
kafka_acls_script = None
command_suffix = ""
if params.upgrade_direction == Direction.UPGRADE:
kafka_acls_script = format("{stack_root}/{version}/kafka/bin/kafka-acls.sh")
command_suffix = "--upgradeAcls"
elif params.upgrade_direction == Direction.DOWNGRADE:
kafka_acls_script = format("{stack_root}/{downgrade_from_version}/kafka/bin/kafka-acls.sh")
command_suffix = "--downgradeAcls"
if kafka_acls_script is not None:
if os.path.exists(kafka_acls_script):
Logger.info("Found Kafka acls script: {0}".format(kafka_acls_script))
if params.zookeeper_connect is None:
raise Fail("Could not retrieve property kafka-broker/zookeeper.connect")
acls_command = "{0} --authorizer kafka.security.auth.SimpleAclAuthorizer --authorizer-properties zookeeper.connect={1} {2}".\
format(kafka_acls_script, params.zookeeper_connect, command_suffix)
Execute(acls_command,
user=params.kafka_user,
logoutput=True)
else:
Logger.info("Did not find Kafka acls script: {0}".format(kafka_acls_script)) | from resource_management.libraries.functions import Direction | random_line_split |
upgrade.py |
#!/usr/bin/env python
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
from resource_management.core.resources.system import Execute
from resource_management.libraries.functions import format
from resource_management.libraries.functions import Direction
from resource_management.core.exceptions import Fail
from resource_management.core.logger import Logger
def run_migration(env, upgrade_type):
"""
If the acl migration script is present, then run it for either upgrade or downgrade.
That script was introduced in HDP 2.3.4.0 and requires stopping all Kafka brokers first.
Requires configs to be present.
:param env: Environment.
:param upgrade_type: "rolling" or "nonrolling
"""
import params
if upgrade_type is None:
raise Fail('Parameter "upgrade_type" is missing.')
if params.upgrade_direction is None:
raise Fail('Parameter "upgrade_direction" is missing.')
if not params.security_enabled:
Logger.info("Skip running the Kafka ACL migration script since cluster security is not enabled.")
return
Logger.info("Upgrade type: {0}, direction: {1}".format(str(upgrade_type), params.upgrade_direction))
# If the schema upgrade script exists in the version upgrading to, then attempt to upgrade/downgrade it while still using the present bits.
kafka_acls_script = None
command_suffix = ""
if params.upgrade_direction == Direction.UPGRADE:
|
elif params.upgrade_direction == Direction.DOWNGRADE:
kafka_acls_script = format("{stack_root}/{downgrade_from_version}/kafka/bin/kafka-acls.sh")
command_suffix = "--downgradeAcls"
if kafka_acls_script is not None:
if os.path.exists(kafka_acls_script):
Logger.info("Found Kafka acls script: {0}".format(kafka_acls_script))
if params.zookeeper_connect is None:
raise Fail("Could not retrieve property kafka-broker/zookeeper.connect")
acls_command = "{0} --authorizer kafka.security.auth.SimpleAclAuthorizer --authorizer-properties zookeeper.connect={1} {2}".\
format(kafka_acls_script, params.zookeeper_connect, command_suffix)
Execute(acls_command,
user=params.kafka_user,
logoutput=True)
else:
Logger.info("Did not find Kafka acls script: {0}".format(kafka_acls_script))
| kafka_acls_script = format("{stack_root}/{version}/kafka/bin/kafka-acls.sh")
command_suffix = "--upgradeAcls" | conditional_block |
upgrade.py |
#!/usr/bin/env python
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
from resource_management.core.resources.system import Execute
from resource_management.libraries.functions import format
from resource_management.libraries.functions import Direction
from resource_management.core.exceptions import Fail
from resource_management.core.logger import Logger
def run_migration(env, upgrade_type):
| """
If the acl migration script is present, then run it for either upgrade or downgrade.
That script was introduced in HDP 2.3.4.0 and requires stopping all Kafka brokers first.
Requires configs to be present.
:param env: Environment.
:param upgrade_type: "rolling" or "nonrolling
"""
import params
if upgrade_type is None:
raise Fail('Parameter "upgrade_type" is missing.')
if params.upgrade_direction is None:
raise Fail('Parameter "upgrade_direction" is missing.')
if not params.security_enabled:
Logger.info("Skip running the Kafka ACL migration script since cluster security is not enabled.")
return
Logger.info("Upgrade type: {0}, direction: {1}".format(str(upgrade_type), params.upgrade_direction))
# If the schema upgrade script exists in the version upgrading to, then attempt to upgrade/downgrade it while still using the present bits.
kafka_acls_script = None
command_suffix = ""
if params.upgrade_direction == Direction.UPGRADE:
kafka_acls_script = format("{stack_root}/{version}/kafka/bin/kafka-acls.sh")
command_suffix = "--upgradeAcls"
elif params.upgrade_direction == Direction.DOWNGRADE:
kafka_acls_script = format("{stack_root}/{downgrade_from_version}/kafka/bin/kafka-acls.sh")
command_suffix = "--downgradeAcls"
if kafka_acls_script is not None:
if os.path.exists(kafka_acls_script):
Logger.info("Found Kafka acls script: {0}".format(kafka_acls_script))
if params.zookeeper_connect is None:
raise Fail("Could not retrieve property kafka-broker/zookeeper.connect")
acls_command = "{0} --authorizer kafka.security.auth.SimpleAclAuthorizer --authorizer-properties zookeeper.connect={1} {2}".\
format(kafka_acls_script, params.zookeeper_connect, command_suffix)
Execute(acls_command,
user=params.kafka_user,
logoutput=True)
else:
Logger.info("Did not find Kafka acls script: {0}".format(kafka_acls_script)) | identifier_body | |
upgrade.py |
#!/usr/bin/env python
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
from resource_management.core.resources.system import Execute
from resource_management.libraries.functions import format
from resource_management.libraries.functions import Direction
from resource_management.core.exceptions import Fail
from resource_management.core.logger import Logger
def | (env, upgrade_type):
"""
If the acl migration script is present, then run it for either upgrade or downgrade.
That script was introduced in HDP 2.3.4.0 and requires stopping all Kafka brokers first.
Requires configs to be present.
:param env: Environment.
:param upgrade_type: "rolling" or "nonrolling
"""
import params
if upgrade_type is None:
raise Fail('Parameter "upgrade_type" is missing.')
if params.upgrade_direction is None:
raise Fail('Parameter "upgrade_direction" is missing.')
if not params.security_enabled:
Logger.info("Skip running the Kafka ACL migration script since cluster security is not enabled.")
return
Logger.info("Upgrade type: {0}, direction: {1}".format(str(upgrade_type), params.upgrade_direction))
# If the schema upgrade script exists in the version upgrading to, then attempt to upgrade/downgrade it while still using the present bits.
kafka_acls_script = None
command_suffix = ""
if params.upgrade_direction == Direction.UPGRADE:
kafka_acls_script = format("{stack_root}/{version}/kafka/bin/kafka-acls.sh")
command_suffix = "--upgradeAcls"
elif params.upgrade_direction == Direction.DOWNGRADE:
kafka_acls_script = format("{stack_root}/{downgrade_from_version}/kafka/bin/kafka-acls.sh")
command_suffix = "--downgradeAcls"
if kafka_acls_script is not None:
if os.path.exists(kafka_acls_script):
Logger.info("Found Kafka acls script: {0}".format(kafka_acls_script))
if params.zookeeper_connect is None:
raise Fail("Could not retrieve property kafka-broker/zookeeper.connect")
acls_command = "{0} --authorizer kafka.security.auth.SimpleAclAuthorizer --authorizer-properties zookeeper.connect={1} {2}".\
format(kafka_acls_script, params.zookeeper_connect, command_suffix)
Execute(acls_command,
user=params.kafka_user,
logoutput=True)
else:
Logger.info("Did not find Kafka acls script: {0}".format(kafka_acls_script))
| run_migration | identifier_name |
MathUtils.js | /**
* Collection Math and sorting methods
* @Class MathUtils
* @static
*/
var ME = module.exports = {};
/**
* Takes an integer and calculates what the 16 bit float
* representation of the binary data used to read the integer is.
* @method f16
* @param {Number} h Integer value
* @return {Number} Float value
*/
ME.f16 = function(h) {
var s = (h & 0x8000) >> 15;
var e = (h & 0x7C00) >> 10;
var f = h & 0x03FF;
if(e == 0) | else if (e == 0x1F) {
return f?NaN:((s?-1:1)*Infinity);
}
return(s?-1:1) * Math.pow(2, e-15) * (1+(f/Math.pow(2, 10)));
}
/**
* Calculates the number of binary ones present in the data used to
* generate the input integer.
* @method popcount
* @param {Number} bits Integer
* @return {Number} Number of binary ones in the data
*/
ME.popcount = function(bits) {
var SK5 = 0x55555555,
SK3 = 0x33333333,
SKF0 = 0x0f0f0f0f,
SKFF = 0xff00ff;
bits -= (bits >> 1) & SK5;
bits = (bits & SK3) + ((bits >> 2) & SK3);
bits = (bits & SKF0) + ((bits >> 4) & SKF0);
bits += bits >> 8;
return (bits + (bits >> 15)) & 63;
}
/**
* Calculates the 64 bit integer value of two 32 bit integers. Only works up to
* the limit of the javascript Number maximum value.
* @method arr32To64
* @param {Number[]} arr Input integers, length should be 2.
* @return {Number} 64 bit representation of the two integers.
*/
var base32Max = Math.pow(2,32);
ME.arr32To64 = function(arr){
/// Re-read as uint64 (still little endian)
/// Warn: this will not work for ~50+ bit longs cus all JS numbers are 64 bit floats...
return base32Max*arr[1] + arr[0];
};
/**
* Sorts an array and returns unique values only.
* @method sort_unique
* @param {Array} arr_in Input array
* @param {Function} comparator A comparator function between the objects in arr_in
* @return {Array} Sorted and unique value.
*/
ME.sort_unique = function(arr_in, comparator) {
var arr = Array.prototype.sort.call(arr_in, comparator);
var u = {}, a = [];
for(var i = 0, l = arr.length; i < l; ++i){
if(u.hasOwnProperty(arr[i])) {
continue;
}
a.push(arr[i]);
u[arr[i]] = 1;
}
return a;
} | {
return (s?-1:1) * Math.pow(2,-14) * (f/Math.pow(2, 10));
} | conditional_block |
MathUtils.js | /**
* Collection Math and sorting methods
* @Class MathUtils
* @static
*/
var ME = module.exports = {};
/**
* Takes an integer and calculates what the 16 bit float
* representation of the binary data used to read the integer is.
* @method f16
* @param {Number} h Integer value
* @return {Number} Float value
*/
ME.f16 = function(h) {
var s = (h & 0x8000) >> 15;
var e = (h & 0x7C00) >> 10;
var f = h & 0x03FF;
if(e == 0) {
return (s?-1:1) * Math.pow(2,-14) * (f/Math.pow(2, 10));
} else if (e == 0x1F) {
return f?NaN:((s?-1:1)*Infinity);
}
return(s?-1:1) * Math.pow(2, e-15) * (1+(f/Math.pow(2, 10)));
}
/**
* Calculates the number of binary ones present in the data used to
* generate the input integer.
* @method popcount
* @param {Number} bits Integer
* @return {Number} Number of binary ones in the data
*/
ME.popcount = function(bits) {
var SK5 = 0x55555555,
SK3 = 0x33333333,
SKF0 = 0x0f0f0f0f,
SKFF = 0xff00ff; | bits = (bits & SK3) + ((bits >> 2) & SK3);
bits = (bits & SKF0) + ((bits >> 4) & SKF0);
bits += bits >> 8;
return (bits + (bits >> 15)) & 63;
}
/**
* Calculates the 64 bit integer value of two 32 bit integers. Only works up to
* the limit of the javascript Number maximum value.
* @method arr32To64
* @param {Number[]} arr Input integers, length should be 2.
* @return {Number} 64 bit representation of the two integers.
*/
var base32Max = Math.pow(2,32);
ME.arr32To64 = function(arr){
/// Re-read as uint64 (still little endian)
/// Warn: this will not work for ~50+ bit longs cus all JS numbers are 64 bit floats...
return base32Max*arr[1] + arr[0];
};
/**
* Sorts an array and returns unique values only.
* @method sort_unique
* @param {Array} arr_in Input array
* @param {Function} comparator A comparator function between the objects in arr_in
* @return {Array} Sorted and unique value.
*/
ME.sort_unique = function(arr_in, comparator) {
var arr = Array.prototype.sort.call(arr_in, comparator);
var u = {}, a = [];
for(var i = 0, l = arr.length; i < l; ++i){
if(u.hasOwnProperty(arr[i])) {
continue;
}
a.push(arr[i]);
u[arr[i]] = 1;
}
return a;
} |
bits -= (bits >> 1) & SK5; | random_line_split |
Event.ts | //////////////////////////////////////////////////////////////////////////////////////
//
// Copyright (c) 2014-2015, Egret Technology Inc.
// All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
// * Neither the name of the Egret nor the
// names of its contributors may be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY EGRET AND CONTRIBUTORS "AS IS" AND ANY EXPRESS
// OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
// OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
// IN NO EVENT SHALL EGRET AND CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;LOSS OF USE, DATA,
// OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// | //////////////////////////////////////////////////////////////////////////////////////
module dragonBones {
/**
* @class dragonBones.Event
* @classdesc
* 事件
*/
export class Event extends egret.Event {
/**
* 创建一个Event实例
* @param type 事件的类型
*/
public constructor(type:string, bubbles:boolean = false, cancelable:boolean = false) {
super(type, bubbles, cancelable)
}
}
} | random_line_split | |
Event.ts | //////////////////////////////////////////////////////////////////////////////////////
//
// Copyright (c) 2014-2015, Egret Technology Inc.
// All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
// * Neither the name of the Egret nor the
// names of its contributors may be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY EGRET AND CONTRIBUTORS "AS IS" AND ANY EXPRESS
// OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
// OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
// IN NO EVENT SHALL EGRET AND CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;LOSS OF USE, DATA,
// OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
//////////////////////////////////////////////////////////////////////////////////////
module dragonBones {
/**
* @class dragonBones.Event
* @classdesc
* 事件
*/
export class Even | ends egret.Event {
/**
* 创建一个Event实例
* @param type 事件的类型
*/
public constructor(type:string, bubbles:boolean = false, cancelable:boolean = false) {
super(type, bubbles, cancelable)
}
}
} | t ext | identifier_name |
Event.ts | //////////////////////////////////////////////////////////////////////////////////////
//
// Copyright (c) 2014-2015, Egret Technology Inc.
// All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
// * Neither the name of the Egret nor the
// names of its contributors may be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY EGRET AND CONTRIBUTORS "AS IS" AND ANY EXPRESS
// OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
// OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
// IN NO EVENT SHALL EGRET AND CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;LOSS OF USE, DATA,
// OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
//////////////////////////////////////////////////////////////////////////////////////
module dragonBones {
/**
* @class dragonBones.Event
* @classdesc
* 事件
*/
export class Event extends egret.Event {
/**
* 创建一个Event实例
* @param type 事件的类型
*/
public constructor(type:string, bubbles:boolean = false, cancelable:boolean = false) {
super(type, | bubbles, cancelable)
}
}
} | identifier_body | |
cleanup.rs | extern crate diesel;
extern crate dotenv;
extern crate kuchiki;
extern crate rusoto_core;
extern crate rusoto_credential;
extern crate rusoto_s3;
extern crate server;
extern crate url;
use diesel::pg::PgConnection;
use diesel::prelude::*;
use dotenv::dotenv;
use kuchiki::traits::*;
use rusoto_core::Region;
use rusoto_s3::{DeleteObjectRequest, S3, S3Client};
use server::models::*;
use server::start_logging;
use std::env;
use url::Url;
fn cleanup(ad: &mut Ad) -> bool {
let document = kuchiki::parse_html().one(ad.html.clone());
let mut ret = false;
for like in document.select("h5._1qbu").unwrap() {
like.as_node().detach();
ret = true;
}
let mut images = ad.images.clone();
for img in document.select(".commentable_item img").unwrap() {
if let Some(src) = img.attributes.borrow_mut().get_mut("src") {
if let Some(pos) = images.iter().position(|x| x == src) {
images.remove(pos);
}
println!("deleting {}", src);
if let Ok(url) = src.parse::<Url>() {
let client = S3Client::simple(Region::UsEast1);
let res = client
.delete_object(&DeleteObjectRequest {
bucket: "pp-facebook-ads".to_string(),
key: url.path().trim_left_matches('/').to_string(),
..DeleteObjectRequest::default()
})
.sync();
*src = "".to_string();
if res.is_err() {
println!("Couldn't delete {} {:?}", src, res);
}
}
}
ret = true;
}
for comment in document.select(".commentable_item").unwrap() {
comment.as_node().detach();
ret = true;
}
ad.html = document
.select("div")
.unwrap()
.nth(0)
.unwrap()
.as_node()
.to_string();
ad.images = images;
ret
}
fn | () {
use server::schema::ads::dsl::*;
dotenv().ok();
start_logging();
let database_url = env::var("DATABASE_URL").expect("DATABASE_URL must be set");
let conn = PgConnection::establish(&database_url).unwrap();
let dbads: Vec<Ad> = ads.order(created_at.desc()).load::<Ad>(&conn).unwrap();
for mut ad in dbads {
if cleanup(&mut ad) {
let document = kuchiki::parse_html().one(ad.html.clone());
println!("Cleaned {}", ad.id);
diesel::update(ads.find(ad.id.clone()))
.set((
html.eq(ad.html),
title.eq(get_title(&document).unwrap()),
message.eq(get_message(&document).unwrap()),
images.eq(ad.images),
))
.execute(&conn)
.unwrap();
} else {
println!("Skipped {}", ad.id);
}
}
}
| main | identifier_name |
cleanup.rs | extern crate diesel;
extern crate dotenv;
extern crate kuchiki;
extern crate rusoto_core;
extern crate rusoto_credential;
extern crate rusoto_s3;
extern crate server;
extern crate url;
use diesel::pg::PgConnection;
use diesel::prelude::*;
use dotenv::dotenv;
use kuchiki::traits::*;
use rusoto_core::Region;
use rusoto_s3::{DeleteObjectRequest, S3, S3Client};
use server::models::*;
use server::start_logging;
use std::env;
use url::Url;
fn cleanup(ad: &mut Ad) -> bool {
let document = kuchiki::parse_html().one(ad.html.clone());
let mut ret = false;
for like in document.select("h5._1qbu").unwrap() {
like.as_node().detach();
ret = true;
}
let mut images = ad.images.clone();
for img in document.select(".commentable_item img").unwrap() {
if let Some(src) = img.attributes.borrow_mut().get_mut("src") {
if let Some(pos) = images.iter().position(|x| x == src) {
images.remove(pos);
}
println!("deleting {}", src);
if let Ok(url) = src.parse::<Url>() {
let client = S3Client::simple(Region::UsEast1);
let res = client
.delete_object(&DeleteObjectRequest {
bucket: "pp-facebook-ads".to_string(),
key: url.path().trim_left_matches('/').to_string(),
..DeleteObjectRequest::default()
})
.sync();
*src = "".to_string();
if res.is_err() |
}
}
ret = true;
}
for comment in document.select(".commentable_item").unwrap() {
comment.as_node().detach();
ret = true;
}
ad.html = document
.select("div")
.unwrap()
.nth(0)
.unwrap()
.as_node()
.to_string();
ad.images = images;
ret
}
fn main() {
use server::schema::ads::dsl::*;
dotenv().ok();
start_logging();
let database_url = env::var("DATABASE_URL").expect("DATABASE_URL must be set");
let conn = PgConnection::establish(&database_url).unwrap();
let dbads: Vec<Ad> = ads.order(created_at.desc()).load::<Ad>(&conn).unwrap();
for mut ad in dbads {
if cleanup(&mut ad) {
let document = kuchiki::parse_html().one(ad.html.clone());
println!("Cleaned {}", ad.id);
diesel::update(ads.find(ad.id.clone()))
.set((
html.eq(ad.html),
title.eq(get_title(&document).unwrap()),
message.eq(get_message(&document).unwrap()),
images.eq(ad.images),
))
.execute(&conn)
.unwrap();
} else {
println!("Skipped {}", ad.id);
}
}
}
| {
println!("Couldn't delete {} {:?}", src, res);
} | conditional_block |
cleanup.rs | extern crate diesel;
extern crate dotenv;
extern crate kuchiki;
extern crate rusoto_core;
extern crate rusoto_credential;
extern crate rusoto_s3;
extern crate server;
extern crate url;
use diesel::pg::PgConnection;
use diesel::prelude::*;
use dotenv::dotenv;
use kuchiki::traits::*;
use rusoto_core::Region;
use rusoto_s3::{DeleteObjectRequest, S3, S3Client};
use server::models::*;
use server::start_logging;
use std::env;
use url::Url;
fn cleanup(ad: &mut Ad) -> bool {
let document = kuchiki::parse_html().one(ad.html.clone());
let mut ret = false;
for like in document.select("h5._1qbu").unwrap() {
like.as_node().detach();
ret = true;
}
let mut images = ad.images.clone();
for img in document.select(".commentable_item img").unwrap() {
if let Some(src) = img.attributes.borrow_mut().get_mut("src") {
if let Some(pos) = images.iter().position(|x| x == src) {
images.remove(pos);
}
println!("deleting {}", src);
if let Ok(url) = src.parse::<Url>() {
let client = S3Client::simple(Region::UsEast1);
let res = client
.delete_object(&DeleteObjectRequest {
bucket: "pp-facebook-ads".to_string(),
key: url.path().trim_left_matches('/').to_string(),
..DeleteObjectRequest::default()
})
.sync();
*src = "".to_string();
if res.is_err() {
println!("Couldn't delete {} {:?}", src, res);
}
}
}
ret = true;
}
for comment in document.select(".commentable_item").unwrap() {
comment.as_node().detach();
ret = true;
}
ad.html = document
.select("div")
.unwrap()
.nth(0)
.unwrap()
.as_node()
.to_string();
ad.images = images;
ret
}
fn main() | {
use server::schema::ads::dsl::*;
dotenv().ok();
start_logging();
let database_url = env::var("DATABASE_URL").expect("DATABASE_URL must be set");
let conn = PgConnection::establish(&database_url).unwrap();
let dbads: Vec<Ad> = ads.order(created_at.desc()).load::<Ad>(&conn).unwrap();
for mut ad in dbads {
if cleanup(&mut ad) {
let document = kuchiki::parse_html().one(ad.html.clone());
println!("Cleaned {}", ad.id);
diesel::update(ads.find(ad.id.clone()))
.set((
html.eq(ad.html),
title.eq(get_title(&document).unwrap()),
message.eq(get_message(&document).unwrap()),
images.eq(ad.images),
))
.execute(&conn)
.unwrap();
} else {
println!("Skipped {}", ad.id);
}
}
} | identifier_body | |
cleanup.rs | extern crate diesel;
extern crate dotenv;
extern crate kuchiki;
extern crate rusoto_core;
extern crate rusoto_credential;
extern crate rusoto_s3;
extern crate server;
extern crate url;
use diesel::pg::PgConnection;
use diesel::prelude::*;
use dotenv::dotenv;
use kuchiki::traits::*;
use rusoto_core::Region;
use rusoto_s3::{DeleteObjectRequest, S3, S3Client};
use server::models::*;
use server::start_logging;
use std::env;
use url::Url;
fn cleanup(ad: &mut Ad) -> bool {
let document = kuchiki::parse_html().one(ad.html.clone());
let mut ret = false;
for like in document.select("h5._1qbu").unwrap() {
like.as_node().detach();
ret = true;
}
let mut images = ad.images.clone();
for img in document.select(".commentable_item img").unwrap() {
if let Some(src) = img.attributes.borrow_mut().get_mut("src") {
if let Some(pos) = images.iter().position(|x| x == src) {
images.remove(pos);
}
println!("deleting {}", src);
if let Ok(url) = src.parse::<Url>() {
let client = S3Client::simple(Region::UsEast1);
let res = client
.delete_object(&DeleteObjectRequest {
bucket: "pp-facebook-ads".to_string(),
key: url.path().trim_left_matches('/').to_string(),
..DeleteObjectRequest::default()
})
.sync();
*src = "".to_string();
if res.is_err() {
println!("Couldn't delete {} {:?}", src, res);
}
}
}
ret = true;
}
for comment in document.select(".commentable_item").unwrap() {
comment.as_node().detach();
ret = true;
} | .unwrap()
.as_node()
.to_string();
ad.images = images;
ret
}
fn main() {
use server::schema::ads::dsl::*;
dotenv().ok();
start_logging();
let database_url = env::var("DATABASE_URL").expect("DATABASE_URL must be set");
let conn = PgConnection::establish(&database_url).unwrap();
let dbads: Vec<Ad> = ads.order(created_at.desc()).load::<Ad>(&conn).unwrap();
for mut ad in dbads {
if cleanup(&mut ad) {
let document = kuchiki::parse_html().one(ad.html.clone());
println!("Cleaned {}", ad.id);
diesel::update(ads.find(ad.id.clone()))
.set((
html.eq(ad.html),
title.eq(get_title(&document).unwrap()),
message.eq(get_message(&document).unwrap()),
images.eq(ad.images),
))
.execute(&conn)
.unwrap();
} else {
println!("Skipped {}", ad.id);
}
}
} |
ad.html = document
.select("div")
.unwrap()
.nth(0) | random_line_split |
gen_key_io_test_vectors.py | #!/usr/bin/env python3
# Copyright (c) 2012-2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
Generate valid and invalid base58 address and private key test vectors.
Usage:
PYTHONPATH=../../test/functional/test_framework ./gen_key_io_test_vectors.py valid 50 > ../../src/test/data/key_io_valid.json
PYTHONPATH=../../test/functional/test_framework ./gen_key_io_test_vectors.py invalid 50 > ../../src/test/data/key_io_invalid.json
'''
# 2012 Wladimir J. van der Laan
# Released under MIT License
import os
from itertools import islice
from base58 import b58encode_chk, b58decode_chk, b58chars
import random
from binascii import b2a_hex
from segwit_addr import bech32_encode, decode, convertbits, CHARSET
# key types
PUBKEY_ADDRESS = 48
SCRIPT_ADDRESS = 5
SCRIPT_ADDRESS2 = 50
PUBKEY_ADDRESS_TEST = 111 | SCRIPT_ADDRESS_REGTEST = 196
PRIVKEY = 176
PRIVKEY_TEST = 239
PRIVKEY_REGTEST = 239
# script
OP_0 = 0x00
OP_1 = 0x51
OP_2 = 0x52
OP_16 = 0x60
OP_DUP = 0x76
OP_EQUAL = 0x87
OP_EQUALVERIFY = 0x88
OP_HASH160 = 0xa9
OP_CHECKSIG = 0xac
pubkey_prefix = (OP_DUP, OP_HASH160, 20)
pubkey_suffix = (OP_EQUALVERIFY, OP_CHECKSIG)
script_prefix = (OP_HASH160, 20)
script_suffix = (OP_EQUAL,)
p2wpkh_prefix = (OP_0, 20)
p2wsh_prefix = (OP_0, 32)
metadata_keys = ['isPrivkey', 'chain', 'isCompressed', 'tryCaseFlip']
# templates for valid sequences
templates = [
# prefix, payload_size, suffix, metadata, output_prefix, output_suffix
# None = N/A
((PUBKEY_ADDRESS,), 20, (), (False, 'main', None, None), pubkey_prefix, pubkey_suffix),
((SCRIPT_ADDRESS,), 20, (), (False, 'main', None, None), script_prefix, script_suffix),
((SCRIPT_ADDRESS2,), 20, (), (False, 'main', None, None), script_prefix, script_suffix),
((PUBKEY_ADDRESS_TEST,), 20, (), (False, 'test', None, None), pubkey_prefix, pubkey_suffix),
((SCRIPT_ADDRESS_TEST,), 20, (), (False, 'test', None, None), script_prefix, script_suffix),
((SCRIPT_ADDRESS_TEST2,), 20, (), (False, 'test', None, None), script_prefix, script_suffix),
((PUBKEY_ADDRESS_REGTEST,), 20, (), (False, 'regtest', None, None), pubkey_prefix, pubkey_suffix),
((SCRIPT_ADDRESS_REGTEST,), 20, (), (False, 'regtest', None, None), script_prefix, script_suffix),
((PRIVKEY,), 32, (), (True, 'main', False, None), (), ()),
((PRIVKEY,), 32, (1,), (True, 'main', True, None), (), ()),
((PRIVKEY_TEST,), 32, (), (True, 'test', False, None), (), ()),
((PRIVKEY_TEST,), 32, (1,), (True, 'test', True, None), (), ()),
((PRIVKEY_REGTEST,), 32, (), (True, 'regtest', False, None), (), ()),
((PRIVKEY_REGTEST,), 32, (1,), (True, 'regtest', True, None), (), ())
]
# templates for valid bech32 sequences
bech32_templates = [
# hrp, version, witprog_size, metadata, output_prefix
('ltc', 0, 20, (False, 'main', None, True), p2wpkh_prefix),
('ltc', 0, 32, (False, 'main', None, True), p2wsh_prefix),
('ltc', 1, 2, (False, 'main', None, True), (OP_1, 2)),
('tltc', 0, 20, (False, 'test', None, True), p2wpkh_prefix),
('tltc', 0, 32, (False, 'test', None, True), p2wsh_prefix),
('tltc', 2, 16, (False, 'test', None, True), (OP_2, 16)),
('rltc', 0, 20, (False, 'regtest', None, True), p2wpkh_prefix),
('rltc', 0, 32, (False, 'regtest', None, True), p2wsh_prefix),
('rltc', 16, 40, (False, 'regtest', None, True), (OP_16, 40))
]
# templates for invalid bech32 sequences
bech32_ng_templates = [
# hrp, version, witprog_size, invalid_bech32, invalid_checksum, invalid_char
('tc', 0, 20, False, False, False),
('tltc', 17, 32, False, False, False),
('rltc', 3, 1, False, False, False),
('ltc', 15, 41, False, False, False),
('tltc', 0, 16, False, False, False),
('rltc', 0, 32, True, False, False),
('ltc', 0, 16, True, False, False),
('tltc', 0, 32, False, True, False),
('rltc', 0, 20, False, False, True)
]
def is_valid(v):
'''Check vector v for validity'''
if len(set(v) - set(b58chars)) > 0:
return is_valid_bech32(v)
result = b58decode_chk(v)
if result is None:
return is_valid_bech32(v)
for template in templates:
prefix = bytearray(template[0])
suffix = bytearray(template[2])
if result.startswith(prefix) and result.endswith(suffix):
if (len(result) - len(prefix) - len(suffix)) == template[1]:
return True
return is_valid_bech32(v)
def is_valid_bech32(v):
'''Check vector v for bech32 validity'''
for hrp in ['ltc', 'tltc', 'rltc']:
if decode(hrp, v) != (None, None):
return True
return False
def gen_valid_base58_vector(template):
'''Generate valid base58 vector'''
prefix = bytearray(template[0])
payload = bytearray(os.urandom(template[1]))
suffix = bytearray(template[2])
dst_prefix = bytearray(template[4])
dst_suffix = bytearray(template[5])
rv = b58encode_chk(prefix + payload + suffix)
return rv, dst_prefix + payload + dst_suffix
def gen_valid_bech32_vector(template):
'''Generate valid bech32 vector'''
hrp = template[0]
witver = template[1]
witprog = bytearray(os.urandom(template[2]))
dst_prefix = bytearray(template[4])
rv = bech32_encode(hrp, [witver] + convertbits(witprog, 8, 5))
return rv, dst_prefix + witprog
def gen_valid_vectors():
'''Generate valid test vectors'''
glist = [gen_valid_base58_vector, gen_valid_bech32_vector]
tlist = [templates, bech32_templates]
while True:
for template, valid_vector_generator in [(t, g) for g, l in zip(glist, tlist) for t in l]:
rv, payload = valid_vector_generator(template)
assert is_valid(rv)
metadata = {x: y for x, y in zip(metadata_keys,template[3]) if y is not None}
hexrepr = b2a_hex(payload)
if isinstance(hexrepr, bytes):
hexrepr = hexrepr.decode('utf8')
yield (rv, hexrepr, metadata)
def gen_invalid_base58_vector(template):
'''Generate possibly invalid vector'''
# kinds of invalid vectors:
# invalid prefix
# invalid payload length
# invalid (randomized) suffix (add random data)
# corrupt checksum
corrupt_prefix = randbool(0.2)
randomize_payload_size = randbool(0.2)
corrupt_suffix = randbool(0.2)
if corrupt_prefix:
prefix = os.urandom(1)
else:
prefix = bytearray(template[0])
if randomize_payload_size:
payload = os.urandom(max(int(random.expovariate(0.5)), 50))
else:
payload = os.urandom(template[1])
if corrupt_suffix:
suffix = os.urandom(len(template[2]))
else:
suffix = bytearray(template[2])
val = b58encode_chk(prefix + payload + suffix)
if random.randint(0,10)<1: # line corruption
if randbool(): # add random character to end
val += random.choice(b58chars)
else: # replace random character in the middle
n = random.randint(0, len(val))
val = val[0:n] + random.choice(b58chars) + val[n+1:]
return val
def gen_invalid_bech32_vector(template):
'''Generate possibly invalid bech32 vector'''
no_data = randbool(0.1)
to_upper = randbool(0.1)
hrp = template[0]
witver = template[1]
witprog = bytearray(os.urandom(template[2]))
if no_data:
rv = bech32_encode(hrp, [])
else:
data = [witver] + convertbits(witprog, 8, 5)
if template[3] and not no_data:
if template[2] % 5 in {2, 4}:
data[-1] |= 1
else:
data.append(0)
rv = bech32_encode(hrp, data)
if template[4]:
i = len(rv) - random.randrange(1, 7)
rv = rv[:i] + random.choice(CHARSET.replace(rv[i], '')) + rv[i + 1:]
if template[5]:
i = len(hrp) + 1 + random.randrange(0, len(rv) - len(hrp) - 4)
rv = rv[:i] + rv[i:i + 4].upper() + rv[i + 4:]
if to_upper:
rv = rv.swapcase()
return rv
def randbool(p = 0.5):
'''Return True with P(p)'''
return random.random() < p
def gen_invalid_vectors():
'''Generate invalid test vectors'''
# start with some manual edge-cases
yield "",
yield "x",
glist = [gen_invalid_base58_vector, gen_invalid_bech32_vector]
tlist = [templates, bech32_ng_templates]
while True:
for template, invalid_vector_generator in [(t, g) for g, l in zip(glist, tlist) for t in l]:
val = invalid_vector_generator(template)
if not is_valid(val):
yield val,
if __name__ == '__main__':
import sys
import json
iters = {'valid':gen_valid_vectors, 'invalid':gen_invalid_vectors}
try:
uiter = iters[sys.argv[1]]
except IndexError:
uiter = gen_valid_vectors
try:
count = int(sys.argv[2])
except IndexError:
count = 0
data = list(islice(uiter(), count))
json.dump(data, sys.stdout, sort_keys=True, indent=4)
sys.stdout.write('\n') | SCRIPT_ADDRESS_TEST = 196
SCRIPT_ADDRESS_TEST2 = 58
PUBKEY_ADDRESS_REGTEST = 111 | random_line_split |
gen_key_io_test_vectors.py | #!/usr/bin/env python3
# Copyright (c) 2012-2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
Generate valid and invalid base58 address and private key test vectors.
Usage:
PYTHONPATH=../../test/functional/test_framework ./gen_key_io_test_vectors.py valid 50 > ../../src/test/data/key_io_valid.json
PYTHONPATH=../../test/functional/test_framework ./gen_key_io_test_vectors.py invalid 50 > ../../src/test/data/key_io_invalid.json
'''
# 2012 Wladimir J. van der Laan
# Released under MIT License
import os
from itertools import islice
from base58 import b58encode_chk, b58decode_chk, b58chars
import random
from binascii import b2a_hex
from segwit_addr import bech32_encode, decode, convertbits, CHARSET
# key types
PUBKEY_ADDRESS = 48
SCRIPT_ADDRESS = 5
SCRIPT_ADDRESS2 = 50
PUBKEY_ADDRESS_TEST = 111
SCRIPT_ADDRESS_TEST = 196
SCRIPT_ADDRESS_TEST2 = 58
PUBKEY_ADDRESS_REGTEST = 111
SCRIPT_ADDRESS_REGTEST = 196
PRIVKEY = 176
PRIVKEY_TEST = 239
PRIVKEY_REGTEST = 239
# script
OP_0 = 0x00
OP_1 = 0x51
OP_2 = 0x52
OP_16 = 0x60
OP_DUP = 0x76
OP_EQUAL = 0x87
OP_EQUALVERIFY = 0x88
OP_HASH160 = 0xa9
OP_CHECKSIG = 0xac
pubkey_prefix = (OP_DUP, OP_HASH160, 20)
pubkey_suffix = (OP_EQUALVERIFY, OP_CHECKSIG)
script_prefix = (OP_HASH160, 20)
script_suffix = (OP_EQUAL,)
p2wpkh_prefix = (OP_0, 20)
p2wsh_prefix = (OP_0, 32)
metadata_keys = ['isPrivkey', 'chain', 'isCompressed', 'tryCaseFlip']
# templates for valid sequences
templates = [
# prefix, payload_size, suffix, metadata, output_prefix, output_suffix
# None = N/A
((PUBKEY_ADDRESS,), 20, (), (False, 'main', None, None), pubkey_prefix, pubkey_suffix),
((SCRIPT_ADDRESS,), 20, (), (False, 'main', None, None), script_prefix, script_suffix),
((SCRIPT_ADDRESS2,), 20, (), (False, 'main', None, None), script_prefix, script_suffix),
((PUBKEY_ADDRESS_TEST,), 20, (), (False, 'test', None, None), pubkey_prefix, pubkey_suffix),
((SCRIPT_ADDRESS_TEST,), 20, (), (False, 'test', None, None), script_prefix, script_suffix),
((SCRIPT_ADDRESS_TEST2,), 20, (), (False, 'test', None, None), script_prefix, script_suffix),
((PUBKEY_ADDRESS_REGTEST,), 20, (), (False, 'regtest', None, None), pubkey_prefix, pubkey_suffix),
((SCRIPT_ADDRESS_REGTEST,), 20, (), (False, 'regtest', None, None), script_prefix, script_suffix),
((PRIVKEY,), 32, (), (True, 'main', False, None), (), ()),
((PRIVKEY,), 32, (1,), (True, 'main', True, None), (), ()),
((PRIVKEY_TEST,), 32, (), (True, 'test', False, None), (), ()),
((PRIVKEY_TEST,), 32, (1,), (True, 'test', True, None), (), ()),
((PRIVKEY_REGTEST,), 32, (), (True, 'regtest', False, None), (), ()),
((PRIVKEY_REGTEST,), 32, (1,), (True, 'regtest', True, None), (), ())
]
# templates for valid bech32 sequences
bech32_templates = [
# hrp, version, witprog_size, metadata, output_prefix
('ltc', 0, 20, (False, 'main', None, True), p2wpkh_prefix),
('ltc', 0, 32, (False, 'main', None, True), p2wsh_prefix),
('ltc', 1, 2, (False, 'main', None, True), (OP_1, 2)),
('tltc', 0, 20, (False, 'test', None, True), p2wpkh_prefix),
('tltc', 0, 32, (False, 'test', None, True), p2wsh_prefix),
('tltc', 2, 16, (False, 'test', None, True), (OP_2, 16)),
('rltc', 0, 20, (False, 'regtest', None, True), p2wpkh_prefix),
('rltc', 0, 32, (False, 'regtest', None, True), p2wsh_prefix),
('rltc', 16, 40, (False, 'regtest', None, True), (OP_16, 40))
]
# templates for invalid bech32 sequences
bech32_ng_templates = [
# hrp, version, witprog_size, invalid_bech32, invalid_checksum, invalid_char
('tc', 0, 20, False, False, False),
('tltc', 17, 32, False, False, False),
('rltc', 3, 1, False, False, False),
('ltc', 15, 41, False, False, False),
('tltc', 0, 16, False, False, False),
('rltc', 0, 32, True, False, False),
('ltc', 0, 16, True, False, False),
('tltc', 0, 32, False, True, False),
('rltc', 0, 20, False, False, True)
]
def is_valid(v):
'''Check vector v for validity'''
if len(set(v) - set(b58chars)) > 0:
return is_valid_bech32(v)
result = b58decode_chk(v)
if result is None:
return is_valid_bech32(v)
for template in templates:
prefix = bytearray(template[0])
suffix = bytearray(template[2])
if result.startswith(prefix) and result.endswith(suffix):
if (len(result) - len(prefix) - len(suffix)) == template[1]:
return True
return is_valid_bech32(v)
def is_valid_bech32(v):
'''Check vector v for bech32 validity'''
for hrp in ['ltc', 'tltc', 'rltc']:
if decode(hrp, v) != (None, None):
return True
return False
def gen_valid_base58_vector(template):
'''Generate valid base58 vector'''
prefix = bytearray(template[0])
payload = bytearray(os.urandom(template[1]))
suffix = bytearray(template[2])
dst_prefix = bytearray(template[4])
dst_suffix = bytearray(template[5])
rv = b58encode_chk(prefix + payload + suffix)
return rv, dst_prefix + payload + dst_suffix
def gen_valid_bech32_vector(template):
'''Generate valid bech32 vector'''
hrp = template[0]
witver = template[1]
witprog = bytearray(os.urandom(template[2]))
dst_prefix = bytearray(template[4])
rv = bech32_encode(hrp, [witver] + convertbits(witprog, 8, 5))
return rv, dst_prefix + witprog
def gen_valid_vectors():
'''Generate valid test vectors'''
glist = [gen_valid_base58_vector, gen_valid_bech32_vector]
tlist = [templates, bech32_templates]
while True:
for template, valid_vector_generator in [(t, g) for g, l in zip(glist, tlist) for t in l]:
rv, payload = valid_vector_generator(template)
assert is_valid(rv)
metadata = {x: y for x, y in zip(metadata_keys,template[3]) if y is not None}
hexrepr = b2a_hex(payload)
if isinstance(hexrepr, bytes):
hexrepr = hexrepr.decode('utf8')
yield (rv, hexrepr, metadata)
def gen_invalid_base58_vector(template):
|
def gen_invalid_bech32_vector(template):
'''Generate possibly invalid bech32 vector'''
no_data = randbool(0.1)
to_upper = randbool(0.1)
hrp = template[0]
witver = template[1]
witprog = bytearray(os.urandom(template[2]))
if no_data:
rv = bech32_encode(hrp, [])
else:
data = [witver] + convertbits(witprog, 8, 5)
if template[3] and not no_data:
if template[2] % 5 in {2, 4}:
data[-1] |= 1
else:
data.append(0)
rv = bech32_encode(hrp, data)
if template[4]:
i = len(rv) - random.randrange(1, 7)
rv = rv[:i] + random.choice(CHARSET.replace(rv[i], '')) + rv[i + 1:]
if template[5]:
i = len(hrp) + 1 + random.randrange(0, len(rv) - len(hrp) - 4)
rv = rv[:i] + rv[i:i + 4].upper() + rv[i + 4:]
if to_upper:
rv = rv.swapcase()
return rv
def randbool(p = 0.5):
'''Return True with P(p)'''
return random.random() < p
def gen_invalid_vectors():
'''Generate invalid test vectors'''
# start with some manual edge-cases
yield "",
yield "x",
glist = [gen_invalid_base58_vector, gen_invalid_bech32_vector]
tlist = [templates, bech32_ng_templates]
while True:
for template, invalid_vector_generator in [(t, g) for g, l in zip(glist, tlist) for t in l]:
val = invalid_vector_generator(template)
if not is_valid(val):
yield val,
if __name__ == '__main__':
import sys
import json
iters = {'valid':gen_valid_vectors, 'invalid':gen_invalid_vectors}
try:
uiter = iters[sys.argv[1]]
except IndexError:
uiter = gen_valid_vectors
try:
count = int(sys.argv[2])
except IndexError:
count = 0
data = list(islice(uiter(), count))
json.dump(data, sys.stdout, sort_keys=True, indent=4)
sys.stdout.write('\n')
| '''Generate possibly invalid vector'''
# kinds of invalid vectors:
# invalid prefix
# invalid payload length
# invalid (randomized) suffix (add random data)
# corrupt checksum
corrupt_prefix = randbool(0.2)
randomize_payload_size = randbool(0.2)
corrupt_suffix = randbool(0.2)
if corrupt_prefix:
prefix = os.urandom(1)
else:
prefix = bytearray(template[0])
if randomize_payload_size:
payload = os.urandom(max(int(random.expovariate(0.5)), 50))
else:
payload = os.urandom(template[1])
if corrupt_suffix:
suffix = os.urandom(len(template[2]))
else:
suffix = bytearray(template[2])
val = b58encode_chk(prefix + payload + suffix)
if random.randint(0,10)<1: # line corruption
if randbool(): # add random character to end
val += random.choice(b58chars)
else: # replace random character in the middle
n = random.randint(0, len(val))
val = val[0:n] + random.choice(b58chars) + val[n+1:]
return val | identifier_body |
gen_key_io_test_vectors.py | #!/usr/bin/env python3
# Copyright (c) 2012-2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
Generate valid and invalid base58 address and private key test vectors.
Usage:
PYTHONPATH=../../test/functional/test_framework ./gen_key_io_test_vectors.py valid 50 > ../../src/test/data/key_io_valid.json
PYTHONPATH=../../test/functional/test_framework ./gen_key_io_test_vectors.py invalid 50 > ../../src/test/data/key_io_invalid.json
'''
# 2012 Wladimir J. van der Laan
# Released under MIT License
import os
from itertools import islice
from base58 import b58encode_chk, b58decode_chk, b58chars
import random
from binascii import b2a_hex
from segwit_addr import bech32_encode, decode, convertbits, CHARSET
# key types
PUBKEY_ADDRESS = 48
SCRIPT_ADDRESS = 5
SCRIPT_ADDRESS2 = 50
PUBKEY_ADDRESS_TEST = 111
SCRIPT_ADDRESS_TEST = 196
SCRIPT_ADDRESS_TEST2 = 58
PUBKEY_ADDRESS_REGTEST = 111
SCRIPT_ADDRESS_REGTEST = 196
PRIVKEY = 176
PRIVKEY_TEST = 239
PRIVKEY_REGTEST = 239
# script
OP_0 = 0x00
OP_1 = 0x51
OP_2 = 0x52
OP_16 = 0x60
OP_DUP = 0x76
OP_EQUAL = 0x87
OP_EQUALVERIFY = 0x88
OP_HASH160 = 0xa9
OP_CHECKSIG = 0xac
pubkey_prefix = (OP_DUP, OP_HASH160, 20)
pubkey_suffix = (OP_EQUALVERIFY, OP_CHECKSIG)
script_prefix = (OP_HASH160, 20)
script_suffix = (OP_EQUAL,)
p2wpkh_prefix = (OP_0, 20)
p2wsh_prefix = (OP_0, 32)
metadata_keys = ['isPrivkey', 'chain', 'isCompressed', 'tryCaseFlip']
# templates for valid sequences
templates = [
# prefix, payload_size, suffix, metadata, output_prefix, output_suffix
# None = N/A
((PUBKEY_ADDRESS,), 20, (), (False, 'main', None, None), pubkey_prefix, pubkey_suffix),
((SCRIPT_ADDRESS,), 20, (), (False, 'main', None, None), script_prefix, script_suffix),
((SCRIPT_ADDRESS2,), 20, (), (False, 'main', None, None), script_prefix, script_suffix),
((PUBKEY_ADDRESS_TEST,), 20, (), (False, 'test', None, None), pubkey_prefix, pubkey_suffix),
((SCRIPT_ADDRESS_TEST,), 20, (), (False, 'test', None, None), script_prefix, script_suffix),
((SCRIPT_ADDRESS_TEST2,), 20, (), (False, 'test', None, None), script_prefix, script_suffix),
((PUBKEY_ADDRESS_REGTEST,), 20, (), (False, 'regtest', None, None), pubkey_prefix, pubkey_suffix),
((SCRIPT_ADDRESS_REGTEST,), 20, (), (False, 'regtest', None, None), script_prefix, script_suffix),
((PRIVKEY,), 32, (), (True, 'main', False, None), (), ()),
((PRIVKEY,), 32, (1,), (True, 'main', True, None), (), ()),
((PRIVKEY_TEST,), 32, (), (True, 'test', False, None), (), ()),
((PRIVKEY_TEST,), 32, (1,), (True, 'test', True, None), (), ()),
((PRIVKEY_REGTEST,), 32, (), (True, 'regtest', False, None), (), ()),
((PRIVKEY_REGTEST,), 32, (1,), (True, 'regtest', True, None), (), ())
]
# templates for valid bech32 sequences
bech32_templates = [
# hrp, version, witprog_size, metadata, output_prefix
('ltc', 0, 20, (False, 'main', None, True), p2wpkh_prefix),
('ltc', 0, 32, (False, 'main', None, True), p2wsh_prefix),
('ltc', 1, 2, (False, 'main', None, True), (OP_1, 2)),
('tltc', 0, 20, (False, 'test', None, True), p2wpkh_prefix),
('tltc', 0, 32, (False, 'test', None, True), p2wsh_prefix),
('tltc', 2, 16, (False, 'test', None, True), (OP_2, 16)),
('rltc', 0, 20, (False, 'regtest', None, True), p2wpkh_prefix),
('rltc', 0, 32, (False, 'regtest', None, True), p2wsh_prefix),
('rltc', 16, 40, (False, 'regtest', None, True), (OP_16, 40))
]
# templates for invalid bech32 sequences
bech32_ng_templates = [
# hrp, version, witprog_size, invalid_bech32, invalid_checksum, invalid_char
('tc', 0, 20, False, False, False),
('tltc', 17, 32, False, False, False),
('rltc', 3, 1, False, False, False),
('ltc', 15, 41, False, False, False),
('tltc', 0, 16, False, False, False),
('rltc', 0, 32, True, False, False),
('ltc', 0, 16, True, False, False),
('tltc', 0, 32, False, True, False),
('rltc', 0, 20, False, False, True)
]
def is_valid(v):
'''Check vector v for validity'''
if len(set(v) - set(b58chars)) > 0:
return is_valid_bech32(v)
result = b58decode_chk(v)
if result is None:
return is_valid_bech32(v)
for template in templates:
prefix = bytearray(template[0])
suffix = bytearray(template[2])
if result.startswith(prefix) and result.endswith(suffix):
if (len(result) - len(prefix) - len(suffix)) == template[1]:
return True
return is_valid_bech32(v)
def is_valid_bech32(v):
'''Check vector v for bech32 validity'''
for hrp in ['ltc', 'tltc', 'rltc']:
if decode(hrp, v) != (None, None):
return True
return False
def gen_valid_base58_vector(template):
'''Generate valid base58 vector'''
prefix = bytearray(template[0])
payload = bytearray(os.urandom(template[1]))
suffix = bytearray(template[2])
dst_prefix = bytearray(template[4])
dst_suffix = bytearray(template[5])
rv = b58encode_chk(prefix + payload + suffix)
return rv, dst_prefix + payload + dst_suffix
def gen_valid_bech32_vector(template):
'''Generate valid bech32 vector'''
hrp = template[0]
witver = template[1]
witprog = bytearray(os.urandom(template[2]))
dst_prefix = bytearray(template[4])
rv = bech32_encode(hrp, [witver] + convertbits(witprog, 8, 5))
return rv, dst_prefix + witprog
def gen_valid_vectors():
'''Generate valid test vectors'''
glist = [gen_valid_base58_vector, gen_valid_bech32_vector]
tlist = [templates, bech32_templates]
while True:
for template, valid_vector_generator in [(t, g) for g, l in zip(glist, tlist) for t in l]:
rv, payload = valid_vector_generator(template)
assert is_valid(rv)
metadata = {x: y for x, y in zip(metadata_keys,template[3]) if y is not None}
hexrepr = b2a_hex(payload)
if isinstance(hexrepr, bytes):
hexrepr = hexrepr.decode('utf8')
yield (rv, hexrepr, metadata)
def gen_invalid_base58_vector(template):
'''Generate possibly invalid vector'''
# kinds of invalid vectors:
# invalid prefix
# invalid payload length
# invalid (randomized) suffix (add random data)
# corrupt checksum
corrupt_prefix = randbool(0.2)
randomize_payload_size = randbool(0.2)
corrupt_suffix = randbool(0.2)
if corrupt_prefix:
prefix = os.urandom(1)
else:
prefix = bytearray(template[0])
if randomize_payload_size:
payload = os.urandom(max(int(random.expovariate(0.5)), 50))
else:
payload = os.urandom(template[1])
if corrupt_suffix:
suffix = os.urandom(len(template[2]))
else:
suffix = bytearray(template[2])
val = b58encode_chk(prefix + payload + suffix)
if random.randint(0,10)<1: # line corruption
if randbool(): # add random character to end
val += random.choice(b58chars)
else: # replace random character in the middle
n = random.randint(0, len(val))
val = val[0:n] + random.choice(b58chars) + val[n+1:]
return val
def gen_invalid_bech32_vector(template):
'''Generate possibly invalid bech32 vector'''
no_data = randbool(0.1)
to_upper = randbool(0.1)
hrp = template[0]
witver = template[1]
witprog = bytearray(os.urandom(template[2]))
if no_data:
rv = bech32_encode(hrp, [])
else:
data = [witver] + convertbits(witprog, 8, 5)
if template[3] and not no_data:
if template[2] % 5 in {2, 4}:
data[-1] |= 1
else:
data.append(0)
rv = bech32_encode(hrp, data)
if template[4]:
|
if template[5]:
i = len(hrp) + 1 + random.randrange(0, len(rv) - len(hrp) - 4)
rv = rv[:i] + rv[i:i + 4].upper() + rv[i + 4:]
if to_upper:
rv = rv.swapcase()
return rv
def randbool(p = 0.5):
'''Return True with P(p)'''
return random.random() < p
def gen_invalid_vectors():
'''Generate invalid test vectors'''
# start with some manual edge-cases
yield "",
yield "x",
glist = [gen_invalid_base58_vector, gen_invalid_bech32_vector]
tlist = [templates, bech32_ng_templates]
while True:
for template, invalid_vector_generator in [(t, g) for g, l in zip(glist, tlist) for t in l]:
val = invalid_vector_generator(template)
if not is_valid(val):
yield val,
if __name__ == '__main__':
import sys
import json
iters = {'valid':gen_valid_vectors, 'invalid':gen_invalid_vectors}
try:
uiter = iters[sys.argv[1]]
except IndexError:
uiter = gen_valid_vectors
try:
count = int(sys.argv[2])
except IndexError:
count = 0
data = list(islice(uiter(), count))
json.dump(data, sys.stdout, sort_keys=True, indent=4)
sys.stdout.write('\n')
| i = len(rv) - random.randrange(1, 7)
rv = rv[:i] + random.choice(CHARSET.replace(rv[i], '')) + rv[i + 1:] | conditional_block |
gen_key_io_test_vectors.py | #!/usr/bin/env python3
# Copyright (c) 2012-2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
Generate valid and invalid base58 address and private key test vectors.
Usage:
PYTHONPATH=../../test/functional/test_framework ./gen_key_io_test_vectors.py valid 50 > ../../src/test/data/key_io_valid.json
PYTHONPATH=../../test/functional/test_framework ./gen_key_io_test_vectors.py invalid 50 > ../../src/test/data/key_io_invalid.json
'''
# 2012 Wladimir J. van der Laan
# Released under MIT License
import os
from itertools import islice
from base58 import b58encode_chk, b58decode_chk, b58chars
import random
from binascii import b2a_hex
from segwit_addr import bech32_encode, decode, convertbits, CHARSET
# key types
PUBKEY_ADDRESS = 48
SCRIPT_ADDRESS = 5
SCRIPT_ADDRESS2 = 50
PUBKEY_ADDRESS_TEST = 111
SCRIPT_ADDRESS_TEST = 196
SCRIPT_ADDRESS_TEST2 = 58
PUBKEY_ADDRESS_REGTEST = 111
SCRIPT_ADDRESS_REGTEST = 196
PRIVKEY = 176
PRIVKEY_TEST = 239
PRIVKEY_REGTEST = 239
# script
OP_0 = 0x00
OP_1 = 0x51
OP_2 = 0x52
OP_16 = 0x60
OP_DUP = 0x76
OP_EQUAL = 0x87
OP_EQUALVERIFY = 0x88
OP_HASH160 = 0xa9
OP_CHECKSIG = 0xac
pubkey_prefix = (OP_DUP, OP_HASH160, 20)
pubkey_suffix = (OP_EQUALVERIFY, OP_CHECKSIG)
script_prefix = (OP_HASH160, 20)
script_suffix = (OP_EQUAL,)
p2wpkh_prefix = (OP_0, 20)
p2wsh_prefix = (OP_0, 32)
metadata_keys = ['isPrivkey', 'chain', 'isCompressed', 'tryCaseFlip']
# templates for valid sequences
templates = [
# prefix, payload_size, suffix, metadata, output_prefix, output_suffix
# None = N/A
((PUBKEY_ADDRESS,), 20, (), (False, 'main', None, None), pubkey_prefix, pubkey_suffix),
((SCRIPT_ADDRESS,), 20, (), (False, 'main', None, None), script_prefix, script_suffix),
((SCRIPT_ADDRESS2,), 20, (), (False, 'main', None, None), script_prefix, script_suffix),
((PUBKEY_ADDRESS_TEST,), 20, (), (False, 'test', None, None), pubkey_prefix, pubkey_suffix),
((SCRIPT_ADDRESS_TEST,), 20, (), (False, 'test', None, None), script_prefix, script_suffix),
((SCRIPT_ADDRESS_TEST2,), 20, (), (False, 'test', None, None), script_prefix, script_suffix),
((PUBKEY_ADDRESS_REGTEST,), 20, (), (False, 'regtest', None, None), pubkey_prefix, pubkey_suffix),
((SCRIPT_ADDRESS_REGTEST,), 20, (), (False, 'regtest', None, None), script_prefix, script_suffix),
((PRIVKEY,), 32, (), (True, 'main', False, None), (), ()),
((PRIVKEY,), 32, (1,), (True, 'main', True, None), (), ()),
((PRIVKEY_TEST,), 32, (), (True, 'test', False, None), (), ()),
((PRIVKEY_TEST,), 32, (1,), (True, 'test', True, None), (), ()),
((PRIVKEY_REGTEST,), 32, (), (True, 'regtest', False, None), (), ()),
((PRIVKEY_REGTEST,), 32, (1,), (True, 'regtest', True, None), (), ())
]
# templates for valid bech32 sequences
bech32_templates = [
# hrp, version, witprog_size, metadata, output_prefix
('ltc', 0, 20, (False, 'main', None, True), p2wpkh_prefix),
('ltc', 0, 32, (False, 'main', None, True), p2wsh_prefix),
('ltc', 1, 2, (False, 'main', None, True), (OP_1, 2)),
('tltc', 0, 20, (False, 'test', None, True), p2wpkh_prefix),
('tltc', 0, 32, (False, 'test', None, True), p2wsh_prefix),
('tltc', 2, 16, (False, 'test', None, True), (OP_2, 16)),
('rltc', 0, 20, (False, 'regtest', None, True), p2wpkh_prefix),
('rltc', 0, 32, (False, 'regtest', None, True), p2wsh_prefix),
('rltc', 16, 40, (False, 'regtest', None, True), (OP_16, 40))
]
# templates for invalid bech32 sequences
bech32_ng_templates = [
# hrp, version, witprog_size, invalid_bech32, invalid_checksum, invalid_char
('tc', 0, 20, False, False, False),
('tltc', 17, 32, False, False, False),
('rltc', 3, 1, False, False, False),
('ltc', 15, 41, False, False, False),
('tltc', 0, 16, False, False, False),
('rltc', 0, 32, True, False, False),
('ltc', 0, 16, True, False, False),
('tltc', 0, 32, False, True, False),
('rltc', 0, 20, False, False, True)
]
def is_valid(v):
'''Check vector v for validity'''
if len(set(v) - set(b58chars)) > 0:
return is_valid_bech32(v)
result = b58decode_chk(v)
if result is None:
return is_valid_bech32(v)
for template in templates:
prefix = bytearray(template[0])
suffix = bytearray(template[2])
if result.startswith(prefix) and result.endswith(suffix):
if (len(result) - len(prefix) - len(suffix)) == template[1]:
return True
return is_valid_bech32(v)
def | (v):
'''Check vector v for bech32 validity'''
for hrp in ['ltc', 'tltc', 'rltc']:
if decode(hrp, v) != (None, None):
return True
return False
def gen_valid_base58_vector(template):
'''Generate valid base58 vector'''
prefix = bytearray(template[0])
payload = bytearray(os.urandom(template[1]))
suffix = bytearray(template[2])
dst_prefix = bytearray(template[4])
dst_suffix = bytearray(template[5])
rv = b58encode_chk(prefix + payload + suffix)
return rv, dst_prefix + payload + dst_suffix
def gen_valid_bech32_vector(template):
'''Generate valid bech32 vector'''
hrp = template[0]
witver = template[1]
witprog = bytearray(os.urandom(template[2]))
dst_prefix = bytearray(template[4])
rv = bech32_encode(hrp, [witver] + convertbits(witprog, 8, 5))
return rv, dst_prefix + witprog
def gen_valid_vectors():
'''Generate valid test vectors'''
glist = [gen_valid_base58_vector, gen_valid_bech32_vector]
tlist = [templates, bech32_templates]
while True:
for template, valid_vector_generator in [(t, g) for g, l in zip(glist, tlist) for t in l]:
rv, payload = valid_vector_generator(template)
assert is_valid(rv)
metadata = {x: y for x, y in zip(metadata_keys,template[3]) if y is not None}
hexrepr = b2a_hex(payload)
if isinstance(hexrepr, bytes):
hexrepr = hexrepr.decode('utf8')
yield (rv, hexrepr, metadata)
def gen_invalid_base58_vector(template):
'''Generate possibly invalid vector'''
# kinds of invalid vectors:
# invalid prefix
# invalid payload length
# invalid (randomized) suffix (add random data)
# corrupt checksum
corrupt_prefix = randbool(0.2)
randomize_payload_size = randbool(0.2)
corrupt_suffix = randbool(0.2)
if corrupt_prefix:
prefix = os.urandom(1)
else:
prefix = bytearray(template[0])
if randomize_payload_size:
payload = os.urandom(max(int(random.expovariate(0.5)), 50))
else:
payload = os.urandom(template[1])
if corrupt_suffix:
suffix = os.urandom(len(template[2]))
else:
suffix = bytearray(template[2])
val = b58encode_chk(prefix + payload + suffix)
if random.randint(0,10)<1: # line corruption
if randbool(): # add random character to end
val += random.choice(b58chars)
else: # replace random character in the middle
n = random.randint(0, len(val))
val = val[0:n] + random.choice(b58chars) + val[n+1:]
return val
def gen_invalid_bech32_vector(template):
'''Generate possibly invalid bech32 vector'''
no_data = randbool(0.1)
to_upper = randbool(0.1)
hrp = template[0]
witver = template[1]
witprog = bytearray(os.urandom(template[2]))
if no_data:
rv = bech32_encode(hrp, [])
else:
data = [witver] + convertbits(witprog, 8, 5)
if template[3] and not no_data:
if template[2] % 5 in {2, 4}:
data[-1] |= 1
else:
data.append(0)
rv = bech32_encode(hrp, data)
if template[4]:
i = len(rv) - random.randrange(1, 7)
rv = rv[:i] + random.choice(CHARSET.replace(rv[i], '')) + rv[i + 1:]
if template[5]:
i = len(hrp) + 1 + random.randrange(0, len(rv) - len(hrp) - 4)
rv = rv[:i] + rv[i:i + 4].upper() + rv[i + 4:]
if to_upper:
rv = rv.swapcase()
return rv
def randbool(p = 0.5):
'''Return True with P(p)'''
return random.random() < p
def gen_invalid_vectors():
'''Generate invalid test vectors'''
# start with some manual edge-cases
yield "",
yield "x",
glist = [gen_invalid_base58_vector, gen_invalid_bech32_vector]
tlist = [templates, bech32_ng_templates]
while True:
for template, invalid_vector_generator in [(t, g) for g, l in zip(glist, tlist) for t in l]:
val = invalid_vector_generator(template)
if not is_valid(val):
yield val,
if __name__ == '__main__':
import sys
import json
iters = {'valid':gen_valid_vectors, 'invalid':gen_invalid_vectors}
try:
uiter = iters[sys.argv[1]]
except IndexError:
uiter = gen_valid_vectors
try:
count = int(sys.argv[2])
except IndexError:
count = 0
data = list(islice(uiter(), count))
json.dump(data, sys.stdout, sort_keys=True, indent=4)
sys.stdout.write('\n')
| is_valid_bech32 | identifier_name |
Hero.tsx | import React, { ReactNode } from 'react'
import styled from 'styled-components'
import { M1, M2, M3, maxWidth, DESKTOP, PHONE } from '../constants/measurements'
import { Section, Row, Col } from './Grid'
import { Fade } from './Fade'
import { H1, H3, P } from './Typography'
const TextWrapper = styled.div<{}>`
align-self: center;
`
const ImgWrapper = styled.div<{}>`
${maxWidth(DESKTOP)} {
padding-left: 5%;
width: 100%;
}
${maxWidth(PHONE)} {
padding: 0 10%;
width: 100%;
}
`
interface IHeroProps {
Image: React.ReactNode
title: string
subtitle: string | * Set this to false when there are other async / transition related events
* which are needed for the image to load properly
*
* For example, if using a lazy load gatsby-image, this should be false
*/
shouldFadeImage?: boolean
}
export const Hero = ({
Image,
title,
subtitle,
body,
shouldFadeImage = true,
}: IHeroProps) => {
return (
<Section>
<Row margin={M2}>
<Col sm={12} md={6} margin={M2} flex>
<TextWrapper>
<H1 mb1>{title}</H1>
<H3 normal>{subtitle}</H3>
{body && <P>{body}</P>}
</TextWrapper>
</Col>
<Col sm={12} md={6} margin={M2}>
<ImgWrapper>{Image}</ImgWrapper>
</Col>
</Row>
</Section>
)
} | body?: string | ReactNode
/**
* If the image should be wrapped in a `<Fade />` component or not.
* | random_line_split |
spa.shell.js | /*
* spa.shell.js
* Shell module for SPA
*/
/*jslint browser : true, continue : true,
devel : true, indent : 2, maxerr : 50,
newcap : true, nomen : true, plusplus : true,
regexp : true, sloppy : true, vars : false,
white : true
*/
/*global $, spa */
spa.shell = (function () {
'use strict';
//---------------- BEGIN MODULE SCOPE VARIABLES --------------
var
configMap = {
anchor_schema_map : {
chat : { opened : true, closed : true }
},
resize_interval : 200,
main_html : String()
+ '<div class="spa-shell-head">'
+ '<div class="spa-shell-head-logo">'
+ '<h1>SPA</h1>'
+ '<p>javascript end to end</p>'
+ '</div>'
+ '<div class="spa-shell-head-acct"></div>'
+ '</div>'
+ '<div class="spa-shell-main">'
+ '<div class="spa-shell-main-nav"></div>'
+ '<div class="spa-shell-main-content"></div>'
+ '</div>'
+ '<div class="spa-shell-foot"></div>'
+ '<div class="spa-shell-modal"></div>'
},
stateMap = {
$container : undefined,
anchor_map : {},
resize_idto : undefined
},
jqueryMap = {},
copyAnchorMap, setJqueryMap, changeAnchorPart,
onResize, onHashchange,
onTapAcct, onLogin, onLogout,
setChatAnchor, initModule;
//----------------- END MODULE SCOPE VARIABLES ---------------
//------------------- BEGIN UTILITY METHODS ------------------
// Returns copy of stored anchor map; minimizes overhead
copyAnchorMap = function () {
return $.extend( true, {}, stateMap.anchor_map );
};
//-------------------- END UTILITY METHODS -------------------
//--------------------- BEGIN DOM METHODS --------------------
// Begin DOM method /setJqueryMap/
setJqueryMap = function () {
var $container = stateMap.$container;
jqueryMap = {
$container : $container,
$acct : $container.find('.spa-shell-head-acct'),
$nav : $container.find('.spa-shell-main-nav')
};
};
// End DOM method /setJqueryMap/
// Begin DOM method /changeAnchorPart/
// Purpose : Changes part of the URI anchor component
// Arguments :
// * arg_map - The map describing what part of the URI anchor
// we want changed.
// Returns :
// * true - the Anchor portion of the URI was updated
// * false - the Anchor portion of the URI could not be updated
// Actions :
// The current anchor rep stored in stateMap.anchor_map.
// See uriAnchor for a discussion of encoding.
// This method
// * Creates a copy of this map using copyAnchorMap().
// * Modifies the key-values using arg_map.
// * Manages the distinction between independent
// and dependent values in the encoding.
// * Attempts to change the URI using uriAnchor.
// * Returns true on success, and false on failure.
//
changeAnchorPart = function ( arg_map ) {
var
anchor_map_revise = copyAnchorMap(),
bool_return = true,
key_name, key_name_dep;
// Begin merge changes into anchor map
KEYVAL:
for ( key_name in arg_map ) {
if ( arg_map.hasOwnProperty( key_name ) ) {
// skip dependent keys during iteration
if ( key_name.indexOf( '_' ) === 0 ) { continue KEYVAL; }
// update independent key value
anchor_map_revise[key_name] = arg_map[key_name];
// update matching dependent key
key_name_dep = '_' + key_name;
if ( arg_map[key_name_dep] ) {
anchor_map_revise[key_name_dep] = arg_map[key_name_dep];
}
else {
delete anchor_map_revise[key_name_dep];
delete anchor_map_revise['_s' + key_name_dep];
}
}
}
// End merge changes into anchor map
// Begin attempt to update URI; revert if not successful
try {
$.uriAnchor.setAnchor( anchor_map_revise );
}
catch ( error ) {
// replace URI with existing state
$.uriAnchor.setAnchor( stateMap.anchor_map,null,true );
bool_return = false;
}
// End attempt to update URI...
return bool_return;
};
// End DOM method /changeAnchorPart/
//--------------------- END DOM METHODS ----------------------
//------------------- BEGIN EVENT HANDLERS ------------------- | // Settings : none
// Returns : false
// Actions :
// * Parses the URI anchor component
// * Compares proposed application state with current
// * Adjust the application only where proposed state
// differs from existing and is allowed by anchor schema
//
onHashchange = function ( event ) {
var
_s_chat_previous, _s_chat_proposed, s_chat_proposed,
anchor_map_proposed,
is_ok = true,
anchor_map_previous = copyAnchorMap();
// attempt to parse anchor
try { anchor_map_proposed = $.uriAnchor.makeAnchorMap(); }
catch ( error ) {
$.uriAnchor.setAnchor( anchor_map_previous, null, true );
return false;
}
stateMap.anchor_map = anchor_map_proposed;
// convenience vars
_s_chat_previous = anchor_map_previous._s_chat;
_s_chat_proposed = anchor_map_proposed._s_chat;
// Begin adjust chat component if changed
if ( ! anchor_map_previous
|| _s_chat_previous !== _s_chat_proposed
) {
s_chat_proposed = anchor_map_proposed.chat;
switch ( s_chat_proposed ) {
case 'opened' :
is_ok = spa.chat.setSliderPosition( 'opened' );
break;
case 'closed' :
is_ok = spa.chat.setSliderPosition( 'closed' );
break;
default :
spa.chat.setSliderPosition( 'closed' );
delete anchor_map_proposed.chat;
$.uriAnchor.setAnchor( anchor_map_proposed, null, true );
}
}
// End adjust chat component if changed
// Begin revert anchor if slider change denied
if ( ! is_ok ) {
if ( anchor_map_previous ) {
$.uriAnchor.setAnchor( anchor_map_previous, null, true );
stateMap.anchor_map = anchor_map_previous;
}
else {
delete anchor_map_proposed.chat;
$.uriAnchor.setAnchor( anchor_map_proposed, null, true );
}
}
// End revert anchor if slider change denied
return false;
};
// End Event handler /onHashchange/
// Begin Event handler /onResize/
onResize = function () {
if ( stateMap.resize_idto ) { return true; }
spa.chat.handleResize();
stateMap.resize_idto = setTimeout(
function () { stateMap.resize_idto = undefined; },
configMap.resize_interval
);
return true;
};
// End Event handler /onResize/
onTapAcct = function ( event ) {
var acct_text, user_name, user = spa.model.people.get_user();
if ( user.get_is_anon() ) {
user_name = prompt( 'Please sign-in' );
spa.model.people.login( user_name );
jqueryMap.$acct.text( '... processing ...' );
}
else {
spa.model.people.logout();
}
return false;
};
onLogin = function ( event, login_user ) {
jqueryMap.$acct.text( login_user.name );
};
onLogout = function ( event, logout_user ) {
jqueryMap.$acct.text( 'Please sign-in' );
};
//-------------------- END EVENT HANDLERS --------------------
//---------------------- BEGIN CALLBACKS ---------------------
// Begin callback method /setChatAnchor/
// Example : setChatAnchor( 'closed' );
// Purpose : Change the chat component of the anchor
// Arguments:
// * position_type - may be 'closed' or 'opened'
// Action :
// Changes the URI anchor parameter 'chat' to the requested
// value if possible.
// Returns :
// * true - requested anchor part was updated
// * false - requested anchor part was not updated
// Throws : none
//
setChatAnchor = function ( position_type ) {
return changeAnchorPart({ chat : position_type });
};
// End callback method /setChatAnchor/
//----------------------- END CALLBACKS ----------------------
//------------------- BEGIN PUBLIC METHODS -------------------
// Begin Public method /initModule/
// Example : spa.shell.initModule( $('#app_div_id') );
// Purpose :
// Directs the Shell to offer its capability to the user
// Arguments :
// * $container (example: $('#app_div_id')).
// A jQuery collection that should represent
// a single DOM container
// Action :
// Populates $container with the shell of the UI
// and then configures and initializes feature modules.
// The Shell is also responsible for browser-wide issues
// such as URI anchor and cookie management
// Returns : none
// Throws : none
//
initModule = function ( $container ) {
// load HTML and map jQuery collections
stateMap.$container = $container;
$container.html( configMap.main_html );
setJqueryMap();
// configure uriAnchor to use our schema
$.uriAnchor.configModule({
schema_map : configMap.anchor_schema_map
});
// configure and initialize feature modules
spa.chat.configModule({
set_chat_anchor : setChatAnchor,
chat_model : spa.model.chat,
people_model : spa.model.people
});
spa.chat.initModule( jqueryMap.$container );
spa.avtr.configModule({
chat_model : spa.model.chat,
people_model : spa.model.people
});
spa.avtr.initModule( jqueryMap.$nav );
// Handle URI anchor change events.
// This is done /after/ all feature modules are configured
// and initialized, otherwise they will not be ready to handle
// the trigger event, which is used to ensure the anchor
// is considered on-load
//
$(window)
.bind( 'resize', onResize )
.bind( 'hashchange', onHashchange )
.trigger( 'hashchange' );
$.gevent.subscribe( $container, 'spa-login', onLogin );
$.gevent.subscribe( $container, 'spa-logout', onLogout );
jqueryMap.$acct
.text( 'Please sign-in')
.bind( 'utap', onTapAcct );
};
// End PUBLIC method /initModule/
return { initModule : initModule };
//------------------- END PUBLIC METHODS ---------------------
}()); | // Begin Event handler /onHashchange/
// Purpose : Handles the hashchange event
// Arguments :
// * event - jQuery event object. | random_line_split |
spa.shell.js | /*
* spa.shell.js
* Shell module for SPA
*/
/*jslint browser : true, continue : true,
devel : true, indent : 2, maxerr : 50,
newcap : true, nomen : true, plusplus : true,
regexp : true, sloppy : true, vars : false,
white : true
*/
/*global $, spa */
spa.shell = (function () {
'use strict';
//---------------- BEGIN MODULE SCOPE VARIABLES --------------
var
configMap = {
anchor_schema_map : {
chat : { opened : true, closed : true }
},
resize_interval : 200,
main_html : String()
+ '<div class="spa-shell-head">'
+ '<div class="spa-shell-head-logo">'
+ '<h1>SPA</h1>'
+ '<p>javascript end to end</p>'
+ '</div>'
+ '<div class="spa-shell-head-acct"></div>'
+ '</div>'
+ '<div class="spa-shell-main">'
+ '<div class="spa-shell-main-nav"></div>'
+ '<div class="spa-shell-main-content"></div>'
+ '</div>'
+ '<div class="spa-shell-foot"></div>'
+ '<div class="spa-shell-modal"></div>'
},
stateMap = {
$container : undefined,
anchor_map : {},
resize_idto : undefined
},
jqueryMap = {},
copyAnchorMap, setJqueryMap, changeAnchorPart,
onResize, onHashchange,
onTapAcct, onLogin, onLogout,
setChatAnchor, initModule;
//----------------- END MODULE SCOPE VARIABLES ---------------
//------------------- BEGIN UTILITY METHODS ------------------
// Returns copy of stored anchor map; minimizes overhead
copyAnchorMap = function () {
return $.extend( true, {}, stateMap.anchor_map );
};
//-------------------- END UTILITY METHODS -------------------
//--------------------- BEGIN DOM METHODS --------------------
// Begin DOM method /setJqueryMap/
setJqueryMap = function () {
var $container = stateMap.$container;
jqueryMap = {
$container : $container,
$acct : $container.find('.spa-shell-head-acct'),
$nav : $container.find('.spa-shell-main-nav')
};
};
// End DOM method /setJqueryMap/
// Begin DOM method /changeAnchorPart/
// Purpose : Changes part of the URI anchor component
// Arguments :
// * arg_map - The map describing what part of the URI anchor
// we want changed.
// Returns :
// * true - the Anchor portion of the URI was updated
// * false - the Anchor portion of the URI could not be updated
// Actions :
// The current anchor rep stored in stateMap.anchor_map.
// See uriAnchor for a discussion of encoding.
// This method
// * Creates a copy of this map using copyAnchorMap().
// * Modifies the key-values using arg_map.
// * Manages the distinction between independent
// and dependent values in the encoding.
// * Attempts to change the URI using uriAnchor.
// * Returns true on success, and false on failure.
//
changeAnchorPart = function ( arg_map ) {
var
anchor_map_revise = copyAnchorMap(),
bool_return = true,
key_name, key_name_dep;
// Begin merge changes into anchor map
KEYVAL:
for ( key_name in arg_map ) {
if ( arg_map.hasOwnProperty( key_name ) ) {
// skip dependent keys during iteration
if ( key_name.indexOf( '_' ) === 0 ) { continue KEYVAL; }
// update independent key value
anchor_map_revise[key_name] = arg_map[key_name];
// update matching dependent key
key_name_dep = '_' + key_name;
if ( arg_map[key_name_dep] ) {
anchor_map_revise[key_name_dep] = arg_map[key_name_dep];
}
else {
delete anchor_map_revise[key_name_dep];
delete anchor_map_revise['_s' + key_name_dep];
}
}
}
// End merge changes into anchor map
// Begin attempt to update URI; revert if not successful
try {
$.uriAnchor.setAnchor( anchor_map_revise );
}
catch ( error ) {
// replace URI with existing state
$.uriAnchor.setAnchor( stateMap.anchor_map,null,true );
bool_return = false;
}
// End attempt to update URI...
return bool_return;
};
// End DOM method /changeAnchorPart/
//--------------------- END DOM METHODS ----------------------
//------------------- BEGIN EVENT HANDLERS -------------------
// Begin Event handler /onHashchange/
// Purpose : Handles the hashchange event
// Arguments :
// * event - jQuery event object.
// Settings : none
// Returns : false
// Actions :
// * Parses the URI anchor component
// * Compares proposed application state with current
// * Adjust the application only where proposed state
// differs from existing and is allowed by anchor schema
//
onHashchange = function ( event ) {
var
_s_chat_previous, _s_chat_proposed, s_chat_proposed,
anchor_map_proposed,
is_ok = true,
anchor_map_previous = copyAnchorMap();
// attempt to parse anchor
try { anchor_map_proposed = $.uriAnchor.makeAnchorMap(); }
catch ( error ) {
$.uriAnchor.setAnchor( anchor_map_previous, null, true );
return false;
}
stateMap.anchor_map = anchor_map_proposed;
// convenience vars
_s_chat_previous = anchor_map_previous._s_chat;
_s_chat_proposed = anchor_map_proposed._s_chat;
// Begin adjust chat component if changed
if ( ! anchor_map_previous
|| _s_chat_previous !== _s_chat_proposed
) |
// End adjust chat component if changed
// Begin revert anchor if slider change denied
if ( ! is_ok ) {
if ( anchor_map_previous ) {
$.uriAnchor.setAnchor( anchor_map_previous, null, true );
stateMap.anchor_map = anchor_map_previous;
}
else {
delete anchor_map_proposed.chat;
$.uriAnchor.setAnchor( anchor_map_proposed, null, true );
}
}
// End revert anchor if slider change denied
return false;
};
// End Event handler /onHashchange/
// Begin Event handler /onResize/
onResize = function () {
if ( stateMap.resize_idto ) { return true; }
spa.chat.handleResize();
stateMap.resize_idto = setTimeout(
function () { stateMap.resize_idto = undefined; },
configMap.resize_interval
);
return true;
};
// End Event handler /onResize/
onTapAcct = function ( event ) {
var acct_text, user_name, user = spa.model.people.get_user();
if ( user.get_is_anon() ) {
user_name = prompt( 'Please sign-in' );
spa.model.people.login( user_name );
jqueryMap.$acct.text( '... processing ...' );
}
else {
spa.model.people.logout();
}
return false;
};
onLogin = function ( event, login_user ) {
jqueryMap.$acct.text( login_user.name );
};
onLogout = function ( event, logout_user ) {
jqueryMap.$acct.text( 'Please sign-in' );
};
//-------------------- END EVENT HANDLERS --------------------
//---------------------- BEGIN CALLBACKS ---------------------
// Begin callback method /setChatAnchor/
// Example : setChatAnchor( 'closed' );
// Purpose : Change the chat component of the anchor
// Arguments:
// * position_type - may be 'closed' or 'opened'
// Action :
// Changes the URI anchor parameter 'chat' to the requested
// value if possible.
// Returns :
// * true - requested anchor part was updated
// * false - requested anchor part was not updated
// Throws : none
//
setChatAnchor = function ( position_type ) {
return changeAnchorPart({ chat : position_type });
};
// End callback method /setChatAnchor/
//----------------------- END CALLBACKS ----------------------
//------------------- BEGIN PUBLIC METHODS -------------------
// Begin Public method /initModule/
// Example : spa.shell.initModule( $('#app_div_id') );
// Purpose :
// Directs the Shell to offer its capability to the user
// Arguments :
// * $container (example: $('#app_div_id')).
// A jQuery collection that should represent
// a single DOM container
// Action :
// Populates $container with the shell of the UI
// and then configures and initializes feature modules.
// The Shell is also responsible for browser-wide issues
// such as URI anchor and cookie management
// Returns : none
// Throws : none
//
initModule = function ( $container ) {
// load HTML and map jQuery collections
stateMap.$container = $container;
$container.html( configMap.main_html );
setJqueryMap();
// configure uriAnchor to use our schema
$.uriAnchor.configModule({
schema_map : configMap.anchor_schema_map
});
// configure and initialize feature modules
spa.chat.configModule({
set_chat_anchor : setChatAnchor,
chat_model : spa.model.chat,
people_model : spa.model.people
});
spa.chat.initModule( jqueryMap.$container );
spa.avtr.configModule({
chat_model : spa.model.chat,
people_model : spa.model.people
});
spa.avtr.initModule( jqueryMap.$nav );
// Handle URI anchor change events.
// This is done /after/ all feature modules are configured
// and initialized, otherwise they will not be ready to handle
// the trigger event, which is used to ensure the anchor
// is considered on-load
//
$(window)
.bind( 'resize', onResize )
.bind( 'hashchange', onHashchange )
.trigger( 'hashchange' );
$.gevent.subscribe( $container, 'spa-login', onLogin );
$.gevent.subscribe( $container, 'spa-logout', onLogout );
jqueryMap.$acct
.text( 'Please sign-in')
.bind( 'utap', onTapAcct );
};
// End PUBLIC method /initModule/
return { initModule : initModule };
//------------------- END PUBLIC METHODS ---------------------
}());
| {
s_chat_proposed = anchor_map_proposed.chat;
switch ( s_chat_proposed ) {
case 'opened' :
is_ok = spa.chat.setSliderPosition( 'opened' );
break;
case 'closed' :
is_ok = spa.chat.setSliderPosition( 'closed' );
break;
default :
spa.chat.setSliderPosition( 'closed' );
delete anchor_map_proposed.chat;
$.uriAnchor.setAnchor( anchor_map_proposed, null, true );
}
} | conditional_block |
subreddit-picker-item-view.js | 'click .add': 'subscribe',
'click .remove': 'unsubscribe'
},
initialize: function(data) {
this.model = data.model;
},
subscribe: function(e) {
e.preventDefault()
e.stopPropagation()
var target = this.$(e.currentTarget)
target.removeClass('add').addClass('remove').html('unsubscribe')
var params = {
action: 'sub',
sr: this.model.get('name'),
sr_name: this.model.get('name'),
uh: $.cookie('modhash')
};
console.log(params)
this.api("api/subscribe", 'POST', params, function(data) {
console.log("subscribe done", data)
//edit the window and cookie
App.trigger('header:refreshSubreddits')
});
},
unsubscribe: function(e) {
e.preventDefault()
e.stopPropagation()
var target = this.$(e.currentTarget)
target.removeClass('remove').addClass('add').html('subscribe')
var params = {
action: 'unsub',
sr: this.model.get('name'),
uh: $.cookie('modhash')
};
console.log(params)
this.api("api/subscribe", 'POST', params, function(data) {
console.log("unsubscribe done", data)
App.trigger('header:refreshSubreddits')
});
}
});
}); | define(['App', 'jquery', 'underscore', 'backbone', 'hbs!template/subreddit-picker-item', 'view/basem-view'],
function(App, $, _, Backbone, SRPitemTmpl, BaseView) {
return BaseView.extend({
template: SRPitemTmpl,
events: { | random_line_split | |
logParser.py |
#
# DESCRIPTION: This script parses the given input bowtie and/or LAST files and creates a csv row of their data in the given output csv.
#
# AUTHOR: Chelsea Tymms
import sys, os.path
import argparse
def getOptions():
"""Function to pull in arguments from the command line"""
description="""This script takes an input fasta file of fusions and identifies all of the identical fusions."""
parser = argparse.ArgumentParser(description=description)
parser.add_argument("-bowtie", "--bowtie_log_names", dest="bowtie", action='store', required=False, nargs = '*', help="bowtie log file names [Optional]")
parser.add_argument("-last", "--last_log_names", dest="last", action='store', required=False, help="LAST log file names [Optional]")
parser.add_argument("-treatment","--treatment_name",dest="treatment",action='store',required=True,nargs= '*', help="Treatment variables [Required]")
parser.add_argument("-o","--output_file",dest="output",action='store',required=True,help="Output file name [Required]")
args = parser.parse_args()
if not args.bowtie and not args.last: #The user should give at least one bowtie or last log argument; otherwise the program does nothing
parser.error('No input logs given; add -bowtie or -last')
return(args)
def main():
args=getOptions()
treatmentArray=args.treatment
firstBowtieTot=0
finalBowtieUnaln=0
uniqAln=0
#If the output file already exists, we will append to it. If it does not, we will open it and write its header.
if os.path.isfile(args.output): #we will append
outputFile=open(args.output,'ab')
else: #write the header
outputFile=open(args.output,'w')
for i in range(1,len(treatmentArray)+1):
outputFile.write('t_var_'+str(i)+',')
if args.bowtie:
for i in range(1,len(args.bowtie)+1):
bowtieNum='bowtie'+str(i)
outputFile.write(','.join(bowtieNum+'_'+n for n in ['tot','aln','unaln','ambig','per_uniq','per_aln'])+',')
if args.last:
outputFile.write(','.join(['last_uniq','last_ambig','last_per_uniq','last_per_aln'])+',')
outputFile.write('per_uniq_aln'+'\n')
outputFile.write(','.join(str(i) for i in treatmentArray)+',')
if args.bowtie:
#Get some important counts from the first and the final bowtie logs
proc,aln,unaln,ambig=parseBowtieLog(args.bowtie[0])
firstBowtieTot=proc
proc,aln,unaln,ambig=parseBowtieLog(args.bowtie[-1])
finalBowtieUnaln=ambig+unaln
#Get and write the counts for each Bowtie log
for bowtieLog in args.bowtie:
proc,aln,unaln,ambig=(parseBowtieLog(bowtieLog))
perUniq,perAln=0,0
if proc!=0:
perUniq=float(aln)/proc * 100
perAln=(float(aln)+ambig)/proc * 100
uniqAln=uniqAln+aln
outputFile.write(','.join(str(i) for i in [proc,aln,unaln,ambig,perUniq,perAln])+',')
#Get and write the counts for the LAST log
if args.last:
lastLog=args.last
ambig,uniq=(parseLastLog(lastLog))
lastPerUniq,lastPerAln = 0,0
if finalBowtieUnaln!=0:
lastPerUniq=float(uniq)/finalBowtieUnaln * 100
lastPerAln=float(ambig)+uniq/finalBowtieUnaln * 100
uniqAln=uniqAln+uniq
outputFile.write(','.join(str(i) for i in [uniq,ambig,lastPerUniq,lastPerAln])+',')
perUniqAln= perUniqAln=float(uniqAln)/firstBowtieTot * 100 if firstBowtieTot!=0 else 0
outputFile.write(str(perUniqAln)+'\n')
outputFile.close()
def parseBowtieLog(fileName):
|
def parseLastLog(fileName):
"""Function to parse a LAST log file"""
if not os.path.isfile(fileName):
print "WARNING: " +fileName+" does not exist."
return 0,0
lastAmbig=0
lastUniq=0
with open(fileName,'rb') as lastLogFile:
for line in lastLogFile.readlines():
if "Ambiguously Aligned Reads" in line:
lastAmbig=line.split(':')[1].strip()
elif "Uniquely Aligned Reads" in line:
lastUniq=line.split(':')[1].strip()
return int(lastAmbig),int(lastUniq)
if __name__ == '__main__':
main()
| """Function to parse a bowtie log file"""
if not os.path.isfile(fileName):
print "WARNING: " +fileName+" does not exist."
return 0,0,0,0
processed,aligned,unaligned,ambig=0,0,0,0
with open(fileName,'rb') as bowtieLogFile:
for line in bowtieLogFile.readlines():
if 'reads processed' in line:
processed=line.split(':')[1].strip()
elif 'reads with at least one reported alignment' in line:
aligned=line.split(':')[1].split(' ')[1]
elif 'reads that failed to align' in line:
unaligned=line.split(':')[1].split(' ')[1]
elif 'reads with alignments suppressed' in line:
ambig=line.split(':')[1].split(' ')[1]
return int(processed),int(aligned),int(unaligned),int(ambig) | identifier_body |
logParser.py | #
# DESCRIPTION: This script parses the given input bowtie and/or LAST files and creates a csv row of their data in the given output csv.
#
# AUTHOR: Chelsea Tymms
import sys, os.path
import argparse
def getOptions():
"""Function to pull in arguments from the command line"""
description="""This script takes an input fasta file of fusions and identifies all of the identical fusions."""
parser = argparse.ArgumentParser(description=description)
parser.add_argument("-bowtie", "--bowtie_log_names", dest="bowtie", action='store', required=False, nargs = '*', help="bowtie log file names [Optional]")
parser.add_argument("-last", "--last_log_names", dest="last", action='store', required=False, help="LAST log file names [Optional]")
parser.add_argument("-treatment","--treatment_name",dest="treatment",action='store',required=True,nargs= '*', help="Treatment variables [Required]")
parser.add_argument("-o","--output_file",dest="output",action='store',required=True,help="Output file name [Required]")
args = parser.parse_args()
if not args.bowtie and not args.last: #The user should give at least one bowtie or last log argument; otherwise the program does nothing
parser.error('No input logs given; add -bowtie or -last')
return(args)
def main():
args=getOptions()
treatmentArray=args.treatment
firstBowtieTot=0
finalBowtieUnaln=0
uniqAln=0
#If the output file already exists, we will append to it. If it does not, we will open it and write its header.
if os.path.isfile(args.output): #we will append
outputFile=open(args.output,'ab')
else: #write the header
outputFile=open(args.output,'w')
for i in range(1,len(treatmentArray)+1):
outputFile.write('t_var_'+str(i)+',')
if args.bowtie:
for i in range(1,len(args.bowtie)+1):
bowtieNum='bowtie'+str(i)
outputFile.write(','.join(bowtieNum+'_'+n for n in ['tot','aln','unaln','ambig','per_uniq','per_aln'])+',')
if args.last:
outputFile.write(','.join(['last_uniq','last_ambig','last_per_uniq','last_per_aln'])+',')
outputFile.write('per_uniq_aln'+'\n')
outputFile.write(','.join(str(i) for i in treatmentArray)+',')
if args.bowtie:
#Get some important counts from the first and the final bowtie logs
proc,aln,unaln,ambig=parseBowtieLog(args.bowtie[0])
firstBowtieTot=proc
proc,aln,unaln,ambig=parseBowtieLog(args.bowtie[-1])
finalBowtieUnaln=ambig+unaln
#Get and write the counts for each Bowtie log
for bowtieLog in args.bowtie:
proc,aln,unaln,ambig=(parseBowtieLog(bowtieLog))
perUniq,perAln=0,0
if proc!=0:
perUniq=float(aln)/proc * 100
perAln=(float(aln)+ambig)/proc * 100
uniqAln=uniqAln+aln
outputFile.write(','.join(str(i) for i in [proc,aln,unaln,ambig,perUniq,perAln])+',')
#Get and write the counts for the LAST log
if args.last:
lastLog=args.last
ambig,uniq=(parseLastLog(lastLog))
lastPerUniq,lastPerAln = 0,0
if finalBowtieUnaln!=0:
lastPerUniq=float(uniq)/finalBowtieUnaln * 100
lastPerAln=float(ambig)+uniq/finalBowtieUnaln * 100
uniqAln=uniqAln+uniq
outputFile.write(','.join(str(i) for i in [uniq,ambig,lastPerUniq,lastPerAln])+',')
perUniqAln= perUniqAln=float(uniqAln)/firstBowtieTot * 100 if firstBowtieTot!=0 else 0
outputFile.write(str(perUniqAln)+'\n')
outputFile.close()
def parseBowtieLog(fileName):
"""Function to parse a bowtie log file"""
if not os.path.isfile(fileName):
print "WARNING: " +fileName+" does not exist."
return 0,0,0,0
processed,aligned,unaligned,ambig=0,0,0,0
with open(fileName,'rb') as bowtieLogFile:
for line in bowtieLogFile.readlines():
if 'reads processed' in line:
processed=line.split(':')[1].strip()
elif 'reads with at least one reported alignment' in line:
aligned=line.split(':')[1].split(' ')[1]
elif 'reads that failed to align' in line:
unaligned=line.split(':')[1].split(' ')[1]
elif 'reads with alignments suppressed' in line:
ambig=line.split(':')[1].split(' ')[1]
return int(processed),int(aligned),int(unaligned),int(ambig)
def parseLastLog(fileName):
"""Function to parse a LAST log file"""
if not os.path.isfile(fileName):
print "WARNING: " +fileName+" does not exist."
return 0,0
lastAmbig=0
lastUniq=0
with open(fileName,'rb') as lastLogFile:
for line in lastLogFile.readlines(): | return int(lastAmbig),int(lastUniq)
if __name__ == '__main__':
main() | if "Ambiguously Aligned Reads" in line:
lastAmbig=line.split(':')[1].strip()
elif "Uniquely Aligned Reads" in line:
lastUniq=line.split(':')[1].strip() | random_line_split |
logParser.py |
#
# DESCRIPTION: This script parses the given input bowtie and/or LAST files and creates a csv row of their data in the given output csv.
#
# AUTHOR: Chelsea Tymms
import sys, os.path
import argparse
def | ():
"""Function to pull in arguments from the command line"""
description="""This script takes an input fasta file of fusions and identifies all of the identical fusions."""
parser = argparse.ArgumentParser(description=description)
parser.add_argument("-bowtie", "--bowtie_log_names", dest="bowtie", action='store', required=False, nargs = '*', help="bowtie log file names [Optional]")
parser.add_argument("-last", "--last_log_names", dest="last", action='store', required=False, help="LAST log file names [Optional]")
parser.add_argument("-treatment","--treatment_name",dest="treatment",action='store',required=True,nargs= '*', help="Treatment variables [Required]")
parser.add_argument("-o","--output_file",dest="output",action='store',required=True,help="Output file name [Required]")
args = parser.parse_args()
if not args.bowtie and not args.last: #The user should give at least one bowtie or last log argument; otherwise the program does nothing
parser.error('No input logs given; add -bowtie or -last')
return(args)
def main():
args=getOptions()
treatmentArray=args.treatment
firstBowtieTot=0
finalBowtieUnaln=0
uniqAln=0
#If the output file already exists, we will append to it. If it does not, we will open it and write its header.
if os.path.isfile(args.output): #we will append
outputFile=open(args.output,'ab')
else: #write the header
outputFile=open(args.output,'w')
for i in range(1,len(treatmentArray)+1):
outputFile.write('t_var_'+str(i)+',')
if args.bowtie:
for i in range(1,len(args.bowtie)+1):
bowtieNum='bowtie'+str(i)
outputFile.write(','.join(bowtieNum+'_'+n for n in ['tot','aln','unaln','ambig','per_uniq','per_aln'])+',')
if args.last:
outputFile.write(','.join(['last_uniq','last_ambig','last_per_uniq','last_per_aln'])+',')
outputFile.write('per_uniq_aln'+'\n')
outputFile.write(','.join(str(i) for i in treatmentArray)+',')
if args.bowtie:
#Get some important counts from the first and the final bowtie logs
proc,aln,unaln,ambig=parseBowtieLog(args.bowtie[0])
firstBowtieTot=proc
proc,aln,unaln,ambig=parseBowtieLog(args.bowtie[-1])
finalBowtieUnaln=ambig+unaln
#Get and write the counts for each Bowtie log
for bowtieLog in args.bowtie:
proc,aln,unaln,ambig=(parseBowtieLog(bowtieLog))
perUniq,perAln=0,0
if proc!=0:
perUniq=float(aln)/proc * 100
perAln=(float(aln)+ambig)/proc * 100
uniqAln=uniqAln+aln
outputFile.write(','.join(str(i) for i in [proc,aln,unaln,ambig,perUniq,perAln])+',')
#Get and write the counts for the LAST log
if args.last:
lastLog=args.last
ambig,uniq=(parseLastLog(lastLog))
lastPerUniq,lastPerAln = 0,0
if finalBowtieUnaln!=0:
lastPerUniq=float(uniq)/finalBowtieUnaln * 100
lastPerAln=float(ambig)+uniq/finalBowtieUnaln * 100
uniqAln=uniqAln+uniq
outputFile.write(','.join(str(i) for i in [uniq,ambig,lastPerUniq,lastPerAln])+',')
perUniqAln= perUniqAln=float(uniqAln)/firstBowtieTot * 100 if firstBowtieTot!=0 else 0
outputFile.write(str(perUniqAln)+'\n')
outputFile.close()
def parseBowtieLog(fileName):
"""Function to parse a bowtie log file"""
if not os.path.isfile(fileName):
print "WARNING: " +fileName+" does not exist."
return 0,0,0,0
processed,aligned,unaligned,ambig=0,0,0,0
with open(fileName,'rb') as bowtieLogFile:
for line in bowtieLogFile.readlines():
if 'reads processed' in line:
processed=line.split(':')[1].strip()
elif 'reads with at least one reported alignment' in line:
aligned=line.split(':')[1].split(' ')[1]
elif 'reads that failed to align' in line:
unaligned=line.split(':')[1].split(' ')[1]
elif 'reads with alignments suppressed' in line:
ambig=line.split(':')[1].split(' ')[1]
return int(processed),int(aligned),int(unaligned),int(ambig)
def parseLastLog(fileName):
"""Function to parse a LAST log file"""
if not os.path.isfile(fileName):
print "WARNING: " +fileName+" does not exist."
return 0,0
lastAmbig=0
lastUniq=0
with open(fileName,'rb') as lastLogFile:
for line in lastLogFile.readlines():
if "Ambiguously Aligned Reads" in line:
lastAmbig=line.split(':')[1].strip()
elif "Uniquely Aligned Reads" in line:
lastUniq=line.split(':')[1].strip()
return int(lastAmbig),int(lastUniq)
if __name__ == '__main__':
main()
| getOptions | identifier_name |
logParser.py |
#
# DESCRIPTION: This script parses the given input bowtie and/or LAST files and creates a csv row of their data in the given output csv.
#
# AUTHOR: Chelsea Tymms
import sys, os.path
import argparse
def getOptions():
"""Function to pull in arguments from the command line"""
description="""This script takes an input fasta file of fusions and identifies all of the identical fusions."""
parser = argparse.ArgumentParser(description=description)
parser.add_argument("-bowtie", "--bowtie_log_names", dest="bowtie", action='store', required=False, nargs = '*', help="bowtie log file names [Optional]")
parser.add_argument("-last", "--last_log_names", dest="last", action='store', required=False, help="LAST log file names [Optional]")
parser.add_argument("-treatment","--treatment_name",dest="treatment",action='store',required=True,nargs= '*', help="Treatment variables [Required]")
parser.add_argument("-o","--output_file",dest="output",action='store',required=True,help="Output file name [Required]")
args = parser.parse_args()
if not args.bowtie and not args.last: #The user should give at least one bowtie or last log argument; otherwise the program does nothing
parser.error('No input logs given; add -bowtie or -last')
return(args)
def main():
args=getOptions()
treatmentArray=args.treatment
firstBowtieTot=0
finalBowtieUnaln=0
uniqAln=0
#If the output file already exists, we will append to it. If it does not, we will open it and write its header.
if os.path.isfile(args.output): #we will append
outputFile=open(args.output,'ab')
else: #write the header
|
outputFile.write(','.join(str(i) for i in treatmentArray)+',')
if args.bowtie:
#Get some important counts from the first and the final bowtie logs
proc,aln,unaln,ambig=parseBowtieLog(args.bowtie[0])
firstBowtieTot=proc
proc,aln,unaln,ambig=parseBowtieLog(args.bowtie[-1])
finalBowtieUnaln=ambig+unaln
#Get and write the counts for each Bowtie log
for bowtieLog in args.bowtie:
proc,aln,unaln,ambig=(parseBowtieLog(bowtieLog))
perUniq,perAln=0,0
if proc!=0:
perUniq=float(aln)/proc * 100
perAln=(float(aln)+ambig)/proc * 100
uniqAln=uniqAln+aln
outputFile.write(','.join(str(i) for i in [proc,aln,unaln,ambig,perUniq,perAln])+',')
#Get and write the counts for the LAST log
if args.last:
lastLog=args.last
ambig,uniq=(parseLastLog(lastLog))
lastPerUniq,lastPerAln = 0,0
if finalBowtieUnaln!=0:
lastPerUniq=float(uniq)/finalBowtieUnaln * 100
lastPerAln=float(ambig)+uniq/finalBowtieUnaln * 100
uniqAln=uniqAln+uniq
outputFile.write(','.join(str(i) for i in [uniq,ambig,lastPerUniq,lastPerAln])+',')
perUniqAln= perUniqAln=float(uniqAln)/firstBowtieTot * 100 if firstBowtieTot!=0 else 0
outputFile.write(str(perUniqAln)+'\n')
outputFile.close()
def parseBowtieLog(fileName):
"""Function to parse a bowtie log file"""
if not os.path.isfile(fileName):
print "WARNING: " +fileName+" does not exist."
return 0,0,0,0
processed,aligned,unaligned,ambig=0,0,0,0
with open(fileName,'rb') as bowtieLogFile:
for line in bowtieLogFile.readlines():
if 'reads processed' in line:
processed=line.split(':')[1].strip()
elif 'reads with at least one reported alignment' in line:
aligned=line.split(':')[1].split(' ')[1]
elif 'reads that failed to align' in line:
unaligned=line.split(':')[1].split(' ')[1]
elif 'reads with alignments suppressed' in line:
ambig=line.split(':')[1].split(' ')[1]
return int(processed),int(aligned),int(unaligned),int(ambig)
def parseLastLog(fileName):
"""Function to parse a LAST log file"""
if not os.path.isfile(fileName):
print "WARNING: " +fileName+" does not exist."
return 0,0
lastAmbig=0
lastUniq=0
with open(fileName,'rb') as lastLogFile:
for line in lastLogFile.readlines():
if "Ambiguously Aligned Reads" in line:
lastAmbig=line.split(':')[1].strip()
elif "Uniquely Aligned Reads" in line:
lastUniq=line.split(':')[1].strip()
return int(lastAmbig),int(lastUniq)
if __name__ == '__main__':
main()
| outputFile=open(args.output,'w')
for i in range(1,len(treatmentArray)+1):
outputFile.write('t_var_'+str(i)+',')
if args.bowtie:
for i in range(1,len(args.bowtie)+1):
bowtieNum='bowtie'+str(i)
outputFile.write(','.join(bowtieNum+'_'+n for n in ['tot','aln','unaln','ambig','per_uniq','per_aln'])+',')
if args.last:
outputFile.write(','.join(['last_uniq','last_ambig','last_per_uniq','last_per_aln'])+',')
outputFile.write('per_uniq_aln'+'\n') | conditional_block |
sync.js | /**
* @license Apache-2.0
*
* Copyright (c) 2018 The Stdlib Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
// MODULES //
var resolve = require( 'path' ).resolve;
var glob = require( 'glob' ).sync;
var cwd = require( '@stdlib/process/cwd' );
var copy = require( '@stdlib/utils/copy' );
var DEFAULTS = require( './defaults.json' );
var validate = require( './validate.js' );
var linter = require( './lint.js' );
var IGNORE = require( './ignore_patterns.json' );
// MAIN //
/**
* Synchronously lints filenames.
*
* @param {Options} [options] - function options
* @param {string} [options.dir] - root directory from which to search for files
* @param {string} [options.pattern='**\/*'] - filename pattern
* @throws {TypeError} options argument must be an object
* @throws {TypeError} must provide valid options
* @returns {(ObjectArray|EmptyArray)} list of lint errors
*
* @example
* var errs = lint();
* // returns [...]
*/
function lint( options ) {
var pattern;
var names;
var opts;
var err;
var dir;
opts = copy( DEFAULTS );
if ( arguments.length ) |
if ( opts.dir ) {
dir = resolve( cwd(), opts.dir );
} else {
dir = cwd();
}
pattern = opts.pattern;
opts = {
'cwd': dir,
'ignore': IGNORE,
'nodir': true // do not match directories
};
names = glob( pattern, opts );
return linter( names );
}
// EXPORTS //
module.exports = lint;
| {
err = validate( opts, options );
if ( err ) {
throw err;
}
} | conditional_block |
sync.js | /**
* @license Apache-2.0
*
* Copyright (c) 2018 The Stdlib Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
// MODULES //
var resolve = require( 'path' ).resolve;
var glob = require( 'glob' ).sync;
var cwd = require( '@stdlib/process/cwd' );
var copy = require( '@stdlib/utils/copy' );
var DEFAULTS = require( './defaults.json' );
var validate = require( './validate.js' );
var linter = require( './lint.js' );
var IGNORE = require( './ignore_patterns.json' );
// MAIN //
/**
* Synchronously lints filenames.
*
* @param {Options} [options] - function options
* @param {string} [options.dir] - root directory from which to search for files
* @param {string} [options.pattern='**\/*'] - filename pattern
* @throws {TypeError} options argument must be an object
* @throws {TypeError} must provide valid options
* @returns {(ObjectArray|EmptyArray)} list of lint errors
*
* @example
* var errs = lint();
* // returns [...]
*/
function | ( options ) {
var pattern;
var names;
var opts;
var err;
var dir;
opts = copy( DEFAULTS );
if ( arguments.length ) {
err = validate( opts, options );
if ( err ) {
throw err;
}
}
if ( opts.dir ) {
dir = resolve( cwd(), opts.dir );
} else {
dir = cwd();
}
pattern = opts.pattern;
opts = {
'cwd': dir,
'ignore': IGNORE,
'nodir': true // do not match directories
};
names = glob( pattern, opts );
return linter( names );
}
// EXPORTS //
module.exports = lint;
| lint | identifier_name |
sync.js | /**
* @license Apache-2.0 | * You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
// MODULES //
var resolve = require( 'path' ).resolve;
var glob = require( 'glob' ).sync;
var cwd = require( '@stdlib/process/cwd' );
var copy = require( '@stdlib/utils/copy' );
var DEFAULTS = require( './defaults.json' );
var validate = require( './validate.js' );
var linter = require( './lint.js' );
var IGNORE = require( './ignore_patterns.json' );
// MAIN //
/**
* Synchronously lints filenames.
*
* @param {Options} [options] - function options
* @param {string} [options.dir] - root directory from which to search for files
* @param {string} [options.pattern='**\/*'] - filename pattern
* @throws {TypeError} options argument must be an object
* @throws {TypeError} must provide valid options
* @returns {(ObjectArray|EmptyArray)} list of lint errors
*
* @example
* var errs = lint();
* // returns [...]
*/
function lint( options ) {
var pattern;
var names;
var opts;
var err;
var dir;
opts = copy( DEFAULTS );
if ( arguments.length ) {
err = validate( opts, options );
if ( err ) {
throw err;
}
}
if ( opts.dir ) {
dir = resolve( cwd(), opts.dir );
} else {
dir = cwd();
}
pattern = opts.pattern;
opts = {
'cwd': dir,
'ignore': IGNORE,
'nodir': true // do not match directories
};
names = glob( pattern, opts );
return linter( names );
}
// EXPORTS //
module.exports = lint; | *
* Copyright (c) 2018 The Stdlib Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. | random_line_split |
sync.js | /**
* @license Apache-2.0
*
* Copyright (c) 2018 The Stdlib Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
// MODULES //
var resolve = require( 'path' ).resolve;
var glob = require( 'glob' ).sync;
var cwd = require( '@stdlib/process/cwd' );
var copy = require( '@stdlib/utils/copy' );
var DEFAULTS = require( './defaults.json' );
var validate = require( './validate.js' );
var linter = require( './lint.js' );
var IGNORE = require( './ignore_patterns.json' );
// MAIN //
/**
* Synchronously lints filenames.
*
* @param {Options} [options] - function options
* @param {string} [options.dir] - root directory from which to search for files
* @param {string} [options.pattern='**\/*'] - filename pattern
* @throws {TypeError} options argument must be an object
* @throws {TypeError} must provide valid options
* @returns {(ObjectArray|EmptyArray)} list of lint errors
*
* @example
* var errs = lint();
* // returns [...]
*/
function lint( options ) |
// EXPORTS //
module.exports = lint;
| {
var pattern;
var names;
var opts;
var err;
var dir;
opts = copy( DEFAULTS );
if ( arguments.length ) {
err = validate( opts, options );
if ( err ) {
throw err;
}
}
if ( opts.dir ) {
dir = resolve( cwd(), opts.dir );
} else {
dir = cwd();
}
pattern = opts.pattern;
opts = {
'cwd': dir,
'ignore': IGNORE,
'nodir': true // do not match directories
};
names = glob( pattern, opts );
return linter( names );
} | identifier_body |
syst_rvr.rs | #[doc = "Register `SYST_RVR` reader"]
pub struct R(crate::R<SYST_RVR_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<SYST_RVR_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<crate::R<SYST_RVR_SPEC>> for R {
#[inline(always)]
fn from(reader: crate::R<SYST_RVR_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Register `SYST_RVR` writer"]
pub struct W(crate::W<SYST_RVR_SPEC>);
impl core::ops::Deref for W {
type Target = crate::W<SYST_RVR_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::ops::DerefMut for W {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl From<crate::W<SYST_RVR_SPEC>> for W {
#[inline(always)]
fn from(writer: crate::W<SYST_RVR_SPEC>) -> Self {
W(writer)
}
}
#[doc = "Field `RELOAD` reader - Reload Value"]
pub struct RELOAD_R(crate::FieldReader<u32, u32>);
impl RELOAD_R {
pub(crate) fn new(bits: u32) -> Self {
RELOAD_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for RELOAD_R {
type Target = crate::FieldReader<u32, u32>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `RELOAD` writer - Reload Value"]
pub struct RELOAD_W<'a> {
w: &'a mut W,
}
impl<'a> RELOAD_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u32) -> &'a mut W {
self.w.bits = (self.w.bits & !0x00ff_ffff) | (value as u32 & 0x00ff_ffff);
self.w
}
}
impl R {
#[doc = "Bits 0:23 - Reload Value"]
#[inline(always)]
pub fn reload(&self) -> RELOAD_R {
RELOAD_R::new((self.bits & 0x00ff_ffff) as u32)
}
}
impl W {
#[doc = "Bits 0:23 - Reload Value"]
#[inline(always)]
pub fn reload(&mut self) -> RELOAD_W {
RELOAD_W { w: self }
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.0.bits(bits);
self
}
}
#[doc = "SysTick Reload Value Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [syst_rvr](index.html) module"]
pub struct SYST_RVR_SPEC;
impl crate::RegisterSpec for SYST_RVR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [syst_rvr::R](R) reader structure"]
impl crate::Readable for SYST_RVR_SPEC {
type Reader = R;
}
#[doc = "`write(|w| ..)` method takes [syst_rvr::W](W) writer structure"]
impl crate::Writable for SYST_RVR_SPEC {
type Writer = W;
} | 0
}
} | #[doc = "`reset()` method sets SYST_RVR to value 0"]
impl crate::Resettable for SYST_RVR_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux { | random_line_split |
syst_rvr.rs | #[doc = "Register `SYST_RVR` reader"]
pub struct R(crate::R<SYST_RVR_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<SYST_RVR_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<crate::R<SYST_RVR_SPEC>> for R {
#[inline(always)]
fn from(reader: crate::R<SYST_RVR_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Register `SYST_RVR` writer"]
pub struct W(crate::W<SYST_RVR_SPEC>);
impl core::ops::Deref for W {
type Target = crate::W<SYST_RVR_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::ops::DerefMut for W {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl From<crate::W<SYST_RVR_SPEC>> for W {
#[inline(always)]
fn from(writer: crate::W<SYST_RVR_SPEC>) -> Self {
W(writer)
}
}
#[doc = "Field `RELOAD` reader - Reload Value"]
pub struct RELOAD_R(crate::FieldReader<u32, u32>);
impl RELOAD_R {
pub(crate) fn new(bits: u32) -> Self {
RELOAD_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for RELOAD_R {
type Target = crate::FieldReader<u32, u32>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `RELOAD` writer - Reload Value"]
pub struct RELOAD_W<'a> {
w: &'a mut W,
}
impl<'a> RELOAD_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u32) -> &'a mut W {
self.w.bits = (self.w.bits & !0x00ff_ffff) | (value as u32 & 0x00ff_ffff);
self.w
}
}
impl R {
#[doc = "Bits 0:23 - Reload Value"]
#[inline(always)]
pub fn reload(&self) -> RELOAD_R {
RELOAD_R::new((self.bits & 0x00ff_ffff) as u32)
}
}
impl W {
#[doc = "Bits 0:23 - Reload Value"]
#[inline(always)]
pub fn reload(&mut self) -> RELOAD_W {
RELOAD_W { w: self }
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self |
}
#[doc = "SysTick Reload Value Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [syst_rvr](index.html) module"]
pub struct SYST_RVR_SPEC;
impl crate::RegisterSpec for SYST_RVR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [syst_rvr::R](R) reader structure"]
impl crate::Readable for SYST_RVR_SPEC {
type Reader = R;
}
#[doc = "`write(|w| ..)` method takes [syst_rvr::W](W) writer structure"]
impl crate::Writable for SYST_RVR_SPEC {
type Writer = W;
}
#[doc = "`reset()` method sets SYST_RVR to value 0"]
impl crate::Resettable for SYST_RVR_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0
}
}
| {
self.0.bits(bits);
self
} | identifier_body |
syst_rvr.rs | #[doc = "Register `SYST_RVR` reader"]
pub struct R(crate::R<SYST_RVR_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<SYST_RVR_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<crate::R<SYST_RVR_SPEC>> for R {
#[inline(always)]
fn from(reader: crate::R<SYST_RVR_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Register `SYST_RVR` writer"]
pub struct W(crate::W<SYST_RVR_SPEC>);
impl core::ops::Deref for W {
type Target = crate::W<SYST_RVR_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::ops::DerefMut for W {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl From<crate::W<SYST_RVR_SPEC>> for W {
#[inline(always)]
fn from(writer: crate::W<SYST_RVR_SPEC>) -> Self {
W(writer)
}
}
#[doc = "Field `RELOAD` reader - Reload Value"]
pub struct RELOAD_R(crate::FieldReader<u32, u32>);
impl RELOAD_R {
pub(crate) fn new(bits: u32) -> Self {
RELOAD_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for RELOAD_R {
type Target = crate::FieldReader<u32, u32>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `RELOAD` writer - Reload Value"]
pub struct RELOAD_W<'a> {
w: &'a mut W,
}
impl<'a> RELOAD_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn | (self, value: u32) -> &'a mut W {
self.w.bits = (self.w.bits & !0x00ff_ffff) | (value as u32 & 0x00ff_ffff);
self.w
}
}
impl R {
#[doc = "Bits 0:23 - Reload Value"]
#[inline(always)]
pub fn reload(&self) -> RELOAD_R {
RELOAD_R::new((self.bits & 0x00ff_ffff) as u32)
}
}
impl W {
#[doc = "Bits 0:23 - Reload Value"]
#[inline(always)]
pub fn reload(&mut self) -> RELOAD_W {
RELOAD_W { w: self }
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.0.bits(bits);
self
}
}
#[doc = "SysTick Reload Value Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [syst_rvr](index.html) module"]
pub struct SYST_RVR_SPEC;
impl crate::RegisterSpec for SYST_RVR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [syst_rvr::R](R) reader structure"]
impl crate::Readable for SYST_RVR_SPEC {
type Reader = R;
}
#[doc = "`write(|w| ..)` method takes [syst_rvr::W](W) writer structure"]
impl crate::Writable for SYST_RVR_SPEC {
type Writer = W;
}
#[doc = "`reset()` method sets SYST_RVR to value 0"]
impl crate::Resettable for SYST_RVR_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0
}
}
| bits | identifier_name |
index.js | /**
* @license Apache-2.0
*
* Copyright (c) 2020 The Stdlib Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
var linspace = require( '@stdlib/array/base/linspace' );
var acot = require( './../lib' );
var x = linspace( -5.0, 5.0, 100 );
var i;
for ( i = 0; i < x.length; i++ ) | {
console.log( 'acot(%d) = %d', x[ i ], acot( x[ i ] ) );
} | conditional_block | |
index.js | /**
* @license Apache-2.0
*
* Copyright (c) 2020 The Stdlib Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
var linspace = require( '@stdlib/array/base/linspace' );
var acot = require( './../lib' );
| for ( i = 0; i < x.length; i++ ) {
console.log( 'acot(%d) = %d', x[ i ], acot( x[ i ] ) );
} | var x = linspace( -5.0, 5.0, 100 );
var i; | random_line_split |
upgrade.d.ts | /**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import { ApplicationRef, OpaqueToken } from '@angular/core';
import { ExtraOptions, RouterPreloader } from '@angular/router';
import { UpgradeModule } from '@angular/upgrade/static';
/**
* @whatItDoes Creates an initializer that in addition to setting up the Angular 2
* router sets up the ngRoute integration.
*
* @howToUse
*
* ```
* @NgModule({
* imports: [
* RouterModule.forRoot(SOME_ROUTES),
* UpgradeModule | * export class AppModule {
* ngDoBootstrap() {}
* }
* ```
*
* @experimental
*/
export declare const RouterUpgradeInitializer: {
provide: OpaqueToken;
useFactory: (ngUpgrade: UpgradeModule, ref: ApplicationRef, preloader: RouterPreloader, opts: ExtraOptions) => Function;
deps: (OpaqueToken | typeof UpgradeModule | typeof ApplicationRef | typeof RouterPreloader)[];
};
/**
* @whatItDoes Sets up a location synchronization.
*
* History.pushState does not fire onPopState, so the angular2 location
* doesn't detect it. The workaround is to attach a location change listener
*
* @experimental
*/
export declare function setUpLocationSync(ngUpgrade: UpgradeModule): void; | * ],
* providers: [
* RouterUpgradeInitializer
* ]
* }) | random_line_split |
LineView.js | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
// FIXME step not support polar
import {__DEV__} from '../../config';
import * as zrUtil from 'zrender/src/core/util';
import SymbolDraw from '../helper/SymbolDraw';
import SymbolClz from '../helper/Symbol';
import lineAnimationDiff from './lineAnimationDiff';
import * as graphic from '../../util/graphic';
import * as modelUtil from '../../util/model';
import {Polyline, Polygon} from './poly';
import ChartView from '../../view/Chart';
import {round} from '../../util/number';
import {prepareDataCoordInfo, getStackedOnPoint} from './helper';
function isPointsSame(points1, points2) {
if (points1.length !== points2.length) {
return;
}
for (var i = 0; i < points1.length; i++) {
var p1 = points1[i];
var p2 = points2[i];
if (p1[0] !== p2[0] || p1[1] !== p2[1]) {
return;
}
}
return true;
}
function getSmooth(smooth) {
return typeof (smooth) === 'number' ? smooth : (smooth ? 0.5 : 0);
}
function getAxisExtentWithGap(axis) {
var extent = axis.getGlobalExtent();
if (axis.onBand) {
// Remove extra 1px to avoid line miter in clipped edge
var halfBandWidth = axis.getBandWidth() / 2 - 1;
var dir = extent[1] > extent[0] ? 1 : -1;
extent[0] += dir * halfBandWidth;
extent[1] -= dir * halfBandWidth;
}
return extent;
}
/**
* @param {module:echarts/coord/cartesian/Cartesian2D|module:echarts/coord/polar/Polar} coordSys
* @param {module:echarts/data/List} data
* @param {Object} dataCoordInfo
* @param {Array.<Array.<number>>} points
*/
function getStackedOnPoints(coordSys, data, dataCoordInfo) {
if (!dataCoordInfo.valueDim) {
return [];
}
var points = [];
for (var idx = 0, len = data.count(); idx < len; idx++) {
points.push(getStackedOnPoint(dataCoordInfo, coordSys, data, idx));
}
return points;
}
function createGridClipShape(cartesian, hasAnimation, forSymbol, seriesModel) {
var xExtent = getAxisExtentWithGap(cartesian.getAxis('x'));
var yExtent = getAxisExtentWithGap(cartesian.getAxis('y'));
var isHorizontal = cartesian.getBaseAxis().isHorizontal();
var x = Math.min(xExtent[0], xExtent[1]);
var y = Math.min(yExtent[0], yExtent[1]);
var width = Math.max(xExtent[0], xExtent[1]) - x;
var height = Math.max(yExtent[0], yExtent[1]) - y;
// Avoid float number rounding error for symbol on the edge of axis extent.
// See #7913 and `test/dataZoom-clip.html`.
if (forSymbol) {
x -= 0.5;
width += 0.5;
y -= 0.5;
height += 0.5;
}
else {
var lineWidth = seriesModel.get('lineStyle.width') || 2;
// Expand clip shape to avoid clipping when line value exceeds axis
var expandSize = seriesModel.get('clipOverflow') ? lineWidth / 2 : Math.max(width, height);
if (isHorizontal) {
y -= expandSize;
height += expandSize * 2;
}
else {
x -= expandSize;
width += expandSize * 2;
}
}
var clipPath = new graphic.Rect({
shape: {
x: x,
y: y,
width: width,
height: height
}
});
if (hasAnimation) {
clipPath.shape[isHorizontal ? 'width' : 'height'] = 0;
graphic.initProps(clipPath, {
shape: {
width: width,
height: height
}
}, seriesModel);
}
return clipPath;
}
function createPolarClipShape(polar, hasAnimation, forSymbol, seriesModel) {
var angleAxis = polar.getAngleAxis();
var radiusAxis = polar.getRadiusAxis();
var radiusExtent = radiusAxis.getExtent().slice();
radiusExtent[0] > radiusExtent[1] && radiusExtent.reverse();
var angleExtent = angleAxis.getExtent();
var RADIAN = Math.PI / 180;
// Avoid float number rounding error for symbol on the edge of axis extent.
if (forSymbol) {
radiusExtent[0] -= 0.5;
radiusExtent[1] += 0.5;
}
var clipPath = new graphic.Sector({
shape: {
cx: round(polar.cx, 1),
cy: round(polar.cy, 1),
r0: round(radiusExtent[0], 1),
r: round(radiusExtent[1], 1),
startAngle: -angleExtent[0] * RADIAN,
endAngle: -angleExtent[1] * RADIAN,
clockwise: angleAxis.inverse
}
});
if (hasAnimation) {
clipPath.shape.endAngle = -angleExtent[0] * RADIAN;
graphic.initProps(clipPath, {
shape: {
endAngle: -angleExtent[1] * RADIAN
}
}, seriesModel);
}
return clipPath;
}
function createClipShape(coordSys, hasAnimation, forSymbol, seriesModel) {
return coordSys.type === 'polar'
? createPolarClipShape(coordSys, hasAnimation, forSymbol, seriesModel)
: createGridClipShape(coordSys, hasAnimation, forSymbol, seriesModel);
}
function turnPointsIntoStep(points, coordSys, stepTurnAt) {
var baseAxis = coordSys.getBaseAxis();
var baseIndex = baseAxis.dim === 'x' || baseAxis.dim === 'radius' ? 0 : 1;
var stepPoints = [];
for (var i = 0; i < points.length - 1; i++) {
var nextPt = points[i + 1];
var pt = points[i];
stepPoints.push(pt);
var stepPt = [];
switch (stepTurnAt) {
case 'end':
stepPt[baseIndex] = nextPt[baseIndex];
stepPt[1 - baseIndex] = pt[1 - baseIndex];
// default is start
stepPoints.push(stepPt);
break;
case 'middle':
// default is start
var middle = (pt[baseIndex] + nextPt[baseIndex]) / 2;
var stepPt2 = [];
stepPt[baseIndex] = stepPt2[baseIndex] = middle;
stepPt[1 - baseIndex] = pt[1 - baseIndex];
stepPt2[1 - baseIndex] = nextPt[1 - baseIndex];
stepPoints.push(stepPt);
stepPoints.push(stepPt2);
break;
default:
stepPt[baseIndex] = pt[baseIndex];
stepPt[1 - baseIndex] = nextPt[1 - baseIndex];
// default is start
stepPoints.push(stepPt);
}
}
// Last points
points[i] && stepPoints.push(points[i]);
return stepPoints;
}
function getVisualGradient(data, coordSys) {
var visualMetaList = data.getVisual('visualMeta');
if (!visualMetaList || !visualMetaList.length || !data.count()) {
// When data.count() is 0, gradient range can not be calculated.
return;
}
if (coordSys.type !== 'cartesian2d') {
if (__DEV__) {
console.warn('Visual map on line style is only supported on cartesian2d.');
}
return;
}
var coordDim;
var visualMeta;
for (var i = visualMetaList.length - 1; i >= 0; i--) {
var dimIndex = visualMetaList[i].dimension;
var dimName = data.dimensions[dimIndex];
var dimInfo = data.getDimensionInfo(dimName);
coordDim = dimInfo && dimInfo.coordDim;
// Can only be x or y
if (coordDim === 'x' || coordDim === 'y') |
}
if (!visualMeta) {
if (__DEV__) {
console.warn('Visual map on line style only support x or y dimension.');
}
return;
}
// If the area to be rendered is bigger than area defined by LinearGradient,
// the canvas spec prescribes that the color of the first stop and the last
// stop should be used. But if two stops are added at offset 0, in effect
// browsers use the color of the second stop to render area outside
// LinearGradient. So we can only infinitesimally extend area defined in
// LinearGradient to render `outerColors`.
var axis = coordSys.getAxis(coordDim);
// dataToCoor mapping may not be linear, but must be monotonic.
var colorStops = zrUtil.map(visualMeta.stops, function (stop) {
return {
coord: axis.toGlobalCoord(axis.dataToCoord(stop.value)),
color: stop.color
};
});
var stopLen = colorStops.length;
var outerColors = visualMeta.outerColors.slice();
if (stopLen && colorStops[0].coord > colorStops[stopLen - 1].coord) {
colorStops.reverse();
outerColors.reverse();
}
var tinyExtent = 10; // Arbitrary value: 10px
var minCoord = colorStops[0].coord - tinyExtent;
var maxCoord = colorStops[stopLen - 1].coord + tinyExtent;
var coordSpan = maxCoord - minCoord;
if (coordSpan < 1e-3) {
return 'transparent';
}
zrUtil.each(colorStops, function (stop) {
stop.offset = (stop.coord - minCoord) / coordSpan;
});
colorStops.push({
offset: stopLen ? colorStops[stopLen - 1].offset : 0.5,
color: outerColors[1] || 'transparent'
});
colorStops.unshift({ // notice colorStops.length have been changed.
offset: stopLen ? colorStops[0].offset : 0.5,
color: outerColors[0] || 'transparent'
});
// zrUtil.each(colorStops, function (colorStop) {
// // Make sure each offset has rounded px to avoid not sharp edge
// colorStop.offset = (Math.round(colorStop.offset * (end - start) + start) - start) / (end - start);
// });
var gradient = new graphic.LinearGradient(0, 0, 0, 0, colorStops, true);
gradient[coordDim] = minCoord;
gradient[coordDim + '2'] = maxCoord;
return gradient;
}
function getIsIgnoreFunc(seriesModel, data, coordSys) {
var showAllSymbol = seriesModel.get('showAllSymbol');
var isAuto = showAllSymbol === 'auto';
if (showAllSymbol && !isAuto) {
return;
}
var categoryAxis = coordSys.getAxesByScale('ordinal')[0];
if (!categoryAxis) {
return;
}
// Note that category label interval strategy might bring some weird effect
// in some scenario: users may wonder why some of the symbols are not
// displayed. So we show all symbols as possible as we can.
if (isAuto
// Simplify the logic, do not determine label overlap here.
&& canShowAllSymbolForCategory(categoryAxis, data)
) {
return;
}
// Otherwise follow the label interval strategy on category axis.
var categoryDataDim = data.mapDimension(categoryAxis.dim);
var labelMap = {};
zrUtil.each(categoryAxis.getViewLabels(), function (labelItem) {
labelMap[labelItem.tickValue] = 1;
});
return function (dataIndex) {
return !labelMap.hasOwnProperty(data.get(categoryDataDim, dataIndex));
};
}
function canShowAllSymbolForCategory(categoryAxis, data) {
// In mose cases, line is monotonous on category axis, and the label size
// is close with each other. So we check the symbol size and some of the
// label size alone with the category axis to estimate whether all symbol
// can be shown without overlap.
var axisExtent = categoryAxis.getExtent();
var availSize = Math.abs(axisExtent[1] - axisExtent[0]) / categoryAxis.scale.count();
isNaN(availSize) && (availSize = 0); // 0/0 is NaN.
// Sampling some points, max 5.
var dataLen = data.count();
var step = Math.max(1, Math.round(dataLen / 5));
for (var dataIndex = 0; dataIndex < dataLen; dataIndex += step) {
if (SymbolClz.getSymbolSize(
data, dataIndex
// Only for cartesian, where `isHorizontal` exists.
)[categoryAxis.isHorizontal() ? 1 : 0]
// Empirical number
* 1.5 > availSize
) {
return false;
}
}
return true;
}
export default ChartView.extend({
type: 'line',
init: function () {
var lineGroup = new graphic.Group();
var symbolDraw = new SymbolDraw();
this.group.add(symbolDraw.group);
this._symbolDraw = symbolDraw;
this._lineGroup = lineGroup;
},
render: function (seriesModel, ecModel, api) {
var coordSys = seriesModel.coordinateSystem;
var group = this.group;
var data = seriesModel.getData();
var lineStyleModel = seriesModel.getModel('lineStyle');
var areaStyleModel = seriesModel.getModel('areaStyle');
var points = data.mapArray(data.getItemLayout);
var isCoordSysPolar = coordSys.type === 'polar';
var prevCoordSys = this._coordSys;
var symbolDraw = this._symbolDraw;
var polyline = this._polyline;
var polygon = this._polygon;
var lineGroup = this._lineGroup;
var hasAnimation = seriesModel.get('animation');
var isAreaChart = !areaStyleModel.isEmpty();
var valueOrigin = areaStyleModel.get('origin');
var dataCoordInfo = prepareDataCoordInfo(coordSys, data, valueOrigin);
var stackedOnPoints = getStackedOnPoints(coordSys, data, dataCoordInfo);
var showSymbol = seriesModel.get('showSymbol');
var isIgnoreFunc = showSymbol && !isCoordSysPolar
&& getIsIgnoreFunc(seriesModel, data, coordSys);
// Remove temporary symbols
var oldData = this._data;
oldData && oldData.eachItemGraphicEl(function (el, idx) {
if (el.__temp) {
group.remove(el);
oldData.setItemGraphicEl(idx, null);
}
});
// Remove previous created symbols if showSymbol changed to false
if (!showSymbol) {
symbolDraw.remove();
}
group.add(lineGroup);
// FIXME step not support polar
var step = !isCoordSysPolar && seriesModel.get('step');
// Initialization animation or coordinate system changed
if (
!(polyline && prevCoordSys.type === coordSys.type && step === this._step)
) {
showSymbol && symbolDraw.updateData(data, {
isIgnore: isIgnoreFunc,
clipShape: createClipShape(coordSys, false, true, seriesModel)
});
if (step) {
// TODO If stacked series is not step
points = turnPointsIntoStep(points, coordSys, step);
stackedOnPoints = turnPointsIntoStep(stackedOnPoints, coordSys, step);
}
polyline = this._newPolyline(points, coordSys, hasAnimation);
if (isAreaChart) {
polygon = this._newPolygon(
points, stackedOnPoints,
coordSys, hasAnimation
);
}
lineGroup.setClipPath(createClipShape(coordSys, true, false, seriesModel));
}
else {
if (isAreaChart && !polygon) {
// If areaStyle is added
polygon = this._newPolygon(
points, stackedOnPoints,
coordSys, hasAnimation
);
}
else if (polygon && !isAreaChart) {
// If areaStyle is removed
lineGroup.remove(polygon);
polygon = this._polygon = null;
}
// Update clipPath
lineGroup.setClipPath(createClipShape(coordSys, false, false, seriesModel));
// Always update, or it is wrong in the case turning on legend
// because points are not changed
showSymbol && symbolDraw.updateData(data, {
isIgnore: isIgnoreFunc,
clipShape: createClipShape(coordSys, false, true, seriesModel)
});
// Stop symbol animation and sync with line points
// FIXME performance?
data.eachItemGraphicEl(function (el) {
el.stopAnimation(true);
});
// In the case data zoom triggerred refreshing frequently
// Data may not change if line has a category axis. So it should animate nothing
if (!isPointsSame(this._stackedOnPoints, stackedOnPoints)
|| !isPointsSame(this._points, points)
) {
if (hasAnimation) {
this._updateAnimation(
data, stackedOnPoints, coordSys, api, step, valueOrigin
);
}
else {
// Not do it in update with animation
if (step) {
// TODO If stacked series is not step
points = turnPointsIntoStep(points, coordSys, step);
stackedOnPoints = turnPointsIntoStep(stackedOnPoints, coordSys, step);
}
polyline.setShape({
points: points
});
polygon && polygon.setShape({
points: points,
stackedOnPoints: stackedOnPoints
});
}
}
}
var visualColor = getVisualGradient(data, coordSys) || data.getVisual('color');
polyline.useStyle(zrUtil.defaults(
// Use color in lineStyle first
lineStyleModel.getLineStyle(),
{
fill: 'none',
stroke: visualColor,
lineJoin: 'bevel'
}
));
var smooth = seriesModel.get('smooth');
smooth = getSmooth(seriesModel.get('smooth'));
polyline.setShape({
smooth: smooth,
smoothMonotone: seriesModel.get('smoothMonotone'),
connectNulls: seriesModel.get('connectNulls')
});
if (polygon) {
var stackedOnSeries = data.getCalculationInfo('stackedOnSeries');
var stackedOnSmooth = 0;
polygon.useStyle(zrUtil.defaults(
areaStyleModel.getAreaStyle(),
{
fill: visualColor,
opacity: 0.7,
lineJoin: 'bevel'
}
));
if (stackedOnSeries) {
stackedOnSmooth = getSmooth(stackedOnSeries.get('smooth'));
}
polygon.setShape({
smooth: smooth,
stackedOnSmooth: stackedOnSmooth,
smoothMonotone: seriesModel.get('smoothMonotone'),
connectNulls: seriesModel.get('connectNulls')
});
}
this._data = data;
// Save the coordinate system for transition animation when data changed
this._coordSys = coordSys;
this._stackedOnPoints = stackedOnPoints;
this._points = points;
this._step = step;
this._valueOrigin = valueOrigin;
},
dispose: function () {},
highlight: function (seriesModel, ecModel, api, payload) {
var data = seriesModel.getData();
var dataIndex = modelUtil.queryDataIndex(data, payload);
if (!(dataIndex instanceof Array) && dataIndex != null && dataIndex >= 0) {
var symbol = data.getItemGraphicEl(dataIndex);
if (!symbol) {
// Create a temporary symbol if it is not exists
var pt = data.getItemLayout(dataIndex);
if (!pt) {
// Null data
return;
}
symbol = new SymbolClz(data, dataIndex);
symbol.position = pt;
symbol.setZ(
seriesModel.get('zlevel'),
seriesModel.get('z')
);
symbol.ignore = isNaN(pt[0]) || isNaN(pt[1]);
symbol.__temp = true;
data.setItemGraphicEl(dataIndex, symbol);
// Stop scale animation
symbol.stopSymbolAnimation(true);
this.group.add(symbol);
}
symbol.highlight();
}
else {
// Highlight whole series
ChartView.prototype.highlight.call(
this, seriesModel, ecModel, api, payload
);
}
},
downplay: function (seriesModel, ecModel, api, payload) {
var data = seriesModel.getData();
var dataIndex = modelUtil.queryDataIndex(data, payload);
if (dataIndex != null && dataIndex >= 0) {
var symbol = data.getItemGraphicEl(dataIndex);
if (symbol) {
if (symbol.__temp) {
data.setItemGraphicEl(dataIndex, null);
this.group.remove(symbol);
}
else {
symbol.downplay();
}
}
}
else {
// FIXME
// can not downplay completely.
// Downplay whole series
ChartView.prototype.downplay.call(
this, seriesModel, ecModel, api, payload
);
}
},
/**
* @param {module:zrender/container/Group} group
* @param {Array.<Array.<number>>} points
* @private
*/
_newPolyline: function (points) {
var polyline = this._polyline;
// Remove previous created polyline
if (polyline) {
this._lineGroup.remove(polyline);
}
polyline = new Polyline({
shape: {
points: points
},
silent: true,
z2: 10
});
this._lineGroup.add(polyline);
this._polyline = polyline;
return polyline;
},
/**
* @param {module:zrender/container/Group} group
* @param {Array.<Array.<number>>} stackedOnPoints
* @param {Array.<Array.<number>>} points
* @private
*/
_newPolygon: function (points, stackedOnPoints) {
var polygon = this._polygon;
// Remove previous created polygon
if (polygon) {
this._lineGroup.remove(polygon);
}
polygon = new Polygon({
shape: {
points: points,
stackedOnPoints: stackedOnPoints
},
silent: true
});
this._lineGroup.add(polygon);
this._polygon = polygon;
return polygon;
},
/**
* @private
*/
// FIXME Two value axis
_updateAnimation: function (data, stackedOnPoints, coordSys, api, step, valueOrigin) {
var polyline = this._polyline;
var polygon = this._polygon;
var seriesModel = data.hostModel;
var diff = lineAnimationDiff(
this._data, data,
this._stackedOnPoints, stackedOnPoints,
this._coordSys, coordSys,
this._valueOrigin, valueOrigin
);
var current = diff.current;
var stackedOnCurrent = diff.stackedOnCurrent;
var next = diff.next;
var stackedOnNext = diff.stackedOnNext;
if (step) {
// TODO If stacked series is not step
current = turnPointsIntoStep(diff.current, coordSys, step);
stackedOnCurrent = turnPointsIntoStep(diff.stackedOnCurrent, coordSys, step);
next = turnPointsIntoStep(diff.next, coordSys, step);
stackedOnNext = turnPointsIntoStep(diff.stackedOnNext, coordSys, step);
}
// `diff.current` is subset of `current` (which should be ensured by
// turnPointsIntoStep), so points in `__points` can be updated when
// points in `current` are update during animation.
polyline.shape.__points = diff.current;
polyline.shape.points = current;
graphic.updateProps(polyline, {
shape: {
points: next
}
}, seriesModel);
if (polygon) {
polygon.setShape({
points: current,
stackedOnPoints: stackedOnCurrent
});
graphic.updateProps(polygon, {
shape: {
points: next,
stackedOnPoints: stackedOnNext
}
}, seriesModel);
}
var updatedDataInfo = [];
var diffStatus = diff.status;
for (var i = 0; i < diffStatus.length; i++) {
var cmd = diffStatus[i].cmd;
if (cmd === '=') {
var el = data.getItemGraphicEl(diffStatus[i].idx1);
if (el) {
updatedDataInfo.push({
el: el,
ptIdx: i // Index of points
});
}
}
}
if (polyline.animators && polyline.animators.length) {
polyline.animators[0].during(function () {
for (var i = 0; i < updatedDataInfo.length; i++) {
var el = updatedDataInfo[i].el;
el.attr('position', polyline.shape.__points[updatedDataInfo[i].ptIdx]);
}
});
}
},
remove: function (ecModel) {
var group = this.group;
var oldData = this._data;
this._lineGroup.removeAll();
this._symbolDraw.remove(true);
// Remove temporary created elements when highlighting
oldData && oldData.eachItemGraphicEl(function (el, idx) {
if (el.__temp) {
group.remove(el);
oldData.setItemGraphicEl(idx, null);
}
});
this._polyline
= this._polygon
= this._coordSys
= this._points
= this._stackedOnPoints
= this._data = null;
}
}); | {
visualMeta = visualMetaList[i];
break;
} | conditional_block |
LineView.js | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
// FIXME step not support polar
import {__DEV__} from '../../config';
import * as zrUtil from 'zrender/src/core/util';
import SymbolDraw from '../helper/SymbolDraw';
import SymbolClz from '../helper/Symbol';
import lineAnimationDiff from './lineAnimationDiff';
import * as graphic from '../../util/graphic';
import * as modelUtil from '../../util/model';
import {Polyline, Polygon} from './poly';
import ChartView from '../../view/Chart';
import {round} from '../../util/number';
import {prepareDataCoordInfo, getStackedOnPoint} from './helper';
function isPointsSame(points1, points2) {
if (points1.length !== points2.length) {
return;
}
for (var i = 0; i < points1.length; i++) {
var p1 = points1[i];
var p2 = points2[i];
if (p1[0] !== p2[0] || p1[1] !== p2[1]) {
return;
}
}
return true;
}
function getSmooth(smooth) {
return typeof (smooth) === 'number' ? smooth : (smooth ? 0.5 : 0);
}
function getAxisExtentWithGap(axis) {
var extent = axis.getGlobalExtent();
if (axis.onBand) {
// Remove extra 1px to avoid line miter in clipped edge
var halfBandWidth = axis.getBandWidth() / 2 - 1;
var dir = extent[1] > extent[0] ? 1 : -1;
extent[0] += dir * halfBandWidth;
extent[1] -= dir * halfBandWidth;
}
return extent;
}
/**
* @param {module:echarts/coord/cartesian/Cartesian2D|module:echarts/coord/polar/Polar} coordSys
* @param {module:echarts/data/List} data
* @param {Object} dataCoordInfo
* @param {Array.<Array.<number>>} points
*/
function getStackedOnPoints(coordSys, data, dataCoordInfo) {
if (!dataCoordInfo.valueDim) {
return [];
}
var points = [];
for (var idx = 0, len = data.count(); idx < len; idx++) {
points.push(getStackedOnPoint(dataCoordInfo, coordSys, data, idx));
}
return points;
}
function createGridClipShape(cartesian, hasAnimation, forSymbol, seriesModel) {
var xExtent = getAxisExtentWithGap(cartesian.getAxis('x'));
var yExtent = getAxisExtentWithGap(cartesian.getAxis('y'));
var isHorizontal = cartesian.getBaseAxis().isHorizontal();
var x = Math.min(xExtent[0], xExtent[1]);
var y = Math.min(yExtent[0], yExtent[1]);
var width = Math.max(xExtent[0], xExtent[1]) - x;
var height = Math.max(yExtent[0], yExtent[1]) - y;
// Avoid float number rounding error for symbol on the edge of axis extent.
// See #7913 and `test/dataZoom-clip.html`.
if (forSymbol) {
x -= 0.5;
width += 0.5;
y -= 0.5;
height += 0.5;
}
else {
var lineWidth = seriesModel.get('lineStyle.width') || 2;
// Expand clip shape to avoid clipping when line value exceeds axis
var expandSize = seriesModel.get('clipOverflow') ? lineWidth / 2 : Math.max(width, height);
if (isHorizontal) {
y -= expandSize;
height += expandSize * 2;
}
else {
x -= expandSize;
width += expandSize * 2;
}
}
var clipPath = new graphic.Rect({
shape: {
x: x,
y: y,
width: width,
height: height
}
});
if (hasAnimation) {
clipPath.shape[isHorizontal ? 'width' : 'height'] = 0;
graphic.initProps(clipPath, {
shape: {
width: width,
height: height
}
}, seriesModel);
}
return clipPath;
}
function createPolarClipShape(polar, hasAnimation, forSymbol, seriesModel) {
var angleAxis = polar.getAngleAxis();
var radiusAxis = polar.getRadiusAxis();
var radiusExtent = radiusAxis.getExtent().slice();
radiusExtent[0] > radiusExtent[1] && radiusExtent.reverse();
var angleExtent = angleAxis.getExtent();
var RADIAN = Math.PI / 180;
// Avoid float number rounding error for symbol on the edge of axis extent.
if (forSymbol) {
radiusExtent[0] -= 0.5;
radiusExtent[1] += 0.5;
}
var clipPath = new graphic.Sector({
shape: {
cx: round(polar.cx, 1),
cy: round(polar.cy, 1),
r0: round(radiusExtent[0], 1),
r: round(radiusExtent[1], 1),
startAngle: -angleExtent[0] * RADIAN,
endAngle: -angleExtent[1] * RADIAN,
clockwise: angleAxis.inverse
}
});
if (hasAnimation) {
clipPath.shape.endAngle = -angleExtent[0] * RADIAN;
graphic.initProps(clipPath, {
shape: {
endAngle: -angleExtent[1] * RADIAN
}
}, seriesModel);
}
return clipPath;
}
function createClipShape(coordSys, hasAnimation, forSymbol, seriesModel) {
return coordSys.type === 'polar'
? createPolarClipShape(coordSys, hasAnimation, forSymbol, seriesModel)
: createGridClipShape(coordSys, hasAnimation, forSymbol, seriesModel);
}
function turnPointsIntoStep(points, coordSys, stepTurnAt) {
var baseAxis = coordSys.getBaseAxis();
var baseIndex = baseAxis.dim === 'x' || baseAxis.dim === 'radius' ? 0 : 1;
var stepPoints = [];
for (var i = 0; i < points.length - 1; i++) {
var nextPt = points[i + 1];
var pt = points[i];
stepPoints.push(pt);
var stepPt = [];
switch (stepTurnAt) {
case 'end':
stepPt[baseIndex] = nextPt[baseIndex];
stepPt[1 - baseIndex] = pt[1 - baseIndex];
// default is start
stepPoints.push(stepPt);
break;
case 'middle':
// default is start
var middle = (pt[baseIndex] + nextPt[baseIndex]) / 2;
var stepPt2 = [];
stepPt[baseIndex] = stepPt2[baseIndex] = middle;
stepPt[1 - baseIndex] = pt[1 - baseIndex];
stepPt2[1 - baseIndex] = nextPt[1 - baseIndex];
stepPoints.push(stepPt);
stepPoints.push(stepPt2);
break;
default:
stepPt[baseIndex] = pt[baseIndex];
stepPt[1 - baseIndex] = nextPt[1 - baseIndex];
// default is start
stepPoints.push(stepPt);
}
}
// Last points
points[i] && stepPoints.push(points[i]);
return stepPoints;
}
function getVisualGradient(data, coordSys) {
var visualMetaList = data.getVisual('visualMeta');
if (!visualMetaList || !visualMetaList.length || !data.count()) {
// When data.count() is 0, gradient range can not be calculated.
return;
}
if (coordSys.type !== 'cartesian2d') {
if (__DEV__) {
console.warn('Visual map on line style is only supported on cartesian2d.');
}
return;
}
var coordDim;
var visualMeta;
for (var i = visualMetaList.length - 1; i >= 0; i--) {
var dimIndex = visualMetaList[i].dimension;
var dimName = data.dimensions[dimIndex];
var dimInfo = data.getDimensionInfo(dimName);
coordDim = dimInfo && dimInfo.coordDim;
// Can only be x or y
if (coordDim === 'x' || coordDim === 'y') {
visualMeta = visualMetaList[i];
break;
}
}
if (!visualMeta) {
if (__DEV__) {
console.warn('Visual map on line style only support x or y dimension.');
}
return;
}
// If the area to be rendered is bigger than area defined by LinearGradient,
// the canvas spec prescribes that the color of the first stop and the last
// stop should be used. But if two stops are added at offset 0, in effect
// browsers use the color of the second stop to render area outside
// LinearGradient. So we can only infinitesimally extend area defined in
// LinearGradient to render `outerColors`.
var axis = coordSys.getAxis(coordDim);
// dataToCoor mapping may not be linear, but must be monotonic.
var colorStops = zrUtil.map(visualMeta.stops, function (stop) {
return {
coord: axis.toGlobalCoord(axis.dataToCoord(stop.value)),
color: stop.color
};
});
var stopLen = colorStops.length;
var outerColors = visualMeta.outerColors.slice();
if (stopLen && colorStops[0].coord > colorStops[stopLen - 1].coord) {
colorStops.reverse();
outerColors.reverse();
}
var tinyExtent = 10; // Arbitrary value: 10px
var minCoord = colorStops[0].coord - tinyExtent;
var maxCoord = colorStops[stopLen - 1].coord + tinyExtent;
var coordSpan = maxCoord - minCoord;
if (coordSpan < 1e-3) {
return 'transparent';
}
zrUtil.each(colorStops, function (stop) {
stop.offset = (stop.coord - minCoord) / coordSpan;
});
colorStops.push({
offset: stopLen ? colorStops[stopLen - 1].offset : 0.5,
color: outerColors[1] || 'transparent'
});
colorStops.unshift({ // notice colorStops.length have been changed.
offset: stopLen ? colorStops[0].offset : 0.5,
color: outerColors[0] || 'transparent'
});
// zrUtil.each(colorStops, function (colorStop) {
// // Make sure each offset has rounded px to avoid not sharp edge
// colorStop.offset = (Math.round(colorStop.offset * (end - start) + start) - start) / (end - start);
// });
var gradient = new graphic.LinearGradient(0, 0, 0, 0, colorStops, true);
gradient[coordDim] = minCoord;
gradient[coordDim + '2'] = maxCoord;
return gradient;
}
function getIsIgnoreFunc(seriesModel, data, coordSys) |
function canShowAllSymbolForCategory(categoryAxis, data) {
// In mose cases, line is monotonous on category axis, and the label size
// is close with each other. So we check the symbol size and some of the
// label size alone with the category axis to estimate whether all symbol
// can be shown without overlap.
var axisExtent = categoryAxis.getExtent();
var availSize = Math.abs(axisExtent[1] - axisExtent[0]) / categoryAxis.scale.count();
isNaN(availSize) && (availSize = 0); // 0/0 is NaN.
// Sampling some points, max 5.
var dataLen = data.count();
var step = Math.max(1, Math.round(dataLen / 5));
for (var dataIndex = 0; dataIndex < dataLen; dataIndex += step) {
if (SymbolClz.getSymbolSize(
data, dataIndex
// Only for cartesian, where `isHorizontal` exists.
)[categoryAxis.isHorizontal() ? 1 : 0]
// Empirical number
* 1.5 > availSize
) {
return false;
}
}
return true;
}
export default ChartView.extend({
type: 'line',
init: function () {
var lineGroup = new graphic.Group();
var symbolDraw = new SymbolDraw();
this.group.add(symbolDraw.group);
this._symbolDraw = symbolDraw;
this._lineGroup = lineGroup;
},
render: function (seriesModel, ecModel, api) {
var coordSys = seriesModel.coordinateSystem;
var group = this.group;
var data = seriesModel.getData();
var lineStyleModel = seriesModel.getModel('lineStyle');
var areaStyleModel = seriesModel.getModel('areaStyle');
var points = data.mapArray(data.getItemLayout);
var isCoordSysPolar = coordSys.type === 'polar';
var prevCoordSys = this._coordSys;
var symbolDraw = this._symbolDraw;
var polyline = this._polyline;
var polygon = this._polygon;
var lineGroup = this._lineGroup;
var hasAnimation = seriesModel.get('animation');
var isAreaChart = !areaStyleModel.isEmpty();
var valueOrigin = areaStyleModel.get('origin');
var dataCoordInfo = prepareDataCoordInfo(coordSys, data, valueOrigin);
var stackedOnPoints = getStackedOnPoints(coordSys, data, dataCoordInfo);
var showSymbol = seriesModel.get('showSymbol');
var isIgnoreFunc = showSymbol && !isCoordSysPolar
&& getIsIgnoreFunc(seriesModel, data, coordSys);
// Remove temporary symbols
var oldData = this._data;
oldData && oldData.eachItemGraphicEl(function (el, idx) {
if (el.__temp) {
group.remove(el);
oldData.setItemGraphicEl(idx, null);
}
});
// Remove previous created symbols if showSymbol changed to false
if (!showSymbol) {
symbolDraw.remove();
}
group.add(lineGroup);
// FIXME step not support polar
var step = !isCoordSysPolar && seriesModel.get('step');
// Initialization animation or coordinate system changed
if (
!(polyline && prevCoordSys.type === coordSys.type && step === this._step)
) {
showSymbol && symbolDraw.updateData(data, {
isIgnore: isIgnoreFunc,
clipShape: createClipShape(coordSys, false, true, seriesModel)
});
if (step) {
// TODO If stacked series is not step
points = turnPointsIntoStep(points, coordSys, step);
stackedOnPoints = turnPointsIntoStep(stackedOnPoints, coordSys, step);
}
polyline = this._newPolyline(points, coordSys, hasAnimation);
if (isAreaChart) {
polygon = this._newPolygon(
points, stackedOnPoints,
coordSys, hasAnimation
);
}
lineGroup.setClipPath(createClipShape(coordSys, true, false, seriesModel));
}
else {
if (isAreaChart && !polygon) {
// If areaStyle is added
polygon = this._newPolygon(
points, stackedOnPoints,
coordSys, hasAnimation
);
}
else if (polygon && !isAreaChart) {
// If areaStyle is removed
lineGroup.remove(polygon);
polygon = this._polygon = null;
}
// Update clipPath
lineGroup.setClipPath(createClipShape(coordSys, false, false, seriesModel));
// Always update, or it is wrong in the case turning on legend
// because points are not changed
showSymbol && symbolDraw.updateData(data, {
isIgnore: isIgnoreFunc,
clipShape: createClipShape(coordSys, false, true, seriesModel)
});
// Stop symbol animation and sync with line points
// FIXME performance?
data.eachItemGraphicEl(function (el) {
el.stopAnimation(true);
});
// In the case data zoom triggerred refreshing frequently
// Data may not change if line has a category axis. So it should animate nothing
if (!isPointsSame(this._stackedOnPoints, stackedOnPoints)
|| !isPointsSame(this._points, points)
) {
if (hasAnimation) {
this._updateAnimation(
data, stackedOnPoints, coordSys, api, step, valueOrigin
);
}
else {
// Not do it in update with animation
if (step) {
// TODO If stacked series is not step
points = turnPointsIntoStep(points, coordSys, step);
stackedOnPoints = turnPointsIntoStep(stackedOnPoints, coordSys, step);
}
polyline.setShape({
points: points
});
polygon && polygon.setShape({
points: points,
stackedOnPoints: stackedOnPoints
});
}
}
}
var visualColor = getVisualGradient(data, coordSys) || data.getVisual('color');
polyline.useStyle(zrUtil.defaults(
// Use color in lineStyle first
lineStyleModel.getLineStyle(),
{
fill: 'none',
stroke: visualColor,
lineJoin: 'bevel'
}
));
var smooth = seriesModel.get('smooth');
smooth = getSmooth(seriesModel.get('smooth'));
polyline.setShape({
smooth: smooth,
smoothMonotone: seriesModel.get('smoothMonotone'),
connectNulls: seriesModel.get('connectNulls')
});
if (polygon) {
var stackedOnSeries = data.getCalculationInfo('stackedOnSeries');
var stackedOnSmooth = 0;
polygon.useStyle(zrUtil.defaults(
areaStyleModel.getAreaStyle(),
{
fill: visualColor,
opacity: 0.7,
lineJoin: 'bevel'
}
));
if (stackedOnSeries) {
stackedOnSmooth = getSmooth(stackedOnSeries.get('smooth'));
}
polygon.setShape({
smooth: smooth,
stackedOnSmooth: stackedOnSmooth,
smoothMonotone: seriesModel.get('smoothMonotone'),
connectNulls: seriesModel.get('connectNulls')
});
}
this._data = data;
// Save the coordinate system for transition animation when data changed
this._coordSys = coordSys;
this._stackedOnPoints = stackedOnPoints;
this._points = points;
this._step = step;
this._valueOrigin = valueOrigin;
},
dispose: function () {},
highlight: function (seriesModel, ecModel, api, payload) {
var data = seriesModel.getData();
var dataIndex = modelUtil.queryDataIndex(data, payload);
if (!(dataIndex instanceof Array) && dataIndex != null && dataIndex >= 0) {
var symbol = data.getItemGraphicEl(dataIndex);
if (!symbol) {
// Create a temporary symbol if it is not exists
var pt = data.getItemLayout(dataIndex);
if (!pt) {
// Null data
return;
}
symbol = new SymbolClz(data, dataIndex);
symbol.position = pt;
symbol.setZ(
seriesModel.get('zlevel'),
seriesModel.get('z')
);
symbol.ignore = isNaN(pt[0]) || isNaN(pt[1]);
symbol.__temp = true;
data.setItemGraphicEl(dataIndex, symbol);
// Stop scale animation
symbol.stopSymbolAnimation(true);
this.group.add(symbol);
}
symbol.highlight();
}
else {
// Highlight whole series
ChartView.prototype.highlight.call(
this, seriesModel, ecModel, api, payload
);
}
},
downplay: function (seriesModel, ecModel, api, payload) {
var data = seriesModel.getData();
var dataIndex = modelUtil.queryDataIndex(data, payload);
if (dataIndex != null && dataIndex >= 0) {
var symbol = data.getItemGraphicEl(dataIndex);
if (symbol) {
if (symbol.__temp) {
data.setItemGraphicEl(dataIndex, null);
this.group.remove(symbol);
}
else {
symbol.downplay();
}
}
}
else {
// FIXME
// can not downplay completely.
// Downplay whole series
ChartView.prototype.downplay.call(
this, seriesModel, ecModel, api, payload
);
}
},
/**
* @param {module:zrender/container/Group} group
* @param {Array.<Array.<number>>} points
* @private
*/
_newPolyline: function (points) {
var polyline = this._polyline;
// Remove previous created polyline
if (polyline) {
this._lineGroup.remove(polyline);
}
polyline = new Polyline({
shape: {
points: points
},
silent: true,
z2: 10
});
this._lineGroup.add(polyline);
this._polyline = polyline;
return polyline;
},
/**
* @param {module:zrender/container/Group} group
* @param {Array.<Array.<number>>} stackedOnPoints
* @param {Array.<Array.<number>>} points
* @private
*/
_newPolygon: function (points, stackedOnPoints) {
var polygon = this._polygon;
// Remove previous created polygon
if (polygon) {
this._lineGroup.remove(polygon);
}
polygon = new Polygon({
shape: {
points: points,
stackedOnPoints: stackedOnPoints
},
silent: true
});
this._lineGroup.add(polygon);
this._polygon = polygon;
return polygon;
},
/**
* @private
*/
// FIXME Two value axis
_updateAnimation: function (data, stackedOnPoints, coordSys, api, step, valueOrigin) {
var polyline = this._polyline;
var polygon = this._polygon;
var seriesModel = data.hostModel;
var diff = lineAnimationDiff(
this._data, data,
this._stackedOnPoints, stackedOnPoints,
this._coordSys, coordSys,
this._valueOrigin, valueOrigin
);
var current = diff.current;
var stackedOnCurrent = diff.stackedOnCurrent;
var next = diff.next;
var stackedOnNext = diff.stackedOnNext;
if (step) {
// TODO If stacked series is not step
current = turnPointsIntoStep(diff.current, coordSys, step);
stackedOnCurrent = turnPointsIntoStep(diff.stackedOnCurrent, coordSys, step);
next = turnPointsIntoStep(diff.next, coordSys, step);
stackedOnNext = turnPointsIntoStep(diff.stackedOnNext, coordSys, step);
}
// `diff.current` is subset of `current` (which should be ensured by
// turnPointsIntoStep), so points in `__points` can be updated when
// points in `current` are update during animation.
polyline.shape.__points = diff.current;
polyline.shape.points = current;
graphic.updateProps(polyline, {
shape: {
points: next
}
}, seriesModel);
if (polygon) {
polygon.setShape({
points: current,
stackedOnPoints: stackedOnCurrent
});
graphic.updateProps(polygon, {
shape: {
points: next,
stackedOnPoints: stackedOnNext
}
}, seriesModel);
}
var updatedDataInfo = [];
var diffStatus = diff.status;
for (var i = 0; i < diffStatus.length; i++) {
var cmd = diffStatus[i].cmd;
if (cmd === '=') {
var el = data.getItemGraphicEl(diffStatus[i].idx1);
if (el) {
updatedDataInfo.push({
el: el,
ptIdx: i // Index of points
});
}
}
}
if (polyline.animators && polyline.animators.length) {
polyline.animators[0].during(function () {
for (var i = 0; i < updatedDataInfo.length; i++) {
var el = updatedDataInfo[i].el;
el.attr('position', polyline.shape.__points[updatedDataInfo[i].ptIdx]);
}
});
}
},
remove: function (ecModel) {
var group = this.group;
var oldData = this._data;
this._lineGroup.removeAll();
this._symbolDraw.remove(true);
// Remove temporary created elements when highlighting
oldData && oldData.eachItemGraphicEl(function (el, idx) {
if (el.__temp) {
group.remove(el);
oldData.setItemGraphicEl(idx, null);
}
});
this._polyline
= this._polygon
= this._coordSys
= this._points
= this._stackedOnPoints
= this._data = null;
}
}); | {
var showAllSymbol = seriesModel.get('showAllSymbol');
var isAuto = showAllSymbol === 'auto';
if (showAllSymbol && !isAuto) {
return;
}
var categoryAxis = coordSys.getAxesByScale('ordinal')[0];
if (!categoryAxis) {
return;
}
// Note that category label interval strategy might bring some weird effect
// in some scenario: users may wonder why some of the symbols are not
// displayed. So we show all symbols as possible as we can.
if (isAuto
// Simplify the logic, do not determine label overlap here.
&& canShowAllSymbolForCategory(categoryAxis, data)
) {
return;
}
// Otherwise follow the label interval strategy on category axis.
var categoryDataDim = data.mapDimension(categoryAxis.dim);
var labelMap = {};
zrUtil.each(categoryAxis.getViewLabels(), function (labelItem) {
labelMap[labelItem.tickValue] = 1;
});
return function (dataIndex) {
return !labelMap.hasOwnProperty(data.get(categoryDataDim, dataIndex));
};
} | identifier_body |
LineView.js | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License. | */
// FIXME step not support polar
import {__DEV__} from '../../config';
import * as zrUtil from 'zrender/src/core/util';
import SymbolDraw from '../helper/SymbolDraw';
import SymbolClz from '../helper/Symbol';
import lineAnimationDiff from './lineAnimationDiff';
import * as graphic from '../../util/graphic';
import * as modelUtil from '../../util/model';
import {Polyline, Polygon} from './poly';
import ChartView from '../../view/Chart';
import {round} from '../../util/number';
import {prepareDataCoordInfo, getStackedOnPoint} from './helper';
function isPointsSame(points1, points2) {
if (points1.length !== points2.length) {
return;
}
for (var i = 0; i < points1.length; i++) {
var p1 = points1[i];
var p2 = points2[i];
if (p1[0] !== p2[0] || p1[1] !== p2[1]) {
return;
}
}
return true;
}
function getSmooth(smooth) {
return typeof (smooth) === 'number' ? smooth : (smooth ? 0.5 : 0);
}
function getAxisExtentWithGap(axis) {
var extent = axis.getGlobalExtent();
if (axis.onBand) {
// Remove extra 1px to avoid line miter in clipped edge
var halfBandWidth = axis.getBandWidth() / 2 - 1;
var dir = extent[1] > extent[0] ? 1 : -1;
extent[0] += dir * halfBandWidth;
extent[1] -= dir * halfBandWidth;
}
return extent;
}
/**
* @param {module:echarts/coord/cartesian/Cartesian2D|module:echarts/coord/polar/Polar} coordSys
* @param {module:echarts/data/List} data
* @param {Object} dataCoordInfo
* @param {Array.<Array.<number>>} points
*/
function getStackedOnPoints(coordSys, data, dataCoordInfo) {
if (!dataCoordInfo.valueDim) {
return [];
}
var points = [];
for (var idx = 0, len = data.count(); idx < len; idx++) {
points.push(getStackedOnPoint(dataCoordInfo, coordSys, data, idx));
}
return points;
}
function createGridClipShape(cartesian, hasAnimation, forSymbol, seriesModel) {
var xExtent = getAxisExtentWithGap(cartesian.getAxis('x'));
var yExtent = getAxisExtentWithGap(cartesian.getAxis('y'));
var isHorizontal = cartesian.getBaseAxis().isHorizontal();
var x = Math.min(xExtent[0], xExtent[1]);
var y = Math.min(yExtent[0], yExtent[1]);
var width = Math.max(xExtent[0], xExtent[1]) - x;
var height = Math.max(yExtent[0], yExtent[1]) - y;
// Avoid float number rounding error for symbol on the edge of axis extent.
// See #7913 and `test/dataZoom-clip.html`.
if (forSymbol) {
x -= 0.5;
width += 0.5;
y -= 0.5;
height += 0.5;
}
else {
var lineWidth = seriesModel.get('lineStyle.width') || 2;
// Expand clip shape to avoid clipping when line value exceeds axis
var expandSize = seriesModel.get('clipOverflow') ? lineWidth / 2 : Math.max(width, height);
if (isHorizontal) {
y -= expandSize;
height += expandSize * 2;
}
else {
x -= expandSize;
width += expandSize * 2;
}
}
var clipPath = new graphic.Rect({
shape: {
x: x,
y: y,
width: width,
height: height
}
});
if (hasAnimation) {
clipPath.shape[isHorizontal ? 'width' : 'height'] = 0;
graphic.initProps(clipPath, {
shape: {
width: width,
height: height
}
}, seriesModel);
}
return clipPath;
}
function createPolarClipShape(polar, hasAnimation, forSymbol, seriesModel) {
var angleAxis = polar.getAngleAxis();
var radiusAxis = polar.getRadiusAxis();
var radiusExtent = radiusAxis.getExtent().slice();
radiusExtent[0] > radiusExtent[1] && radiusExtent.reverse();
var angleExtent = angleAxis.getExtent();
var RADIAN = Math.PI / 180;
// Avoid float number rounding error for symbol on the edge of axis extent.
if (forSymbol) {
radiusExtent[0] -= 0.5;
radiusExtent[1] += 0.5;
}
var clipPath = new graphic.Sector({
shape: {
cx: round(polar.cx, 1),
cy: round(polar.cy, 1),
r0: round(radiusExtent[0], 1),
r: round(radiusExtent[1], 1),
startAngle: -angleExtent[0] * RADIAN,
endAngle: -angleExtent[1] * RADIAN,
clockwise: angleAxis.inverse
}
});
if (hasAnimation) {
clipPath.shape.endAngle = -angleExtent[0] * RADIAN;
graphic.initProps(clipPath, {
shape: {
endAngle: -angleExtent[1] * RADIAN
}
}, seriesModel);
}
return clipPath;
}
function createClipShape(coordSys, hasAnimation, forSymbol, seriesModel) {
return coordSys.type === 'polar'
? createPolarClipShape(coordSys, hasAnimation, forSymbol, seriesModel)
: createGridClipShape(coordSys, hasAnimation, forSymbol, seriesModel);
}
function turnPointsIntoStep(points, coordSys, stepTurnAt) {
var baseAxis = coordSys.getBaseAxis();
var baseIndex = baseAxis.dim === 'x' || baseAxis.dim === 'radius' ? 0 : 1;
var stepPoints = [];
for (var i = 0; i < points.length - 1; i++) {
var nextPt = points[i + 1];
var pt = points[i];
stepPoints.push(pt);
var stepPt = [];
switch (stepTurnAt) {
case 'end':
stepPt[baseIndex] = nextPt[baseIndex];
stepPt[1 - baseIndex] = pt[1 - baseIndex];
// default is start
stepPoints.push(stepPt);
break;
case 'middle':
// default is start
var middle = (pt[baseIndex] + nextPt[baseIndex]) / 2;
var stepPt2 = [];
stepPt[baseIndex] = stepPt2[baseIndex] = middle;
stepPt[1 - baseIndex] = pt[1 - baseIndex];
stepPt2[1 - baseIndex] = nextPt[1 - baseIndex];
stepPoints.push(stepPt);
stepPoints.push(stepPt2);
break;
default:
stepPt[baseIndex] = pt[baseIndex];
stepPt[1 - baseIndex] = nextPt[1 - baseIndex];
// default is start
stepPoints.push(stepPt);
}
}
// Last points
points[i] && stepPoints.push(points[i]);
return stepPoints;
}
function getVisualGradient(data, coordSys) {
var visualMetaList = data.getVisual('visualMeta');
if (!visualMetaList || !visualMetaList.length || !data.count()) {
// When data.count() is 0, gradient range can not be calculated.
return;
}
if (coordSys.type !== 'cartesian2d') {
if (__DEV__) {
console.warn('Visual map on line style is only supported on cartesian2d.');
}
return;
}
var coordDim;
var visualMeta;
for (var i = visualMetaList.length - 1; i >= 0; i--) {
var dimIndex = visualMetaList[i].dimension;
var dimName = data.dimensions[dimIndex];
var dimInfo = data.getDimensionInfo(dimName);
coordDim = dimInfo && dimInfo.coordDim;
// Can only be x or y
if (coordDim === 'x' || coordDim === 'y') {
visualMeta = visualMetaList[i];
break;
}
}
if (!visualMeta) {
if (__DEV__) {
console.warn('Visual map on line style only support x or y dimension.');
}
return;
}
// If the area to be rendered is bigger than area defined by LinearGradient,
// the canvas spec prescribes that the color of the first stop and the last
// stop should be used. But if two stops are added at offset 0, in effect
// browsers use the color of the second stop to render area outside
// LinearGradient. So we can only infinitesimally extend area defined in
// LinearGradient to render `outerColors`.
var axis = coordSys.getAxis(coordDim);
// dataToCoor mapping may not be linear, but must be monotonic.
var colorStops = zrUtil.map(visualMeta.stops, function (stop) {
return {
coord: axis.toGlobalCoord(axis.dataToCoord(stop.value)),
color: stop.color
};
});
var stopLen = colorStops.length;
var outerColors = visualMeta.outerColors.slice();
if (stopLen && colorStops[0].coord > colorStops[stopLen - 1].coord) {
colorStops.reverse();
outerColors.reverse();
}
var tinyExtent = 10; // Arbitrary value: 10px
var minCoord = colorStops[0].coord - tinyExtent;
var maxCoord = colorStops[stopLen - 1].coord + tinyExtent;
var coordSpan = maxCoord - minCoord;
if (coordSpan < 1e-3) {
return 'transparent';
}
zrUtil.each(colorStops, function (stop) {
stop.offset = (stop.coord - minCoord) / coordSpan;
});
colorStops.push({
offset: stopLen ? colorStops[stopLen - 1].offset : 0.5,
color: outerColors[1] || 'transparent'
});
colorStops.unshift({ // notice colorStops.length have been changed.
offset: stopLen ? colorStops[0].offset : 0.5,
color: outerColors[0] || 'transparent'
});
// zrUtil.each(colorStops, function (colorStop) {
// // Make sure each offset has rounded px to avoid not sharp edge
// colorStop.offset = (Math.round(colorStop.offset * (end - start) + start) - start) / (end - start);
// });
var gradient = new graphic.LinearGradient(0, 0, 0, 0, colorStops, true);
gradient[coordDim] = minCoord;
gradient[coordDim + '2'] = maxCoord;
return gradient;
}
function getIsIgnoreFunc(seriesModel, data, coordSys) {
var showAllSymbol = seriesModel.get('showAllSymbol');
var isAuto = showAllSymbol === 'auto';
if (showAllSymbol && !isAuto) {
return;
}
var categoryAxis = coordSys.getAxesByScale('ordinal')[0];
if (!categoryAxis) {
return;
}
// Note that category label interval strategy might bring some weird effect
// in some scenario: users may wonder why some of the symbols are not
// displayed. So we show all symbols as possible as we can.
if (isAuto
// Simplify the logic, do not determine label overlap here.
&& canShowAllSymbolForCategory(categoryAxis, data)
) {
return;
}
// Otherwise follow the label interval strategy on category axis.
var categoryDataDim = data.mapDimension(categoryAxis.dim);
var labelMap = {};
zrUtil.each(categoryAxis.getViewLabels(), function (labelItem) {
labelMap[labelItem.tickValue] = 1;
});
return function (dataIndex) {
return !labelMap.hasOwnProperty(data.get(categoryDataDim, dataIndex));
};
}
function canShowAllSymbolForCategory(categoryAxis, data) {
// In mose cases, line is monotonous on category axis, and the label size
// is close with each other. So we check the symbol size and some of the
// label size alone with the category axis to estimate whether all symbol
// can be shown without overlap.
var axisExtent = categoryAxis.getExtent();
var availSize = Math.abs(axisExtent[1] - axisExtent[0]) / categoryAxis.scale.count();
isNaN(availSize) && (availSize = 0); // 0/0 is NaN.
// Sampling some points, max 5.
var dataLen = data.count();
var step = Math.max(1, Math.round(dataLen / 5));
for (var dataIndex = 0; dataIndex < dataLen; dataIndex += step) {
if (SymbolClz.getSymbolSize(
data, dataIndex
// Only for cartesian, where `isHorizontal` exists.
)[categoryAxis.isHorizontal() ? 1 : 0]
// Empirical number
* 1.5 > availSize
) {
return false;
}
}
return true;
}
export default ChartView.extend({
type: 'line',
init: function () {
var lineGroup = new graphic.Group();
var symbolDraw = new SymbolDraw();
this.group.add(symbolDraw.group);
this._symbolDraw = symbolDraw;
this._lineGroup = lineGroup;
},
render: function (seriesModel, ecModel, api) {
var coordSys = seriesModel.coordinateSystem;
var group = this.group;
var data = seriesModel.getData();
var lineStyleModel = seriesModel.getModel('lineStyle');
var areaStyleModel = seriesModel.getModel('areaStyle');
var points = data.mapArray(data.getItemLayout);
var isCoordSysPolar = coordSys.type === 'polar';
var prevCoordSys = this._coordSys;
var symbolDraw = this._symbolDraw;
var polyline = this._polyline;
var polygon = this._polygon;
var lineGroup = this._lineGroup;
var hasAnimation = seriesModel.get('animation');
var isAreaChart = !areaStyleModel.isEmpty();
var valueOrigin = areaStyleModel.get('origin');
var dataCoordInfo = prepareDataCoordInfo(coordSys, data, valueOrigin);
var stackedOnPoints = getStackedOnPoints(coordSys, data, dataCoordInfo);
var showSymbol = seriesModel.get('showSymbol');
var isIgnoreFunc = showSymbol && !isCoordSysPolar
&& getIsIgnoreFunc(seriesModel, data, coordSys);
// Remove temporary symbols
var oldData = this._data;
oldData && oldData.eachItemGraphicEl(function (el, idx) {
if (el.__temp) {
group.remove(el);
oldData.setItemGraphicEl(idx, null);
}
});
// Remove previous created symbols if showSymbol changed to false
if (!showSymbol) {
symbolDraw.remove();
}
group.add(lineGroup);
// FIXME step not support polar
var step = !isCoordSysPolar && seriesModel.get('step');
// Initialization animation or coordinate system changed
if (
!(polyline && prevCoordSys.type === coordSys.type && step === this._step)
) {
showSymbol && symbolDraw.updateData(data, {
isIgnore: isIgnoreFunc,
clipShape: createClipShape(coordSys, false, true, seriesModel)
});
if (step) {
// TODO If stacked series is not step
points = turnPointsIntoStep(points, coordSys, step);
stackedOnPoints = turnPointsIntoStep(stackedOnPoints, coordSys, step);
}
polyline = this._newPolyline(points, coordSys, hasAnimation);
if (isAreaChart) {
polygon = this._newPolygon(
points, stackedOnPoints,
coordSys, hasAnimation
);
}
lineGroup.setClipPath(createClipShape(coordSys, true, false, seriesModel));
}
else {
if (isAreaChart && !polygon) {
// If areaStyle is added
polygon = this._newPolygon(
points, stackedOnPoints,
coordSys, hasAnimation
);
}
else if (polygon && !isAreaChart) {
// If areaStyle is removed
lineGroup.remove(polygon);
polygon = this._polygon = null;
}
// Update clipPath
lineGroup.setClipPath(createClipShape(coordSys, false, false, seriesModel));
// Always update, or it is wrong in the case turning on legend
// because points are not changed
showSymbol && symbolDraw.updateData(data, {
isIgnore: isIgnoreFunc,
clipShape: createClipShape(coordSys, false, true, seriesModel)
});
// Stop symbol animation and sync with line points
// FIXME performance?
data.eachItemGraphicEl(function (el) {
el.stopAnimation(true);
});
// In the case data zoom triggerred refreshing frequently
// Data may not change if line has a category axis. So it should animate nothing
if (!isPointsSame(this._stackedOnPoints, stackedOnPoints)
|| !isPointsSame(this._points, points)
) {
if (hasAnimation) {
this._updateAnimation(
data, stackedOnPoints, coordSys, api, step, valueOrigin
);
}
else {
// Not do it in update with animation
if (step) {
// TODO If stacked series is not step
points = turnPointsIntoStep(points, coordSys, step);
stackedOnPoints = turnPointsIntoStep(stackedOnPoints, coordSys, step);
}
polyline.setShape({
points: points
});
polygon && polygon.setShape({
points: points,
stackedOnPoints: stackedOnPoints
});
}
}
}
var visualColor = getVisualGradient(data, coordSys) || data.getVisual('color');
polyline.useStyle(zrUtil.defaults(
// Use color in lineStyle first
lineStyleModel.getLineStyle(),
{
fill: 'none',
stroke: visualColor,
lineJoin: 'bevel'
}
));
var smooth = seriesModel.get('smooth');
smooth = getSmooth(seriesModel.get('smooth'));
polyline.setShape({
smooth: smooth,
smoothMonotone: seriesModel.get('smoothMonotone'),
connectNulls: seriesModel.get('connectNulls')
});
if (polygon) {
var stackedOnSeries = data.getCalculationInfo('stackedOnSeries');
var stackedOnSmooth = 0;
polygon.useStyle(zrUtil.defaults(
areaStyleModel.getAreaStyle(),
{
fill: visualColor,
opacity: 0.7,
lineJoin: 'bevel'
}
));
if (stackedOnSeries) {
stackedOnSmooth = getSmooth(stackedOnSeries.get('smooth'));
}
polygon.setShape({
smooth: smooth,
stackedOnSmooth: stackedOnSmooth,
smoothMonotone: seriesModel.get('smoothMonotone'),
connectNulls: seriesModel.get('connectNulls')
});
}
this._data = data;
// Save the coordinate system for transition animation when data changed
this._coordSys = coordSys;
this._stackedOnPoints = stackedOnPoints;
this._points = points;
this._step = step;
this._valueOrigin = valueOrigin;
},
dispose: function () {},
highlight: function (seriesModel, ecModel, api, payload) {
var data = seriesModel.getData();
var dataIndex = modelUtil.queryDataIndex(data, payload);
if (!(dataIndex instanceof Array) && dataIndex != null && dataIndex >= 0) {
var symbol = data.getItemGraphicEl(dataIndex);
if (!symbol) {
// Create a temporary symbol if it is not exists
var pt = data.getItemLayout(dataIndex);
if (!pt) {
// Null data
return;
}
symbol = new SymbolClz(data, dataIndex);
symbol.position = pt;
symbol.setZ(
seriesModel.get('zlevel'),
seriesModel.get('z')
);
symbol.ignore = isNaN(pt[0]) || isNaN(pt[1]);
symbol.__temp = true;
data.setItemGraphicEl(dataIndex, symbol);
// Stop scale animation
symbol.stopSymbolAnimation(true);
this.group.add(symbol);
}
symbol.highlight();
}
else {
// Highlight whole series
ChartView.prototype.highlight.call(
this, seriesModel, ecModel, api, payload
);
}
},
downplay: function (seriesModel, ecModel, api, payload) {
var data = seriesModel.getData();
var dataIndex = modelUtil.queryDataIndex(data, payload);
if (dataIndex != null && dataIndex >= 0) {
var symbol = data.getItemGraphicEl(dataIndex);
if (symbol) {
if (symbol.__temp) {
data.setItemGraphicEl(dataIndex, null);
this.group.remove(symbol);
}
else {
symbol.downplay();
}
}
}
else {
// FIXME
// can not downplay completely.
// Downplay whole series
ChartView.prototype.downplay.call(
this, seriesModel, ecModel, api, payload
);
}
},
/**
* @param {module:zrender/container/Group} group
* @param {Array.<Array.<number>>} points
* @private
*/
_newPolyline: function (points) {
var polyline = this._polyline;
// Remove previous created polyline
if (polyline) {
this._lineGroup.remove(polyline);
}
polyline = new Polyline({
shape: {
points: points
},
silent: true,
z2: 10
});
this._lineGroup.add(polyline);
this._polyline = polyline;
return polyline;
},
/**
* @param {module:zrender/container/Group} group
* @param {Array.<Array.<number>>} stackedOnPoints
* @param {Array.<Array.<number>>} points
* @private
*/
_newPolygon: function (points, stackedOnPoints) {
var polygon = this._polygon;
// Remove previous created polygon
if (polygon) {
this._lineGroup.remove(polygon);
}
polygon = new Polygon({
shape: {
points: points,
stackedOnPoints: stackedOnPoints
},
silent: true
});
this._lineGroup.add(polygon);
this._polygon = polygon;
return polygon;
},
/**
* @private
*/
// FIXME Two value axis
_updateAnimation: function (data, stackedOnPoints, coordSys, api, step, valueOrigin) {
var polyline = this._polyline;
var polygon = this._polygon;
var seriesModel = data.hostModel;
var diff = lineAnimationDiff(
this._data, data,
this._stackedOnPoints, stackedOnPoints,
this._coordSys, coordSys,
this._valueOrigin, valueOrigin
);
var current = diff.current;
var stackedOnCurrent = diff.stackedOnCurrent;
var next = diff.next;
var stackedOnNext = diff.stackedOnNext;
if (step) {
// TODO If stacked series is not step
current = turnPointsIntoStep(diff.current, coordSys, step);
stackedOnCurrent = turnPointsIntoStep(diff.stackedOnCurrent, coordSys, step);
next = turnPointsIntoStep(diff.next, coordSys, step);
stackedOnNext = turnPointsIntoStep(diff.stackedOnNext, coordSys, step);
}
// `diff.current` is subset of `current` (which should be ensured by
// turnPointsIntoStep), so points in `__points` can be updated when
// points in `current` are update during animation.
polyline.shape.__points = diff.current;
polyline.shape.points = current;
graphic.updateProps(polyline, {
shape: {
points: next
}
}, seriesModel);
if (polygon) {
polygon.setShape({
points: current,
stackedOnPoints: stackedOnCurrent
});
graphic.updateProps(polygon, {
shape: {
points: next,
stackedOnPoints: stackedOnNext
}
}, seriesModel);
}
var updatedDataInfo = [];
var diffStatus = diff.status;
for (var i = 0; i < diffStatus.length; i++) {
var cmd = diffStatus[i].cmd;
if (cmd === '=') {
var el = data.getItemGraphicEl(diffStatus[i].idx1);
if (el) {
updatedDataInfo.push({
el: el,
ptIdx: i // Index of points
});
}
}
}
if (polyline.animators && polyline.animators.length) {
polyline.animators[0].during(function () {
for (var i = 0; i < updatedDataInfo.length; i++) {
var el = updatedDataInfo[i].el;
el.attr('position', polyline.shape.__points[updatedDataInfo[i].ptIdx]);
}
});
}
},
remove: function (ecModel) {
var group = this.group;
var oldData = this._data;
this._lineGroup.removeAll();
this._symbolDraw.remove(true);
// Remove temporary created elements when highlighting
oldData && oldData.eachItemGraphicEl(function (el, idx) {
if (el.__temp) {
group.remove(el);
oldData.setItemGraphicEl(idx, null);
}
});
this._polyline
= this._polygon
= this._coordSys
= this._points
= this._stackedOnPoints
= this._data = null;
}
}); | random_line_split | |
LineView.js | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
// FIXME step not support polar
import {__DEV__} from '../../config';
import * as zrUtil from 'zrender/src/core/util';
import SymbolDraw from '../helper/SymbolDraw';
import SymbolClz from '../helper/Symbol';
import lineAnimationDiff from './lineAnimationDiff';
import * as graphic from '../../util/graphic';
import * as modelUtil from '../../util/model';
import {Polyline, Polygon} from './poly';
import ChartView from '../../view/Chart';
import {round} from '../../util/number';
import {prepareDataCoordInfo, getStackedOnPoint} from './helper';
function isPointsSame(points1, points2) {
if (points1.length !== points2.length) {
return;
}
for (var i = 0; i < points1.length; i++) {
var p1 = points1[i];
var p2 = points2[i];
if (p1[0] !== p2[0] || p1[1] !== p2[1]) {
return;
}
}
return true;
}
function | (smooth) {
return typeof (smooth) === 'number' ? smooth : (smooth ? 0.5 : 0);
}
function getAxisExtentWithGap(axis) {
var extent = axis.getGlobalExtent();
if (axis.onBand) {
// Remove extra 1px to avoid line miter in clipped edge
var halfBandWidth = axis.getBandWidth() / 2 - 1;
var dir = extent[1] > extent[0] ? 1 : -1;
extent[0] += dir * halfBandWidth;
extent[1] -= dir * halfBandWidth;
}
return extent;
}
/**
* @param {module:echarts/coord/cartesian/Cartesian2D|module:echarts/coord/polar/Polar} coordSys
* @param {module:echarts/data/List} data
* @param {Object} dataCoordInfo
* @param {Array.<Array.<number>>} points
*/
function getStackedOnPoints(coordSys, data, dataCoordInfo) {
if (!dataCoordInfo.valueDim) {
return [];
}
var points = [];
for (var idx = 0, len = data.count(); idx < len; idx++) {
points.push(getStackedOnPoint(dataCoordInfo, coordSys, data, idx));
}
return points;
}
function createGridClipShape(cartesian, hasAnimation, forSymbol, seriesModel) {
var xExtent = getAxisExtentWithGap(cartesian.getAxis('x'));
var yExtent = getAxisExtentWithGap(cartesian.getAxis('y'));
var isHorizontal = cartesian.getBaseAxis().isHorizontal();
var x = Math.min(xExtent[0], xExtent[1]);
var y = Math.min(yExtent[0], yExtent[1]);
var width = Math.max(xExtent[0], xExtent[1]) - x;
var height = Math.max(yExtent[0], yExtent[1]) - y;
// Avoid float number rounding error for symbol on the edge of axis extent.
// See #7913 and `test/dataZoom-clip.html`.
if (forSymbol) {
x -= 0.5;
width += 0.5;
y -= 0.5;
height += 0.5;
}
else {
var lineWidth = seriesModel.get('lineStyle.width') || 2;
// Expand clip shape to avoid clipping when line value exceeds axis
var expandSize = seriesModel.get('clipOverflow') ? lineWidth / 2 : Math.max(width, height);
if (isHorizontal) {
y -= expandSize;
height += expandSize * 2;
}
else {
x -= expandSize;
width += expandSize * 2;
}
}
var clipPath = new graphic.Rect({
shape: {
x: x,
y: y,
width: width,
height: height
}
});
if (hasAnimation) {
clipPath.shape[isHorizontal ? 'width' : 'height'] = 0;
graphic.initProps(clipPath, {
shape: {
width: width,
height: height
}
}, seriesModel);
}
return clipPath;
}
function createPolarClipShape(polar, hasAnimation, forSymbol, seriesModel) {
var angleAxis = polar.getAngleAxis();
var radiusAxis = polar.getRadiusAxis();
var radiusExtent = radiusAxis.getExtent().slice();
radiusExtent[0] > radiusExtent[1] && radiusExtent.reverse();
var angleExtent = angleAxis.getExtent();
var RADIAN = Math.PI / 180;
// Avoid float number rounding error for symbol on the edge of axis extent.
if (forSymbol) {
radiusExtent[0] -= 0.5;
radiusExtent[1] += 0.5;
}
var clipPath = new graphic.Sector({
shape: {
cx: round(polar.cx, 1),
cy: round(polar.cy, 1),
r0: round(radiusExtent[0], 1),
r: round(radiusExtent[1], 1),
startAngle: -angleExtent[0] * RADIAN,
endAngle: -angleExtent[1] * RADIAN,
clockwise: angleAxis.inverse
}
});
if (hasAnimation) {
clipPath.shape.endAngle = -angleExtent[0] * RADIAN;
graphic.initProps(clipPath, {
shape: {
endAngle: -angleExtent[1] * RADIAN
}
}, seriesModel);
}
return clipPath;
}
function createClipShape(coordSys, hasAnimation, forSymbol, seriesModel) {
return coordSys.type === 'polar'
? createPolarClipShape(coordSys, hasAnimation, forSymbol, seriesModel)
: createGridClipShape(coordSys, hasAnimation, forSymbol, seriesModel);
}
function turnPointsIntoStep(points, coordSys, stepTurnAt) {
var baseAxis = coordSys.getBaseAxis();
var baseIndex = baseAxis.dim === 'x' || baseAxis.dim === 'radius' ? 0 : 1;
var stepPoints = [];
for (var i = 0; i < points.length - 1; i++) {
var nextPt = points[i + 1];
var pt = points[i];
stepPoints.push(pt);
var stepPt = [];
switch (stepTurnAt) {
case 'end':
stepPt[baseIndex] = nextPt[baseIndex];
stepPt[1 - baseIndex] = pt[1 - baseIndex];
// default is start
stepPoints.push(stepPt);
break;
case 'middle':
// default is start
var middle = (pt[baseIndex] + nextPt[baseIndex]) / 2;
var stepPt2 = [];
stepPt[baseIndex] = stepPt2[baseIndex] = middle;
stepPt[1 - baseIndex] = pt[1 - baseIndex];
stepPt2[1 - baseIndex] = nextPt[1 - baseIndex];
stepPoints.push(stepPt);
stepPoints.push(stepPt2);
break;
default:
stepPt[baseIndex] = pt[baseIndex];
stepPt[1 - baseIndex] = nextPt[1 - baseIndex];
// default is start
stepPoints.push(stepPt);
}
}
// Last points
points[i] && stepPoints.push(points[i]);
return stepPoints;
}
function getVisualGradient(data, coordSys) {
var visualMetaList = data.getVisual('visualMeta');
if (!visualMetaList || !visualMetaList.length || !data.count()) {
// When data.count() is 0, gradient range can not be calculated.
return;
}
if (coordSys.type !== 'cartesian2d') {
if (__DEV__) {
console.warn('Visual map on line style is only supported on cartesian2d.');
}
return;
}
var coordDim;
var visualMeta;
for (var i = visualMetaList.length - 1; i >= 0; i--) {
var dimIndex = visualMetaList[i].dimension;
var dimName = data.dimensions[dimIndex];
var dimInfo = data.getDimensionInfo(dimName);
coordDim = dimInfo && dimInfo.coordDim;
// Can only be x or y
if (coordDim === 'x' || coordDim === 'y') {
visualMeta = visualMetaList[i];
break;
}
}
if (!visualMeta) {
if (__DEV__) {
console.warn('Visual map on line style only support x or y dimension.');
}
return;
}
// If the area to be rendered is bigger than area defined by LinearGradient,
// the canvas spec prescribes that the color of the first stop and the last
// stop should be used. But if two stops are added at offset 0, in effect
// browsers use the color of the second stop to render area outside
// LinearGradient. So we can only infinitesimally extend area defined in
// LinearGradient to render `outerColors`.
var axis = coordSys.getAxis(coordDim);
// dataToCoor mapping may not be linear, but must be monotonic.
var colorStops = zrUtil.map(visualMeta.stops, function (stop) {
return {
coord: axis.toGlobalCoord(axis.dataToCoord(stop.value)),
color: stop.color
};
});
var stopLen = colorStops.length;
var outerColors = visualMeta.outerColors.slice();
if (stopLen && colorStops[0].coord > colorStops[stopLen - 1].coord) {
colorStops.reverse();
outerColors.reverse();
}
var tinyExtent = 10; // Arbitrary value: 10px
var minCoord = colorStops[0].coord - tinyExtent;
var maxCoord = colorStops[stopLen - 1].coord + tinyExtent;
var coordSpan = maxCoord - minCoord;
if (coordSpan < 1e-3) {
return 'transparent';
}
zrUtil.each(colorStops, function (stop) {
stop.offset = (stop.coord - minCoord) / coordSpan;
});
colorStops.push({
offset: stopLen ? colorStops[stopLen - 1].offset : 0.5,
color: outerColors[1] || 'transparent'
});
colorStops.unshift({ // notice colorStops.length have been changed.
offset: stopLen ? colorStops[0].offset : 0.5,
color: outerColors[0] || 'transparent'
});
// zrUtil.each(colorStops, function (colorStop) {
// // Make sure each offset has rounded px to avoid not sharp edge
// colorStop.offset = (Math.round(colorStop.offset * (end - start) + start) - start) / (end - start);
// });
var gradient = new graphic.LinearGradient(0, 0, 0, 0, colorStops, true);
gradient[coordDim] = minCoord;
gradient[coordDim + '2'] = maxCoord;
return gradient;
}
function getIsIgnoreFunc(seriesModel, data, coordSys) {
var showAllSymbol = seriesModel.get('showAllSymbol');
var isAuto = showAllSymbol === 'auto';
if (showAllSymbol && !isAuto) {
return;
}
var categoryAxis = coordSys.getAxesByScale('ordinal')[0];
if (!categoryAxis) {
return;
}
// Note that category label interval strategy might bring some weird effect
// in some scenario: users may wonder why some of the symbols are not
// displayed. So we show all symbols as possible as we can.
if (isAuto
// Simplify the logic, do not determine label overlap here.
&& canShowAllSymbolForCategory(categoryAxis, data)
) {
return;
}
// Otherwise follow the label interval strategy on category axis.
var categoryDataDim = data.mapDimension(categoryAxis.dim);
var labelMap = {};
zrUtil.each(categoryAxis.getViewLabels(), function (labelItem) {
labelMap[labelItem.tickValue] = 1;
});
return function (dataIndex) {
return !labelMap.hasOwnProperty(data.get(categoryDataDim, dataIndex));
};
}
function canShowAllSymbolForCategory(categoryAxis, data) {
// In mose cases, line is monotonous on category axis, and the label size
// is close with each other. So we check the symbol size and some of the
// label size alone with the category axis to estimate whether all symbol
// can be shown without overlap.
var axisExtent = categoryAxis.getExtent();
var availSize = Math.abs(axisExtent[1] - axisExtent[0]) / categoryAxis.scale.count();
isNaN(availSize) && (availSize = 0); // 0/0 is NaN.
// Sampling some points, max 5.
var dataLen = data.count();
var step = Math.max(1, Math.round(dataLen / 5));
for (var dataIndex = 0; dataIndex < dataLen; dataIndex += step) {
if (SymbolClz.getSymbolSize(
data, dataIndex
// Only for cartesian, where `isHorizontal` exists.
)[categoryAxis.isHorizontal() ? 1 : 0]
// Empirical number
* 1.5 > availSize
) {
return false;
}
}
return true;
}
export default ChartView.extend({
type: 'line',
init: function () {
var lineGroup = new graphic.Group();
var symbolDraw = new SymbolDraw();
this.group.add(symbolDraw.group);
this._symbolDraw = symbolDraw;
this._lineGroup = lineGroup;
},
render: function (seriesModel, ecModel, api) {
var coordSys = seriesModel.coordinateSystem;
var group = this.group;
var data = seriesModel.getData();
var lineStyleModel = seriesModel.getModel('lineStyle');
var areaStyleModel = seriesModel.getModel('areaStyle');
var points = data.mapArray(data.getItemLayout);
var isCoordSysPolar = coordSys.type === 'polar';
var prevCoordSys = this._coordSys;
var symbolDraw = this._symbolDraw;
var polyline = this._polyline;
var polygon = this._polygon;
var lineGroup = this._lineGroup;
var hasAnimation = seriesModel.get('animation');
var isAreaChart = !areaStyleModel.isEmpty();
var valueOrigin = areaStyleModel.get('origin');
var dataCoordInfo = prepareDataCoordInfo(coordSys, data, valueOrigin);
var stackedOnPoints = getStackedOnPoints(coordSys, data, dataCoordInfo);
var showSymbol = seriesModel.get('showSymbol');
var isIgnoreFunc = showSymbol && !isCoordSysPolar
&& getIsIgnoreFunc(seriesModel, data, coordSys);
// Remove temporary symbols
var oldData = this._data;
oldData && oldData.eachItemGraphicEl(function (el, idx) {
if (el.__temp) {
group.remove(el);
oldData.setItemGraphicEl(idx, null);
}
});
// Remove previous created symbols if showSymbol changed to false
if (!showSymbol) {
symbolDraw.remove();
}
group.add(lineGroup);
// FIXME step not support polar
var step = !isCoordSysPolar && seriesModel.get('step');
// Initialization animation or coordinate system changed
if (
!(polyline && prevCoordSys.type === coordSys.type && step === this._step)
) {
showSymbol && symbolDraw.updateData(data, {
isIgnore: isIgnoreFunc,
clipShape: createClipShape(coordSys, false, true, seriesModel)
});
if (step) {
// TODO If stacked series is not step
points = turnPointsIntoStep(points, coordSys, step);
stackedOnPoints = turnPointsIntoStep(stackedOnPoints, coordSys, step);
}
polyline = this._newPolyline(points, coordSys, hasAnimation);
if (isAreaChart) {
polygon = this._newPolygon(
points, stackedOnPoints,
coordSys, hasAnimation
);
}
lineGroup.setClipPath(createClipShape(coordSys, true, false, seriesModel));
}
else {
if (isAreaChart && !polygon) {
// If areaStyle is added
polygon = this._newPolygon(
points, stackedOnPoints,
coordSys, hasAnimation
);
}
else if (polygon && !isAreaChart) {
// If areaStyle is removed
lineGroup.remove(polygon);
polygon = this._polygon = null;
}
// Update clipPath
lineGroup.setClipPath(createClipShape(coordSys, false, false, seriesModel));
// Always update, or it is wrong in the case turning on legend
// because points are not changed
showSymbol && symbolDraw.updateData(data, {
isIgnore: isIgnoreFunc,
clipShape: createClipShape(coordSys, false, true, seriesModel)
});
// Stop symbol animation and sync with line points
// FIXME performance?
data.eachItemGraphicEl(function (el) {
el.stopAnimation(true);
});
// In the case data zoom triggerred refreshing frequently
// Data may not change if line has a category axis. So it should animate nothing
if (!isPointsSame(this._stackedOnPoints, stackedOnPoints)
|| !isPointsSame(this._points, points)
) {
if (hasAnimation) {
this._updateAnimation(
data, stackedOnPoints, coordSys, api, step, valueOrigin
);
}
else {
// Not do it in update with animation
if (step) {
// TODO If stacked series is not step
points = turnPointsIntoStep(points, coordSys, step);
stackedOnPoints = turnPointsIntoStep(stackedOnPoints, coordSys, step);
}
polyline.setShape({
points: points
});
polygon && polygon.setShape({
points: points,
stackedOnPoints: stackedOnPoints
});
}
}
}
var visualColor = getVisualGradient(data, coordSys) || data.getVisual('color');
polyline.useStyle(zrUtil.defaults(
// Use color in lineStyle first
lineStyleModel.getLineStyle(),
{
fill: 'none',
stroke: visualColor,
lineJoin: 'bevel'
}
));
var smooth = seriesModel.get('smooth');
smooth = getSmooth(seriesModel.get('smooth'));
polyline.setShape({
smooth: smooth,
smoothMonotone: seriesModel.get('smoothMonotone'),
connectNulls: seriesModel.get('connectNulls')
});
if (polygon) {
var stackedOnSeries = data.getCalculationInfo('stackedOnSeries');
var stackedOnSmooth = 0;
polygon.useStyle(zrUtil.defaults(
areaStyleModel.getAreaStyle(),
{
fill: visualColor,
opacity: 0.7,
lineJoin: 'bevel'
}
));
if (stackedOnSeries) {
stackedOnSmooth = getSmooth(stackedOnSeries.get('smooth'));
}
polygon.setShape({
smooth: smooth,
stackedOnSmooth: stackedOnSmooth,
smoothMonotone: seriesModel.get('smoothMonotone'),
connectNulls: seriesModel.get('connectNulls')
});
}
this._data = data;
// Save the coordinate system for transition animation when data changed
this._coordSys = coordSys;
this._stackedOnPoints = stackedOnPoints;
this._points = points;
this._step = step;
this._valueOrigin = valueOrigin;
},
dispose: function () {},
highlight: function (seriesModel, ecModel, api, payload) {
var data = seriesModel.getData();
var dataIndex = modelUtil.queryDataIndex(data, payload);
if (!(dataIndex instanceof Array) && dataIndex != null && dataIndex >= 0) {
var symbol = data.getItemGraphicEl(dataIndex);
if (!symbol) {
// Create a temporary symbol if it is not exists
var pt = data.getItemLayout(dataIndex);
if (!pt) {
// Null data
return;
}
symbol = new SymbolClz(data, dataIndex);
symbol.position = pt;
symbol.setZ(
seriesModel.get('zlevel'),
seriesModel.get('z')
);
symbol.ignore = isNaN(pt[0]) || isNaN(pt[1]);
symbol.__temp = true;
data.setItemGraphicEl(dataIndex, symbol);
// Stop scale animation
symbol.stopSymbolAnimation(true);
this.group.add(symbol);
}
symbol.highlight();
}
else {
// Highlight whole series
ChartView.prototype.highlight.call(
this, seriesModel, ecModel, api, payload
);
}
},
downplay: function (seriesModel, ecModel, api, payload) {
var data = seriesModel.getData();
var dataIndex = modelUtil.queryDataIndex(data, payload);
if (dataIndex != null && dataIndex >= 0) {
var symbol = data.getItemGraphicEl(dataIndex);
if (symbol) {
if (symbol.__temp) {
data.setItemGraphicEl(dataIndex, null);
this.group.remove(symbol);
}
else {
symbol.downplay();
}
}
}
else {
// FIXME
// can not downplay completely.
// Downplay whole series
ChartView.prototype.downplay.call(
this, seriesModel, ecModel, api, payload
);
}
},
/**
* @param {module:zrender/container/Group} group
* @param {Array.<Array.<number>>} points
* @private
*/
_newPolyline: function (points) {
var polyline = this._polyline;
// Remove previous created polyline
if (polyline) {
this._lineGroup.remove(polyline);
}
polyline = new Polyline({
shape: {
points: points
},
silent: true,
z2: 10
});
this._lineGroup.add(polyline);
this._polyline = polyline;
return polyline;
},
/**
* @param {module:zrender/container/Group} group
* @param {Array.<Array.<number>>} stackedOnPoints
* @param {Array.<Array.<number>>} points
* @private
*/
_newPolygon: function (points, stackedOnPoints) {
var polygon = this._polygon;
// Remove previous created polygon
if (polygon) {
this._lineGroup.remove(polygon);
}
polygon = new Polygon({
shape: {
points: points,
stackedOnPoints: stackedOnPoints
},
silent: true
});
this._lineGroup.add(polygon);
this._polygon = polygon;
return polygon;
},
/**
* @private
*/
// FIXME Two value axis
_updateAnimation: function (data, stackedOnPoints, coordSys, api, step, valueOrigin) {
var polyline = this._polyline;
var polygon = this._polygon;
var seriesModel = data.hostModel;
var diff = lineAnimationDiff(
this._data, data,
this._stackedOnPoints, stackedOnPoints,
this._coordSys, coordSys,
this._valueOrigin, valueOrigin
);
var current = diff.current;
var stackedOnCurrent = diff.stackedOnCurrent;
var next = diff.next;
var stackedOnNext = diff.stackedOnNext;
if (step) {
// TODO If stacked series is not step
current = turnPointsIntoStep(diff.current, coordSys, step);
stackedOnCurrent = turnPointsIntoStep(diff.stackedOnCurrent, coordSys, step);
next = turnPointsIntoStep(diff.next, coordSys, step);
stackedOnNext = turnPointsIntoStep(diff.stackedOnNext, coordSys, step);
}
// `diff.current` is subset of `current` (which should be ensured by
// turnPointsIntoStep), so points in `__points` can be updated when
// points in `current` are update during animation.
polyline.shape.__points = diff.current;
polyline.shape.points = current;
graphic.updateProps(polyline, {
shape: {
points: next
}
}, seriesModel);
if (polygon) {
polygon.setShape({
points: current,
stackedOnPoints: stackedOnCurrent
});
graphic.updateProps(polygon, {
shape: {
points: next,
stackedOnPoints: stackedOnNext
}
}, seriesModel);
}
var updatedDataInfo = [];
var diffStatus = diff.status;
for (var i = 0; i < diffStatus.length; i++) {
var cmd = diffStatus[i].cmd;
if (cmd === '=') {
var el = data.getItemGraphicEl(diffStatus[i].idx1);
if (el) {
updatedDataInfo.push({
el: el,
ptIdx: i // Index of points
});
}
}
}
if (polyline.animators && polyline.animators.length) {
polyline.animators[0].during(function () {
for (var i = 0; i < updatedDataInfo.length; i++) {
var el = updatedDataInfo[i].el;
el.attr('position', polyline.shape.__points[updatedDataInfo[i].ptIdx]);
}
});
}
},
remove: function (ecModel) {
var group = this.group;
var oldData = this._data;
this._lineGroup.removeAll();
this._symbolDraw.remove(true);
// Remove temporary created elements when highlighting
oldData && oldData.eachItemGraphicEl(function (el, idx) {
if (el.__temp) {
group.remove(el);
oldData.setItemGraphicEl(idx, null);
}
});
this._polyline
= this._polygon
= this._coordSys
= this._points
= this._stackedOnPoints
= this._data = null;
}
}); | getSmooth | identifier_name |
win32_aclui.py | # pylint:disable=line-too-long
import logging
from ...sim_type import SimTypeFunction, SimTypeShort, SimTypeInt, SimTypeLong, SimTypeLongLong, SimTypeDouble, SimTypeFloat, SimTypePointer, SimTypeChar, SimStruct, SimTypeFixedSizeArray, SimTypeBottom, SimUnion, SimTypeBool
from ...calling_conventions import SimCCStdcall, SimCCMicrosoftAMD64
from .. import SIM_PROCEDURES as P
from . import SimLibrary | _l = logging.getLogger(name=__name__)
lib = SimLibrary()
lib.set_default_cc('X86', SimCCStdcall)
lib.set_default_cc('AMD64', SimCCMicrosoftAMD64)
lib.set_library_names("aclui.dll")
prototypes = \
{
#
'CreateSecurityPage': SimTypeFunction([SimTypeBottom(label="ISecurityInformation")], SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), arg_names=["psi"]),
#
'EditSecurity': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeBottom(label="ISecurityInformation")], SimTypeInt(signed=True, label="Int32"), arg_names=["hwndOwner", "psi"]),
#
'EditSecurityAdvanced': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeBottom(label="ISecurityInformation"), SimTypeInt(signed=False, label="SI_PAGE_TYPE")], SimTypeInt(signed=True, label="Int32"), arg_names=["hwndOwner", "psi", "uSIPage"]),
}
lib.set_prototypes(prototypes) | random_line_split | |
generic-function.rs | // Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// min-lldb-version: 310
// compile-flags:-g
// === GDB TESTS ===================================================================================
// gdb-command:run
// gdb-command:print *t0
// gdb-check:$1 = 1
// gdb-command:print *t1
// gdb-check:$2 = 2.5
// gdb-command:print ret
// gdb-check:$3 = {{1, 2.5}, {2.5, 1}}
// gdb-command:continue
// gdb-command:print *t0
// gdb-check:$4 = 3.5
// gdb-command:print *t1
// gdb-check:$5 = 4
// gdb-command:print ret
// gdb-check:$6 = {{3.5, 4}, {4, 3.5}}
// gdb-command:continue
// gdb-command:print *t0
// gdb-check:$7 = 5
// gdb-command:print *t1
// gdb-check:$8 = {a = 6, b = 7.5}
// gdb-command:print ret
// gdb-check:$9 = {{5, {a = 6, b = 7.5}}, {{a = 6, b = 7.5}, 5}}
// gdb-command:continue
// === LLDB TESTS ==================================================================================
// lldb-command:run
// lldb-command:print *t0
// lldb-check:[...]$0 = 1
// lldb-command:print *t1
// lldb-check:[...]$1 = 2.5
// lldb-command:print ret
// lldb-check:[...]$2 = ((1, 2.5), (2.5, 1))
// lldb-command:continue
// lldb-command:print *t0
// lldb-check:[...]$3 = 3.5
// lldb-command:print *t1
// lldb-check:[...]$4 = 4
// lldb-command:print ret
// lldb-check:[...]$5 = ((3.5, 4), (4, 3.5))
// lldb-command:continue
// lldb-command:print *t0
// lldb-check:[...]$6 = 5
// lldb-command:print *t1
// lldb-check:[...]$7 = Struct { a: 6, b: 7.5 }
// lldb-command:print ret
// lldb-check:[...]$8 = ((5, Struct { a: 6, b: 7.5 }), (Struct { a: 6, b: 7.5 }, 5))
// lldb-command:continue
#![omit_gdb_pretty_printer_section]
#[derive(Clone)]
struct Struct {
a: int,
b: f64
}
fn dup_tup<T0: Clone, T1: Clone>(t0: &T0, t1: &T1) -> ((T0, T1), (T1, T0)) {
let ret = ((t0.clone(), t1.clone()), (t1.clone(), t0.clone()));
zzz(); // #break
ret
}
fn main() {
let _ = dup_tup(&1, &2.5f64);
let _ = dup_tup(&3.5f64, &4_u16);
let _ = dup_tup(&5, &Struct { a: 6, b: 7.5 });
}
fn | () {()}
| zzz | identifier_name |
generic-function.rs | // Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// min-lldb-version: 310
// compile-flags:-g
// === GDB TESTS ===================================================================================
// gdb-command:run
// gdb-command:print *t0
// gdb-check:$1 = 1
// gdb-command:print *t1
// gdb-check:$2 = 2.5
// gdb-command:print ret
// gdb-check:$3 = {{1, 2.5}, {2.5, 1}}
// gdb-command:continue
// gdb-command:print *t0
// gdb-check:$4 = 3.5
// gdb-command:print *t1
// gdb-check:$5 = 4
// gdb-command:print ret
// gdb-check:$6 = {{3.5, 4}, {4, 3.5}}
// gdb-command:continue
// gdb-command:print *t0
// gdb-check:$7 = 5
// gdb-command:print *t1
// gdb-check:$8 = {a = 6, b = 7.5}
// gdb-command:print ret
// gdb-check:$9 = {{5, {a = 6, b = 7.5}}, {{a = 6, b = 7.5}, 5}}
// gdb-command:continue
// === LLDB TESTS ==================================================================================
// lldb-command:run
// lldb-command:print *t0
// lldb-check:[...]$0 = 1
// lldb-command:print *t1
// lldb-check:[...]$1 = 2.5
// lldb-command:print ret
// lldb-check:[...]$2 = ((1, 2.5), (2.5, 1))
// lldb-command:continue
// lldb-command:print *t0
// lldb-check:[...]$3 = 3.5
// lldb-command:print *t1
// lldb-check:[...]$4 = 4 |
// lldb-command:print *t0
// lldb-check:[...]$6 = 5
// lldb-command:print *t1
// lldb-check:[...]$7 = Struct { a: 6, b: 7.5 }
// lldb-command:print ret
// lldb-check:[...]$8 = ((5, Struct { a: 6, b: 7.5 }), (Struct { a: 6, b: 7.5 }, 5))
// lldb-command:continue
#![omit_gdb_pretty_printer_section]
#[derive(Clone)]
struct Struct {
a: int,
b: f64
}
fn dup_tup<T0: Clone, T1: Clone>(t0: &T0, t1: &T1) -> ((T0, T1), (T1, T0)) {
let ret = ((t0.clone(), t1.clone()), (t1.clone(), t0.clone()));
zzz(); // #break
ret
}
fn main() {
let _ = dup_tup(&1, &2.5f64);
let _ = dup_tup(&3.5f64, &4_u16);
let _ = dup_tup(&5, &Struct { a: 6, b: 7.5 });
}
fn zzz() {()} | // lldb-command:print ret
// lldb-check:[...]$5 = ((3.5, 4), (4, 3.5))
// lldb-command:continue | random_line_split |
views.py | import uuid
from random import randint
from django.shortcuts import render
from django.http import HttpResponseRedirect
from .models import Url
def index(request):
|
def make_url(request):
if request.method == "POST":
url = None # initial url
url_site = request.POST['url']
url_id = generate_key()
try:
url = Url.objects.get(url_id = url_id)
while url:
url_id = generate_key()
url = Url.objects.get(url_id = url_id)
create_url(request, url_id, url_site)
request.session["has_url"] = url_id
except Url.DoesNotExist:
create_url(request, url_id, url_site)
request.session["has_url"] = url_id
return HttpResponseRedirect("/")
def create_url(custom_request, url_id, url_site):
if custom_request.user.is_authenticated():
url = Url.objects.create(url_id = url_id, url_site = url_site,
url_author = custom_request.user)
else:
url = Url.objects.create(url_id = url_id, url_site = url_site)
url.save()
def generate_key():
to_choose = "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ";
url_id = ""
while len(url_id) != 6:
i = randint(0, len(to_choose) - 1)
url_id += to_choose[i]
return url_id
def redirect_url(request, url_id=None):
try:
url = Url.objects.get(url_id = url_id)
url.url_clicked = url.url_clicked + 1
url.save()
except Url.DoesNotExist:
return render(request, "base/page_not_found.html", {})
return HttpResponseRedirect(url.url_site)
| if request.session.has_key("has_url"):
url = request.session.get("has_url")
del request.session['has_url']
return render(request, "miudo/index.html", locals())
return render(request, "miudo/index.html", {}) | identifier_body |
views.py | import uuid
from random import randint
from django.shortcuts import render
from django.http import HttpResponseRedirect
from .models import Url
def index(request):
if request.session.has_key("has_url"):
url = request.session.get("has_url")
del request.session['has_url']
return render(request, "miudo/index.html", locals())
return render(request, "miudo/index.html", {})
def make_url(request):
if request.method == "POST":
url = None # initial url
url_site = request.POST['url']
url_id = generate_key()
try:
url = Url.objects.get(url_id = url_id)
while url:
url_id = generate_key()
url = Url.objects.get(url_id = url_id)
create_url(request, url_id, url_site)
request.session["has_url"] = url_id
except Url.DoesNotExist:
create_url(request, url_id, url_site)
request.session["has_url"] = url_id
return HttpResponseRedirect("/")
def create_url(custom_request, url_id, url_site):
if custom_request.user.is_authenticated():
url = Url.objects.create(url_id = url_id, url_site = url_site,
url_author = custom_request.user)
else:
|
url.save()
def generate_key():
to_choose = "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ";
url_id = ""
while len(url_id) != 6:
i = randint(0, len(to_choose) - 1)
url_id += to_choose[i]
return url_id
def redirect_url(request, url_id=None):
try:
url = Url.objects.get(url_id = url_id)
url.url_clicked = url.url_clicked + 1
url.save()
except Url.DoesNotExist:
return render(request, "base/page_not_found.html", {})
return HttpResponseRedirect(url.url_site)
| url = Url.objects.create(url_id = url_id, url_site = url_site) | conditional_block |
views.py | import uuid
from random import randint
from django.shortcuts import render
from django.http import HttpResponseRedirect
from .models import Url
def index(request):
if request.session.has_key("has_url"):
url = request.session.get("has_url")
del request.session['has_url']
return render(request, "miudo/index.html", locals())
return render(request, "miudo/index.html", {})
def | (request):
if request.method == "POST":
url = None # initial url
url_site = request.POST['url']
url_id = generate_key()
try:
url = Url.objects.get(url_id = url_id)
while url:
url_id = generate_key()
url = Url.objects.get(url_id = url_id)
create_url(request, url_id, url_site)
request.session["has_url"] = url_id
except Url.DoesNotExist:
create_url(request, url_id, url_site)
request.session["has_url"] = url_id
return HttpResponseRedirect("/")
def create_url(custom_request, url_id, url_site):
if custom_request.user.is_authenticated():
url = Url.objects.create(url_id = url_id, url_site = url_site,
url_author = custom_request.user)
else:
url = Url.objects.create(url_id = url_id, url_site = url_site)
url.save()
def generate_key():
to_choose = "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ";
url_id = ""
while len(url_id) != 6:
i = randint(0, len(to_choose) - 1)
url_id += to_choose[i]
return url_id
def redirect_url(request, url_id=None):
try:
url = Url.objects.get(url_id = url_id)
url.url_clicked = url.url_clicked + 1
url.save()
except Url.DoesNotExist:
return render(request, "base/page_not_found.html", {})
return HttpResponseRedirect(url.url_site)
| make_url | identifier_name |
views.py | import uuid
from random import randint
from django.shortcuts import render
from django.http import HttpResponseRedirect
from .models import Url
def index(request):
if request.session.has_key("has_url"):
url = request.session.get("has_url")
del request.session['has_url']
return render(request, "miudo/index.html", locals())
return render(request, "miudo/index.html", {}) | def make_url(request):
if request.method == "POST":
url = None # initial url
url_site = request.POST['url']
url_id = generate_key()
try:
url = Url.objects.get(url_id = url_id)
while url:
url_id = generate_key()
url = Url.objects.get(url_id = url_id)
create_url(request, url_id, url_site)
request.session["has_url"] = url_id
except Url.DoesNotExist:
create_url(request, url_id, url_site)
request.session["has_url"] = url_id
return HttpResponseRedirect("/")
def create_url(custom_request, url_id, url_site):
if custom_request.user.is_authenticated():
url = Url.objects.create(url_id = url_id, url_site = url_site,
url_author = custom_request.user)
else:
url = Url.objects.create(url_id = url_id, url_site = url_site)
url.save()
def generate_key():
to_choose = "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ";
url_id = ""
while len(url_id) != 6:
i = randint(0, len(to_choose) - 1)
url_id += to_choose[i]
return url_id
def redirect_url(request, url_id=None):
try:
url = Url.objects.get(url_id = url_id)
url.url_clicked = url.url_clicked + 1
url.save()
except Url.DoesNotExist:
return render(request, "base/page_not_found.html", {})
return HttpResponseRedirect(url.url_site) | random_line_split | |
tweet_card.py | #! /usr/bin/env python
import rospy
import sys
from time import sleep
import actionlib
import yaml
from random import randint
#from threading import Timer
import strands_tweets.msg
import image_branding.msg
from std_msgs.msg import String
from sensor_msgs.msg import Image
#from cv_bridge import CvBridge, CvBridgeError
#import dynamic_reconfigure.client
class read_and_tweet(object):
def __init__(self, filename):
# rospy.on_shutdown(self._on_node_shutdown)
self.msg_sub = rospy.Subscriber('/socialCardReader/commands', String, self.command_callback, queue_size=1)
self.client = actionlib.SimpleActionClient('strands_tweets', strands_tweets.msg.SendTweetAction)
self.brandclient = actionlib.SimpleActionClient('/image_branding', image_branding.msg.ImageBrandingAction)
self.client.wait_for_server()
self.brandclient.wait_for_server()
text_file = open(filename, "r")
texts = yaml.load(text_file)
self.tweets_texts = texts['tweets']['card']
#self.tw_pub = rospy.Publisher('/card_image_tweet/tweet', card_image_tweet.msg.Tweet)
rospy.loginfo(" ... Init done")
def command_callback(self, msg):
command_msg = msg.data
if command_msg == 'PHOTO' :
self.msg_sub.unregister()
#/head_xtion/depth/image_rect_meters #store this
try:
msg = rospy.wait_for_message('/head_xtion/rgb/image_color', Image, timeout=1.0)
except rospy.ROSException :
rospy.logwarn("Failed to get camera rgb Image")
tweetgoal = strands_tweets.msg.SendTweetGoal()
brandgoal = image_branding.msg.ImageBrandingGoal()
text = self.tweets_texts[randint(0, len(self.tweets_texts)-1)]
#text = "Look who is here"
print "tweeting %s" %text
brandgoal.photo = msg
self.brandclient.send_goal(brandgoal)
self.brandclient.wait_for_result()
br_ph = self.brandclient.get_result()
tweetgoal.text = text
tweetgoal.with_photo = True
tweetgoal.photo = br_ph.branded_image
self.client.send_goal(tweetgoal)
#tweettext=card_image_tweet.msg.Tweet()
#tweettext.text = text
#tweettext.photo = br_ph.branded_image
#self.tw_pub.publish(tweettext)
self.client.wait_for_result()
ps = self.client.get_result()
print ps
sleep(10)
self.msg_sub = rospy.Subscriber('/socialCardReader/commands', String, self.command_callback, queue_size=1)
if __name__ == '__main__':
if len(sys.argv) < 2 :
|
filename=str(sys.argv[1])
rospy.init_node('card_image_tweet')
ps = read_and_tweet(filename)
rospy.spin() | print "usage: tweet_card file_texts.yaml"
sys.exit(2) | conditional_block |
tweet_card.py | #! /usr/bin/env python
import rospy
import sys
from time import sleep
import actionlib
import yaml
from random import randint
#from threading import Timer
import strands_tweets.msg
import image_branding.msg
from std_msgs.msg import String
from sensor_msgs.msg import Image
#from cv_bridge import CvBridge, CvBridgeError
#import dynamic_reconfigure.client
class read_and_tweet(object):
|
self.msg_sub = rospy.Subscriber('/socialCardReader/commands', String, self.command_callback, queue_size=1)
self.client = actionlib.SimpleActionClient('strands_tweets', strands_tweets.msg.SendTweetAction)
self.brandclient = actionlib.SimpleActionClient('/image_branding', image_branding.msg.ImageBrandingAction)
self.client.wait_for_server()
self.brandclient.wait_for_server()
text_file = open(filename, "r")
texts = yaml.load(text_file)
self.tweets_texts = texts['tweets']['card']
#self.tw_pub = rospy.Publisher('/card_image_tweet/tweet', card_image_tweet.msg.Tweet)
rospy.loginfo(" ... Init done")
def command_callback(self, msg):
command_msg = msg.data
if command_msg == 'PHOTO' :
self.msg_sub.unregister()
#/head_xtion/depth/image_rect_meters #store this
try:
msg = rospy.wait_for_message('/head_xtion/rgb/image_color', Image, timeout=1.0)
except rospy.ROSException :
rospy.logwarn("Failed to get camera rgb Image")
tweetgoal = strands_tweets.msg.SendTweetGoal()
brandgoal = image_branding.msg.ImageBrandingGoal()
text = self.tweets_texts[randint(0, len(self.tweets_texts)-1)]
#text = "Look who is here"
print "tweeting %s" %text
brandgoal.photo = msg
self.brandclient.send_goal(brandgoal)
self.brandclient.wait_for_result()
br_ph = self.brandclient.get_result()
tweetgoal.text = text
tweetgoal.with_photo = True
tweetgoal.photo = br_ph.branded_image
self.client.send_goal(tweetgoal)
#tweettext=card_image_tweet.msg.Tweet()
#tweettext.text = text
#tweettext.photo = br_ph.branded_image
#self.tw_pub.publish(tweettext)
self.client.wait_for_result()
ps = self.client.get_result()
print ps
sleep(10)
self.msg_sub = rospy.Subscriber('/socialCardReader/commands', String, self.command_callback, queue_size=1)
if __name__ == '__main__':
if len(sys.argv) < 2 :
print "usage: tweet_card file_texts.yaml"
sys.exit(2)
filename=str(sys.argv[1])
rospy.init_node('card_image_tweet')
ps = read_and_tweet(filename)
rospy.spin() | def __init__(self, filename):
# rospy.on_shutdown(self._on_node_shutdown) | random_line_split |
tweet_card.py | #! /usr/bin/env python
import rospy
import sys
from time import sleep
import actionlib
import yaml
from random import randint
#from threading import Timer
import strands_tweets.msg
import image_branding.msg
from std_msgs.msg import String
from sensor_msgs.msg import Image
#from cv_bridge import CvBridge, CvBridgeError
#import dynamic_reconfigure.client
class read_and_tweet(object):
def __init__(self, filename):
# rospy.on_shutdown(self._on_node_shutdown)
self.msg_sub = rospy.Subscriber('/socialCardReader/commands', String, self.command_callback, queue_size=1)
self.client = actionlib.SimpleActionClient('strands_tweets', strands_tweets.msg.SendTweetAction)
self.brandclient = actionlib.SimpleActionClient('/image_branding', image_branding.msg.ImageBrandingAction)
self.client.wait_for_server()
self.brandclient.wait_for_server()
text_file = open(filename, "r")
texts = yaml.load(text_file)
self.tweets_texts = texts['tweets']['card']
#self.tw_pub = rospy.Publisher('/card_image_tweet/tweet', card_image_tweet.msg.Tweet)
rospy.loginfo(" ... Init done")
def | (self, msg):
command_msg = msg.data
if command_msg == 'PHOTO' :
self.msg_sub.unregister()
#/head_xtion/depth/image_rect_meters #store this
try:
msg = rospy.wait_for_message('/head_xtion/rgb/image_color', Image, timeout=1.0)
except rospy.ROSException :
rospy.logwarn("Failed to get camera rgb Image")
tweetgoal = strands_tweets.msg.SendTweetGoal()
brandgoal = image_branding.msg.ImageBrandingGoal()
text = self.tweets_texts[randint(0, len(self.tweets_texts)-1)]
#text = "Look who is here"
print "tweeting %s" %text
brandgoal.photo = msg
self.brandclient.send_goal(brandgoal)
self.brandclient.wait_for_result()
br_ph = self.brandclient.get_result()
tweetgoal.text = text
tweetgoal.with_photo = True
tweetgoal.photo = br_ph.branded_image
self.client.send_goal(tweetgoal)
#tweettext=card_image_tweet.msg.Tweet()
#tweettext.text = text
#tweettext.photo = br_ph.branded_image
#self.tw_pub.publish(tweettext)
self.client.wait_for_result()
ps = self.client.get_result()
print ps
sleep(10)
self.msg_sub = rospy.Subscriber('/socialCardReader/commands', String, self.command_callback, queue_size=1)
if __name__ == '__main__':
if len(sys.argv) < 2 :
print "usage: tweet_card file_texts.yaml"
sys.exit(2)
filename=str(sys.argv[1])
rospy.init_node('card_image_tweet')
ps = read_and_tweet(filename)
rospy.spin() | command_callback | identifier_name |
tweet_card.py | #! /usr/bin/env python
import rospy
import sys
from time import sleep
import actionlib
import yaml
from random import randint
#from threading import Timer
import strands_tweets.msg
import image_branding.msg
from std_msgs.msg import String
from sensor_msgs.msg import Image
#from cv_bridge import CvBridge, CvBridgeError
#import dynamic_reconfigure.client
class read_and_tweet(object):
def __init__(self, filename):
# rospy.on_shutdown(self._on_node_shutdown)
|
def command_callback(self, msg):
command_msg = msg.data
if command_msg == 'PHOTO' :
self.msg_sub.unregister()
#/head_xtion/depth/image_rect_meters #store this
try:
msg = rospy.wait_for_message('/head_xtion/rgb/image_color', Image, timeout=1.0)
except rospy.ROSException :
rospy.logwarn("Failed to get camera rgb Image")
tweetgoal = strands_tweets.msg.SendTweetGoal()
brandgoal = image_branding.msg.ImageBrandingGoal()
text = self.tweets_texts[randint(0, len(self.tweets_texts)-1)]
#text = "Look who is here"
print "tweeting %s" %text
brandgoal.photo = msg
self.brandclient.send_goal(brandgoal)
self.brandclient.wait_for_result()
br_ph = self.brandclient.get_result()
tweetgoal.text = text
tweetgoal.with_photo = True
tweetgoal.photo = br_ph.branded_image
self.client.send_goal(tweetgoal)
#tweettext=card_image_tweet.msg.Tweet()
#tweettext.text = text
#tweettext.photo = br_ph.branded_image
#self.tw_pub.publish(tweettext)
self.client.wait_for_result()
ps = self.client.get_result()
print ps
sleep(10)
self.msg_sub = rospy.Subscriber('/socialCardReader/commands', String, self.command_callback, queue_size=1)
if __name__ == '__main__':
if len(sys.argv) < 2 :
print "usage: tweet_card file_texts.yaml"
sys.exit(2)
filename=str(sys.argv[1])
rospy.init_node('card_image_tweet')
ps = read_and_tweet(filename)
rospy.spin() | self.msg_sub = rospy.Subscriber('/socialCardReader/commands', String, self.command_callback, queue_size=1)
self.client = actionlib.SimpleActionClient('strands_tweets', strands_tweets.msg.SendTweetAction)
self.brandclient = actionlib.SimpleActionClient('/image_branding', image_branding.msg.ImageBrandingAction)
self.client.wait_for_server()
self.brandclient.wait_for_server()
text_file = open(filename, "r")
texts = yaml.load(text_file)
self.tweets_texts = texts['tweets']['card']
#self.tw_pub = rospy.Publisher('/card_image_tweet/tweet', card_image_tweet.msg.Tweet)
rospy.loginfo(" ... Init done") | identifier_body |
fields.py | import re
from django.db.models import fields
from django.template.defaultfilters import slugify
def _unique_slugify(instance, value, slug_field_name='slug', queryset=None, slug_separator='-'):
slug_field = instance._meta.get_field(slug_field_name)
slug_len = slug_field.max_length
# Sort out the initial slug. Chop its length down if we need to.
slug = slugify(value)
if slug_len:
slug = slug[:slug_len]
slug = _slug_strip(slug, slug_separator)
original_slug = slug
# Create a queryset, excluding the current instance.
if queryset is None:
queryset = instance.__class__._default_manager.all()
if instance.pk:
queryset = queryset.exclude(pk=instance.pk)
# Find a unique slug. If one matches, at '-2' to the end and try again
# (then '-3', etc).
next = 2
while not slug or queryset.filter(**{slug_field_name: slug}):
slug = original_slug
end = '-%s' % next
if slug_len and len(slug) + len(end) > slug_len:
slug = slug[:slug_len-len(end)]
slug = _slug_strip(slug, slug_separator)
slug = '%s%s' % (slug, end)
next += 1
setattr(instance, slug_field.attname, slug)
return slug
def _slug_strip(value, separator=None):
"""
Cleans up a slug by removing slug separator characters that occur at the
beginning or end of a slug.
If an alternate separator is used, it will also replace any instances of the
default '-' separator with the new separator.
"""
if separator == '-' or not separator:
re_sep = '-'
else:
re_sep = '(?:-|%s)' % re.escape(separator)
value = re.sub('%s+' % re_sep, separator, value)
return re.sub(r'^%s+|%s+$' % (re_sep, re_sep), '', value)
class AutoSlugField(fields.SlugField):
"""Auto slug field, creates unique slug for model."""
def __init__(self, prepopulate_from, *args, **kwargs):
"""Create auto slug field.
If field is unique, the uniqueness of the slug is ensured from existing
slugs by adding extra number at the end of slug.
If field has slug given, it is used instead. If you want to re-generate
the slug, just set it :const:`None` or :const:`""` so it will be re-
generated automatically.
:param prepopulate_from: Must be assigned to list of field names which
are used to prepopulate automatically.
:type prepopulate_from: sequence
"""
self.prepopulate_separator = kwargs.get("prepopulate_separator", u"-")
self.prepopulate_from = prepopulate_from
kwargs["blank"] = True
super(fields.SlugField, self).__init__(*args, **kwargs)
def pre_save(self, model_instance, add): #@UnusedVariable
| """Pre-save event"""
current_slug = getattr(model_instance, self.attname)
# Use current slug instead, if it is given.
# Assumption: There are no empty slugs.
if not (current_slug is None or current_slug == ""):
slug = current_slug
else:
slug = self.prepopulate_separator.\
join(unicode(getattr(model_instance, prepop))
for prepop in self.prepopulate_from)
if self.unique:
return _unique_slugify(model_instance, value=slug,
slug_field_name=self.attname)
else:
return slugify(slug)[:self.max_length] | identifier_body | |
fields.py | import re
from django.db.models import fields
from django.template.defaultfilters import slugify
def _unique_slugify(instance, value, slug_field_name='slug', queryset=None, slug_separator='-'):
slug_field = instance._meta.get_field(slug_field_name)
slug_len = slug_field.max_length
# Sort out the initial slug. Chop its length down if we need to.
slug = slugify(value)
if slug_len:
slug = slug[:slug_len]
slug = _slug_strip(slug, slug_separator)
original_slug = slug
# Create a queryset, excluding the current instance.
if queryset is None:
queryset = instance.__class__._default_manager.all()
if instance.pk:
queryset = queryset.exclude(pk=instance.pk)
# Find a unique slug. If one matches, at '-2' to the end and try again
# (then '-3', etc).
next = 2
while not slug or queryset.filter(**{slug_field_name: slug}):
slug = original_slug
end = '-%s' % next
if slug_len and len(slug) + len(end) > slug_len:
slug = slug[:slug_len-len(end)]
slug = _slug_strip(slug, slug_separator)
slug = '%s%s' % (slug, end)
next += 1
setattr(instance, slug_field.attname, slug)
return slug
def _slug_strip(value, separator=None):
"""
Cleans up a slug by removing slug separator characters that occur at the
beginning or end of a slug.
If an alternate separator is used, it will also replace any instances of the
default '-' separator with the new separator.
"""
if separator == '-' or not separator:
re_sep = '-'
else:
re_sep = '(?:-|%s)' % re.escape(separator)
value = re.sub('%s+' % re_sep, separator, value)
return re.sub(r'^%s+|%s+$' % (re_sep, re_sep), '', value)
class AutoSlugField(fields.SlugField):
"""Auto slug field, creates unique slug for model."""
def __init__(self, prepopulate_from, *args, **kwargs):
"""Create auto slug field.
If field is unique, the uniqueness of the slug is ensured from existing
slugs by adding extra number at the end of slug.
If field has slug given, it is used instead. If you want to re-generate
the slug, just set it :const:`None` or :const:`""` so it will be re-
generated automatically.
:param prepopulate_from: Must be assigned to list of field names which
are used to prepopulate automatically.
:type prepopulate_from: sequence
"""
self.prepopulate_separator = kwargs.get("prepopulate_separator", u"-")
self.prepopulate_from = prepopulate_from
kwargs["blank"] = True
super(fields.SlugField, self).__init__(*args, **kwargs)
def pre_save(self, model_instance, add): #@UnusedVariable
"""Pre-save event"""
current_slug = getattr(model_instance, self.attname)
| slug = current_slug
else:
slug = self.prepopulate_separator.\
join(unicode(getattr(model_instance, prepop))
for prepop in self.prepopulate_from)
if self.unique:
return _unique_slugify(model_instance, value=slug,
slug_field_name=self.attname)
else:
return slugify(slug)[:self.max_length] | # Use current slug instead, if it is given.
# Assumption: There are no empty slugs.
if not (current_slug is None or current_slug == ""): | random_line_split |
fields.py | import re
from django.db.models import fields
from django.template.defaultfilters import slugify
def _unique_slugify(instance, value, slug_field_name='slug', queryset=None, slug_separator='-'):
slug_field = instance._meta.get_field(slug_field_name)
slug_len = slug_field.max_length
# Sort out the initial slug. Chop its length down if we need to.
slug = slugify(value)
if slug_len:
slug = slug[:slug_len]
slug = _slug_strip(slug, slug_separator)
original_slug = slug
# Create a queryset, excluding the current instance.
if queryset is None:
queryset = instance.__class__._default_manager.all()
if instance.pk:
queryset = queryset.exclude(pk=instance.pk)
# Find a unique slug. If one matches, at '-2' to the end and try again
# (then '-3', etc).
next = 2
while not slug or queryset.filter(**{slug_field_name: slug}):
slug = original_slug
end = '-%s' % next
if slug_len and len(slug) + len(end) > slug_len:
slug = slug[:slug_len-len(end)]
slug = _slug_strip(slug, slug_separator)
slug = '%s%s' % (slug, end)
next += 1
setattr(instance, slug_field.attname, slug)
return slug
def | (value, separator=None):
"""
Cleans up a slug by removing slug separator characters that occur at the
beginning or end of a slug.
If an alternate separator is used, it will also replace any instances of the
default '-' separator with the new separator.
"""
if separator == '-' or not separator:
re_sep = '-'
else:
re_sep = '(?:-|%s)' % re.escape(separator)
value = re.sub('%s+' % re_sep, separator, value)
return re.sub(r'^%s+|%s+$' % (re_sep, re_sep), '', value)
class AutoSlugField(fields.SlugField):
"""Auto slug field, creates unique slug for model."""
def __init__(self, prepopulate_from, *args, **kwargs):
"""Create auto slug field.
If field is unique, the uniqueness of the slug is ensured from existing
slugs by adding extra number at the end of slug.
If field has slug given, it is used instead. If you want to re-generate
the slug, just set it :const:`None` or :const:`""` so it will be re-
generated automatically.
:param prepopulate_from: Must be assigned to list of field names which
are used to prepopulate automatically.
:type prepopulate_from: sequence
"""
self.prepopulate_separator = kwargs.get("prepopulate_separator", u"-")
self.prepopulate_from = prepopulate_from
kwargs["blank"] = True
super(fields.SlugField, self).__init__(*args, **kwargs)
def pre_save(self, model_instance, add): #@UnusedVariable
"""Pre-save event"""
current_slug = getattr(model_instance, self.attname)
# Use current slug instead, if it is given.
# Assumption: There are no empty slugs.
if not (current_slug is None or current_slug == ""):
slug = current_slug
else:
slug = self.prepopulate_separator.\
join(unicode(getattr(model_instance, prepop))
for prepop in self.prepopulate_from)
if self.unique:
return _unique_slugify(model_instance, value=slug,
slug_field_name=self.attname)
else:
return slugify(slug)[:self.max_length] | _slug_strip | identifier_name |
fields.py | import re
from django.db.models import fields
from django.template.defaultfilters import slugify
def _unique_slugify(instance, value, slug_field_name='slug', queryset=None, slug_separator='-'):
slug_field = instance._meta.get_field(slug_field_name)
slug_len = slug_field.max_length
# Sort out the initial slug. Chop its length down if we need to.
slug = slugify(value)
if slug_len:
slug = slug[:slug_len]
slug = _slug_strip(slug, slug_separator)
original_slug = slug
# Create a queryset, excluding the current instance.
if queryset is None:
|
# Find a unique slug. If one matches, at '-2' to the end and try again
# (then '-3', etc).
next = 2
while not slug or queryset.filter(**{slug_field_name: slug}):
slug = original_slug
end = '-%s' % next
if slug_len and len(slug) + len(end) > slug_len:
slug = slug[:slug_len-len(end)]
slug = _slug_strip(slug, slug_separator)
slug = '%s%s' % (slug, end)
next += 1
setattr(instance, slug_field.attname, slug)
return slug
def _slug_strip(value, separator=None):
"""
Cleans up a slug by removing slug separator characters that occur at the
beginning or end of a slug.
If an alternate separator is used, it will also replace any instances of the
default '-' separator with the new separator.
"""
if separator == '-' or not separator:
re_sep = '-'
else:
re_sep = '(?:-|%s)' % re.escape(separator)
value = re.sub('%s+' % re_sep, separator, value)
return re.sub(r'^%s+|%s+$' % (re_sep, re_sep), '', value)
class AutoSlugField(fields.SlugField):
"""Auto slug field, creates unique slug for model."""
def __init__(self, prepopulate_from, *args, **kwargs):
"""Create auto slug field.
If field is unique, the uniqueness of the slug is ensured from existing
slugs by adding extra number at the end of slug.
If field has slug given, it is used instead. If you want to re-generate
the slug, just set it :const:`None` or :const:`""` so it will be re-
generated automatically.
:param prepopulate_from: Must be assigned to list of field names which
are used to prepopulate automatically.
:type prepopulate_from: sequence
"""
self.prepopulate_separator = kwargs.get("prepopulate_separator", u"-")
self.prepopulate_from = prepopulate_from
kwargs["blank"] = True
super(fields.SlugField, self).__init__(*args, **kwargs)
def pre_save(self, model_instance, add): #@UnusedVariable
"""Pre-save event"""
current_slug = getattr(model_instance, self.attname)
# Use current slug instead, if it is given.
# Assumption: There are no empty slugs.
if not (current_slug is None or current_slug == ""):
slug = current_slug
else:
slug = self.prepopulate_separator.\
join(unicode(getattr(model_instance, prepop))
for prepop in self.prepopulate_from)
if self.unique:
return _unique_slugify(model_instance, value=slug,
slug_field_name=self.attname)
else:
return slugify(slug)[:self.max_length] | queryset = instance.__class__._default_manager.all()
if instance.pk:
queryset = queryset.exclude(pk=instance.pk) | conditional_block |
serial.js | 'use strict';
goog.provide('Blockly.JavaScript.serial');
goog.require('Blockly.JavaScript');
Blockly.JavaScript.serial_print = function() {
var content = Blockly.JavaScript.valueToCode(this, 'CONTENT', Blockly.JavaScript.ORDER_ATOMIC) || '\"\"'
var code = 'serial.writeString(\'\' + '+content+');\n';
return code;
};
Blockly.JavaScript.serial_println = function() {
var content = Blockly.JavaScript.valueToCode(this, 'CONTENT', Blockly.JavaScript.ORDER_ATOMIC) || '\"\"'
var code = 'serial.writeLine(\'\' + '+content+');\n';
return code;
};
Blockly.JavaScript.serial_print_hex = function() {
var content = Blockly.JavaScript.valueToCode(this, 'CONTENT', Blockly.JavaScript.ORDER_ATOMIC) || '0';
var code = 'serial.writeLine('+content+'.toString(16));\n';
return code;
};
Blockly.JavaScript.serial_receive_data_event = function() { | };
Blockly.JavaScript.serial_readstr = function() {
var code ="serial.readString()";
return [code,Blockly.JavaScript.ORDER_ATOMIC];
};
Blockly.JavaScript.serial_readline = function() {
var code ="serial.readLine()";
return [code,Blockly.JavaScript.ORDER_ATOMIC];
};
Blockly.JavaScript.serial_readstr_until = function() {
var char_marker = this.getFieldValue('char_marker');
var code ="serial.readUntil("+char_marker + ")";
return [code,Blockly.JavaScript.ORDER_ATOMIC];
};
Blockly.JavaScript.serial_softserial = function () {
var dropdown_pin1 = Blockly.JavaScript.valueToCode(this, 'RX',Blockly.JavaScript.ORDER_ATOMIC);
var dropdown_pin2 = Blockly.JavaScript.valueToCode(this, 'TX',Blockly.JavaScript.ORDER_ATOMIC);
var baudrate = this.getFieldValue('baudrate');
return "serial.redirect(" + dropdown_pin1 + ", " + dropdown_pin2 + ", BaudRate.BaudRate" + baudrate + ");\n";
}; | var char_marker = Blockly.JavaScript.valueToCode(this, 'char_marker', Blockly.JavaScript.ORDER_ATOMIC) || ';';
var branch = Blockly.JavaScript.statementToCode(this, 'DO');
Blockly.JavaScript.definitions_['func_serial_receive_data_event_' + char_marker.charCodeAt(1)] = "serial.onDataReceived(" + char_marker + ", () => {\n" + branch + "};\n"; | random_line_split |
material-checkbox.component.ts | import { Component, Input, OnInit } from '@angular/core';
import { AbstractControl } from '@angular/forms';
import { hasOwn } from './../../shared/utility.functions';
import { JsonSchemaFormService } from '../../json-schema-form.service';
@Component({
selector: 'material-checkbox-widget',
template: `
<md-checkbox *ngIf="isConditionallyShown()"
align="left"
[color]="options?.color || 'primary'"
[disabled]="controlDisabled || options?.readonly"
[id]="'control' + layoutNode?._id"
[name]="controlName"
[checked]="isChecked"
(change)="updateValue($event)">
<span *ngIf="options?.title"
class="checkbox-name"
[style.display]="options?.notitle ? 'none' : ''"
[innerHTML]="options?.title"></span>
</md-checkbox>`,
styles: [` .checkbox-name { white-space: nowrap; } `],
})
export class MaterialCheckboxComponent implements OnInit {
formControl: AbstractControl;
controlName: string;
controlValue: any;
controlDisabled: boolean = false;
boundControl: boolean = false;
options: any;
trueValue: any = true;
falseValue: any = false;
@Input() formID: number;
@Input() layoutNode: any;
@Input() layoutIndex: number[];
@Input() dataIndex: number[];
@Input() data:any;
constructor(
private jsf: JsonSchemaFormService
) { }
ngOnInit() {
this.options = this.layoutNode.options || {};
this.jsf.initializeControl(this);
if (this.controlValue === null || this.controlValue === undefined) {
this.controlValue = false;
}
}
updateValue(event) {
this.jsf.updateValue(this, event.checked ? this.trueValue : this.falseValue);
}
get isChecked() {
return this.jsf.getFormControlValue(this) === this.trueValue;
}
isConditionallyShown(): boolean |
}
| {
this.data = this.jsf.data;
let result: boolean = true;
if (this.data && hasOwn(this.options, 'condition')) {
const model = this.data;
/* tslint:disable */
eval('result = ' + this.options.condition);
/* tslint:enable */
}
return result;
} | identifier_body |
material-checkbox.component.ts | import { Component, Input, OnInit } from '@angular/core';
import { AbstractControl } from '@angular/forms';
import { hasOwn } from './../../shared/utility.functions';
import { JsonSchemaFormService } from '../../json-schema-form.service';
@Component({
selector: 'material-checkbox-widget',
template: `
<md-checkbox *ngIf="isConditionallyShown()"
align="left"
[color]="options?.color || 'primary'"
[disabled]="controlDisabled || options?.readonly"
[id]="'control' + layoutNode?._id"
[name]="controlName"
[checked]="isChecked"
(change)="updateValue($event)">
<span *ngIf="options?.title"
class="checkbox-name"
[style.display]="options?.notitle ? 'none' : ''"
[innerHTML]="options?.title"></span>
</md-checkbox>`,
styles: [` .checkbox-name { white-space: nowrap; } `],
})
export class MaterialCheckboxComponent implements OnInit {
formControl: AbstractControl;
controlName: string;
controlValue: any;
controlDisabled: boolean = false;
boundControl: boolean = false;
options: any;
trueValue: any = true;
falseValue: any = false;
@Input() formID: number;
@Input() layoutNode: any;
@Input() layoutIndex: number[];
@Input() dataIndex: number[];
@Input() data:any;
constructor(
private jsf: JsonSchemaFormService
) { }
ngOnInit() {
this.options = this.layoutNode.options || {};
this.jsf.initializeControl(this);
if (this.controlValue === null || this.controlValue === undefined) {
this.controlValue = false;
}
}
updateValue(event) {
this.jsf.updateValue(this, event.checked ? this.trueValue : this.falseValue);
}
get isChecked() {
return this.jsf.getFormControlValue(this) === this.trueValue;
}
isConditionallyShown(): boolean {
this.data = this.jsf.data;
let result: boolean = true;
if (this.data && hasOwn(this.options, 'condition')) {
const model = this.data;
/* tslint:disable */
eval('result = ' + this.options.condition);
/* tslint:enable */
} |
return result;
}
} | random_line_split |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.