body
stringlengths 26
98.2k
| body_hash
int64 -9,222,864,604,528,158,000
9,221,803,474B
| docstring
stringlengths 1
16.8k
| path
stringlengths 5
230
| name
stringlengths 1
96
| repository_name
stringlengths 7
89
| lang
stringclasses 1
value | body_without_docstring
stringlengths 20
98.2k
|
|---|---|---|---|---|---|---|---|
def collect(self):
'\n collectors only function called collect. and it collects data\n '
downstream = GaugeMetricFamily('node_bw_wan_bps', 'last tested wan downstream mb/s', labels=['nodeid'])
for node in GLOBAL_NODES['nodes']:
if ('downstream_mbps_wan' in node):
downstream.add_metric([node['id']], node['downstream_mbps_wan'])
(yield downstream)
upstream = GaugeMetricFamily('node_bw_ff_bps', 'last tested ff downstream in mb/s', labels=['nodeid'])
for node in GLOBAL_NODES['nodes']:
if ('downstream_mbps_ff' in node):
upstream.add_metric([node['id']], node['downstream_mbps_ff'])
(yield upstream)
ping = GaugeMetricFamily('node_gw_ping_ms', 'last tested gateway ping in ms', labels=['nodeid'])
for node in GLOBAL_NODES['nodes']:
if ('gw_ping_ms' in node):
ping.add_metric([node['id']], node['gw_ping_ms'])
(yield ping)
rx_counter = CounterMetricFamily('node_rx_bytes', 'received bytes', labels=['nodeid'])
for node in GLOBAL_NODES['nodes']:
if ('rx_bytes' in node):
rx_counter.add_metric([node['id']], int(node['rx_bytes']))
(yield rx_counter)
tx_counter = CounterMetricFamily('node_tx_bytes', 'transmitted bytes', labels=['nodeid'])
for node in GLOBAL_NODES['nodes']:
if ('tx_bytes' in node):
tx_counter.add_metric([node['id']], int(node['tx_bytes']))
(yield tx_counter)
| 8,762,798,445,223,603,000
|
collectors only function called collect. and it collects data
|
roles/ffbsee-robin-exporter/files/robin_prometheus.py
|
collect
|
ffbsee/ffbsee-ansible
|
python
|
def collect(self):
'\n \n '
downstream = GaugeMetricFamily('node_bw_wan_bps', 'last tested wan downstream mb/s', labels=['nodeid'])
for node in GLOBAL_NODES['nodes']:
if ('downstream_mbps_wan' in node):
downstream.add_metric([node['id']], node['downstream_mbps_wan'])
(yield downstream)
upstream = GaugeMetricFamily('node_bw_ff_bps', 'last tested ff downstream in mb/s', labels=['nodeid'])
for node in GLOBAL_NODES['nodes']:
if ('downstream_mbps_ff' in node):
upstream.add_metric([node['id']], node['downstream_mbps_ff'])
(yield upstream)
ping = GaugeMetricFamily('node_gw_ping_ms', 'last tested gateway ping in ms', labels=['nodeid'])
for node in GLOBAL_NODES['nodes']:
if ('gw_ping_ms' in node):
ping.add_metric([node['id']], node['gw_ping_ms'])
(yield ping)
rx_counter = CounterMetricFamily('node_rx_bytes', 'received bytes', labels=['nodeid'])
for node in GLOBAL_NODES['nodes']:
if ('rx_bytes' in node):
rx_counter.add_metric([node['id']], int(node['rx_bytes']))
(yield rx_counter)
tx_counter = CounterMetricFamily('node_tx_bytes', 'transmitted bytes', labels=['nodeid'])
for node in GLOBAL_NODES['nodes']:
if ('tx_bytes' in node):
tx_counter.add_metric([node['id']], int(node['tx_bytes']))
(yield tx_counter)
|
@staticmethod
def _parse_string(parse_it):
'\n Strip an escaped string which is enclosed in double quotes and\n unescape.\n '
if ((parse_it[0] != '"') or (parse_it[(- 1)] != '"')):
raise ValueError('malformatted string: {0:r}'.format(parse_it))
return bytes(parse_it[1:(- 1)], 'ascii').decode('unicode-escape')
| 941,041,326,645,701,900
|
Strip an escaped string which is enclosed in double quotes and
unescape.
|
roles/ffbsee-robin-exporter/files/robin_prometheus.py
|
_parse_string
|
ffbsee/ffbsee-ansible
|
python
|
@staticmethod
def _parse_string(parse_it):
'\n Strip an escaped string which is enclosed in double quotes and\n unescape.\n '
if ((parse_it[0] != '"') or (parse_it[(- 1)] != '"')):
raise ValueError('malformatted string: {0:r}'.format(parse_it))
return bytes(parse_it[1:(- 1)], 'ascii').decode('unicode-escape')
|
@staticmethod
def parse_line(item, nodes=None):
'\n Parse and validate a line as returned by alfred.\n\n Such lines consist of a nodes MAC address and an escaped string of JSON\n encoded data. Note that most missing fields are populated with\n reasonable defaults.\n '
if (nodes is None):
nodes = {}
if ((item[(- 2):] != '},') or (item[0] != '{')):
raise ValueError('malformatted line: {0}'.format(item))
(mac, properties) = item[1:(- 2)].split(',', 1)
mac = AlfredParser._parse_string(mac.strip())
jsonschema.validate(mac, AlfredParser.MAC_SCHEMA)
properties = AlfredParser._parse_string(properties.strip())
if ('\x00' in properties):
decompress = zlib.decompressobj((zlib.MAX_WBITS | 32))
properties = decompress.decompress(properties.encode('raw-unicode-escape'), (64 * 1024)).decode('utf-8')
else:
properties = properties.encode('latin-1').decode('utf8')
properties = json.loads(properties)
jsonschema.validate(properties, AlfredParser.ALFRED_NODE_SCHEMA)
properties.setdefault('downstream_mbps_wan', 0)
properties.setdefault('downstream_mbps_ff', 0)
properties.setdefault('rx_bytes', 0)
properties.setdefault('tx_bytes', 0)
if (mac in nodes):
node = nodes[mac]
node.update_properties(properties, True)
node.online = True
node.lastseen = NOW_TIMESTAMP
else:
node = Node(mac, properties, True)
nodes[mac] = node
| -6,734,838,248,712,039,000
|
Parse and validate a line as returned by alfred.
Such lines consist of a nodes MAC address and an escaped string of JSON
encoded data. Note that most missing fields are populated with
reasonable defaults.
|
roles/ffbsee-robin-exporter/files/robin_prometheus.py
|
parse_line
|
ffbsee/ffbsee-ansible
|
python
|
@staticmethod
def parse_line(item, nodes=None):
'\n Parse and validate a line as returned by alfred.\n\n Such lines consist of a nodes MAC address and an escaped string of JSON\n encoded data. Note that most missing fields are populated with\n reasonable defaults.\n '
if (nodes is None):
nodes = {}
if ((item[(- 2):] != '},') or (item[0] != '{')):
raise ValueError('malformatted line: {0}'.format(item))
(mac, properties) = item[1:(- 2)].split(',', 1)
mac = AlfredParser._parse_string(mac.strip())
jsonschema.validate(mac, AlfredParser.MAC_SCHEMA)
properties = AlfredParser._parse_string(properties.strip())
if ('\x00' in properties):
decompress = zlib.decompressobj((zlib.MAX_WBITS | 32))
properties = decompress.decompress(properties.encode('raw-unicode-escape'), (64 * 1024)).decode('utf-8')
else:
properties = properties.encode('latin-1').decode('utf8')
properties = json.loads(properties)
jsonschema.validate(properties, AlfredParser.ALFRED_NODE_SCHEMA)
properties.setdefault('downstream_mbps_wan', 0)
properties.setdefault('downstream_mbps_ff', 0)
properties.setdefault('rx_bytes', 0)
properties.setdefault('tx_bytes', 0)
if (mac in nodes):
node = nodes[mac]
node.update_properties(properties, True)
node.online = True
node.lastseen = NOW_TIMESTAMP
else:
node = Node(mac, properties, True)
nodes[mac] = node
|
def update_properties(self, properties, force=True):
'\n Replace any properties with their respective values in ``properties``.\n '
if force:
self.properties = dict(properties)
if ('force' in self.properties):
del self.properties['force']
else:
for (key, value) in properties.items():
if (key not in self.properties):
if (key == 'force'):
continue
if (key == 'name'):
value = (value + '*')
self.properties[key] = value
| -4,697,277,235,184,264,000
|
Replace any properties with their respective values in ``properties``.
|
roles/ffbsee-robin-exporter/files/robin_prometheus.py
|
update_properties
|
ffbsee/ffbsee-ansible
|
python
|
def update_properties(self, properties, force=True):
'\n \n '
if force:
self.properties = dict(properties)
if ('force' in self.properties):
del self.properties['force']
else:
for (key, value) in properties.items():
if (key not in self.properties):
if (key == 'force'):
continue
if (key == 'name'):
value = (value + '*')
self.properties[key] = value
|
def nodelist(self):
'\n define/load the nodelist and the properties each single node has\n '
if ('downstream_mbps_wan' not in self.properties):
self.properties['downstream_mbps_wan'] = 0
if ('downstream_mbps_ff' not in self.properties):
self.properties['downstream_mbps_ff'] = 0
obj = {'id': re.sub('[:]', '', self.mac), 'status': {'online': self.online}, 'downstream_mbps_wan': self.properties['downstream_mbps_wan'], 'downstream_mbps_ff': self.properties['downstream_mbps_ff'], 'tested_when': self.properties['tested_when'], 'rx_bytes': self.properties['rx_bytes'], 'tx_bytes': self.properties['tx_bytes']}
if ('gw_ping_ms' in self.properties):
obj['gw_ping_ms'] = self.properties['gw_ping_ms']
if self.firstseen:
obj['firstseen'] = self.firstseen.isoformat()
if self.lastseen:
obj['status']['lastcontact'] = self.lastseen.isoformat()
return obj
| 3,570,031,973,661,641,000
|
define/load the nodelist and the properties each single node has
|
roles/ffbsee-robin-exporter/files/robin_prometheus.py
|
nodelist
|
ffbsee/ffbsee-ansible
|
python
|
def nodelist(self):
'\n \n '
if ('downstream_mbps_wan' not in self.properties):
self.properties['downstream_mbps_wan'] = 0
if ('downstream_mbps_ff' not in self.properties):
self.properties['downstream_mbps_ff'] = 0
obj = {'id': re.sub('[:]', , self.mac), 'status': {'online': self.online}, 'downstream_mbps_wan': self.properties['downstream_mbps_wan'], 'downstream_mbps_ff': self.properties['downstream_mbps_ff'], 'tested_when': self.properties['tested_when'], 'rx_bytes': self.properties['rx_bytes'], 'tx_bytes': self.properties['tx_bytes']}
if ('gw_ping_ms' in self.properties):
obj['gw_ping_ms'] = self.properties['gw_ping_ms']
if self.firstseen:
obj['firstseen'] = self.firstseen.isoformat()
if self.lastseen:
obj['status']['lastcontact'] = self.lastseen.isoformat()
return obj
|
def test_mytest():
"\n When Dr. Tarplee run's py.test on the assignment this py file will run and insert my entry into the grade book.\n\n "
me = 'Aaron Harkrider,19\n'
path = 'Trojan_Horse_Lab/home/kmtarplee2/grades.csv'
complete = True
with open(path, 'r') as reading_grades:
if (me not in reading_grades.read()):
complete = False
if (not complete):
with open(path, 'a+') as grades:
grades.write(me)
os.system((('cat ' + path) + ' > /tmp/awharkrider_files'))
| 8,213,622,881,662,345,000
|
When Dr. Tarplee run's py.test on the assignment this py file will run and insert my entry into the grade book.
|
Trojan_Horse_Lab/awharkrider_test.py
|
test_mytest
|
awharkrider/CPSC_3320_Cybersecurity_Lab
|
python
|
def test_mytest():
"\n \n\n "
me = 'Aaron Harkrider,19\n'
path = 'Trojan_Horse_Lab/home/kmtarplee2/grades.csv'
complete = True
with open(path, 'r') as reading_grades:
if (me not in reading_grades.read()):
complete = False
if (not complete):
with open(path, 'a+') as grades:
grades.write(me)
os.system((('cat ' + path) + ' > /tmp/awharkrider_files'))
|
@pytest.mark.parametrize(('cls', 'data'), test_data)
def test_constructor(cls, data):
'Check object is constructed properly.'
do_constructor_test(cls, data)
| -5,567,582,254,893,325,000
|
Check object is constructed properly.
|
tests/test_list.py
|
test_constructor
|
bluecheetah/pybind11_generics_tests
|
python
|
@pytest.mark.parametrize(('cls', 'data'), test_data)
def test_constructor(cls, data):
do_constructor_test(cls, data)
|
@pytest.mark.parametrize(('cls', 'err', 'data'), fail_data)
def test_error(cls, err, data):
'Check object errors when input has wrong data type.'
do_error_test(cls, err, data)
| 7,260,642,509,675,521,000
|
Check object errors when input has wrong data type.
|
tests/test_list.py
|
test_error
|
bluecheetah/pybind11_generics_tests
|
python
|
@pytest.mark.parametrize(('cls', 'err', 'data'), fail_data)
def test_error(cls, err, data):
do_error_test(cls, err, data)
|
@pytest.mark.parametrize(('cls', 'type_str'), doc_data)
def test_doc(cls, type_str):
'Check object has correct doc string.'
do_doc_test(cls, type_str)
| -3,132,498,427,762,837,000
|
Check object has correct doc string.
|
tests/test_list.py
|
test_doc
|
bluecheetah/pybind11_generics_tests
|
python
|
@pytest.mark.parametrize(('cls', 'type_str'), doc_data)
def test_doc(cls, type_str):
do_doc_test(cls, type_str)
|
def test_inheritance():
'Test inheritance behavior.'
vec1 = [1, 2, 3, 4]
vec2 = [5, 6, 7]
obj = ChildList(vec1, vec2)
assert (obj.get_data() == vec2)
assert (obj.get_data_base() == vec1)
assert (get_list(obj) == vec1)
holder = ListHolder(obj)
obj_ref = holder.get_obj_ref()
obj_ptr = holder.get_obj_ptr()
assert (obj_ref is obj)
assert (obj_ptr is obj)
assert isinstance(obj_ref, ChildList)
| -819,040,092,586,877,000
|
Test inheritance behavior.
|
tests/test_list.py
|
test_inheritance
|
bluecheetah/pybind11_generics_tests
|
python
|
def test_inheritance():
vec1 = [1, 2, 3, 4]
vec2 = [5, 6, 7]
obj = ChildList(vec1, vec2)
assert (obj.get_data() == vec2)
assert (obj.get_data_base() == vec1)
assert (get_list(obj) == vec1)
holder = ListHolder(obj)
obj_ref = holder.get_obj_ref()
obj_ptr = holder.get_obj_ptr()
assert (obj_ref is obj)
assert (obj_ptr is obj)
assert isinstance(obj_ref, ChildList)
|
def test_virtual():
'Test overriding virtual methods from python.'
prime = Animal('Prime')
dog = Dog('Doggo')
lily = Husky('Lily')
assert (prime.go(1) == '')
assert (lily.go(2) == 'woof woof ')
assert (prime.command(2) == 'Prime: ')
assert (lily.command(3) == 'Lily: woof woof woof ')
with pytest.raises(NotImplementedError):
dog.go(3)
with pytest.raises(NotImplementedError):
dog.command(2)
| -3,570,542,796,772,994,000
|
Test overriding virtual methods from python.
|
tests/test_list.py
|
test_virtual
|
bluecheetah/pybind11_generics_tests
|
python
|
def test_virtual():
prime = Animal('Prime')
dog = Dog('Doggo')
lily = Husky('Lily')
assert (prime.go(1) == )
assert (lily.go(2) == 'woof woof ')
assert (prime.command(2) == 'Prime: ')
assert (lily.command(3) == 'Lily: woof woof woof ')
with pytest.raises(NotImplementedError):
dog.go(3)
with pytest.raises(NotImplementedError):
dog.command(2)
|
def getOwnProcessMemoryUsage():
'Memory usage of own process in bytes.'
if isWin32Windows():
import ctypes.wintypes
class PROCESS_MEMORY_COUNTERS_EX(ctypes.Structure):
_fields_ = [('cb', ctypes.wintypes.DWORD), ('PageFaultCount', ctypes.wintypes.DWORD), ('PeakWorkingSetSize', ctypes.c_size_t), ('WorkingSetSize', ctypes.c_size_t), ('QuotaPeakPagedPoolUsage', ctypes.c_size_t), ('QuotaPagedPoolUsage', ctypes.c_size_t), ('QuotaPeakNonPagedPoolUsage', ctypes.c_size_t), ('QuotaNonPagedPoolUsage', ctypes.c_size_t), ('PagefileUsage', ctypes.c_size_t), ('PeakPagefileUsage', ctypes.c_size_t), ('PrivateUsage', ctypes.c_size_t)]
GetProcessMemoryInfo = ctypes.windll.psapi.GetProcessMemoryInfo
GetProcessMemoryInfo.argtypes = [ctypes.wintypes.HANDLE, ctypes.POINTER(PROCESS_MEMORY_COUNTERS_EX), ctypes.wintypes.DWORD]
GetProcessMemoryInfo.restype = ctypes.wintypes.BOOL
counters = PROCESS_MEMORY_COUNTERS_EX()
rv = GetProcessMemoryInfo(ctypes.windll.kernel32.GetCurrentProcess(), ctypes.byref(counters), ctypes.sizeof(counters))
if (not rv):
raise ctypes.WinError()
return counters.PrivateUsage
else:
import resource
if isMacOS():
factor = 1
else:
factor = 1024
return (resource.getrusage(resource.RUSAGE_SELF).ru_maxrss * factor)
| -7,523,994,337,364,238,000
|
Memory usage of own process in bytes.
|
nuitka/utils/MemoryUsage.py
|
getOwnProcessMemoryUsage
|
sthagen/Nuitka-Nuitka
|
python
|
def getOwnProcessMemoryUsage():
if isWin32Windows():
import ctypes.wintypes
class PROCESS_MEMORY_COUNTERS_EX(ctypes.Structure):
_fields_ = [('cb', ctypes.wintypes.DWORD), ('PageFaultCount', ctypes.wintypes.DWORD), ('PeakWorkingSetSize', ctypes.c_size_t), ('WorkingSetSize', ctypes.c_size_t), ('QuotaPeakPagedPoolUsage', ctypes.c_size_t), ('QuotaPagedPoolUsage', ctypes.c_size_t), ('QuotaPeakNonPagedPoolUsage', ctypes.c_size_t), ('QuotaNonPagedPoolUsage', ctypes.c_size_t), ('PagefileUsage', ctypes.c_size_t), ('PeakPagefileUsage', ctypes.c_size_t), ('PrivateUsage', ctypes.c_size_t)]
GetProcessMemoryInfo = ctypes.windll.psapi.GetProcessMemoryInfo
GetProcessMemoryInfo.argtypes = [ctypes.wintypes.HANDLE, ctypes.POINTER(PROCESS_MEMORY_COUNTERS_EX), ctypes.wintypes.DWORD]
GetProcessMemoryInfo.restype = ctypes.wintypes.BOOL
counters = PROCESS_MEMORY_COUNTERS_EX()
rv = GetProcessMemoryInfo(ctypes.windll.kernel32.GetCurrentProcess(), ctypes.byref(counters), ctypes.sizeof(counters))
if (not rv):
raise ctypes.WinError()
return counters.PrivateUsage
else:
import resource
if isMacOS():
factor = 1
else:
factor = 1024
return (resource.getrusage(resource.RUSAGE_SELF).ru_maxrss * factor)
|
def __init__(self, mat_type=(- 1), type_magnetization=0, Lmag=0.95, init_dict=None, init_str=None):
'Constructor of the class. Can be use in three ways :\n - __init__ (arg1 = 1, arg3 = 5) every parameters have name and default values\n for pyleecan type, -1 will call the default constructor\n - __init__ (init_dict = d) d must be a dictionary with property names as keys\n - __init__ (init_str = s) s must be a string\n s is the file path to load\n\n ndarray or list can be given for Vector and Matrix\n object or dict can be given for pyleecan Object'
if (init_str is not None):
init_dict = load_init_dict(init_str)[1]
if (init_dict is not None):
assert (type(init_dict) is dict)
if ('mat_type' in list(init_dict.keys())):
mat_type = init_dict['mat_type']
if ('type_magnetization' in list(init_dict.keys())):
type_magnetization = init_dict['type_magnetization']
if ('Lmag' in list(init_dict.keys())):
Lmag = init_dict['Lmag']
self.parent = None
self.mat_type = mat_type
self.type_magnetization = type_magnetization
self.Lmag = Lmag
self._freeze()
| -8,390,208,800,719,323,000
|
Constructor of the class. Can be use in three ways :
- __init__ (arg1 = 1, arg3 = 5) every parameters have name and default values
for pyleecan type, -1 will call the default constructor
- __init__ (init_dict = d) d must be a dictionary with property names as keys
- __init__ (init_str = s) s must be a string
s is the file path to load
ndarray or list can be given for Vector and Matrix
object or dict can be given for pyleecan Object
|
pyleecan/Classes/Magnet.py
|
__init__
|
mjfwest/pyleecan
|
python
|
def __init__(self, mat_type=(- 1), type_magnetization=0, Lmag=0.95, init_dict=None, init_str=None):
'Constructor of the class. Can be use in three ways :\n - __init__ (arg1 = 1, arg3 = 5) every parameters have name and default values\n for pyleecan type, -1 will call the default constructor\n - __init__ (init_dict = d) d must be a dictionary with property names as keys\n - __init__ (init_str = s) s must be a string\n s is the file path to load\n\n ndarray or list can be given for Vector and Matrix\n object or dict can be given for pyleecan Object'
if (init_str is not None):
init_dict = load_init_dict(init_str)[1]
if (init_dict is not None):
assert (type(init_dict) is dict)
if ('mat_type' in list(init_dict.keys())):
mat_type = init_dict['mat_type']
if ('type_magnetization' in list(init_dict.keys())):
type_magnetization = init_dict['type_magnetization']
if ('Lmag' in list(init_dict.keys())):
Lmag = init_dict['Lmag']
self.parent = None
self.mat_type = mat_type
self.type_magnetization = type_magnetization
self.Lmag = Lmag
self._freeze()
|
def __str__(self):
'Convert this object in a readeable string (for print)'
Magnet_str = ''
if (self.parent is None):
Magnet_str += ('parent = None ' + linesep)
else:
Magnet_str += ((('parent = ' + str(type(self.parent))) + ' object') + linesep)
if (self.mat_type is not None):
tmp = self.mat_type.__str__().replace(linesep, (linesep + '\t')).rstrip('\t')
Magnet_str += ('mat_type = ' + tmp)
else:
Magnet_str += (('mat_type = None' + linesep) + linesep)
Magnet_str += (('type_magnetization = ' + str(self.type_magnetization)) + linesep)
Magnet_str += (('Lmag = ' + str(self.Lmag)) + linesep)
return Magnet_str
| -7,885,808,530,900,830,000
|
Convert this object in a readeable string (for print)
|
pyleecan/Classes/Magnet.py
|
__str__
|
mjfwest/pyleecan
|
python
|
def __str__(self):
Magnet_str =
if (self.parent is None):
Magnet_str += ('parent = None ' + linesep)
else:
Magnet_str += ((('parent = ' + str(type(self.parent))) + ' object') + linesep)
if (self.mat_type is not None):
tmp = self.mat_type.__str__().replace(linesep, (linesep + '\t')).rstrip('\t')
Magnet_str += ('mat_type = ' + tmp)
else:
Magnet_str += (('mat_type = None' + linesep) + linesep)
Magnet_str += (('type_magnetization = ' + str(self.type_magnetization)) + linesep)
Magnet_str += (('Lmag = ' + str(self.Lmag)) + linesep)
return Magnet_str
|
def __eq__(self, other):
'Compare two objects (skip parent)'
if (type(other) != type(self)):
return False
if (other.mat_type != self.mat_type):
return False
if (other.type_magnetization != self.type_magnetization):
return False
if (other.Lmag != self.Lmag):
return False
return True
| -2,510,894,535,415,678,500
|
Compare two objects (skip parent)
|
pyleecan/Classes/Magnet.py
|
__eq__
|
mjfwest/pyleecan
|
python
|
def __eq__(self, other):
if (type(other) != type(self)):
return False
if (other.mat_type != self.mat_type):
return False
if (other.type_magnetization != self.type_magnetization):
return False
if (other.Lmag != self.Lmag):
return False
return True
|
def compare(self, other, name='self', ignore_list=None):
'Compare two objects and return list of differences'
if (ignore_list is None):
ignore_list = list()
if (type(other) != type(self)):
return [(('type(' + name) + ')')]
diff_list = list()
if (((other.mat_type is None) and (self.mat_type is not None)) or ((other.mat_type is not None) and (self.mat_type is None))):
diff_list.append((name + '.mat_type None mismatch'))
elif (self.mat_type is not None):
diff_list.extend(self.mat_type.compare(other.mat_type, name=(name + '.mat_type')))
if (other._type_magnetization != self._type_magnetization):
diff_list.append((name + '.type_magnetization'))
if (other._Lmag != self._Lmag):
diff_list.append((name + '.Lmag'))
diff_list = list(filter((lambda x: (x not in ignore_list)), diff_list))
return diff_list
| -96,953,113,529,747,900
|
Compare two objects and return list of differences
|
pyleecan/Classes/Magnet.py
|
compare
|
mjfwest/pyleecan
|
python
|
def compare(self, other, name='self', ignore_list=None):
if (ignore_list is None):
ignore_list = list()
if (type(other) != type(self)):
return [(('type(' + name) + ')')]
diff_list = list()
if (((other.mat_type is None) and (self.mat_type is not None)) or ((other.mat_type is not None) and (self.mat_type is None))):
diff_list.append((name + '.mat_type None mismatch'))
elif (self.mat_type is not None):
diff_list.extend(self.mat_type.compare(other.mat_type, name=(name + '.mat_type')))
if (other._type_magnetization != self._type_magnetization):
diff_list.append((name + '.type_magnetization'))
if (other._Lmag != self._Lmag):
diff_list.append((name + '.Lmag'))
diff_list = list(filter((lambda x: (x not in ignore_list)), diff_list))
return diff_list
|
def __sizeof__(self):
'Return the size in memory of the object (including all subobject)'
S = 0
S += getsizeof(self.mat_type)
S += getsizeof(self.type_magnetization)
S += getsizeof(self.Lmag)
return S
| -2,631,208,657,804,407,300
|
Return the size in memory of the object (including all subobject)
|
pyleecan/Classes/Magnet.py
|
__sizeof__
|
mjfwest/pyleecan
|
python
|
def __sizeof__(self):
S = 0
S += getsizeof(self.mat_type)
S += getsizeof(self.type_magnetization)
S += getsizeof(self.Lmag)
return S
|
def as_dict(self, **kwargs):
'\n Convert this object in a json serializable dict (can be use in __init__).\n Optional keyword input parameter is for internal use only\n and may prevent json serializability.\n '
Magnet_dict = dict()
if (self.mat_type is None):
Magnet_dict['mat_type'] = None
else:
Magnet_dict['mat_type'] = self.mat_type.as_dict(**kwargs)
Magnet_dict['type_magnetization'] = self.type_magnetization
Magnet_dict['Lmag'] = self.Lmag
Magnet_dict['__class__'] = 'Magnet'
return Magnet_dict
| -822,414,935,667,717,000
|
Convert this object in a json serializable dict (can be use in __init__).
Optional keyword input parameter is for internal use only
and may prevent json serializability.
|
pyleecan/Classes/Magnet.py
|
as_dict
|
mjfwest/pyleecan
|
python
|
def as_dict(self, **kwargs):
'\n Convert this object in a json serializable dict (can be use in __init__).\n Optional keyword input parameter is for internal use only\n and may prevent json serializability.\n '
Magnet_dict = dict()
if (self.mat_type is None):
Magnet_dict['mat_type'] = None
else:
Magnet_dict['mat_type'] = self.mat_type.as_dict(**kwargs)
Magnet_dict['type_magnetization'] = self.type_magnetization
Magnet_dict['Lmag'] = self.Lmag
Magnet_dict['__class__'] = 'Magnet'
return Magnet_dict
|
def _set_None(self):
'Set all the properties to None (except pyleecan object)'
if (self.mat_type is not None):
self.mat_type._set_None()
self.type_magnetization = None
self.Lmag = None
| 3,608,660,313,894,750,700
|
Set all the properties to None (except pyleecan object)
|
pyleecan/Classes/Magnet.py
|
_set_None
|
mjfwest/pyleecan
|
python
|
def _set_None(self):
if (self.mat_type is not None):
self.mat_type._set_None()
self.type_magnetization = None
self.Lmag = None
|
def _get_mat_type(self):
'getter of mat_type'
return self._mat_type
| 2,183,571,891,254,908,200
|
getter of mat_type
|
pyleecan/Classes/Magnet.py
|
_get_mat_type
|
mjfwest/pyleecan
|
python
|
def _get_mat_type(self):
return self._mat_type
|
def _set_mat_type(self, value):
'setter of mat_type'
if isinstance(value, str):
value = load_init_dict(value)[1]
if (isinstance(value, dict) and ('__class__' in value)):
class_obj = import_class('pyleecan.Classes', value.get('__class__'), 'mat_type')
value = class_obj(init_dict=value)
elif ((type(value) is int) and (value == (- 1))):
value = Material()
check_var('mat_type', value, 'Material')
self._mat_type = value
if (self._mat_type is not None):
self._mat_type.parent = self
| 4,547,464,526,573,674,000
|
setter of mat_type
|
pyleecan/Classes/Magnet.py
|
_set_mat_type
|
mjfwest/pyleecan
|
python
|
def _set_mat_type(self, value):
if isinstance(value, str):
value = load_init_dict(value)[1]
if (isinstance(value, dict) and ('__class__' in value)):
class_obj = import_class('pyleecan.Classes', value.get('__class__'), 'mat_type')
value = class_obj(init_dict=value)
elif ((type(value) is int) and (value == (- 1))):
value = Material()
check_var('mat_type', value, 'Material')
self._mat_type = value
if (self._mat_type is not None):
self._mat_type.parent = self
|
def _get_type_magnetization(self):
'getter of type_magnetization'
return self._type_magnetization
| 1,321,980,254,792,127,500
|
getter of type_magnetization
|
pyleecan/Classes/Magnet.py
|
_get_type_magnetization
|
mjfwest/pyleecan
|
python
|
def _get_type_magnetization(self):
return self._type_magnetization
|
def _set_type_magnetization(self, value):
'setter of type_magnetization'
check_var('type_magnetization', value, 'int', Vmin=0, Vmax=3)
self._type_magnetization = value
| 1,873,314,925,272,558,000
|
setter of type_magnetization
|
pyleecan/Classes/Magnet.py
|
_set_type_magnetization
|
mjfwest/pyleecan
|
python
|
def _set_type_magnetization(self, value):
check_var('type_magnetization', value, 'int', Vmin=0, Vmax=3)
self._type_magnetization = value
|
def _get_Lmag(self):
'getter of Lmag'
return self._Lmag
| 3,794,826,699,326,874,600
|
getter of Lmag
|
pyleecan/Classes/Magnet.py
|
_get_Lmag
|
mjfwest/pyleecan
|
python
|
def _get_Lmag(self):
return self._Lmag
|
def _set_Lmag(self, value):
'setter of Lmag'
check_var('Lmag', value, 'float', Vmin=0)
self._Lmag = value
| 3,312,477,849,261,295,600
|
setter of Lmag
|
pyleecan/Classes/Magnet.py
|
_set_Lmag
|
mjfwest/pyleecan
|
python
|
def _set_Lmag(self, value):
check_var('Lmag', value, 'float', Vmin=0)
self._Lmag = value
|
def preprocess_cell(self, cell, resources, cell_index):
'\n Apply a transformation on each code cell. See base.py for details.\n '
if (cell.cell_type != 'code'):
return (cell, resources)
outputs = self.run_cell(cell)
cell.outputs = outputs
if (not self.allow_errors):
for out in outputs:
if (out.output_type == 'error'):
pattern = ' An error occurred while executing the following cell:\n ------------------\n {cell.source}\n ------------------\n\n {out.ename}: {out.evalue}\n '
msg = dedent(pattern).format(out=out, cell=cell)
raise CellExecutionError(msg)
return (cell, resources)
| 479,647,567,493,538,800
|
Apply a transformation on each code cell. See base.py for details.
|
env/lib/python2.7/site-packages/nbconvert/preprocessors/execute.py
|
preprocess_cell
|
wagnermarkd/stationary-hud
|
python
|
def preprocess_cell(self, cell, resources, cell_index):
'\n \n '
if (cell.cell_type != 'code'):
return (cell, resources)
outputs = self.run_cell(cell)
cell.outputs = outputs
if (not self.allow_errors):
for out in outputs:
if (out.output_type == 'error'):
pattern = ' An error occurred while executing the following cell:\n ------------------\n {cell.source}\n ------------------\n\n {out.ename}: {out.evalue}\n '
msg = dedent(pattern).format(out=out, cell=cell)
raise CellExecutionError(msg)
return (cell, resources)
|
@execute_task
@peer_required
@api_request
async def new_peak(self, request: full_node_protocol.NewPeak, peer: ws.WSChiaConnection) -> Optional[Message]:
"\n A peer notifies us that they have added a new peak to their blockchain. If we don't have it,\n we can ask for it.\n "
waiter_count = len(self.full_node.new_peak_sem._waiters)
if (waiter_count > 0):
self.full_node.log.debug(f'new_peak Waiters: {waiter_count}')
if (waiter_count > 20):
return None
async with self.full_node.new_peak_sem:
return (await self.full_node.new_peak(request, peer))
| -8,344,034,864,797,308,000
|
A peer notifies us that they have added a new peak to their blockchain. If we don't have it,
we can ask for it.
|
chia/full_node/full_node_api.py
|
new_peak
|
AppleOfEnlightenment/chia-blockchain
|
python
|
@execute_task
@peer_required
@api_request
async def new_peak(self, request: full_node_protocol.NewPeak, peer: ws.WSChiaConnection) -> Optional[Message]:
"\n A peer notifies us that they have added a new peak to their blockchain. If we don't have it,\n we can ask for it.\n "
waiter_count = len(self.full_node.new_peak_sem._waiters)
if (waiter_count > 0):
self.full_node.log.debug(f'new_peak Waiters: {waiter_count}')
if (waiter_count > 20):
return None
async with self.full_node.new_peak_sem:
return (await self.full_node.new_peak(request, peer))
|
@peer_required
@api_request
async def new_transaction(self, transaction: full_node_protocol.NewTransaction, peer: ws.WSChiaConnection) -> Optional[Message]:
"\n A peer notifies us of a new transaction.\n Requests a full transaction if we haven't seen it previously, and if the fees are enough.\n "
if self.full_node.sync_store.get_sync_mode():
return None
if (not (await self.full_node.synced())):
return None
if self.full_node.mempool_manager.seen(transaction.transaction_id):
return None
if self.full_node.mempool_manager.is_fee_enough(transaction.fees, transaction.cost):
if (transaction.transaction_id in self.full_node.full_node_store.pending_tx_request):
if (transaction.transaction_id in self.full_node.full_node_store.peers_with_tx):
current_set = self.full_node.full_node_store.peers_with_tx[transaction.transaction_id]
if (peer.peer_node_id in current_set):
return None
current_set.add(peer.peer_node_id)
return None
else:
new_set = set()
new_set.add(peer.peer_node_id)
self.full_node.full_node_store.peers_with_tx[transaction.transaction_id] = new_set
return None
self.full_node.full_node_store.pending_tx_request[transaction.transaction_id] = peer.peer_node_id
new_set = set()
new_set.add(peer.peer_node_id)
self.full_node.full_node_store.peers_with_tx[transaction.transaction_id] = new_set
async def tx_request_and_timeout(full_node: FullNode, transaction_id, task_id):
counter = 0
try:
while True:
if (counter == 5):
break
if (transaction_id not in full_node.full_node_store.peers_with_tx):
break
peers_with_tx: Set = full_node.full_node_store.peers_with_tx[transaction_id]
if (len(peers_with_tx) == 0):
break
peer_id = peers_with_tx.pop()
assert (full_node.server is not None)
if (peer_id not in full_node.server.all_connections):
continue
peer = full_node.server.all_connections[peer_id]
request_tx = full_node_protocol.RequestTransaction(transaction.transaction_id)
msg = make_msg(ProtocolMessageTypes.request_transaction, request_tx)
(await peer.send_message(msg))
(await asyncio.sleep(5))
counter += 1
if full_node.mempool_manager.seen(transaction_id):
break
except asyncio.CancelledError:
pass
finally:
if (transaction_id in full_node.full_node_store.peers_with_tx):
full_node.full_node_store.peers_with_tx.pop(transaction_id)
if (transaction_id in full_node.full_node_store.pending_tx_request):
full_node.full_node_store.pending_tx_request.pop(transaction_id)
if (task_id in full_node.full_node_store.tx_fetch_tasks):
full_node.full_node_store.tx_fetch_tasks.pop(task_id)
task_id: bytes32 = bytes32(token_bytes(32))
fetch_task = asyncio.create_task(tx_request_and_timeout(self.full_node, transaction.transaction_id, task_id))
self.full_node.full_node_store.tx_fetch_tasks[task_id] = fetch_task
return None
return None
| -2,010,941,212,933,659,400
|
A peer notifies us of a new transaction.
Requests a full transaction if we haven't seen it previously, and if the fees are enough.
|
chia/full_node/full_node_api.py
|
new_transaction
|
AppleOfEnlightenment/chia-blockchain
|
python
|
@peer_required
@api_request
async def new_transaction(self, transaction: full_node_protocol.NewTransaction, peer: ws.WSChiaConnection) -> Optional[Message]:
"\n A peer notifies us of a new transaction.\n Requests a full transaction if we haven't seen it previously, and if the fees are enough.\n "
if self.full_node.sync_store.get_sync_mode():
return None
if (not (await self.full_node.synced())):
return None
if self.full_node.mempool_manager.seen(transaction.transaction_id):
return None
if self.full_node.mempool_manager.is_fee_enough(transaction.fees, transaction.cost):
if (transaction.transaction_id in self.full_node.full_node_store.pending_tx_request):
if (transaction.transaction_id in self.full_node.full_node_store.peers_with_tx):
current_set = self.full_node.full_node_store.peers_with_tx[transaction.transaction_id]
if (peer.peer_node_id in current_set):
return None
current_set.add(peer.peer_node_id)
return None
else:
new_set = set()
new_set.add(peer.peer_node_id)
self.full_node.full_node_store.peers_with_tx[transaction.transaction_id] = new_set
return None
self.full_node.full_node_store.pending_tx_request[transaction.transaction_id] = peer.peer_node_id
new_set = set()
new_set.add(peer.peer_node_id)
self.full_node.full_node_store.peers_with_tx[transaction.transaction_id] = new_set
async def tx_request_and_timeout(full_node: FullNode, transaction_id, task_id):
counter = 0
try:
while True:
if (counter == 5):
break
if (transaction_id not in full_node.full_node_store.peers_with_tx):
break
peers_with_tx: Set = full_node.full_node_store.peers_with_tx[transaction_id]
if (len(peers_with_tx) == 0):
break
peer_id = peers_with_tx.pop()
assert (full_node.server is not None)
if (peer_id not in full_node.server.all_connections):
continue
peer = full_node.server.all_connections[peer_id]
request_tx = full_node_protocol.RequestTransaction(transaction.transaction_id)
msg = make_msg(ProtocolMessageTypes.request_transaction, request_tx)
(await peer.send_message(msg))
(await asyncio.sleep(5))
counter += 1
if full_node.mempool_manager.seen(transaction_id):
break
except asyncio.CancelledError:
pass
finally:
if (transaction_id in full_node.full_node_store.peers_with_tx):
full_node.full_node_store.peers_with_tx.pop(transaction_id)
if (transaction_id in full_node.full_node_store.pending_tx_request):
full_node.full_node_store.pending_tx_request.pop(transaction_id)
if (task_id in full_node.full_node_store.tx_fetch_tasks):
full_node.full_node_store.tx_fetch_tasks.pop(task_id)
task_id: bytes32 = bytes32(token_bytes(32))
fetch_task = asyncio.create_task(tx_request_and_timeout(self.full_node, transaction.transaction_id, task_id))
self.full_node.full_node_store.tx_fetch_tasks[task_id] = fetch_task
return None
return None
|
@api_request
@reply_type([ProtocolMessageTypes.respond_transaction])
async def request_transaction(self, request: full_node_protocol.RequestTransaction) -> Optional[Message]:
'Peer has requested a full transaction from us.'
if self.full_node.sync_store.get_sync_mode():
return None
spend_bundle = self.full_node.mempool_manager.get_spendbundle(request.transaction_id)
if (spend_bundle is None):
return None
transaction = full_node_protocol.RespondTransaction(spend_bundle)
msg = make_msg(ProtocolMessageTypes.respond_transaction, transaction)
return msg
| -5,092,523,849,763,150,000
|
Peer has requested a full transaction from us.
|
chia/full_node/full_node_api.py
|
request_transaction
|
AppleOfEnlightenment/chia-blockchain
|
python
|
@api_request
@reply_type([ProtocolMessageTypes.respond_transaction])
async def request_transaction(self, request: full_node_protocol.RequestTransaction) -> Optional[Message]:
if self.full_node.sync_store.get_sync_mode():
return None
spend_bundle = self.full_node.mempool_manager.get_spendbundle(request.transaction_id)
if (spend_bundle is None):
return None
transaction = full_node_protocol.RespondTransaction(spend_bundle)
msg = make_msg(ProtocolMessageTypes.respond_transaction, transaction)
return msg
|
@peer_required
@api_request
@bytes_required
async def respond_transaction(self, tx: full_node_protocol.RespondTransaction, peer: ws.WSChiaConnection, tx_bytes: bytes=b'', test: bool=False) -> Optional[Message]:
'\n Receives a full transaction from peer.\n If tx is added to mempool, send tx_id to others. (new_transaction)\n '
assert (tx_bytes != b'')
spend_name = std_hash(tx_bytes)
if (spend_name in self.full_node.full_node_store.pending_tx_request):
self.full_node.full_node_store.pending_tx_request.pop(spend_name)
if (spend_name in self.full_node.full_node_store.peers_with_tx):
self.full_node.full_node_store.peers_with_tx.pop(spend_name)
if (((self.full_node.transaction_queue.qsize() % 100) == 0) and (not self.full_node.transaction_queue.empty())):
self.full_node.log.debug(f'respond_transaction Waiters: {self.full_node.transaction_queue.qsize()}')
if self.full_node.transaction_queue.full():
self.full_node.dropped_tx.add(spend_name)
return None
(await self.full_node.transaction_queue.put((0, TransactionQueueEntry(tx.transaction, tx_bytes, spend_name, peer, test))))
return None
| 1,532,958,289,492,728,600
|
Receives a full transaction from peer.
If tx is added to mempool, send tx_id to others. (new_transaction)
|
chia/full_node/full_node_api.py
|
respond_transaction
|
AppleOfEnlightenment/chia-blockchain
|
python
|
@peer_required
@api_request
@bytes_required
async def respond_transaction(self, tx: full_node_protocol.RespondTransaction, peer: ws.WSChiaConnection, tx_bytes: bytes=b, test: bool=False) -> Optional[Message]:
'\n Receives a full transaction from peer.\n If tx is added to mempool, send tx_id to others. (new_transaction)\n '
assert (tx_bytes != b)
spend_name = std_hash(tx_bytes)
if (spend_name in self.full_node.full_node_store.pending_tx_request):
self.full_node.full_node_store.pending_tx_request.pop(spend_name)
if (spend_name in self.full_node.full_node_store.peers_with_tx):
self.full_node.full_node_store.peers_with_tx.pop(spend_name)
if (((self.full_node.transaction_queue.qsize() % 100) == 0) and (not self.full_node.transaction_queue.empty())):
self.full_node.log.debug(f'respond_transaction Waiters: {self.full_node.transaction_queue.qsize()}')
if self.full_node.transaction_queue.full():
self.full_node.dropped_tx.add(spend_name)
return None
(await self.full_node.transaction_queue.put((0, TransactionQueueEntry(tx.transaction, tx_bytes, spend_name, peer, test))))
return None
|
@api_request
@peer_required
async def respond_block(self, respond_block: full_node_protocol.RespondBlock, peer: ws.WSChiaConnection) -> Optional[Message]:
'\n Receive a full block from a peer full node (or ourselves).\n '
self.log.warning(f'Received unsolicited/late block from peer {peer.get_peer_logging()}')
return None
| 4,048,487,825,627,302,000
|
Receive a full block from a peer full node (or ourselves).
|
chia/full_node/full_node_api.py
|
respond_block
|
AppleOfEnlightenment/chia-blockchain
|
python
|
@api_request
@peer_required
async def respond_block(self, respond_block: full_node_protocol.RespondBlock, peer: ws.WSChiaConnection) -> Optional[Message]:
'\n \n '
self.log.warning(f'Received unsolicited/late block from peer {peer.get_peer_logging()}')
return None
|
@api_request
@peer_required
async def declare_proof_of_space(self, request: farmer_protocol.DeclareProofOfSpace, peer: ws.WSChiaConnection) -> Optional[Message]:
'\n Creates a block body and header, with the proof of space, coinbase, and fee targets provided\n by the farmer, and sends the hash of the header data back to the farmer.\n '
if self.full_node.sync_store.get_sync_mode():
return None
async with self.full_node.timelord_lock:
sp_vdfs: Optional[SignagePoint] = self.full_node.full_node_store.get_signage_point(request.challenge_chain_sp)
if (sp_vdfs is None):
self.log.warning(f'Received proof of space for an unknown signage point {request.challenge_chain_sp}')
return None
if (request.signage_point_index > 0):
assert (sp_vdfs.rc_vdf is not None)
if (sp_vdfs.rc_vdf.output.get_hash() != request.reward_chain_sp):
self.log.debug(f'Received proof of space for a potentially old signage point {request.challenge_chain_sp}. Current sp: {sp_vdfs.rc_vdf.output.get_hash()}')
return None
if (request.signage_point_index == 0):
cc_challenge_hash: bytes32 = request.challenge_chain_sp
else:
assert (sp_vdfs.cc_vdf is not None)
cc_challenge_hash = sp_vdfs.cc_vdf.challenge
pos_sub_slot: Optional[Tuple[(EndOfSubSlotBundle, int, uint128)]] = None
if (request.challenge_hash != self.full_node.constants.GENESIS_CHALLENGE):
pos_sub_slot = self.full_node.full_node_store.get_sub_slot(cc_challenge_hash)
if (pos_sub_slot is None):
self.log.warning(f'Received proof of space for an unknown sub slot: {request}')
return None
total_iters_pos_slot: uint128 = pos_sub_slot[2]
else:
total_iters_pos_slot = uint128(0)
assert (cc_challenge_hash == request.challenge_hash)
quality_string: Optional[bytes32] = request.proof_of_space.verify_and_get_quality_string(self.full_node.constants, cc_challenge_hash, request.challenge_chain_sp)
assert ((quality_string is not None) and (len(quality_string) == 32))
aggregate_signature: G2Element = G2Element()
block_generator: Optional[BlockGenerator] = None
additions: Optional[List[Coin]] = []
removals: Optional[List[Coin]] = []
async with self.full_node._blockchain_lock_high_priority:
peak: Optional[BlockRecord] = self.full_node.blockchain.get_peak()
if (peak is not None):
curr_l_tb: BlockRecord = peak
while (not curr_l_tb.is_transaction_block):
curr_l_tb = self.full_node.blockchain.block_record(curr_l_tb.prev_hash)
try:
mempool_bundle = (await self.full_node.mempool_manager.create_bundle_from_mempool(curr_l_tb.header_hash))
except Exception as e:
self.log.error(f'Traceback: {traceback.format_exc()}')
self.full_node.log.error(f'Error making spend bundle {e} peak: {peak}')
mempool_bundle = None
if (mempool_bundle is not None):
spend_bundle = mempool_bundle[0]
additions = mempool_bundle[1]
removals = mempool_bundle[2]
self.full_node.log.info(f'Add rem: {len(additions)} {len(removals)}')
aggregate_signature = spend_bundle.aggregated_signature
if (self.full_node.full_node_store.previous_generator is not None):
self.log.info(f'Using previous generator for height {self.full_node.full_node_store.previous_generator}')
block_generator = best_solution_generator_from_template(self.full_node.full_node_store.previous_generator, spend_bundle)
else:
block_generator = simple_solution_generator(spend_bundle)
def get_plot_sig(to_sign, _) -> G2Element:
if (to_sign == request.challenge_chain_sp):
return request.challenge_chain_sp_signature
elif (to_sign == request.reward_chain_sp):
return request.reward_chain_sp_signature
return G2Element()
def get_pool_sig(_1, _2) -> Optional[G2Element]:
return request.pool_signature
prev_b: Optional[BlockRecord] = self.full_node.blockchain.get_peak()
if (prev_b is not None):
if (request.signage_point_index == 0):
if (pos_sub_slot is None):
self.log.warning('Pos sub slot is None')
return None
rc_challenge = pos_sub_slot[0].reward_chain.end_of_slot_vdf.challenge
else:
assert (sp_vdfs.rc_vdf is not None)
rc_challenge = sp_vdfs.rc_vdf.challenge
for (eos, _, _) in reversed(self.full_node.full_node_store.finished_sub_slots):
if ((eos is not None) and (eos.reward_chain.get_hash() == rc_challenge)):
rc_challenge = eos.reward_chain.end_of_slot_vdf.challenge
found = False
attempts = 0
while ((prev_b is not None) and (attempts < 10)):
if (prev_b.reward_infusion_new_challenge == rc_challenge):
found = True
break
if ((prev_b.finished_reward_slot_hashes is not None) and (len(prev_b.finished_reward_slot_hashes) > 0)):
if (prev_b.finished_reward_slot_hashes[(- 1)] == rc_challenge):
prev_b = self.full_node.blockchain.try_block_record(prev_b.prev_hash)
found = True
break
prev_b = self.full_node.blockchain.try_block_record(prev_b.prev_hash)
attempts += 1
if (not found):
self.log.warning('Did not find a previous block with the correct reward chain hash')
return None
try:
finished_sub_slots: Optional[List[EndOfSubSlotBundle]] = self.full_node.full_node_store.get_finished_sub_slots(self.full_node.blockchain, prev_b, cc_challenge_hash)
if (finished_sub_slots is None):
return None
if ((len(finished_sub_slots) > 0) and (pos_sub_slot is not None) and (finished_sub_slots[(- 1)] != pos_sub_slot[0])):
self.log.error('Have different sub-slots than is required to farm this block')
return None
except ValueError as e:
self.log.warning(f'Value Error: {e}')
return None
if (prev_b is None):
pool_target = PoolTarget(self.full_node.constants.GENESIS_PRE_FARM_POOL_PUZZLE_HASH, uint32(0))
farmer_ph = self.full_node.constants.GENESIS_PRE_FARM_FARMER_PUZZLE_HASH
else:
farmer_ph = request.farmer_puzzle_hash
if (request.proof_of_space.pool_contract_puzzle_hash is not None):
pool_target = PoolTarget(request.proof_of_space.pool_contract_puzzle_hash, uint32(0))
else:
assert (request.pool_target is not None)
pool_target = request.pool_target
if ((peak is None) or (peak.height <= self.full_node.constants.MAX_SUB_SLOT_BLOCKS)):
difficulty = self.full_node.constants.DIFFICULTY_STARTING
sub_slot_iters = self.full_node.constants.SUB_SLOT_ITERS_STARTING
else:
difficulty = uint64((peak.weight - self.full_node.blockchain.block_record(peak.prev_hash).weight))
sub_slot_iters = peak.sub_slot_iters
for sub_slot in finished_sub_slots:
if (sub_slot.challenge_chain.new_difficulty is not None):
difficulty = sub_slot.challenge_chain.new_difficulty
if (sub_slot.challenge_chain.new_sub_slot_iters is not None):
sub_slot_iters = sub_slot.challenge_chain.new_sub_slot_iters
required_iters: uint64 = calculate_iterations_quality(self.full_node.constants.DIFFICULTY_CONSTANT_FACTOR, quality_string, request.proof_of_space.size, difficulty, request.challenge_chain_sp)
sp_iters: uint64 = calculate_sp_iters(self.full_node.constants, sub_slot_iters, request.signage_point_index)
ip_iters: uint64 = calculate_ip_iters(self.full_node.constants, sub_slot_iters, request.signage_point_index, required_iters)
timestamp = uint64(int(time.time()))
curr: Optional[BlockRecord] = prev_b
while ((curr is not None) and (not curr.is_transaction_block) and (curr.height != 0)):
curr = self.full_node.blockchain.try_block_record(curr.prev_hash)
if (curr is not None):
assert (curr.timestamp is not None)
if (timestamp <= curr.timestamp):
timestamp = uint64(int((curr.timestamp + 1)))
self.log.info('Starting to make the unfinished block')
unfinished_block: UnfinishedBlock = create_unfinished_block(self.full_node.constants, total_iters_pos_slot, sub_slot_iters, request.signage_point_index, sp_iters, ip_iters, request.proof_of_space, cc_challenge_hash, farmer_ph, pool_target, get_plot_sig, get_pool_sig, sp_vdfs, timestamp, self.full_node.blockchain, b'', block_generator, aggregate_signature, additions, removals, prev_b, finished_sub_slots)
self.log.info('Made the unfinished block')
if (prev_b is not None):
height: uint32 = uint32((prev_b.height + 1))
else:
height = uint32(0)
self.full_node.full_node_store.add_candidate_block(quality_string, height, unfinished_block)
foliage_sb_data_hash = unfinished_block.foliage.foliage_block_data.get_hash()
if unfinished_block.is_transaction_block():
foliage_transaction_block_hash = unfinished_block.foliage.foliage_transaction_block_hash
else:
foliage_transaction_block_hash = bytes32(([0] * 32))
assert (foliage_transaction_block_hash is not None)
message = farmer_protocol.RequestSignedValues(quality_string, foliage_sb_data_hash, foliage_transaction_block_hash)
(await peer.send_message(make_msg(ProtocolMessageTypes.request_signed_values, message)))
if (unfinished_block.is_transaction_block() and (unfinished_block.transactions_generator is not None)):
unfinished_block_backup = create_unfinished_block(self.full_node.constants, total_iters_pos_slot, sub_slot_iters, request.signage_point_index, sp_iters, ip_iters, request.proof_of_space, cc_challenge_hash, farmer_ph, pool_target, get_plot_sig, get_pool_sig, sp_vdfs, timestamp, self.full_node.blockchain, b'', None, G2Element(), None, None, prev_b, finished_sub_slots)
self.full_node.full_node_store.add_candidate_block(quality_string, height, unfinished_block_backup, backup=True)
return None
| -8,531,630,444,906,639,000
|
Creates a block body and header, with the proof of space, coinbase, and fee targets provided
by the farmer, and sends the hash of the header data back to the farmer.
|
chia/full_node/full_node_api.py
|
declare_proof_of_space
|
AppleOfEnlightenment/chia-blockchain
|
python
|
@api_request
@peer_required
async def declare_proof_of_space(self, request: farmer_protocol.DeclareProofOfSpace, peer: ws.WSChiaConnection) -> Optional[Message]:
'\n Creates a block body and header, with the proof of space, coinbase, and fee targets provided\n by the farmer, and sends the hash of the header data back to the farmer.\n '
if self.full_node.sync_store.get_sync_mode():
return None
async with self.full_node.timelord_lock:
sp_vdfs: Optional[SignagePoint] = self.full_node.full_node_store.get_signage_point(request.challenge_chain_sp)
if (sp_vdfs is None):
self.log.warning(f'Received proof of space for an unknown signage point {request.challenge_chain_sp}')
return None
if (request.signage_point_index > 0):
assert (sp_vdfs.rc_vdf is not None)
if (sp_vdfs.rc_vdf.output.get_hash() != request.reward_chain_sp):
self.log.debug(f'Received proof of space for a potentially old signage point {request.challenge_chain_sp}. Current sp: {sp_vdfs.rc_vdf.output.get_hash()}')
return None
if (request.signage_point_index == 0):
cc_challenge_hash: bytes32 = request.challenge_chain_sp
else:
assert (sp_vdfs.cc_vdf is not None)
cc_challenge_hash = sp_vdfs.cc_vdf.challenge
pos_sub_slot: Optional[Tuple[(EndOfSubSlotBundle, int, uint128)]] = None
if (request.challenge_hash != self.full_node.constants.GENESIS_CHALLENGE):
pos_sub_slot = self.full_node.full_node_store.get_sub_slot(cc_challenge_hash)
if (pos_sub_slot is None):
self.log.warning(f'Received proof of space for an unknown sub slot: {request}')
return None
total_iters_pos_slot: uint128 = pos_sub_slot[2]
else:
total_iters_pos_slot = uint128(0)
assert (cc_challenge_hash == request.challenge_hash)
quality_string: Optional[bytes32] = request.proof_of_space.verify_and_get_quality_string(self.full_node.constants, cc_challenge_hash, request.challenge_chain_sp)
assert ((quality_string is not None) and (len(quality_string) == 32))
aggregate_signature: G2Element = G2Element()
block_generator: Optional[BlockGenerator] = None
additions: Optional[List[Coin]] = []
removals: Optional[List[Coin]] = []
async with self.full_node._blockchain_lock_high_priority:
peak: Optional[BlockRecord] = self.full_node.blockchain.get_peak()
if (peak is not None):
curr_l_tb: BlockRecord = peak
while (not curr_l_tb.is_transaction_block):
curr_l_tb = self.full_node.blockchain.block_record(curr_l_tb.prev_hash)
try:
mempool_bundle = (await self.full_node.mempool_manager.create_bundle_from_mempool(curr_l_tb.header_hash))
except Exception as e:
self.log.error(f'Traceback: {traceback.format_exc()}')
self.full_node.log.error(f'Error making spend bundle {e} peak: {peak}')
mempool_bundle = None
if (mempool_bundle is not None):
spend_bundle = mempool_bundle[0]
additions = mempool_bundle[1]
removals = mempool_bundle[2]
self.full_node.log.info(f'Add rem: {len(additions)} {len(removals)}')
aggregate_signature = spend_bundle.aggregated_signature
if (self.full_node.full_node_store.previous_generator is not None):
self.log.info(f'Using previous generator for height {self.full_node.full_node_store.previous_generator}')
block_generator = best_solution_generator_from_template(self.full_node.full_node_store.previous_generator, spend_bundle)
else:
block_generator = simple_solution_generator(spend_bundle)
def get_plot_sig(to_sign, _) -> G2Element:
if (to_sign == request.challenge_chain_sp):
return request.challenge_chain_sp_signature
elif (to_sign == request.reward_chain_sp):
return request.reward_chain_sp_signature
return G2Element()
def get_pool_sig(_1, _2) -> Optional[G2Element]:
return request.pool_signature
prev_b: Optional[BlockRecord] = self.full_node.blockchain.get_peak()
if (prev_b is not None):
if (request.signage_point_index == 0):
if (pos_sub_slot is None):
self.log.warning('Pos sub slot is None')
return None
rc_challenge = pos_sub_slot[0].reward_chain.end_of_slot_vdf.challenge
else:
assert (sp_vdfs.rc_vdf is not None)
rc_challenge = sp_vdfs.rc_vdf.challenge
for (eos, _, _) in reversed(self.full_node.full_node_store.finished_sub_slots):
if ((eos is not None) and (eos.reward_chain.get_hash() == rc_challenge)):
rc_challenge = eos.reward_chain.end_of_slot_vdf.challenge
found = False
attempts = 0
while ((prev_b is not None) and (attempts < 10)):
if (prev_b.reward_infusion_new_challenge == rc_challenge):
found = True
break
if ((prev_b.finished_reward_slot_hashes is not None) and (len(prev_b.finished_reward_slot_hashes) > 0)):
if (prev_b.finished_reward_slot_hashes[(- 1)] == rc_challenge):
prev_b = self.full_node.blockchain.try_block_record(prev_b.prev_hash)
found = True
break
prev_b = self.full_node.blockchain.try_block_record(prev_b.prev_hash)
attempts += 1
if (not found):
self.log.warning('Did not find a previous block with the correct reward chain hash')
return None
try:
finished_sub_slots: Optional[List[EndOfSubSlotBundle]] = self.full_node.full_node_store.get_finished_sub_slots(self.full_node.blockchain, prev_b, cc_challenge_hash)
if (finished_sub_slots is None):
return None
if ((len(finished_sub_slots) > 0) and (pos_sub_slot is not None) and (finished_sub_slots[(- 1)] != pos_sub_slot[0])):
self.log.error('Have different sub-slots than is required to farm this block')
return None
except ValueError as e:
self.log.warning(f'Value Error: {e}')
return None
if (prev_b is None):
pool_target = PoolTarget(self.full_node.constants.GENESIS_PRE_FARM_POOL_PUZZLE_HASH, uint32(0))
farmer_ph = self.full_node.constants.GENESIS_PRE_FARM_FARMER_PUZZLE_HASH
else:
farmer_ph = request.farmer_puzzle_hash
if (request.proof_of_space.pool_contract_puzzle_hash is not None):
pool_target = PoolTarget(request.proof_of_space.pool_contract_puzzle_hash, uint32(0))
else:
assert (request.pool_target is not None)
pool_target = request.pool_target
if ((peak is None) or (peak.height <= self.full_node.constants.MAX_SUB_SLOT_BLOCKS)):
difficulty = self.full_node.constants.DIFFICULTY_STARTING
sub_slot_iters = self.full_node.constants.SUB_SLOT_ITERS_STARTING
else:
difficulty = uint64((peak.weight - self.full_node.blockchain.block_record(peak.prev_hash).weight))
sub_slot_iters = peak.sub_slot_iters
for sub_slot in finished_sub_slots:
if (sub_slot.challenge_chain.new_difficulty is not None):
difficulty = sub_slot.challenge_chain.new_difficulty
if (sub_slot.challenge_chain.new_sub_slot_iters is not None):
sub_slot_iters = sub_slot.challenge_chain.new_sub_slot_iters
required_iters: uint64 = calculate_iterations_quality(self.full_node.constants.DIFFICULTY_CONSTANT_FACTOR, quality_string, request.proof_of_space.size, difficulty, request.challenge_chain_sp)
sp_iters: uint64 = calculate_sp_iters(self.full_node.constants, sub_slot_iters, request.signage_point_index)
ip_iters: uint64 = calculate_ip_iters(self.full_node.constants, sub_slot_iters, request.signage_point_index, required_iters)
timestamp = uint64(int(time.time()))
curr: Optional[BlockRecord] = prev_b
while ((curr is not None) and (not curr.is_transaction_block) and (curr.height != 0)):
curr = self.full_node.blockchain.try_block_record(curr.prev_hash)
if (curr is not None):
assert (curr.timestamp is not None)
if (timestamp <= curr.timestamp):
timestamp = uint64(int((curr.timestamp + 1)))
self.log.info('Starting to make the unfinished block')
unfinished_block: UnfinishedBlock = create_unfinished_block(self.full_node.constants, total_iters_pos_slot, sub_slot_iters, request.signage_point_index, sp_iters, ip_iters, request.proof_of_space, cc_challenge_hash, farmer_ph, pool_target, get_plot_sig, get_pool_sig, sp_vdfs, timestamp, self.full_node.blockchain, b, block_generator, aggregate_signature, additions, removals, prev_b, finished_sub_slots)
self.log.info('Made the unfinished block')
if (prev_b is not None):
height: uint32 = uint32((prev_b.height + 1))
else:
height = uint32(0)
self.full_node.full_node_store.add_candidate_block(quality_string, height, unfinished_block)
foliage_sb_data_hash = unfinished_block.foliage.foliage_block_data.get_hash()
if unfinished_block.is_transaction_block():
foliage_transaction_block_hash = unfinished_block.foliage.foliage_transaction_block_hash
else:
foliage_transaction_block_hash = bytes32(([0] * 32))
assert (foliage_transaction_block_hash is not None)
message = farmer_protocol.RequestSignedValues(quality_string, foliage_sb_data_hash, foliage_transaction_block_hash)
(await peer.send_message(make_msg(ProtocolMessageTypes.request_signed_values, message)))
if (unfinished_block.is_transaction_block() and (unfinished_block.transactions_generator is not None)):
unfinished_block_backup = create_unfinished_block(self.full_node.constants, total_iters_pos_slot, sub_slot_iters, request.signage_point_index, sp_iters, ip_iters, request.proof_of_space, cc_challenge_hash, farmer_ph, pool_target, get_plot_sig, get_pool_sig, sp_vdfs, timestamp, self.full_node.blockchain, b, None, G2Element(), None, None, prev_b, finished_sub_slots)
self.full_node.full_node_store.add_candidate_block(quality_string, height, unfinished_block_backup, backup=True)
return None
|
@api_request
@peer_required
async def signed_values(self, farmer_request: farmer_protocol.SignedValues, peer: ws.WSChiaConnection) -> Optional[Message]:
'\n Signature of header hash, by the harvester. This is enough to create an unfinished\n block, which only needs a Proof of Time to be finished. If the signature is valid,\n we call the unfinished_block routine.\n '
candidate_tuple: Optional[Tuple[(uint32, UnfinishedBlock)]] = self.full_node.full_node_store.get_candidate_block(farmer_request.quality_string)
if (candidate_tuple is None):
self.log.warning(f'Quality string {farmer_request.quality_string} not found in database')
return None
(height, candidate) = candidate_tuple
if (not AugSchemeMPL.verify(candidate.reward_chain_block.proof_of_space.plot_public_key, candidate.foliage.foliage_block_data.get_hash(), farmer_request.foliage_block_data_signature)):
self.log.warning('Signature not valid. There might be a collision in plots. Ignore this during tests.')
return None
fsb2 = dataclasses.replace(candidate.foliage, foliage_block_data_signature=farmer_request.foliage_block_data_signature)
if candidate.is_transaction_block():
fsb2 = dataclasses.replace(fsb2, foliage_transaction_block_signature=farmer_request.foliage_transaction_block_signature)
new_candidate = dataclasses.replace(candidate, foliage=fsb2)
if (not self.full_node.has_valid_pool_sig(new_candidate)):
self.log.warning('Trying to make a pre-farm block but height is not 0')
return None
request = full_node_protocol.RespondUnfinishedBlock(new_candidate)
try:
(await self.full_node.respond_unfinished_block(request, None, True))
except Exception as e:
self.full_node.log.error(f'Error farming block {e} {request}')
candidate_tuple = self.full_node.full_node_store.get_candidate_block(farmer_request.quality_string, backup=True)
if (candidate_tuple is not None):
(height, unfinished_block) = candidate_tuple
self.full_node.full_node_store.add_candidate_block(farmer_request.quality_string, height, unfinished_block, False)
assert (unfinished_block.foliage.foliage_transaction_block_hash is not None)
message = farmer_protocol.RequestSignedValues(farmer_request.quality_string, unfinished_block.foliage.foliage_block_data.get_hash(), unfinished_block.foliage.foliage_transaction_block_hash)
(await peer.send_message(make_msg(ProtocolMessageTypes.request_signed_values, message)))
return None
| -1,211,370,363,037,242,000
|
Signature of header hash, by the harvester. This is enough to create an unfinished
block, which only needs a Proof of Time to be finished. If the signature is valid,
we call the unfinished_block routine.
|
chia/full_node/full_node_api.py
|
signed_values
|
AppleOfEnlightenment/chia-blockchain
|
python
|
@api_request
@peer_required
async def signed_values(self, farmer_request: farmer_protocol.SignedValues, peer: ws.WSChiaConnection) -> Optional[Message]:
'\n Signature of header hash, by the harvester. This is enough to create an unfinished\n block, which only needs a Proof of Time to be finished. If the signature is valid,\n we call the unfinished_block routine.\n '
candidate_tuple: Optional[Tuple[(uint32, UnfinishedBlock)]] = self.full_node.full_node_store.get_candidate_block(farmer_request.quality_string)
if (candidate_tuple is None):
self.log.warning(f'Quality string {farmer_request.quality_string} not found in database')
return None
(height, candidate) = candidate_tuple
if (not AugSchemeMPL.verify(candidate.reward_chain_block.proof_of_space.plot_public_key, candidate.foliage.foliage_block_data.get_hash(), farmer_request.foliage_block_data_signature)):
self.log.warning('Signature not valid. There might be a collision in plots. Ignore this during tests.')
return None
fsb2 = dataclasses.replace(candidate.foliage, foliage_block_data_signature=farmer_request.foliage_block_data_signature)
if candidate.is_transaction_block():
fsb2 = dataclasses.replace(fsb2, foliage_transaction_block_signature=farmer_request.foliage_transaction_block_signature)
new_candidate = dataclasses.replace(candidate, foliage=fsb2)
if (not self.full_node.has_valid_pool_sig(new_candidate)):
self.log.warning('Trying to make a pre-farm block but height is not 0')
return None
request = full_node_protocol.RespondUnfinishedBlock(new_candidate)
try:
(await self.full_node.respond_unfinished_block(request, None, True))
except Exception as e:
self.full_node.log.error(f'Error farming block {e} {request}')
candidate_tuple = self.full_node.full_node_store.get_candidate_block(farmer_request.quality_string, backup=True)
if (candidate_tuple is not None):
(height, unfinished_block) = candidate_tuple
self.full_node.full_node_store.add_candidate_block(farmer_request.quality_string, height, unfinished_block, False)
assert (unfinished_block.foliage.foliage_transaction_block_hash is not None)
message = farmer_protocol.RequestSignedValues(farmer_request.quality_string, unfinished_block.foliage.foliage_block_data.get_hash(), unfinished_block.foliage.foliage_transaction_block_hash)
(await peer.send_message(make_msg(ProtocolMessageTypes.request_signed_values, message)))
return None
|
@api_request
async def request_ses_hashes(self, request: wallet_protocol.RequestSESInfo):
'Returns the start and end height of a sub-epoch for the height specified in request'
ses_height = self.full_node.blockchain.get_ses_heights()
start_height = request.start_height
end_height = request.end_height
ses_hash_heights = []
ses_reward_hashes = []
for (idx, ses_start_height) in enumerate(ses_height):
if (idx == (len(ses_height) - 1)):
break
next_ses_height = ses_height[(idx + 1)]
if (ses_start_height <= start_height < next_ses_height):
ses_hash_heights.append([ses_start_height, next_ses_height])
ses: SubEpochSummary = self.full_node.blockchain.get_ses(ses_start_height)
ses_reward_hashes.append(ses.reward_chain_hash)
if (ses_start_height < end_height < next_ses_height):
break
else:
if (idx == (len(ses_height) - 2)):
break
next_next_height = ses_height[(idx + 2)]
ses_hash_heights.append([next_ses_height, next_next_height])
nex_ses: SubEpochSummary = self.full_node.blockchain.get_ses(next_ses_height)
ses_reward_hashes.append(nex_ses.reward_chain_hash)
break
response = RespondSESInfo(ses_reward_hashes, ses_hash_heights)
msg = make_msg(ProtocolMessageTypes.respond_ses_hashes, response)
return msg
| -5,567,190,610,942,839,000
|
Returns the start and end height of a sub-epoch for the height specified in request
|
chia/full_node/full_node_api.py
|
request_ses_hashes
|
AppleOfEnlightenment/chia-blockchain
|
python
|
@api_request
async def request_ses_hashes(self, request: wallet_protocol.RequestSESInfo):
ses_height = self.full_node.blockchain.get_ses_heights()
start_height = request.start_height
end_height = request.end_height
ses_hash_heights = []
ses_reward_hashes = []
for (idx, ses_start_height) in enumerate(ses_height):
if (idx == (len(ses_height) - 1)):
break
next_ses_height = ses_height[(idx + 1)]
if (ses_start_height <= start_height < next_ses_height):
ses_hash_heights.append([ses_start_height, next_ses_height])
ses: SubEpochSummary = self.full_node.blockchain.get_ses(ses_start_height)
ses_reward_hashes.append(ses.reward_chain_hash)
if (ses_start_height < end_height < next_ses_height):
break
else:
if (idx == (len(ses_height) - 2)):
break
next_next_height = ses_height[(idx + 2)]
ses_hash_heights.append([next_ses_height, next_next_height])
nex_ses: SubEpochSummary = self.full_node.blockchain.get_ses(next_ses_height)
ses_reward_hashes.append(nex_ses.reward_chain_hash)
break
response = RespondSESInfo(ses_reward_hashes, ses_hash_heights)
msg = make_msg(ProtocolMessageTypes.respond_ses_hashes, response)
return msg
|
def init(empty=False):
'Initialize the platform with devices.'
global DEVICES
DEVICES = ([] if empty else [MockToggleDevice('AC', STATE_ON), MockToggleDevice('AC', STATE_OFF), MockToggleDevice(None, STATE_OFF)])
| -2,477,909,103,883,586,000
|
Initialize the platform with devices.
|
tests/testing_config/custom_components/switch/test.py
|
init
|
DevRGT/home-assistant
|
python
|
def init(empty=False):
global DEVICES
DEVICES = ([] if empty else [MockToggleDevice('AC', STATE_ON), MockToggleDevice('AC', STATE_OFF), MockToggleDevice(None, STATE_OFF)])
|
async def async_setup_platform(hass, config, async_add_devices_callback, discovery_info=None):
'Find and return test switches.'
async_add_devices_callback(DEVICES)
| -690,868,948,060,746,100
|
Find and return test switches.
|
tests/testing_config/custom_components/switch/test.py
|
async_setup_platform
|
DevRGT/home-assistant
|
python
|
async def async_setup_platform(hass, config, async_add_devices_callback, discovery_info=None):
async_add_devices_callback(DEVICES)
|
def __init__(self, title=None, first_name=None, other_names=None, last_name=None):
'Name2 - a model defined in OpenAPI'
self._title = None
self._first_name = None
self._other_names = None
self._last_name = None
self.discriminator = None
if (title is not None):
self.title = title
if (first_name is not None):
self.first_name = first_name
if (other_names is not None):
self.other_names = other_names
if (last_name is not None):
self.last_name = last_name
| -8,844,696,543,339,215,000
|
Name2 - a model defined in OpenAPI
|
velo_payments/models/name2.py
|
__init__
|
velopaymentsapi/velo-python
|
python
|
def __init__(self, title=None, first_name=None, other_names=None, last_name=None):
self._title = None
self._first_name = None
self._other_names = None
self._last_name = None
self.discriminator = None
if (title is not None):
self.title = title
if (first_name is not None):
self.first_name = first_name
if (other_names is not None):
self.other_names = other_names
if (last_name is not None):
self.last_name = last_name
|
@property
def title(self):
'Gets the title of this Name2. # noqa: E501\n\n\n :return: The title of this Name2. # noqa: E501\n :rtype: str\n '
return self._title
| 333,266,289,626,321,800
|
Gets the title of this Name2. # noqa: E501
:return: The title of this Name2. # noqa: E501
:rtype: str
|
velo_payments/models/name2.py
|
title
|
velopaymentsapi/velo-python
|
python
|
@property
def title(self):
'Gets the title of this Name2. # noqa: E501\n\n\n :return: The title of this Name2. # noqa: E501\n :rtype: str\n '
return self._title
|
@title.setter
def title(self, title):
'Sets the title of this Name2.\n\n\n :param title: The title of this Name2. # noqa: E501\n :type: str\n '
if ((title is not None) and (len(title) > 10)):
raise ValueError('Invalid value for `title`, length must be less than or equal to `10`')
if ((title is not None) and (len(title) < 1)):
raise ValueError('Invalid value for `title`, length must be greater than or equal to `1`')
self._title = title
| -7,108,323,074,611,974,000
|
Sets the title of this Name2.
:param title: The title of this Name2. # noqa: E501
:type: str
|
velo_payments/models/name2.py
|
title
|
velopaymentsapi/velo-python
|
python
|
@title.setter
def title(self, title):
'Sets the title of this Name2.\n\n\n :param title: The title of this Name2. # noqa: E501\n :type: str\n '
if ((title is not None) and (len(title) > 10)):
raise ValueError('Invalid value for `title`, length must be less than or equal to `10`')
if ((title is not None) and (len(title) < 1)):
raise ValueError('Invalid value for `title`, length must be greater than or equal to `1`')
self._title = title
|
@property
def first_name(self):
'Gets the first_name of this Name2. # noqa: E501\n\n\n :return: The first_name of this Name2. # noqa: E501\n :rtype: str\n '
return self._first_name
| 6,137,112,359,837,039,000
|
Gets the first_name of this Name2. # noqa: E501
:return: The first_name of this Name2. # noqa: E501
:rtype: str
|
velo_payments/models/name2.py
|
first_name
|
velopaymentsapi/velo-python
|
python
|
@property
def first_name(self):
'Gets the first_name of this Name2. # noqa: E501\n\n\n :return: The first_name of this Name2. # noqa: E501\n :rtype: str\n '
return self._first_name
|
@first_name.setter
def first_name(self, first_name):
'Sets the first_name of this Name2.\n\n\n :param first_name: The first_name of this Name2. # noqa: E501\n :type: str\n '
if ((first_name is not None) and (len(first_name) > 40)):
raise ValueError('Invalid value for `first_name`, length must be less than or equal to `40`')
if ((first_name is not None) and (len(first_name) < 1)):
raise ValueError('Invalid value for `first_name`, length must be greater than or equal to `1`')
self._first_name = first_name
| 4,208,640,053,617,927,700
|
Sets the first_name of this Name2.
:param first_name: The first_name of this Name2. # noqa: E501
:type: str
|
velo_payments/models/name2.py
|
first_name
|
velopaymentsapi/velo-python
|
python
|
@first_name.setter
def first_name(self, first_name):
'Sets the first_name of this Name2.\n\n\n :param first_name: The first_name of this Name2. # noqa: E501\n :type: str\n '
if ((first_name is not None) and (len(first_name) > 40)):
raise ValueError('Invalid value for `first_name`, length must be less than or equal to `40`')
if ((first_name is not None) and (len(first_name) < 1)):
raise ValueError('Invalid value for `first_name`, length must be greater than or equal to `1`')
self._first_name = first_name
|
@property
def other_names(self):
'Gets the other_names of this Name2. # noqa: E501\n\n\n :return: The other_names of this Name2. # noqa: E501\n :rtype: str\n '
return self._other_names
| -1,837,420,641,573,318,400
|
Gets the other_names of this Name2. # noqa: E501
:return: The other_names of this Name2. # noqa: E501
:rtype: str
|
velo_payments/models/name2.py
|
other_names
|
velopaymentsapi/velo-python
|
python
|
@property
def other_names(self):
'Gets the other_names of this Name2. # noqa: E501\n\n\n :return: The other_names of this Name2. # noqa: E501\n :rtype: str\n '
return self._other_names
|
@other_names.setter
def other_names(self, other_names):
'Sets the other_names of this Name2.\n\n\n :param other_names: The other_names of this Name2. # noqa: E501\n :type: str\n '
if ((other_names is not None) and (len(other_names) > 40)):
raise ValueError('Invalid value for `other_names`, length must be less than or equal to `40`')
if ((other_names is not None) and (len(other_names) < 1)):
raise ValueError('Invalid value for `other_names`, length must be greater than or equal to `1`')
self._other_names = other_names
| 1,413,021,188,074,489,000
|
Sets the other_names of this Name2.
:param other_names: The other_names of this Name2. # noqa: E501
:type: str
|
velo_payments/models/name2.py
|
other_names
|
velopaymentsapi/velo-python
|
python
|
@other_names.setter
def other_names(self, other_names):
'Sets the other_names of this Name2.\n\n\n :param other_names: The other_names of this Name2. # noqa: E501\n :type: str\n '
if ((other_names is not None) and (len(other_names) > 40)):
raise ValueError('Invalid value for `other_names`, length must be less than or equal to `40`')
if ((other_names is not None) and (len(other_names) < 1)):
raise ValueError('Invalid value for `other_names`, length must be greater than or equal to `1`')
self._other_names = other_names
|
@property
def last_name(self):
'Gets the last_name of this Name2. # noqa: E501\n\n\n :return: The last_name of this Name2. # noqa: E501\n :rtype: str\n '
return self._last_name
| 6,837,372,023,683,964,000
|
Gets the last_name of this Name2. # noqa: E501
:return: The last_name of this Name2. # noqa: E501
:rtype: str
|
velo_payments/models/name2.py
|
last_name
|
velopaymentsapi/velo-python
|
python
|
@property
def last_name(self):
'Gets the last_name of this Name2. # noqa: E501\n\n\n :return: The last_name of this Name2. # noqa: E501\n :rtype: str\n '
return self._last_name
|
@last_name.setter
def last_name(self, last_name):
'Sets the last_name of this Name2.\n\n\n :param last_name: The last_name of this Name2. # noqa: E501\n :type: str\n '
if ((last_name is not None) and (len(last_name) > 40)):
raise ValueError('Invalid value for `last_name`, length must be less than or equal to `40`')
if ((last_name is not None) and (len(last_name) < 1)):
raise ValueError('Invalid value for `last_name`, length must be greater than or equal to `1`')
self._last_name = last_name
| 7,584,318,513,949,805,000
|
Sets the last_name of this Name2.
:param last_name: The last_name of this Name2. # noqa: E501
:type: str
|
velo_payments/models/name2.py
|
last_name
|
velopaymentsapi/velo-python
|
python
|
@last_name.setter
def last_name(self, last_name):
'Sets the last_name of this Name2.\n\n\n :param last_name: The last_name of this Name2. # noqa: E501\n :type: str\n '
if ((last_name is not None) and (len(last_name) > 40)):
raise ValueError('Invalid value for `last_name`, length must be less than or equal to `40`')
if ((last_name is not None) and (len(last_name) < 1)):
raise ValueError('Invalid value for `last_name`, length must be greater than or equal to `1`')
self._last_name = last_name
|
def to_dict(self):
'Returns the model properties as a dict'
result = {}
for (attr, _) in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value))
elif hasattr(value, 'to_dict'):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map((lambda item: ((item[0], item[1].to_dict()) if hasattr(item[1], 'to_dict') else item)), value.items()))
else:
result[attr] = value
return result
| 8,442,519,487,048,767,000
|
Returns the model properties as a dict
|
velo_payments/models/name2.py
|
to_dict
|
velopaymentsapi/velo-python
|
python
|
def to_dict(self):
result = {}
for (attr, _) in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value))
elif hasattr(value, 'to_dict'):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map((lambda item: ((item[0], item[1].to_dict()) if hasattr(item[1], 'to_dict') else item)), value.items()))
else:
result[attr] = value
return result
|
def to_str(self):
'Returns the string representation of the model'
return pprint.pformat(self.to_dict())
| 5,849,158,643,760,736,000
|
Returns the string representation of the model
|
velo_payments/models/name2.py
|
to_str
|
velopaymentsapi/velo-python
|
python
|
def to_str(self):
return pprint.pformat(self.to_dict())
|
def __repr__(self):
'For `print` and `pprint`'
return self.to_str()
| -8,960,031,694,814,905,000
|
For `print` and `pprint`
|
velo_payments/models/name2.py
|
__repr__
|
velopaymentsapi/velo-python
|
python
|
def __repr__(self):
return self.to_str()
|
def __eq__(self, other):
'Returns true if both objects are equal'
if (not isinstance(other, Name2)):
return False
return (self.__dict__ == other.__dict__)
| 5,736,994,555,762,344,000
|
Returns true if both objects are equal
|
velo_payments/models/name2.py
|
__eq__
|
velopaymentsapi/velo-python
|
python
|
def __eq__(self, other):
if (not isinstance(other, Name2)):
return False
return (self.__dict__ == other.__dict__)
|
def __ne__(self, other):
'Returns true if both objects are not equal'
return (not (self == other))
| 7,764,124,047,908,058,000
|
Returns true if both objects are not equal
|
velo_payments/models/name2.py
|
__ne__
|
velopaymentsapi/velo-python
|
python
|
def __ne__(self, other):
return (not (self == other))
|
def convert(self):
'Perform the conversion from datapackage to destination format\n '
handle = self._header()
logger.debug(self.default_values)
for (name, df) in self.package.items():
logger.debug(name)
if df.empty:
columns = [x['name'] for x in df._metadata['schema']['fields']]
df = pd.DataFrame(columns=columns)
df = df.reset_index()
if ('index' in df.columns):
df = df.drop(columns='index')
logger.debug('Number of columns: %s, %s', len(df.columns), df.columns)
if (len(df.columns) > 1):
default_value = self.default_values[name]
self._write_parameter(df, name, handle, default=default_value)
else:
self._write_set(df, name, handle)
self._footer(handle)
handle.close()
| 5,956,221,640,661,579,000
|
Perform the conversion from datapackage to destination format
|
src/otoole/preprocess/narrow_to_datafile.py
|
convert
|
chrwm/otoole
|
python
|
def convert(self):
'\n '
handle = self._header()
logger.debug(self.default_values)
for (name, df) in self.package.items():
logger.debug(name)
if df.empty:
columns = [x['name'] for x in df._metadata['schema']['fields']]
df = pd.DataFrame(columns=columns)
df = df.reset_index()
if ('index' in df.columns):
df = df.drop(columns='index')
logger.debug('Number of columns: %s, %s', len(df.columns), df.columns)
if (len(df.columns) > 1):
default_value = self.default_values[name]
self._write_parameter(df, name, handle, default=default_value)
else:
self._write_set(df, name, handle)
self._footer(handle)
handle.close()
|
@abstractmethod
def _write_parameter(self, df: pd.DataFrame, parameter_name: str, handle: TextIO, default: float) -> pd.DataFrame:
'Write parameter data'
raise NotImplementedError()
| 1,703,931,284,228,020,700
|
Write parameter data
|
src/otoole/preprocess/narrow_to_datafile.py
|
_write_parameter
|
chrwm/otoole
|
python
|
@abstractmethod
def _write_parameter(self, df: pd.DataFrame, parameter_name: str, handle: TextIO, default: float) -> pd.DataFrame:
raise NotImplementedError()
|
@abstractmethod
def _write_set(self, df: pd.DataFrame, set_name, handle: TextIO) -> pd.DataFrame:
'Write set data'
raise NotImplementedError()
| 5,731,673,263,464,903,000
|
Write set data
|
src/otoole/preprocess/narrow_to_datafile.py
|
_write_set
|
chrwm/otoole
|
python
|
@abstractmethod
def _write_set(self, df: pd.DataFrame, set_name, handle: TextIO) -> pd.DataFrame:
raise NotImplementedError()
|
def _write_parameter(self, df: pd.DataFrame, parameter_name: str, handle: TextIO, default: float):
'Write parameter data to a csv file, omitting data which matches the default value\n\n Arguments\n ---------\n filepath : StreamIO\n df : pandas.DataFrame\n parameter_name : str\n handle: TextIO\n default : int\n '
df = self._form_parameter(df, default)
handle.write('param default {} : {} :=\n'.format(default, parameter_name))
df.to_csv(path_or_buf=handle, sep=' ', header=False, index=False)
handle.write(';\n')
| 3,071,068,969,233,579,500
|
Write parameter data to a csv file, omitting data which matches the default value
Arguments
---------
filepath : StreamIO
df : pandas.DataFrame
parameter_name : str
handle: TextIO
default : int
|
src/otoole/preprocess/narrow_to_datafile.py
|
_write_parameter
|
chrwm/otoole
|
python
|
def _write_parameter(self, df: pd.DataFrame, parameter_name: str, handle: TextIO, default: float):
'Write parameter data to a csv file, omitting data which matches the default value\n\n Arguments\n ---------\n filepath : StreamIO\n df : pandas.DataFrame\n parameter_name : str\n handle: TextIO\n default : int\n '
df = self._form_parameter(df, default)
handle.write('param default {} : {} :=\n'.format(default, parameter_name))
df.to_csv(path_or_buf=handle, sep=' ', header=False, index=False)
handle.write(';\n')
|
def _write_set(self, df: pd.DataFrame, set_name, handle: TextIO):
'\n\n Arguments\n ---------\n df : pandas.DataFrame\n set_name : str\n handle: TextIO\n '
handle.write('set {} :=\n'.format(set_name))
df.to_csv(path_or_buf=handle, sep=' ', header=False, index=False)
handle.write(';\n')
| 3,455,871,895,498,749,000
|
Arguments
---------
df : pandas.DataFrame
set_name : str
handle: TextIO
|
src/otoole/preprocess/narrow_to_datafile.py
|
_write_set
|
chrwm/otoole
|
python
|
def _write_set(self, df: pd.DataFrame, set_name, handle: TextIO):
'\n\n Arguments\n ---------\n df : pandas.DataFrame\n set_name : str\n handle: TextIO\n '
handle.write('set {} :=\n'.format(set_name))
df.to_csv(path_or_buf=handle, sep=' ', header=False, index=False)
handle.write(';\n')
|
def _form_parameter(self, df: pd.DataFrame, parameter_name: str, default: float) -> pd.DataFrame:
'Converts data into wide format\n\n Arguments\n ---------\n df: pd.DataFrame\n parameter_name: str\n default: float\n\n Returns\n -------\n pandas.DataFrame\n '
if (not df.empty):
names = df.columns.to_list()
if (len(names) > 2):
logger.debug('More than 2 columns for {}: {}'.format(parameter_name, names))
rows = names[0:(- 2)]
columns = names[(- 2)]
values = names[(- 1)]
logger.debug('Rows: {}; columns: {}; values: {}', rows, columns, values)
logger.debug('dtypes: {}'.format(df.dtypes))
pivot = pd.pivot_table(df, index=rows, columns=columns, values=values, fill_value=default)
elif (len(names) == 2):
logger.debug('Two columns for {}: {}'.format(parameter_name, names))
values = names[(- 1)]
rows = names[0:(- 2)]
logger.debug('Rows: {}; values: {}', rows, values)
pivot = pd.pivot_table(df, index=rows, values=values, fill_value=default)
else:
logger.debug('One column for {}: {}'.format(parameter_name, names))
pivot = df.copy()
pivot = pivot.reset_index(drop=True)
else:
logger.debug('Dataframe {} is empty'.format(parameter_name))
pivot = df.copy()
return pivot
| -7,803,264,701,268,087,000
|
Converts data into wide format
Arguments
---------
df: pd.DataFrame
parameter_name: str
default: float
Returns
-------
pandas.DataFrame
|
src/otoole/preprocess/narrow_to_datafile.py
|
_form_parameter
|
chrwm/otoole
|
python
|
def _form_parameter(self, df: pd.DataFrame, parameter_name: str, default: float) -> pd.DataFrame:
'Converts data into wide format\n\n Arguments\n ---------\n df: pd.DataFrame\n parameter_name: str\n default: float\n\n Returns\n -------\n pandas.DataFrame\n '
if (not df.empty):
names = df.columns.to_list()
if (len(names) > 2):
logger.debug('More than 2 columns for {}: {}'.format(parameter_name, names))
rows = names[0:(- 2)]
columns = names[(- 2)]
values = names[(- 1)]
logger.debug('Rows: {}; columns: {}; values: {}', rows, columns, values)
logger.debug('dtypes: {}'.format(df.dtypes))
pivot = pd.pivot_table(df, index=rows, columns=columns, values=values, fill_value=default)
elif (len(names) == 2):
logger.debug('Two columns for {}: {}'.format(parameter_name, names))
values = names[(- 1)]
rows = names[0:(- 2)]
logger.debug('Rows: {}; values: {}', rows, values)
pivot = pd.pivot_table(df, index=rows, values=values, fill_value=default)
else:
logger.debug('One column for {}: {}'.format(parameter_name, names))
pivot = df.copy()
pivot = pivot.reset_index(drop=True)
else:
logger.debug('Dataframe {} is empty'.format(parameter_name))
pivot = df.copy()
return pivot
|
def RunGit(args, **kwargs):
'Returns stdout.'
return RunCommand((['git'] + args), **kwargs)
| -5,287,102,442,574,532,000
|
Returns stdout.
|
git_cl.py
|
RunGit
|
wuyong2k/chromium_depot_tool
|
python
|
def RunGit(args, **kwargs):
return RunCommand((['git'] + args), **kwargs)
|
def RunGitWithCode(args, suppress_stderr=False):
'Returns return code and stdout.'
try:
if suppress_stderr:
stderr = subprocess2.VOID
else:
stderr = sys.stderr
(out, code) = subprocess2.communicate((['git'] + args), env=GetNoGitPagerEnv(), stdout=subprocess2.PIPE, stderr=stderr)
return (code, out[0])
except ValueError:
return (1, '')
| 7,143,899,178,419,619,000
|
Returns return code and stdout.
|
git_cl.py
|
RunGitWithCode
|
wuyong2k/chromium_depot_tool
|
python
|
def RunGitWithCode(args, suppress_stderr=False):
try:
if suppress_stderr:
stderr = subprocess2.VOID
else:
stderr = sys.stderr
(out, code) = subprocess2.communicate((['git'] + args), env=GetNoGitPagerEnv(), stdout=subprocess2.PIPE, stderr=stderr)
return (code, out[0])
except ValueError:
return (1, )
|
def RunGitSilent(args):
'Returns stdout, suppresses stderr and ignores the return code.'
return RunGitWithCode(args, suppress_stderr=True)[1]
| -1,088,870,218,897,835,300
|
Returns stdout, suppresses stderr and ignores the return code.
|
git_cl.py
|
RunGitSilent
|
wuyong2k/chromium_depot_tool
|
python
|
def RunGitSilent(args):
return RunGitWithCode(args, suppress_stderr=True)[1]
|
def BranchExists(branch):
'Return True if specified branch exists.'
(code, _) = RunGitWithCode(['rev-parse', '--verify', branch], suppress_stderr=True)
return (not code)
| -5,487,771,598,012,838,000
|
Return True if specified branch exists.
|
git_cl.py
|
BranchExists
|
wuyong2k/chromium_depot_tool
|
python
|
def BranchExists(branch):
(code, _) = RunGitWithCode(['rev-parse', '--verify', branch], suppress_stderr=True)
return (not code)
|
def _prefix_master(master):
"Convert user-specified master name to full master name.\n\n Buildbucket uses full master name(master.tryserver.chromium.linux) as bucket\n name, while the developers always use shortened master name\n (tryserver.chromium.linux) by stripping off the prefix 'master.'. This\n function does the conversion for buildbucket migration.\n "
prefix = 'master.'
if master.startswith(prefix):
return master
return ('%s%s' % (prefix, master))
| -8,604,759,538,425,906,000
|
Convert user-specified master name to full master name.
Buildbucket uses full master name(master.tryserver.chromium.linux) as bucket
name, while the developers always use shortened master name
(tryserver.chromium.linux) by stripping off the prefix 'master.'. This
function does the conversion for buildbucket migration.
|
git_cl.py
|
_prefix_master
|
wuyong2k/chromium_depot_tool
|
python
|
def _prefix_master(master):
"Convert user-specified master name to full master name.\n\n Buildbucket uses full master name(master.tryserver.chromium.linux) as bucket\n name, while the developers always use shortened master name\n (tryserver.chromium.linux) by stripping off the prefix 'master.'. This\n function does the conversion for buildbucket migration.\n "
prefix = 'master.'
if master.startswith(prefix):
return master
return ('%s%s' % (prefix, master))
|
def _buildbucket_retry(operation_name, http, *args, **kwargs):
'Retries requests to buildbucket service and returns parsed json content.'
try_count = 0
while True:
(response, content) = http.request(*args, **kwargs)
try:
content_json = json.loads(content)
except ValueError:
content_json = None
if (content_json and content_json.get('error')):
error = content_json.get('error')
if (error.get('code') == 403):
raise BuildbucketResponseException(('Access denied: %s' % error.get('message', '')))
msg = ('Error in response. Reason: %s. Message: %s.' % (error.get('reason', ''), error.get('message', '')))
raise BuildbucketResponseException(msg)
if (response.status == 200):
if (not content_json):
raise BuildbucketResponseException(('Buildbucket returns invalid json content: %s.\nPlease file bugs at http://crbug.com, label "Infra-BuildBucket".' % content))
return content_json
if ((response.status < 500) or (try_count >= 2)):
raise httplib2.HttpLib2Error(content)
logging.debug('Transient errors when %s. Will retry.', operation_name)
time.sleep((0.5 + (1.5 * try_count)))
try_count += 1
assert False, 'unreachable'
| 4,700,090,738,834,549,000
|
Retries requests to buildbucket service and returns parsed json content.
|
git_cl.py
|
_buildbucket_retry
|
wuyong2k/chromium_depot_tool
|
python
|
def _buildbucket_retry(operation_name, http, *args, **kwargs):
try_count = 0
while True:
(response, content) = http.request(*args, **kwargs)
try:
content_json = json.loads(content)
except ValueError:
content_json = None
if (content_json and content_json.get('error')):
error = content_json.get('error')
if (error.get('code') == 403):
raise BuildbucketResponseException(('Access denied: %s' % error.get('message', )))
msg = ('Error in response. Reason: %s. Message: %s.' % (error.get('reason', ), error.get('message', )))
raise BuildbucketResponseException(msg)
if (response.status == 200):
if (not content_json):
raise BuildbucketResponseException(('Buildbucket returns invalid json content: %s.\nPlease file bugs at http://crbug.com, label "Infra-BuildBucket".' % content))
return content_json
if ((response.status < 500) or (try_count >= 2)):
raise httplib2.HttpLib2Error(content)
logging.debug('Transient errors when %s. Will retry.', operation_name)
time.sleep((0.5 + (1.5 * try_count)))
try_count += 1
assert False, 'unreachable'
|
def trigger_luci_job(changelist, masters, options):
'Send a job to run on LUCI.'
issue_props = changelist.GetIssueProperties()
issue = changelist.GetIssue()
patchset = changelist.GetMostRecentPatchset()
for builders_and_tests in sorted(masters.itervalues()):
for builder in sorted(builders_and_tests):
luci_trigger.trigger(builder, 'HEAD', issue, patchset, issue_props['project'])
| -6,207,832,785,841,236,000
|
Send a job to run on LUCI.
|
git_cl.py
|
trigger_luci_job
|
wuyong2k/chromium_depot_tool
|
python
|
def trigger_luci_job(changelist, masters, options):
issue_props = changelist.GetIssueProperties()
issue = changelist.GetIssue()
patchset = changelist.GetMostRecentPatchset()
for builders_and_tests in sorted(masters.itervalues()):
for builder in sorted(builders_and_tests):
luci_trigger.trigger(builder, 'HEAD', issue, patchset, issue_props['project'])
|
def fetch_try_jobs(auth_config, changelist, options):
'Fetches tryjobs from buildbucket.\n\n Returns a map from build id to build info as json dictionary.\n '
rietveld_url = settings.GetDefaultServerUrl()
rietveld_host = urlparse.urlparse(rietveld_url).hostname
authenticator = auth.get_authenticator_for_host(rietveld_host, auth_config)
if authenticator.has_cached_credentials():
http = authenticator.authorize(httplib2.Http())
else:
print(('Warning: Some results might be missing because %s' % (auth.LoginRequiredError(rietveld_host).message,)))
http = httplib2.Http()
http.force_exception_to_status_code = True
buildset = 'patch/rietveld/{hostname}/{issue}/{patch}'.format(hostname=rietveld_host, issue=changelist.GetIssue(), patch=options.patchset)
params = {'tag': ('buildset:%s' % buildset)}
builds = {}
while True:
url = 'https://{hostname}/_ah/api/buildbucket/v1/search?{params}'.format(hostname=options.buildbucket_host, params=urllib.urlencode(params))
content = _buildbucket_retry('fetching tryjobs', http, url, 'GET')
for build in content.get('builds', []):
builds[build['id']] = build
if ('next_cursor' in content):
params['start_cursor'] = content['next_cursor']
else:
break
return builds
| -8,651,788,814,486,410,000
|
Fetches tryjobs from buildbucket.
Returns a map from build id to build info as json dictionary.
|
git_cl.py
|
fetch_try_jobs
|
wuyong2k/chromium_depot_tool
|
python
|
def fetch_try_jobs(auth_config, changelist, options):
'Fetches tryjobs from buildbucket.\n\n Returns a map from build id to build info as json dictionary.\n '
rietveld_url = settings.GetDefaultServerUrl()
rietveld_host = urlparse.urlparse(rietveld_url).hostname
authenticator = auth.get_authenticator_for_host(rietveld_host, auth_config)
if authenticator.has_cached_credentials():
http = authenticator.authorize(httplib2.Http())
else:
print(('Warning: Some results might be missing because %s' % (auth.LoginRequiredError(rietveld_host).message,)))
http = httplib2.Http()
http.force_exception_to_status_code = True
buildset = 'patch/rietveld/{hostname}/{issue}/{patch}'.format(hostname=rietveld_host, issue=changelist.GetIssue(), patch=options.patchset)
params = {'tag': ('buildset:%s' % buildset)}
builds = {}
while True:
url = 'https://{hostname}/_ah/api/buildbucket/v1/search?{params}'.format(hostname=options.buildbucket_host, params=urllib.urlencode(params))
content = _buildbucket_retry('fetching tryjobs', http, url, 'GET')
for build in content.get('builds', []):
builds[build['id']] = build
if ('next_cursor' in content):
params['start_cursor'] = content['next_cursor']
else:
break
return builds
|
def print_tryjobs(options, builds):
'Prints nicely result of fetch_try_jobs.'
if (not builds):
print('No tryjobs scheduled')
return
builds = builds.copy()
builder_names_cache = {}
def get_builder(b):
try:
return builder_names_cache[b['id']]
except KeyError:
try:
parameters = json.loads(b['parameters_json'])
name = parameters['builder_name']
except (ValueError, KeyError) as error:
print(('WARNING: failed to get builder name for build %s: %s' % (b['id'], error)))
name = None
builder_names_cache[b['id']] = name
return name
def get_bucket(b):
bucket = b['bucket']
if bucket.startswith('master.'):
return bucket[len('master.'):]
return bucket
if options.print_master:
name_fmt = ('%%-%ds %%-%ds' % (max((len(str(get_bucket(b))) for b in builds.itervalues())), max((len(str(get_builder(b))) for b in builds.itervalues()))))
def get_name(b):
return (name_fmt % (get_bucket(b), get_builder(b)))
else:
name_fmt = ('%%-%ds' % max((len(str(get_builder(b))) for b in builds.itervalues())))
def get_name(b):
return (name_fmt % get_builder(b))
def sort_key(b):
return (b['status'], b.get('result'), get_name(b), b.get('url'))
def pop(title, f, color=None, **kwargs):
'Pop matching builds from `builds` dict and print them.'
if ((not options.color) or (color is None)):
colorize = str
else:
colorize = (lambda x: ('%s%s%s' % (color, x, Fore.RESET)))
result = []
for b in builds.values():
if all(((b.get(k) == v) for (k, v) in kwargs.iteritems())):
builds.pop(b['id'])
result.append(b)
if result:
print(colorize(title))
for b in sorted(result, key=sort_key):
print(' ', colorize('\t'.join(map(str, f(b)))))
total = len(builds)
pop(status='COMPLETED', result='SUCCESS', title='Successes:', color=Fore.GREEN, f=(lambda b: (get_name(b), b.get('url'))))
pop(status='COMPLETED', result='FAILURE', failure_reason='INFRA_FAILURE', title='Infra Failures:', color=Fore.MAGENTA, f=(lambda b: (get_name(b), b.get('url'))))
pop(status='COMPLETED', result='FAILURE', failure_reason='BUILD_FAILURE', title='Failures:', color=Fore.RED, f=(lambda b: (get_name(b), b.get('url'))))
pop(status='COMPLETED', result='CANCELED', title='Canceled:', color=Fore.MAGENTA, f=(lambda b: (get_name(b),)))
pop(status='COMPLETED', result='FAILURE', failure_reason='INVALID_BUILD_DEFINITION', title='Wrong master/builder name:', color=Fore.MAGENTA, f=(lambda b: (get_name(b),)))
pop(status='COMPLETED', result='FAILURE', title='Other failures:', f=(lambda b: (get_name(b), b.get('failure_reason'), b.get('url'))))
pop(status='COMPLETED', title='Other finished:', f=(lambda b: (get_name(b), b.get('result'), b.get('url'))))
pop(status='STARTED', title='Started:', color=Fore.YELLOW, f=(lambda b: (get_name(b), b.get('url'))))
pop(status='SCHEDULED', title='Scheduled:', f=(lambda b: (get_name(b), ('id=%s' % b['id']))))
pop(title='Other:', f=(lambda b: (get_name(b), ('id=%s' % b['id']))))
assert (len(builds) == 0)
print(('Total: %d tryjobs' % total))
| 3,467,436,648,795,702,000
|
Prints nicely result of fetch_try_jobs.
|
git_cl.py
|
print_tryjobs
|
wuyong2k/chromium_depot_tool
|
python
|
def print_tryjobs(options, builds):
if (not builds):
print('No tryjobs scheduled')
return
builds = builds.copy()
builder_names_cache = {}
def get_builder(b):
try:
return builder_names_cache[b['id']]
except KeyError:
try:
parameters = json.loads(b['parameters_json'])
name = parameters['builder_name']
except (ValueError, KeyError) as error:
print(('WARNING: failed to get builder name for build %s: %s' % (b['id'], error)))
name = None
builder_names_cache[b['id']] = name
return name
def get_bucket(b):
bucket = b['bucket']
if bucket.startswith('master.'):
return bucket[len('master.'):]
return bucket
if options.print_master:
name_fmt = ('%%-%ds %%-%ds' % (max((len(str(get_bucket(b))) for b in builds.itervalues())), max((len(str(get_builder(b))) for b in builds.itervalues()))))
def get_name(b):
return (name_fmt % (get_bucket(b), get_builder(b)))
else:
name_fmt = ('%%-%ds' % max((len(str(get_builder(b))) for b in builds.itervalues())))
def get_name(b):
return (name_fmt % get_builder(b))
def sort_key(b):
return (b['status'], b.get('result'), get_name(b), b.get('url'))
def pop(title, f, color=None, **kwargs):
'Pop matching builds from `builds` dict and print them.'
if ((not options.color) or (color is None)):
colorize = str
else:
colorize = (lambda x: ('%s%s%s' % (color, x, Fore.RESET)))
result = []
for b in builds.values():
if all(((b.get(k) == v) for (k, v) in kwargs.iteritems())):
builds.pop(b['id'])
result.append(b)
if result:
print(colorize(title))
for b in sorted(result, key=sort_key):
print(' ', colorize('\t'.join(map(str, f(b)))))
total = len(builds)
pop(status='COMPLETED', result='SUCCESS', title='Successes:', color=Fore.GREEN, f=(lambda b: (get_name(b), b.get('url'))))
pop(status='COMPLETED', result='FAILURE', failure_reason='INFRA_FAILURE', title='Infra Failures:', color=Fore.MAGENTA, f=(lambda b: (get_name(b), b.get('url'))))
pop(status='COMPLETED', result='FAILURE', failure_reason='BUILD_FAILURE', title='Failures:', color=Fore.RED, f=(lambda b: (get_name(b), b.get('url'))))
pop(status='COMPLETED', result='CANCELED', title='Canceled:', color=Fore.MAGENTA, f=(lambda b: (get_name(b),)))
pop(status='COMPLETED', result='FAILURE', failure_reason='INVALID_BUILD_DEFINITION', title='Wrong master/builder name:', color=Fore.MAGENTA, f=(lambda b: (get_name(b),)))
pop(status='COMPLETED', result='FAILURE', title='Other failures:', f=(lambda b: (get_name(b), b.get('failure_reason'), b.get('url'))))
pop(status='COMPLETED', title='Other finished:', f=(lambda b: (get_name(b), b.get('result'), b.get('url'))))
pop(status='STARTED', title='Started:', color=Fore.YELLOW, f=(lambda b: (get_name(b), b.get('url'))))
pop(status='SCHEDULED', title='Scheduled:', f=(lambda b: (get_name(b), ('id=%s' % b['id']))))
pop(title='Other:', f=(lambda b: (get_name(b), ('id=%s' % b['id']))))
assert (len(builds) == 0)
print(('Total: %d tryjobs' % total))
|
def MatchSvnGlob(url, base_url, glob_spec, allow_wildcards):
'Return the corresponding git ref if |base_url| together with |glob_spec|\n matches the full |url|.\n\n If |allow_wildcards| is true, |glob_spec| can contain wildcards (see below).\n '
(fetch_suburl, as_ref) = glob_spec.split(':')
if allow_wildcards:
glob_match = re.match('(.+/)?(\\*|{[^/]*})(/.+)?', fetch_suburl)
if glob_match:
branch_re = re.escape(base_url)
if glob_match.group(1):
branch_re += ('/' + re.escape(glob_match.group(1)))
wildcard = glob_match.group(2)
if (wildcard == '*'):
branch_re += '([^/]*)'
else:
wildcard = re.escape(wildcard)
wildcard = re.sub('^\\\\{', '(', wildcard)
wildcard = re.sub('\\\\,', '|', wildcard)
wildcard = re.sub('\\\\}$', ')', wildcard)
branch_re += wildcard
if glob_match.group(3):
branch_re += re.escape(glob_match.group(3))
match = re.match(branch_re, url)
if match:
return re.sub('\\*$', match.group(1), as_ref)
if fetch_suburl:
full_url = ((base_url + '/') + fetch_suburl)
else:
full_url = base_url
if (full_url == url):
return as_ref
return None
| -7,813,583,685,021,503,000
|
Return the corresponding git ref if |base_url| together with |glob_spec|
matches the full |url|.
If |allow_wildcards| is true, |glob_spec| can contain wildcards (see below).
|
git_cl.py
|
MatchSvnGlob
|
wuyong2k/chromium_depot_tool
|
python
|
def MatchSvnGlob(url, base_url, glob_spec, allow_wildcards):
'Return the corresponding git ref if |base_url| together with |glob_spec|\n matches the full |url|.\n\n If |allow_wildcards| is true, |glob_spec| can contain wildcards (see below).\n '
(fetch_suburl, as_ref) = glob_spec.split(':')
if allow_wildcards:
glob_match = re.match('(.+/)?(\\*|{[^/]*})(/.+)?', fetch_suburl)
if glob_match:
branch_re = re.escape(base_url)
if glob_match.group(1):
branch_re += ('/' + re.escape(glob_match.group(1)))
wildcard = glob_match.group(2)
if (wildcard == '*'):
branch_re += '([^/]*)'
else:
wildcard = re.escape(wildcard)
wildcard = re.sub('^\\\\{', '(', wildcard)
wildcard = re.sub('\\\\,', '|', wildcard)
wildcard = re.sub('\\\\}$', ')', wildcard)
branch_re += wildcard
if glob_match.group(3):
branch_re += re.escape(glob_match.group(3))
match = re.match(branch_re, url)
if match:
return re.sub('\\*$', match.group(1), as_ref)
if fetch_suburl:
full_url = ((base_url + '/') + fetch_suburl)
else:
full_url = base_url
if (full_url == url):
return as_ref
return None
|
def print_stats(similarity, find_copies, args):
'Prints statistics about the change to the user.'
env = GetNoGitPagerEnv()
if ('GIT_EXTERNAL_DIFF' in env):
del env['GIT_EXTERNAL_DIFF']
if find_copies:
similarity_options = ['--find-copies-harder', '-l100000', ('-C%s' % similarity)]
else:
similarity_options = [('-M%s' % similarity)]
try:
stdout = sys.stdout.fileno()
except AttributeError:
stdout = None
return subprocess2.call(((['git', 'diff', '--no-ext-diff', '--stat'] + similarity_options) + args), stdout=stdout, env=env)
| -2,500,755,617,917,667,300
|
Prints statistics about the change to the user.
|
git_cl.py
|
print_stats
|
wuyong2k/chromium_depot_tool
|
python
|
def print_stats(similarity, find_copies, args):
env = GetNoGitPagerEnv()
if ('GIT_EXTERNAL_DIFF' in env):
del env['GIT_EXTERNAL_DIFF']
if find_copies:
similarity_options = ['--find-copies-harder', '-l100000', ('-C%s' % similarity)]
else:
similarity_options = [('-M%s' % similarity)]
try:
stdout = sys.stdout.fileno()
except AttributeError:
stdout = None
return subprocess2.call(((['git', 'diff', '--no-ext-diff', '--stat'] + similarity_options) + args), stdout=stdout, env=env)
|
def ShortBranchName(branch):
"Convert a name like 'refs/heads/foo' to just 'foo'."
return branch.replace('refs/heads/', '', 1)
| 1,362,601,367,376,277,200
|
Convert a name like 'refs/heads/foo' to just 'foo'.
|
git_cl.py
|
ShortBranchName
|
wuyong2k/chromium_depot_tool
|
python
|
def ShortBranchName(branch):
return branch.replace('refs/heads/', , 1)
|
def GetCurrentBranchRef():
'Returns branch ref (e.g., refs/heads/master) or None.'
return (RunGit(['symbolic-ref', 'HEAD'], stderr=subprocess2.VOID, error_ok=True).strip() or None)
| -5,729,977,296,304,144,000
|
Returns branch ref (e.g., refs/heads/master) or None.
|
git_cl.py
|
GetCurrentBranchRef
|
wuyong2k/chromium_depot_tool
|
python
|
def GetCurrentBranchRef():
return (RunGit(['symbolic-ref', 'HEAD'], stderr=subprocess2.VOID, error_ok=True).strip() or None)
|
def GetCurrentBranch():
'Returns current branch or None.\n\n For refs/heads/* branches, returns just last part. For others, full ref.\n '
branchref = GetCurrentBranchRef()
if branchref:
return ShortBranchName(branchref)
return None
| 2,375,936,194,256,374,300
|
Returns current branch or None.
For refs/heads/* branches, returns just last part. For others, full ref.
|
git_cl.py
|
GetCurrentBranch
|
wuyong2k/chromium_depot_tool
|
python
|
def GetCurrentBranch():
'Returns current branch or None.\n\n For refs/heads/* branches, returns just last part. For others, full ref.\n '
branchref = GetCurrentBranchRef()
if branchref:
return ShortBranchName(branchref)
return None
|
def ParseIssueNumberArgument(arg):
'Parses the issue argument and returns _ParsedIssueNumberArgument.'
fail_result = _ParsedIssueNumberArgument()
if arg.isdigit():
return _ParsedIssueNumberArgument(issue=int(arg))
if (not arg.startswith('http')):
return fail_result
url = gclient_utils.UpgradeToHttps(arg)
try:
parsed_url = urlparse.urlparse(url)
except ValueError:
return fail_result
for cls in _CODEREVIEW_IMPLEMENTATIONS.itervalues():
tmp = cls.ParseIssueURL(parsed_url)
if (tmp is not None):
return tmp
return fail_result
| 1,000,651,171,004,019,700
|
Parses the issue argument and returns _ParsedIssueNumberArgument.
|
git_cl.py
|
ParseIssueNumberArgument
|
wuyong2k/chromium_depot_tool
|
python
|
def ParseIssueNumberArgument(arg):
fail_result = _ParsedIssueNumberArgument()
if arg.isdigit():
return _ParsedIssueNumberArgument(issue=int(arg))
if (not arg.startswith('http')):
return fail_result
url = gclient_utils.UpgradeToHttps(arg)
try:
parsed_url = urlparse.urlparse(url)
except ValueError:
return fail_result
for cls in _CODEREVIEW_IMPLEMENTATIONS.itervalues():
tmp = cls.ParseIssueURL(parsed_url)
if (tmp is not None):
return tmp
return fail_result
|
def _add_codereview_select_options(parser):
'Appends --gerrit and --rietveld options to force specific codereview.'
parser.codereview_group = optparse.OptionGroup(parser, 'EXPERIMENTAL! Codereview override options')
parser.add_option_group(parser.codereview_group)
parser.codereview_group.add_option('--gerrit', action='store_true', help='Force the use of Gerrit for codereview')
parser.codereview_group.add_option('--rietveld', action='store_true', help='Force the use of Rietveld for codereview')
| -8,934,478,034,858,730,000
|
Appends --gerrit and --rietveld options to force specific codereview.
|
git_cl.py
|
_add_codereview_select_options
|
wuyong2k/chromium_depot_tool
|
python
|
def _add_codereview_select_options(parser):
parser.codereview_group = optparse.OptionGroup(parser, 'EXPERIMENTAL! Codereview override options')
parser.add_option_group(parser.codereview_group)
parser.codereview_group.add_option('--gerrit', action='store_true', help='Force the use of Gerrit for codereview')
parser.codereview_group.add_option('--rietveld', action='store_true', help='Force the use of Rietveld for codereview')
|
def get_approving_reviewers(props):
'Retrieves the reviewers that approved a CL from the issue properties with\n messages.\n\n Note that the list may contain reviewers that are not committer, thus are not\n considered by the CQ.\n '
return sorted(set((message['sender'] for message in props['messages'] if (message['approval'] and (message['sender'] in props['reviewers'])))))
| 8,787,014,851,502,826,000
|
Retrieves the reviewers that approved a CL from the issue properties with
messages.
Note that the list may contain reviewers that are not committer, thus are not
considered by the CQ.
|
git_cl.py
|
get_approving_reviewers
|
wuyong2k/chromium_depot_tool
|
python
|
def get_approving_reviewers(props):
'Retrieves the reviewers that approved a CL from the issue properties with\n messages.\n\n Note that the list may contain reviewers that are not committer, thus are not\n considered by the CQ.\n '
return sorted(set((message['sender'] for message in props['messages'] if (message['approval'] and (message['sender'] in props['reviewers'])))))
|
def FindCodereviewSettingsFile(filename='codereview.settings'):
"Finds the given file starting in the cwd and going up.\n\n Only looks up to the top of the repository unless an\n 'inherit-review-settings-ok' file exists in the root of the repository.\n "
inherit_ok_file = 'inherit-review-settings-ok'
cwd = os.getcwd()
root = settings.GetRoot()
if os.path.isfile(os.path.join(root, inherit_ok_file)):
root = '/'
while True:
if (filename in os.listdir(cwd)):
if os.path.isfile(os.path.join(cwd, filename)):
return open(os.path.join(cwd, filename))
if (cwd == root):
break
cwd = os.path.dirname(cwd)
| 4,084,806,995,077,205,500
|
Finds the given file starting in the cwd and going up.
Only looks up to the top of the repository unless an
'inherit-review-settings-ok' file exists in the root of the repository.
|
git_cl.py
|
FindCodereviewSettingsFile
|
wuyong2k/chromium_depot_tool
|
python
|
def FindCodereviewSettingsFile(filename='codereview.settings'):
"Finds the given file starting in the cwd and going up.\n\n Only looks up to the top of the repository unless an\n 'inherit-review-settings-ok' file exists in the root of the repository.\n "
inherit_ok_file = 'inherit-review-settings-ok'
cwd = os.getcwd()
root = settings.GetRoot()
if os.path.isfile(os.path.join(root, inherit_ok_file)):
root = '/'
while True:
if (filename in os.listdir(cwd)):
if os.path.isfile(os.path.join(cwd, filename)):
return open(os.path.join(cwd, filename))
if (cwd == root):
break
cwd = os.path.dirname(cwd)
|
def LoadCodereviewSettingsFromFile(fileobj):
'Parse a codereview.settings file and updates hooks.'
keyvals = gclient_utils.ParseCodereviewSettingsContent(fileobj.read())
def SetProperty(name, setting, unset_error_ok=False):
fullname = ('rietveld.' + name)
if (setting in keyvals):
RunGit(['config', fullname, keyvals[setting]])
else:
RunGit(['config', '--unset-all', fullname], error_ok=unset_error_ok)
SetProperty('server', 'CODE_REVIEW_SERVER')
SetProperty('cc', 'CC_LIST', unset_error_ok=True)
SetProperty('private', 'PRIVATE', unset_error_ok=True)
SetProperty('tree-status-url', 'STATUS', unset_error_ok=True)
SetProperty('viewvc-url', 'VIEW_VC', unset_error_ok=True)
SetProperty('bug-prefix', 'BUG_PREFIX', unset_error_ok=True)
SetProperty('cpplint-regex', 'LINT_REGEX', unset_error_ok=True)
SetProperty('force-https-commit-url', 'FORCE_HTTPS_COMMIT_URL', unset_error_ok=True)
SetProperty('cpplint-ignore-regex', 'LINT_IGNORE_REGEX', unset_error_ok=True)
SetProperty('project', 'PROJECT', unset_error_ok=True)
SetProperty('pending-ref-prefix', 'PENDING_REF_PREFIX', unset_error_ok=True)
SetProperty('run-post-upload-hook', 'RUN_POST_UPLOAD_HOOK', unset_error_ok=True)
if ('GERRIT_HOST' in keyvals):
RunGit(['config', 'gerrit.host', keyvals['GERRIT_HOST']])
if ('GERRIT_SQUASH_UPLOADS' in keyvals):
RunGit(['config', 'gerrit.squash-uploads', keyvals['GERRIT_SQUASH_UPLOADS']])
if ('GERRIT_SKIP_ENSURE_AUTHENTICATED' in keyvals):
RunGit(['config', 'gerrit.skip-ensure-authenticated', keyvals['GERRIT_SKIP_ENSURE_AUTHENTICATED']])
if (('PUSH_URL_CONFIG' in keyvals) and ('ORIGIN_URL_CONFIG' in keyvals)):
RunGit(['config', keyvals['PUSH_URL_CONFIG'], keyvals['ORIGIN_URL_CONFIG']])
| -4,062,880,140,684,171,000
|
Parse a codereview.settings file and updates hooks.
|
git_cl.py
|
LoadCodereviewSettingsFromFile
|
wuyong2k/chromium_depot_tool
|
python
|
def LoadCodereviewSettingsFromFile(fileobj):
keyvals = gclient_utils.ParseCodereviewSettingsContent(fileobj.read())
def SetProperty(name, setting, unset_error_ok=False):
fullname = ('rietveld.' + name)
if (setting in keyvals):
RunGit(['config', fullname, keyvals[setting]])
else:
RunGit(['config', '--unset-all', fullname], error_ok=unset_error_ok)
SetProperty('server', 'CODE_REVIEW_SERVER')
SetProperty('cc', 'CC_LIST', unset_error_ok=True)
SetProperty('private', 'PRIVATE', unset_error_ok=True)
SetProperty('tree-status-url', 'STATUS', unset_error_ok=True)
SetProperty('viewvc-url', 'VIEW_VC', unset_error_ok=True)
SetProperty('bug-prefix', 'BUG_PREFIX', unset_error_ok=True)
SetProperty('cpplint-regex', 'LINT_REGEX', unset_error_ok=True)
SetProperty('force-https-commit-url', 'FORCE_HTTPS_COMMIT_URL', unset_error_ok=True)
SetProperty('cpplint-ignore-regex', 'LINT_IGNORE_REGEX', unset_error_ok=True)
SetProperty('project', 'PROJECT', unset_error_ok=True)
SetProperty('pending-ref-prefix', 'PENDING_REF_PREFIX', unset_error_ok=True)
SetProperty('run-post-upload-hook', 'RUN_POST_UPLOAD_HOOK', unset_error_ok=True)
if ('GERRIT_HOST' in keyvals):
RunGit(['config', 'gerrit.host', keyvals['GERRIT_HOST']])
if ('GERRIT_SQUASH_UPLOADS' in keyvals):
RunGit(['config', 'gerrit.squash-uploads', keyvals['GERRIT_SQUASH_UPLOADS']])
if ('GERRIT_SKIP_ENSURE_AUTHENTICATED' in keyvals):
RunGit(['config', 'gerrit.skip-ensure-authenticated', keyvals['GERRIT_SKIP_ENSURE_AUTHENTICATED']])
if (('PUSH_URL_CONFIG' in keyvals) and ('ORIGIN_URL_CONFIG' in keyvals)):
RunGit(['config', keyvals['PUSH_URL_CONFIG'], keyvals['ORIGIN_URL_CONFIG']])
|
def urlretrieve(source, destination):
"urllib is broken for SSL connections via a proxy therefore we\n can't use urllib.urlretrieve()."
with open(destination, 'w') as f:
f.write(urllib2.urlopen(source).read())
| -8,312,412,582,893,265,000
|
urllib is broken for SSL connections via a proxy therefore we
can't use urllib.urlretrieve().
|
git_cl.py
|
urlretrieve
|
wuyong2k/chromium_depot_tool
|
python
|
def urlretrieve(source, destination):
"urllib is broken for SSL connections via a proxy therefore we\n can't use urllib.urlretrieve()."
with open(destination, 'w') as f:
f.write(urllib2.urlopen(source).read())
|
def hasSheBang(fname):
'Checks fname is a #! script.'
with open(fname) as f:
return f.read(2).startswith('#!')
| -3,424,342,128,742,994,400
|
Checks fname is a #! script.
|
git_cl.py
|
hasSheBang
|
wuyong2k/chromium_depot_tool
|
python
|
def hasSheBang(fname):
with open(fname) as f:
return f.read(2).startswith('#!')
|
def DownloadGerritHook(force):
'Download and install Gerrit commit-msg hook.\n\n Args:\n force: True to update hooks. False to install hooks if not present.\n '
if (not settings.GetIsGerrit()):
return
src = 'https://gerrit-review.googlesource.com/tools/hooks/commit-msg'
dst = os.path.join(settings.GetRoot(), '.git', 'hooks', 'commit-msg')
if (not os.access(dst, os.X_OK)):
if os.path.exists(dst):
if (not force):
return
try:
urlretrieve(src, dst)
if (not hasSheBang(dst)):
DieWithError(('Not a script: %s\nYou need to download from\n%s\ninto .git/hooks/commit-msg and chmod +x .git/hooks/commit-msg' % (dst, src)))
os.chmod(dst, ((stat.S_IRUSR | stat.S_IWUSR) | stat.S_IXUSR))
except Exception:
if os.path.exists(dst):
os.remove(dst)
DieWithError(('\nFailed to download hooks.\nYou need to download from\n%s\ninto .git/hooks/commit-msg and chmod +x .git/hooks/commit-msg' % src))
| 6,228,536,019,035,251,000
|
Download and install Gerrit commit-msg hook.
Args:
force: True to update hooks. False to install hooks if not present.
|
git_cl.py
|
DownloadGerritHook
|
wuyong2k/chromium_depot_tool
|
python
|
def DownloadGerritHook(force):
'Download and install Gerrit commit-msg hook.\n\n Args:\n force: True to update hooks. False to install hooks if not present.\n '
if (not settings.GetIsGerrit()):
return
src = 'https://gerrit-review.googlesource.com/tools/hooks/commit-msg'
dst = os.path.join(settings.GetRoot(), '.git', 'hooks', 'commit-msg')
if (not os.access(dst, os.X_OK)):
if os.path.exists(dst):
if (not force):
return
try:
urlretrieve(src, dst)
if (not hasSheBang(dst)):
DieWithError(('Not a script: %s\nYou need to download from\n%s\ninto .git/hooks/commit-msg and chmod +x .git/hooks/commit-msg' % (dst, src)))
os.chmod(dst, ((stat.S_IRUSR | stat.S_IWUSR) | stat.S_IXUSR))
except Exception:
if os.path.exists(dst):
os.remove(dst)
DieWithError(('\nFailed to download hooks.\nYou need to download from\n%s\ninto .git/hooks/commit-msg and chmod +x .git/hooks/commit-msg' % src))
|
def GetRietveldCodereviewSettingsInteractively():
'Prompt the user for settings.'
server = settings.GetDefaultServerUrl(error_ok=True)
prompt = 'Rietveld server (host[:port])'
prompt += (' [%s]' % (server or DEFAULT_SERVER))
newserver = ask_for_data((prompt + ':'))
if ((not server) and (not newserver)):
newserver = DEFAULT_SERVER
if newserver:
newserver = gclient_utils.UpgradeToHttps(newserver)
if (newserver != server):
RunGit(['config', 'rietveld.server', newserver])
def SetProperty(initial, caption, name, is_url):
prompt = caption
if initial:
prompt += (' ("x" to clear) [%s]' % initial)
new_val = ask_for_data((prompt + ':'))
if (new_val == 'x'):
RunGit(['config', '--unset-all', ('rietveld.' + name)], error_ok=True)
elif new_val:
if is_url:
new_val = gclient_utils.UpgradeToHttps(new_val)
if (new_val != initial):
RunGit(['config', ('rietveld.' + name), new_val])
SetProperty(settings.GetDefaultCCList(), 'CC list', 'cc', False)
SetProperty(settings.GetDefaultPrivateFlag(), 'Private flag (rietveld only)', 'private', False)
SetProperty(settings.GetTreeStatusUrl(error_ok=True), 'Tree status URL', 'tree-status-url', False)
SetProperty(settings.GetViewVCUrl(), 'ViewVC URL', 'viewvc-url', True)
SetProperty(settings.GetBugPrefix(), 'Bug Prefix', 'bug-prefix', False)
SetProperty(settings.GetRunPostUploadHook(), 'Run Post Upload Hook', 'run-post-upload-hook', False)
| 3,663,493,880,168,690,700
|
Prompt the user for settings.
|
git_cl.py
|
GetRietveldCodereviewSettingsInteractively
|
wuyong2k/chromium_depot_tool
|
python
|
def GetRietveldCodereviewSettingsInteractively():
server = settings.GetDefaultServerUrl(error_ok=True)
prompt = 'Rietveld server (host[:port])'
prompt += (' [%s]' % (server or DEFAULT_SERVER))
newserver = ask_for_data((prompt + ':'))
if ((not server) and (not newserver)):
newserver = DEFAULT_SERVER
if newserver:
newserver = gclient_utils.UpgradeToHttps(newserver)
if (newserver != server):
RunGit(['config', 'rietveld.server', newserver])
def SetProperty(initial, caption, name, is_url):
prompt = caption
if initial:
prompt += (' ("x" to clear) [%s]' % initial)
new_val = ask_for_data((prompt + ':'))
if (new_val == 'x'):
RunGit(['config', '--unset-all', ('rietveld.' + name)], error_ok=True)
elif new_val:
if is_url:
new_val = gclient_utils.UpgradeToHttps(new_val)
if (new_val != initial):
RunGit(['config', ('rietveld.' + name), new_val])
SetProperty(settings.GetDefaultCCList(), 'CC list', 'cc', False)
SetProperty(settings.GetDefaultPrivateFlag(), 'Private flag (rietveld only)', 'private', False)
SetProperty(settings.GetTreeStatusUrl(error_ok=True), 'Tree status URL', 'tree-status-url', False)
SetProperty(settings.GetViewVCUrl(), 'ViewVC URL', 'viewvc-url', True)
SetProperty(settings.GetBugPrefix(), 'Bug Prefix', 'bug-prefix', False)
SetProperty(settings.GetRunPostUploadHook(), 'Run Post Upload Hook', 'run-post-upload-hook', False)
|
@subcommand.usage('[repo root containing codereview.settings]')
def CMDconfig(parser, args):
'Edits configuration for this tree.'
print('WARNING: git cl config works for Rietveld only.\nFor Gerrit, see http://crbug.com/603116.')
parser.add_option('--activate-update', action='store_true', help='activate auto-updating [rietveld] section in .git/config')
parser.add_option('--deactivate-update', action='store_true', help='deactivate auto-updating [rietveld] section in .git/config')
(options, args) = parser.parse_args(args)
if options.deactivate_update:
RunGit(['config', 'rietveld.autoupdate', 'false'])
return
if options.activate_update:
RunGit(['config', '--unset', 'rietveld.autoupdate'])
return
if (len(args) == 0):
GetRietveldCodereviewSettingsInteractively()
return 0
url = args[0]
if (not url.endswith('codereview.settings')):
url = os.path.join(url, 'codereview.settings')
LoadCodereviewSettingsFromFile(urllib2.urlopen(url))
return 0
| 77,438,087,323,842,530
|
Edits configuration for this tree.
|
git_cl.py
|
CMDconfig
|
wuyong2k/chromium_depot_tool
|
python
|
@subcommand.usage('[repo root containing codereview.settings]')
def CMDconfig(parser, args):
print('WARNING: git cl config works for Rietveld only.\nFor Gerrit, see http://crbug.com/603116.')
parser.add_option('--activate-update', action='store_true', help='activate auto-updating [rietveld] section in .git/config')
parser.add_option('--deactivate-update', action='store_true', help='deactivate auto-updating [rietveld] section in .git/config')
(options, args) = parser.parse_args(args)
if options.deactivate_update:
RunGit(['config', 'rietveld.autoupdate', 'false'])
return
if options.activate_update:
RunGit(['config', '--unset', 'rietveld.autoupdate'])
return
if (len(args) == 0):
GetRietveldCodereviewSettingsInteractively()
return 0
url = args[0]
if (not url.endswith('codereview.settings')):
url = os.path.join(url, 'codereview.settings')
LoadCodereviewSettingsFromFile(urllib2.urlopen(url))
return 0
|
def CMDbaseurl(parser, args):
'Gets or sets base-url for this branch.'
branchref = RunGit(['symbolic-ref', 'HEAD']).strip()
branch = ShortBranchName(branchref)
(_, args) = parser.parse_args(args)
if (not args):
print('Current base-url:')
return RunGit(['config', ('branch.%s.base-url' % branch)], error_ok=False).strip()
else:
print(('Setting base-url to %s' % args[0]))
return RunGit(['config', ('branch.%s.base-url' % branch), args[0]], error_ok=False).strip()
| -1,379,295,837,932,834,600
|
Gets or sets base-url for this branch.
|
git_cl.py
|
CMDbaseurl
|
wuyong2k/chromium_depot_tool
|
python
|
def CMDbaseurl(parser, args):
branchref = RunGit(['symbolic-ref', 'HEAD']).strip()
branch = ShortBranchName(branchref)
(_, args) = parser.parse_args(args)
if (not args):
print('Current base-url:')
return RunGit(['config', ('branch.%s.base-url' % branch)], error_ok=False).strip()
else:
print(('Setting base-url to %s' % args[0]))
return RunGit(['config', ('branch.%s.base-url' % branch), args[0]], error_ok=False).strip()
|
def color_for_status(status):
'Maps a Changelist status to color, for CMDstatus and other tools.'
return {'unsent': Fore.RED, 'waiting': Fore.BLUE, 'reply': Fore.YELLOW, 'lgtm': Fore.GREEN, 'commit': Fore.MAGENTA, 'closed': Fore.CYAN, 'error': Fore.WHITE}.get(status, Fore.WHITE)
| 813,356,962,964,108,900
|
Maps a Changelist status to color, for CMDstatus and other tools.
|
git_cl.py
|
color_for_status
|
wuyong2k/chromium_depot_tool
|
python
|
def color_for_status(status):
return {'unsent': Fore.RED, 'waiting': Fore.BLUE, 'reply': Fore.YELLOW, 'lgtm': Fore.GREEN, 'commit': Fore.MAGENTA, 'closed': Fore.CYAN, 'error': Fore.WHITE}.get(status, Fore.WHITE)
|
def get_cl_statuses(changes, fine_grained, max_processes=None):
"Returns a blocking iterable of (cl, status) for given branches.\n\n If fine_grained is true, this will fetch CL statuses from the server.\n Otherwise, simply indicate if there's a matching url for the given branches.\n\n If max_processes is specified, it is used as the maximum number of processes\n to spawn to fetch CL status from the server. Otherwise 1 process per branch is\n spawned.\n\n See GetStatus() for a list of possible statuses.\n "
upload.verbosity = 0
if fine_grained:
if changes:
fetch = (lambda cl: (cl, cl.GetStatus()))
(yield fetch(changes[0]))
if (not changes):
return
changes_to_fetch = changes[1:]
pool = ThreadPool((min(max_processes, len(changes_to_fetch)) if (max_processes is not None) else len(changes_to_fetch)))
fetched_cls = set()
it = pool.imap_unordered(fetch, changes_to_fetch).__iter__()
while True:
try:
row = it.next(timeout=5)
except multiprocessing.TimeoutError:
break
fetched_cls.add(row[0])
(yield row)
for cl in (set(changes_to_fetch) - fetched_cls):
(yield (cl, 'error'))
else:
for cl in changes:
(yield (cl, ('waiting' if cl.GetIssueURL() else 'error')))
| 5,757,548,687,413,044,000
|
Returns a blocking iterable of (cl, status) for given branches.
If fine_grained is true, this will fetch CL statuses from the server.
Otherwise, simply indicate if there's a matching url for the given branches.
If max_processes is specified, it is used as the maximum number of processes
to spawn to fetch CL status from the server. Otherwise 1 process per branch is
spawned.
See GetStatus() for a list of possible statuses.
|
git_cl.py
|
get_cl_statuses
|
wuyong2k/chromium_depot_tool
|
python
|
def get_cl_statuses(changes, fine_grained, max_processes=None):
"Returns a blocking iterable of (cl, status) for given branches.\n\n If fine_grained is true, this will fetch CL statuses from the server.\n Otherwise, simply indicate if there's a matching url for the given branches.\n\n If max_processes is specified, it is used as the maximum number of processes\n to spawn to fetch CL status from the server. Otherwise 1 process per branch is\n spawned.\n\n See GetStatus() for a list of possible statuses.\n "
upload.verbosity = 0
if fine_grained:
if changes:
fetch = (lambda cl: (cl, cl.GetStatus()))
(yield fetch(changes[0]))
if (not changes):
return
changes_to_fetch = changes[1:]
pool = ThreadPool((min(max_processes, len(changes_to_fetch)) if (max_processes is not None) else len(changes_to_fetch)))
fetched_cls = set()
it = pool.imap_unordered(fetch, changes_to_fetch).__iter__()
while True:
try:
row = it.next(timeout=5)
except multiprocessing.TimeoutError:
break
fetched_cls.add(row[0])
(yield row)
for cl in (set(changes_to_fetch) - fetched_cls):
(yield (cl, 'error'))
else:
for cl in changes:
(yield (cl, ('waiting' if cl.GetIssueURL() else 'error')))
|
def upload_branch_deps(cl, args):
'Uploads CLs of local branches that are dependents of the current branch.\n\n If the local branch dependency tree looks like:\n test1 -> test2.1 -> test3.1\n -> test3.2\n -> test2.2 -> test3.3\n\n and you run "git cl upload --dependencies" from test1 then "git cl upload" is\n run on the dependent branches in this order:\n test2.1, test3.1, test3.2, test2.2, test3.3\n\n Note: This function does not rebase your local dependent branches. Use it when\n you make a change to the parent branch that will not conflict with its\n dependent branches, and you would like their dependencies updated in\n Rietveld.\n '
if git_common.is_dirty_git_tree('upload-branch-deps'):
return 1
root_branch = cl.GetBranch()
if (root_branch is None):
DieWithError("Can't find dependent branches from detached HEAD state. Get on a branch!")
if ((not cl.GetIssue()) or (not cl.GetPatchset())):
DieWithError('Current branch does not have an uploaded CL. We cannot set patchset dependencies without an uploaded CL.')
branches = RunGit(['for-each-ref', '--format=%(refname:short) %(upstream:short)', 'refs/heads'])
if (not branches):
print('No local branches found.')
return 0
tracked_to_dependents = collections.defaultdict(list)
for b in branches.splitlines():
tokens = b.split()
if (len(tokens) == 2):
(branch_name, tracked) = tokens
tracked_to_dependents[tracked].append(branch_name)
print()
print(('The dependent local branches of %s are:' % root_branch))
dependents = []
def traverse_dependents_preorder(branch, padding=''):
dependents_to_process = tracked_to_dependents.get(branch, [])
padding += ' '
for dependent in dependents_to_process:
print(('%s%s' % (padding, dependent)))
dependents.append(dependent)
traverse_dependents_preorder(dependent, padding)
traverse_dependents_preorder(root_branch)
print()
if (not dependents):
print(('There are no dependent local branches for %s' % root_branch))
return 0
print('This command will checkout all dependent branches and run "git cl upload".')
ask_for_data('[Press enter to continue or ctrl-C to quit]')
if (not cl.IsGerrit()):
args.extend(['-t', 'Updated patchset dependency'])
failures = {}
try:
for dependent_branch in dependents:
print()
print('--------------------------------------')
print(('Running "git cl upload" from %s:' % dependent_branch))
RunGit(['checkout', '-q', dependent_branch])
print()
try:
if (CMDupload(OptionParser(), args) != 0):
print(('Upload failed for %s!' % dependent_branch))
failures[dependent_branch] = 1
except:
failures[dependent_branch] = 1
print()
finally:
RunGit(['checkout', '-q', root_branch])
print()
print('Upload complete for dependent branches!')
for dependent_branch in dependents:
upload_status = ('failed' if failures.get(dependent_branch) else 'succeeded')
print((' %s : %s' % (dependent_branch, upload_status)))
print()
return 0
| -9,003,330,700,420,991,000
|
Uploads CLs of local branches that are dependents of the current branch.
If the local branch dependency tree looks like:
test1 -> test2.1 -> test3.1
-> test3.2
-> test2.2 -> test3.3
and you run "git cl upload --dependencies" from test1 then "git cl upload" is
run on the dependent branches in this order:
test2.1, test3.1, test3.2, test2.2, test3.3
Note: This function does not rebase your local dependent branches. Use it when
you make a change to the parent branch that will not conflict with its
dependent branches, and you would like their dependencies updated in
Rietveld.
|
git_cl.py
|
upload_branch_deps
|
wuyong2k/chromium_depot_tool
|
python
|
def upload_branch_deps(cl, args):
'Uploads CLs of local branches that are dependents of the current branch.\n\n If the local branch dependency tree looks like:\n test1 -> test2.1 -> test3.1\n -> test3.2\n -> test2.2 -> test3.3\n\n and you run "git cl upload --dependencies" from test1 then "git cl upload" is\n run on the dependent branches in this order:\n test2.1, test3.1, test3.2, test2.2, test3.3\n\n Note: This function does not rebase your local dependent branches. Use it when\n you make a change to the parent branch that will not conflict with its\n dependent branches, and you would like their dependencies updated in\n Rietveld.\n '
if git_common.is_dirty_git_tree('upload-branch-deps'):
return 1
root_branch = cl.GetBranch()
if (root_branch is None):
DieWithError("Can't find dependent branches from detached HEAD state. Get on a branch!")
if ((not cl.GetIssue()) or (not cl.GetPatchset())):
DieWithError('Current branch does not have an uploaded CL. We cannot set patchset dependencies without an uploaded CL.')
branches = RunGit(['for-each-ref', '--format=%(refname:short) %(upstream:short)', 'refs/heads'])
if (not branches):
print('No local branches found.')
return 0
tracked_to_dependents = collections.defaultdict(list)
for b in branches.splitlines():
tokens = b.split()
if (len(tokens) == 2):
(branch_name, tracked) = tokens
tracked_to_dependents[tracked].append(branch_name)
print()
print(('The dependent local branches of %s are:' % root_branch))
dependents = []
def traverse_dependents_preorder(branch, padding=):
dependents_to_process = tracked_to_dependents.get(branch, [])
padding += ' '
for dependent in dependents_to_process:
print(('%s%s' % (padding, dependent)))
dependents.append(dependent)
traverse_dependents_preorder(dependent, padding)
traverse_dependents_preorder(root_branch)
print()
if (not dependents):
print(('There are no dependent local branches for %s' % root_branch))
return 0
print('This command will checkout all dependent branches and run "git cl upload".')
ask_for_data('[Press enter to continue or ctrl-C to quit]')
if (not cl.IsGerrit()):
args.extend(['-t', 'Updated patchset dependency'])
failures = {}
try:
for dependent_branch in dependents:
print()
print('--------------------------------------')
print(('Running "git cl upload" from %s:' % dependent_branch))
RunGit(['checkout', '-q', dependent_branch])
print()
try:
if (CMDupload(OptionParser(), args) != 0):
print(('Upload failed for %s!' % dependent_branch))
failures[dependent_branch] = 1
except:
failures[dependent_branch] = 1
print()
finally:
RunGit(['checkout', '-q', root_branch])
print()
print('Upload complete for dependent branches!')
for dependent_branch in dependents:
upload_status = ('failed' if failures.get(dependent_branch) else 'succeeded')
print((' %s : %s' % (dependent_branch, upload_status)))
print()
return 0
|
def CMDarchive(parser, args):
'Archives and deletes branches associated with closed changelists.'
parser.add_option('-j', '--maxjobs', action='store', type=int, help='The maximum number of jobs to use when retrieving review status')
parser.add_option('-f', '--force', action='store_true', help='Bypasses the confirmation prompt.')
auth.add_auth_options(parser)
(options, args) = parser.parse_args(args)
if args:
parser.error(('Unsupported args: %s' % ' '.join(args)))
auth_config = auth.extract_auth_config_from_options(options)
branches = RunGit(['for-each-ref', '--format=%(refname)', 'refs/heads'])
if (not branches):
return 0
print('Finding all branches associated with closed issues...')
changes = [Changelist(branchref=b, auth_config=auth_config) for b in branches.splitlines()]
alignment = max(5, max((len(c.GetBranch()) for c in changes)))
statuses = get_cl_statuses(changes, fine_grained=True, max_processes=options.maxjobs)
proposal = [(cl.GetBranch(), ('git-cl-archived-%s-%s' % (cl.GetIssue(), cl.GetBranch()))) for (cl, status) in statuses if (status == 'closed')]
proposal.sort()
if (not proposal):
print('No branches with closed codereview issues found.')
return 0
current_branch = GetCurrentBranch()
print('\nBranches with closed issues that will be archived:\n')
print(('%*s | %s' % (alignment, 'Branch name', 'Archival tag name')))
for next_item in proposal:
print(('%*s %s' % (alignment, next_item[0], next_item[1])))
if any(((branch == current_branch) for (branch, _) in proposal)):
print(("You are currently on a branch '%s' which is associated with a closed codereview issue, so archive cannot proceed. Please checkout another branch and run this command again." % current_branch))
return 1
if (not options.force):
answer = ask_for_data('\nProceed with deletion (Y/n)? ').lower()
if (answer not in ('y', '')):
print('Aborted.')
return 1
for (branch, tagname) in proposal:
RunGit(['tag', tagname, branch])
RunGit(['branch', '-D', branch])
print("\nJob's done!")
return 0
| -6,254,498,161,913,117,000
|
Archives and deletes branches associated with closed changelists.
|
git_cl.py
|
CMDarchive
|
wuyong2k/chromium_depot_tool
|
python
|
def CMDarchive(parser, args):
parser.add_option('-j', '--maxjobs', action='store', type=int, help='The maximum number of jobs to use when retrieving review status')
parser.add_option('-f', '--force', action='store_true', help='Bypasses the confirmation prompt.')
auth.add_auth_options(parser)
(options, args) = parser.parse_args(args)
if args:
parser.error(('Unsupported args: %s' % ' '.join(args)))
auth_config = auth.extract_auth_config_from_options(options)
branches = RunGit(['for-each-ref', '--format=%(refname)', 'refs/heads'])
if (not branches):
return 0
print('Finding all branches associated with closed issues...')
changes = [Changelist(branchref=b, auth_config=auth_config) for b in branches.splitlines()]
alignment = max(5, max((len(c.GetBranch()) for c in changes)))
statuses = get_cl_statuses(changes, fine_grained=True, max_processes=options.maxjobs)
proposal = [(cl.GetBranch(), ('git-cl-archived-%s-%s' % (cl.GetIssue(), cl.GetBranch()))) for (cl, status) in statuses if (status == 'closed')]
proposal.sort()
if (not proposal):
print('No branches with closed codereview issues found.')
return 0
current_branch = GetCurrentBranch()
print('\nBranches with closed issues that will be archived:\n')
print(('%*s | %s' % (alignment, 'Branch name', 'Archival tag name')))
for next_item in proposal:
print(('%*s %s' % (alignment, next_item[0], next_item[1])))
if any(((branch == current_branch) for (branch, _) in proposal)):
print(("You are currently on a branch '%s' which is associated with a closed codereview issue, so archive cannot proceed. Please checkout another branch and run this command again." % current_branch))
return 1
if (not options.force):
answer = ask_for_data('\nProceed with deletion (Y/n)? ').lower()
if (answer not in ('y', )):
print('Aborted.')
return 1
for (branch, tagname) in proposal:
RunGit(['tag', tagname, branch])
RunGit(['branch', '-D', branch])
print("\nJob's done!")
return 0
|
def CMDstatus(parser, args):
"Show status of changelists.\n\n Colors are used to tell the state of the CL unless --fast is used:\n - Red not sent for review or broken\n - Blue waiting for review\n - Yellow waiting for you to reply to review\n - Green LGTM'ed\n - Magenta in the commit queue\n - Cyan was committed, branch can be deleted\n\n Also see 'git cl comments'.\n "
parser.add_option('--field', help='print only specific field (desc|id|patch|url)')
parser.add_option('-f', '--fast', action='store_true', help='Do not retrieve review status')
parser.add_option('-j', '--maxjobs', action='store', type=int, help='The maximum number of jobs to use when retrieving review status')
auth.add_auth_options(parser)
(options, args) = parser.parse_args(args)
if args:
parser.error(('Unsupported args: %s' % args))
auth_config = auth.extract_auth_config_from_options(options)
if options.field:
cl = Changelist(auth_config=auth_config)
if options.field.startswith('desc'):
print(cl.GetDescription())
elif (options.field == 'id'):
issueid = cl.GetIssue()
if issueid:
print(issueid)
elif (options.field == 'patch'):
patchset = cl.GetPatchset()
if patchset:
print(patchset)
elif (options.field == 'url'):
url = cl.GetIssueURL()
if url:
print(url)
return 0
branches = RunGit(['for-each-ref', '--format=%(refname)', 'refs/heads'])
if (not branches):
print('No local branch found.')
return 0
changes = [Changelist(branchref=b, auth_config=auth_config) for b in branches.splitlines()]
print('Branches associated with reviews:')
output = get_cl_statuses(changes, fine_grained=(not options.fast), max_processes=options.maxjobs)
branch_statuses = {}
alignment = max(5, max((len(ShortBranchName(c.GetBranch())) for c in changes)))
for cl in sorted(changes, key=(lambda c: c.GetBranch())):
branch = cl.GetBranch()
while (branch not in branch_statuses):
(c, status) = output.next()
branch_statuses[c.GetBranch()] = status
status = branch_statuses.pop(branch)
url = cl.GetIssueURL()
if (url and ((not status) or (status == 'error'))):
url += ' (broken)'
color = color_for_status(status)
reset = Fore.RESET
if (not setup_color.IS_TTY):
color = ''
reset = ''
status_str = (('(%s)' % status) if status else '')
print((' %*s : %s%s %s%s' % (alignment, ShortBranchName(branch), color, url, status_str, reset)))
cl = Changelist(auth_config=auth_config)
print()
print('Current branch:')
print(cl.GetBranch())
if (not cl.GetIssue()):
print('No issue assigned.')
return 0
print(('Issue number: %s (%s)' % (cl.GetIssue(), cl.GetIssueURL())))
if (not options.fast):
print('Issue description:')
print(cl.GetDescription(pretty=True))
return 0
| 7,393,758,512,050,732,000
|
Show status of changelists.
Colors are used to tell the state of the CL unless --fast is used:
- Red not sent for review or broken
- Blue waiting for review
- Yellow waiting for you to reply to review
- Green LGTM'ed
- Magenta in the commit queue
- Cyan was committed, branch can be deleted
Also see 'git cl comments'.
|
git_cl.py
|
CMDstatus
|
wuyong2k/chromium_depot_tool
|
python
|
def CMDstatus(parser, args):
"Show status of changelists.\n\n Colors are used to tell the state of the CL unless --fast is used:\n - Red not sent for review or broken\n - Blue waiting for review\n - Yellow waiting for you to reply to review\n - Green LGTM'ed\n - Magenta in the commit queue\n - Cyan was committed, branch can be deleted\n\n Also see 'git cl comments'.\n "
parser.add_option('--field', help='print only specific field (desc|id|patch|url)')
parser.add_option('-f', '--fast', action='store_true', help='Do not retrieve review status')
parser.add_option('-j', '--maxjobs', action='store', type=int, help='The maximum number of jobs to use when retrieving review status')
auth.add_auth_options(parser)
(options, args) = parser.parse_args(args)
if args:
parser.error(('Unsupported args: %s' % args))
auth_config = auth.extract_auth_config_from_options(options)
if options.field:
cl = Changelist(auth_config=auth_config)
if options.field.startswith('desc'):
print(cl.GetDescription())
elif (options.field == 'id'):
issueid = cl.GetIssue()
if issueid:
print(issueid)
elif (options.field == 'patch'):
patchset = cl.GetPatchset()
if patchset:
print(patchset)
elif (options.field == 'url'):
url = cl.GetIssueURL()
if url:
print(url)
return 0
branches = RunGit(['for-each-ref', '--format=%(refname)', 'refs/heads'])
if (not branches):
print('No local branch found.')
return 0
changes = [Changelist(branchref=b, auth_config=auth_config) for b in branches.splitlines()]
print('Branches associated with reviews:')
output = get_cl_statuses(changes, fine_grained=(not options.fast), max_processes=options.maxjobs)
branch_statuses = {}
alignment = max(5, max((len(ShortBranchName(c.GetBranch())) for c in changes)))
for cl in sorted(changes, key=(lambda c: c.GetBranch())):
branch = cl.GetBranch()
while (branch not in branch_statuses):
(c, status) = output.next()
branch_statuses[c.GetBranch()] = status
status = branch_statuses.pop(branch)
url = cl.GetIssueURL()
if (url and ((not status) or (status == 'error'))):
url += ' (broken)'
color = color_for_status(status)
reset = Fore.RESET
if (not setup_color.IS_TTY):
color =
reset =
status_str = (('(%s)' % status) if status else )
print((' %*s : %s%s %s%s' % (alignment, ShortBranchName(branch), color, url, status_str, reset)))
cl = Changelist(auth_config=auth_config)
print()
print('Current branch:')
print(cl.GetBranch())
if (not cl.GetIssue()):
print('No issue assigned.')
return 0
print(('Issue number: %s (%s)' % (cl.GetIssue(), cl.GetIssueURL())))
if (not options.fast):
print('Issue description:')
print(cl.GetDescription(pretty=True))
return 0
|
def colorize_CMDstatus_doc():
'To be called once in main() to add colors to git cl status help.'
colors = [i for i in dir(Fore) if i[0].isupper()]
def colorize_line(line):
for color in colors:
if (color in line.upper()):
indent = ((len(line) - len(line.lstrip(' '))) + 1)
return (((line[:indent] + getattr(Fore, color)) + line[indent:]) + Fore.RESET)
return line
lines = CMDstatus.__doc__.splitlines()
CMDstatus.__doc__ = '\n'.join((colorize_line(l) for l in lines))
| 4,285,850,215,364,875,300
|
To be called once in main() to add colors to git cl status help.
|
git_cl.py
|
colorize_CMDstatus_doc
|
wuyong2k/chromium_depot_tool
|
python
|
def colorize_CMDstatus_doc():
colors = [i for i in dir(Fore) if i[0].isupper()]
def colorize_line(line):
for color in colors:
if (color in line.upper()):
indent = ((len(line) - len(line.lstrip(' '))) + 1)
return (((line[:indent] + getattr(Fore, color)) + line[indent:]) + Fore.RESET)
return line
lines = CMDstatus.__doc__.splitlines()
CMDstatus.__doc__ = '\n'.join((colorize_line(l) for l in lines))
|
@subcommand.usage('[issue_number]')
def CMDissue(parser, args):
'Sets or displays the current code review issue number.\n\n Pass issue number 0 to clear the current issue.\n '
parser.add_option('-r', '--reverse', action='store_true', help='Lookup the branch(es) for the specified issues. If no issues are specified, all branches with mapped issues will be listed.')
_add_codereview_select_options(parser)
(options, args) = parser.parse_args(args)
_process_codereview_select_options(parser, options)
if options.reverse:
branches = RunGit(['for-each-ref', 'refs/heads', '--format=%(refname:short)']).splitlines()
issue_branch_map = {}
for branch in branches:
cl = Changelist(branchref=branch)
issue_branch_map.setdefault(cl.GetIssue(), []).append(branch)
if (not args):
args = sorted(issue_branch_map.iterkeys())
for issue in args:
if (not issue):
continue
print(('Branch for issue number %s: %s' % (issue, ', '.join((issue_branch_map.get(int(issue)) or ('None',))))))
else:
cl = Changelist(codereview=options.forced_codereview)
if (len(args) > 0):
try:
issue = int(args[0])
except ValueError:
DieWithError('Pass a number to set the issue or none to list it.\nMaybe you want to run git cl status?')
cl.SetIssue(issue)
print(('Issue number: %s (%s)' % (cl.GetIssue(), cl.GetIssueURL())))
return 0
| -2,807,526,720,289,769,000
|
Sets or displays the current code review issue number.
Pass issue number 0 to clear the current issue.
|
git_cl.py
|
CMDissue
|
wuyong2k/chromium_depot_tool
|
python
|
@subcommand.usage('[issue_number]')
def CMDissue(parser, args):
'Sets or displays the current code review issue number.\n\n Pass issue number 0 to clear the current issue.\n '
parser.add_option('-r', '--reverse', action='store_true', help='Lookup the branch(es) for the specified issues. If no issues are specified, all branches with mapped issues will be listed.')
_add_codereview_select_options(parser)
(options, args) = parser.parse_args(args)
_process_codereview_select_options(parser, options)
if options.reverse:
branches = RunGit(['for-each-ref', 'refs/heads', '--format=%(refname:short)']).splitlines()
issue_branch_map = {}
for branch in branches:
cl = Changelist(branchref=branch)
issue_branch_map.setdefault(cl.GetIssue(), []).append(branch)
if (not args):
args = sorted(issue_branch_map.iterkeys())
for issue in args:
if (not issue):
continue
print(('Branch for issue number %s: %s' % (issue, ', '.join((issue_branch_map.get(int(issue)) or ('None',))))))
else:
cl = Changelist(codereview=options.forced_codereview)
if (len(args) > 0):
try:
issue = int(args[0])
except ValueError:
DieWithError('Pass a number to set the issue or none to list it.\nMaybe you want to run git cl status?')
cl.SetIssue(issue)
print(('Issue number: %s (%s)' % (cl.GetIssue(), cl.GetIssueURL())))
return 0
|
def CMDcomments(parser, args):
'Shows or posts review comments for any changelist.'
parser.add_option('-a', '--add-comment', dest='comment', help='comment to add to an issue')
parser.add_option('-i', dest='issue', help='review issue id (defaults to current issue)')
parser.add_option('-j', '--json-file', help='File to write JSON summary to')
auth.add_auth_options(parser)
(options, args) = parser.parse_args(args)
auth_config = auth.extract_auth_config_from_options(options)
issue = None
if options.issue:
try:
issue = int(options.issue)
except ValueError:
DieWithError('A review issue id is expected to be a number')
cl = Changelist(issue=issue, codereview='rietveld', auth_config=auth_config)
if options.comment:
cl.AddComment(options.comment)
return 0
data = cl.GetIssueProperties()
summary = []
for message in sorted(data.get('messages', []), key=(lambda x: x['date'])):
summary.append({'date': message['date'], 'lgtm': False, 'message': message['text'], 'not_lgtm': False, 'sender': message['sender']})
if message['disapproval']:
color = Fore.RED
summary[(- 1)]['not lgtm'] = True
elif message['approval']:
color = Fore.GREEN
summary[(- 1)]['lgtm'] = True
elif (message['sender'] == data['owner_email']):
color = Fore.MAGENTA
else:
color = Fore.BLUE
print(('\n%s%s %s%s' % (color, message['date'].split('.', 1)[0], message['sender'], Fore.RESET)))
if message['text'].strip():
print('\n'.join(((' ' + l) for l in message['text'].splitlines())))
if options.json_file:
with open(options.json_file, 'wb') as f:
json.dump(summary, f)
return 0
| -7,667,378,544,018,240,000
|
Shows or posts review comments for any changelist.
|
git_cl.py
|
CMDcomments
|
wuyong2k/chromium_depot_tool
|
python
|
def CMDcomments(parser, args):
parser.add_option('-a', '--add-comment', dest='comment', help='comment to add to an issue')
parser.add_option('-i', dest='issue', help='review issue id (defaults to current issue)')
parser.add_option('-j', '--json-file', help='File to write JSON summary to')
auth.add_auth_options(parser)
(options, args) = parser.parse_args(args)
auth_config = auth.extract_auth_config_from_options(options)
issue = None
if options.issue:
try:
issue = int(options.issue)
except ValueError:
DieWithError('A review issue id is expected to be a number')
cl = Changelist(issue=issue, codereview='rietveld', auth_config=auth_config)
if options.comment:
cl.AddComment(options.comment)
return 0
data = cl.GetIssueProperties()
summary = []
for message in sorted(data.get('messages', []), key=(lambda x: x['date'])):
summary.append({'date': message['date'], 'lgtm': False, 'message': message['text'], 'not_lgtm': False, 'sender': message['sender']})
if message['disapproval']:
color = Fore.RED
summary[(- 1)]['not lgtm'] = True
elif message['approval']:
color = Fore.GREEN
summary[(- 1)]['lgtm'] = True
elif (message['sender'] == data['owner_email']):
color = Fore.MAGENTA
else:
color = Fore.BLUE
print(('\n%s%s %s%s' % (color, message['date'].split('.', 1)[0], message['sender'], Fore.RESET)))
if message['text'].strip():
print('\n'.join(((' ' + l) for l in message['text'].splitlines())))
if options.json_file:
with open(options.json_file, 'wb') as f:
json.dump(summary, f)
return 0
|
@subcommand.usage('[codereview url or issue id]')
def CMDdescription(parser, args):
"Brings up the editor for the current CL's description."
parser.add_option('-d', '--display', action='store_true', help='Display the description instead of opening an editor')
parser.add_option('-n', '--new-description', help='New description to set for this issue (- for stdin)')
_add_codereview_select_options(parser)
auth.add_auth_options(parser)
(options, args) = parser.parse_args(args)
_process_codereview_select_options(parser, options)
target_issue = None
if (len(args) > 0):
issue_arg = ParseIssueNumberArgument(args[0])
if (not issue_arg.valid):
parser.print_help()
return 1
target_issue = issue_arg.issue
auth_config = auth.extract_auth_config_from_options(options)
cl = Changelist(auth_config=auth_config, issue=target_issue, codereview=options.forced_codereview)
if (not cl.GetIssue()):
DieWithError('This branch has no associated changelist.')
description = ChangeDescription(cl.GetDescription())
if options.display:
print(description.description)
return 0
if options.new_description:
text = options.new_description
if (text == '-'):
text = '\n'.join((l.rstrip() for l in sys.stdin))
description.set_description(text)
else:
description.prompt()
if (cl.GetDescription() != description.description):
cl.UpdateDescription(description.description)
return 0
| -3,280,009,347,181,202,400
|
Brings up the editor for the current CL's description.
|
git_cl.py
|
CMDdescription
|
wuyong2k/chromium_depot_tool
|
python
|
@subcommand.usage('[codereview url or issue id]')
def CMDdescription(parser, args):
parser.add_option('-d', '--display', action='store_true', help='Display the description instead of opening an editor')
parser.add_option('-n', '--new-description', help='New description to set for this issue (- for stdin)')
_add_codereview_select_options(parser)
auth.add_auth_options(parser)
(options, args) = parser.parse_args(args)
_process_codereview_select_options(parser, options)
target_issue = None
if (len(args) > 0):
issue_arg = ParseIssueNumberArgument(args[0])
if (not issue_arg.valid):
parser.print_help()
return 1
target_issue = issue_arg.issue
auth_config = auth.extract_auth_config_from_options(options)
cl = Changelist(auth_config=auth_config, issue=target_issue, codereview=options.forced_codereview)
if (not cl.GetIssue()):
DieWithError('This branch has no associated changelist.')
description = ChangeDescription(cl.GetDescription())
if options.display:
print(description.description)
return 0
if options.new_description:
text = options.new_description
if (text == '-'):
text = '\n'.join((l.rstrip() for l in sys.stdin))
description.set_description(text)
else:
description.prompt()
if (cl.GetDescription() != description.description):
cl.UpdateDescription(description.description)
return 0
|
def CreateDescriptionFromLog(args):
'Pulls out the commit log to use as a base for the CL description.'
log_args = []
if ((len(args) == 1) and (not args[0].endswith('.'))):
log_args = [(args[0] + '..')]
elif ((len(args) == 1) and args[0].endswith('...')):
log_args = [args[0][:(- 1)]]
elif (len(args) == 2):
log_args = [((args[0] + '..') + args[1])]
else:
log_args = args[:]
return RunGit((['log', '--pretty=format:%s\n\n%b'] + log_args))
| -1,814,644,899,593,277,200
|
Pulls out the commit log to use as a base for the CL description.
|
git_cl.py
|
CreateDescriptionFromLog
|
wuyong2k/chromium_depot_tool
|
python
|
def CreateDescriptionFromLog(args):
log_args = []
if ((len(args) == 1) and (not args[0].endswith('.'))):
log_args = [(args[0] + '..')]
elif ((len(args) == 1) and args[0].endswith('...')):
log_args = [args[0][:(- 1)]]
elif (len(args) == 2):
log_args = [((args[0] + '..') + args[1])]
else:
log_args = args[:]
return RunGit((['log', '--pretty=format:%s\n\n%b'] + log_args))
|
def CMDlint(parser, args):
'Runs cpplint on the current changelist.'
parser.add_option('--filter', action='append', metavar='-x,+y', help="Comma-separated list of cpplint's category-filters")
auth.add_auth_options(parser)
(options, args) = parser.parse_args(args)
auth_config = auth.extract_auth_config_from_options(options)
try:
import cpplint
import cpplint_chromium
except ImportError:
print('Your depot_tools is missing cpplint.py and/or cpplint_chromium.py.')
return 1
previous_cwd = os.getcwd()
os.chdir(settings.GetRoot())
try:
cl = Changelist(auth_config=auth_config)
change = cl.GetChange(cl.GetCommonAncestorWithUpstream(), None)
files = [f.LocalPath() for f in change.AffectedFiles()]
if (not files):
print('Cannot lint an empty CL')
return 1
command = (args + files)
if options.filter:
command = ([('--filter=' + ','.join(options.filter))] + command)
filenames = cpplint.ParseArguments(command)
white_regex = re.compile(settings.GetLintRegex())
black_regex = re.compile(settings.GetLintIgnoreRegex())
extra_check_functions = [cpplint_chromium.CheckPointerDeclarationWhitespace]
for filename in filenames:
if white_regex.match(filename):
if black_regex.match(filename):
print(('Ignoring file %s' % filename))
else:
cpplint.ProcessFile(filename, cpplint._cpplint_state.verbose_level, extra_check_functions)
else:
print(('Skipping file %s' % filename))
finally:
os.chdir(previous_cwd)
print(('Total errors found: %d\n' % cpplint._cpplint_state.error_count))
if (cpplint._cpplint_state.error_count != 0):
return 1
return 0
| 5,296,917,262,715,844,000
|
Runs cpplint on the current changelist.
|
git_cl.py
|
CMDlint
|
wuyong2k/chromium_depot_tool
|
python
|
def CMDlint(parser, args):
parser.add_option('--filter', action='append', metavar='-x,+y', help="Comma-separated list of cpplint's category-filters")
auth.add_auth_options(parser)
(options, args) = parser.parse_args(args)
auth_config = auth.extract_auth_config_from_options(options)
try:
import cpplint
import cpplint_chromium
except ImportError:
print('Your depot_tools is missing cpplint.py and/or cpplint_chromium.py.')
return 1
previous_cwd = os.getcwd()
os.chdir(settings.GetRoot())
try:
cl = Changelist(auth_config=auth_config)
change = cl.GetChange(cl.GetCommonAncestorWithUpstream(), None)
files = [f.LocalPath() for f in change.AffectedFiles()]
if (not files):
print('Cannot lint an empty CL')
return 1
command = (args + files)
if options.filter:
command = ([('--filter=' + ','.join(options.filter))] + command)
filenames = cpplint.ParseArguments(command)
white_regex = re.compile(settings.GetLintRegex())
black_regex = re.compile(settings.GetLintIgnoreRegex())
extra_check_functions = [cpplint_chromium.CheckPointerDeclarationWhitespace]
for filename in filenames:
if white_regex.match(filename):
if black_regex.match(filename):
print(('Ignoring file %s' % filename))
else:
cpplint.ProcessFile(filename, cpplint._cpplint_state.verbose_level, extra_check_functions)
else:
print(('Skipping file %s' % filename))
finally:
os.chdir(previous_cwd)
print(('Total errors found: %d\n' % cpplint._cpplint_state.error_count))
if (cpplint._cpplint_state.error_count != 0):
return 1
return 0
|
def CMDpresubmit(parser, args):
'Runs presubmit tests on the current changelist.'
parser.add_option('-u', '--upload', action='store_true', help='Run upload hook instead of the push/dcommit hook')
parser.add_option('-f', '--force', action='store_true', help='Run checks even if tree is dirty')
auth.add_auth_options(parser)
(options, args) = parser.parse_args(args)
auth_config = auth.extract_auth_config_from_options(options)
if ((not options.force) and git_common.is_dirty_git_tree('presubmit')):
print('use --force to check even if tree is dirty.')
return 1
cl = Changelist(auth_config=auth_config)
if args:
base_branch = args[0]
else:
base_branch = cl.GetCommonAncestorWithUpstream()
cl.RunHook(committing=(not options.upload), may_prompt=False, verbose=options.verbose, change=cl.GetChange(base_branch, None))
return 0
| -8,723,193,488,120,087,000
|
Runs presubmit tests on the current changelist.
|
git_cl.py
|
CMDpresubmit
|
wuyong2k/chromium_depot_tool
|
python
|
def CMDpresubmit(parser, args):
parser.add_option('-u', '--upload', action='store_true', help='Run upload hook instead of the push/dcommit hook')
parser.add_option('-f', '--force', action='store_true', help='Run checks even if tree is dirty')
auth.add_auth_options(parser)
(options, args) = parser.parse_args(args)
auth_config = auth.extract_auth_config_from_options(options)
if ((not options.force) and git_common.is_dirty_git_tree('presubmit')):
print('use --force to check even if tree is dirty.')
return 1
cl = Changelist(auth_config=auth_config)
if args:
base_branch = args[0]
else:
base_branch = cl.GetCommonAncestorWithUpstream()
cl.RunHook(committing=(not options.upload), may_prompt=False, verbose=options.verbose, change=cl.GetChange(base_branch, None))
return 0
|
def GenerateGerritChangeId(message):
'Returns Ixxxxxx...xxx change id.\n\n Works the same way as\n https://gerrit-review.googlesource.com/tools/hooks/commit-msg\n but can be called on demand on all platforms.\n\n The basic idea is to generate git hash of a state of the tree, original commit\n message, author/committer info and timestamps.\n '
lines = []
tree_hash = RunGitSilent(['write-tree'])
lines.append(('tree %s' % tree_hash.strip()))
(code, parent) = RunGitWithCode(['rev-parse', 'HEAD~0'], suppress_stderr=False)
if (code == 0):
lines.append(('parent %s' % parent.strip()))
author = RunGitSilent(['var', 'GIT_AUTHOR_IDENT'])
lines.append(('author %s' % author.strip()))
committer = RunGitSilent(['var', 'GIT_COMMITTER_IDENT'])
lines.append(('committer %s' % committer.strip()))
lines.append('')
lines.append(message)
change_hash = RunCommand(['git', 'hash-object', '-t', 'commit', '--stdin'], stdin='\n'.join(lines))
return ('I%s' % change_hash.strip())
| 1,807,800,228,276,951,000
|
Returns Ixxxxxx...xxx change id.
Works the same way as
https://gerrit-review.googlesource.com/tools/hooks/commit-msg
but can be called on demand on all platforms.
The basic idea is to generate git hash of a state of the tree, original commit
message, author/committer info and timestamps.
|
git_cl.py
|
GenerateGerritChangeId
|
wuyong2k/chromium_depot_tool
|
python
|
def GenerateGerritChangeId(message):
'Returns Ixxxxxx...xxx change id.\n\n Works the same way as\n https://gerrit-review.googlesource.com/tools/hooks/commit-msg\n but can be called on demand on all platforms.\n\n The basic idea is to generate git hash of a state of the tree, original commit\n message, author/committer info and timestamps.\n '
lines = []
tree_hash = RunGitSilent(['write-tree'])
lines.append(('tree %s' % tree_hash.strip()))
(code, parent) = RunGitWithCode(['rev-parse', 'HEAD~0'], suppress_stderr=False)
if (code == 0):
lines.append(('parent %s' % parent.strip()))
author = RunGitSilent(['var', 'GIT_AUTHOR_IDENT'])
lines.append(('author %s' % author.strip()))
committer = RunGitSilent(['var', 'GIT_COMMITTER_IDENT'])
lines.append(('committer %s' % committer.strip()))
lines.append()
lines.append(message)
change_hash = RunCommand(['git', 'hash-object', '-t', 'commit', '--stdin'], stdin='\n'.join(lines))
return ('I%s' % change_hash.strip())
|
def GetTargetRef(remote, remote_branch, target_branch, pending_prefix):
'Computes the remote branch ref to use for the CL.\n\n Args:\n remote (str): The git remote for the CL.\n remote_branch (str): The git remote branch for the CL.\n target_branch (str): The target branch specified by the user.\n pending_prefix (str): The pending prefix from the settings.\n '
if (not (remote and remote_branch)):
return None
if target_branch:
if ('/' not in target_branch):
remote_branch = ('refs/remotes/%s/%s' % (remote, target_branch))
else:
prefix_replacements = (('^((refs/)?remotes/)?branch-heads/', 'refs/remotes/branch-heads/'), (('^((refs/)?remotes/)?%s/' % remote), ('refs/remotes/%s/' % remote)), ('^(refs/)?heads/', ('refs/remotes/%s/' % remote)))
match = None
for (regex, replacement) in prefix_replacements:
match = re.search(regex, target_branch)
if match:
remote_branch = target_branch.replace(match.group(0), replacement)
break
if (not match):
remote_branch = target_branch
elif (remote_branch in REFS_THAT_ALIAS_TO_OTHER_REFS):
remote_branch = REFS_THAT_ALIAS_TO_OTHER_REFS[remote_branch]
if remote_branch.startswith(('refs/remotes/%s/refs/' % remote)):
remote_branch = remote_branch.replace(('refs/remotes/%s/' % remote), '')
elif remote_branch.startswith(('refs/remotes/%s/' % remote)):
remote_branch = remote_branch.replace(('refs/remotes/%s/' % remote), 'refs/heads/')
elif remote_branch.startswith('refs/remotes/branch-heads'):
remote_branch = remote_branch.replace('refs/remotes/', 'refs/')
if pending_prefix:
remote_branch = remote_branch.replace('refs/', pending_prefix)
return remote_branch
| -1,148,269,362,434,899,100
|
Computes the remote branch ref to use for the CL.
Args:
remote (str): The git remote for the CL.
remote_branch (str): The git remote branch for the CL.
target_branch (str): The target branch specified by the user.
pending_prefix (str): The pending prefix from the settings.
|
git_cl.py
|
GetTargetRef
|
wuyong2k/chromium_depot_tool
|
python
|
def GetTargetRef(remote, remote_branch, target_branch, pending_prefix):
'Computes the remote branch ref to use for the CL.\n\n Args:\n remote (str): The git remote for the CL.\n remote_branch (str): The git remote branch for the CL.\n target_branch (str): The target branch specified by the user.\n pending_prefix (str): The pending prefix from the settings.\n '
if (not (remote and remote_branch)):
return None
if target_branch:
if ('/' not in target_branch):
remote_branch = ('refs/remotes/%s/%s' % (remote, target_branch))
else:
prefix_replacements = (('^((refs/)?remotes/)?branch-heads/', 'refs/remotes/branch-heads/'), (('^((refs/)?remotes/)?%s/' % remote), ('refs/remotes/%s/' % remote)), ('^(refs/)?heads/', ('refs/remotes/%s/' % remote)))
match = None
for (regex, replacement) in prefix_replacements:
match = re.search(regex, target_branch)
if match:
remote_branch = target_branch.replace(match.group(0), replacement)
break
if (not match):
remote_branch = target_branch
elif (remote_branch in REFS_THAT_ALIAS_TO_OTHER_REFS):
remote_branch = REFS_THAT_ALIAS_TO_OTHER_REFS[remote_branch]
if remote_branch.startswith(('refs/remotes/%s/refs/' % remote)):
remote_branch = remote_branch.replace(('refs/remotes/%s/' % remote), )
elif remote_branch.startswith(('refs/remotes/%s/' % remote)):
remote_branch = remote_branch.replace(('refs/remotes/%s/' % remote), 'refs/heads/')
elif remote_branch.startswith('refs/remotes/branch-heads'):
remote_branch = remote_branch.replace('refs/remotes/', 'refs/')
if pending_prefix:
remote_branch = remote_branch.replace('refs/', pending_prefix)
return remote_branch
|
def cleanup_list(l):
'Fixes a list so that comma separated items are put as individual items.\n\n So that "--reviewers joe@c,john@c --reviewers joa@c" results in\n options.reviewers == sorted([\'joe@c\', \'john@c\', \'joa@c\']).\n '
items = sum((i.split(',') for i in l), [])
stripped_items = (i.strip() for i in items)
return sorted(filter(None, stripped_items))
| 2,816,913,004,485,716,500
|
Fixes a list so that comma separated items are put as individual items.
So that "--reviewers joe@c,john@c --reviewers joa@c" results in
options.reviewers == sorted(['joe@c', 'john@c', 'joa@c']).
|
git_cl.py
|
cleanup_list
|
wuyong2k/chromium_depot_tool
|
python
|
def cleanup_list(l):
'Fixes a list so that comma separated items are put as individual items.\n\n So that "--reviewers joe@c,john@c --reviewers joa@c" results in\n options.reviewers == sorted([\'joe@c\', \'john@c\', \'joa@c\']).\n '
items = sum((i.split(',') for i in l), [])
stripped_items = (i.strip() for i in items)
return sorted(filter(None, stripped_items))
|
@subcommand.usage('[args to "git diff"]')
def CMDupload(parser, args):
'Uploads the current changelist to codereview.\n\n Can skip dependency patchset uploads for a branch by running:\n git config branch.branch_name.skip-deps-uploads True\n To unset run:\n git config --unset branch.branch_name.skip-deps-uploads\n Can also set the above globally by using the --global flag.\n '
parser.add_option('--bypass-hooks', action='store_true', dest='bypass_hooks', help='bypass upload presubmit hook')
parser.add_option('--bypass-watchlists', action='store_true', dest='bypass_watchlists', help='bypass watchlists auto CC-ing reviewers')
parser.add_option('-f', action='store_true', dest='force', help="force yes to questions (don't prompt)")
parser.add_option('-m', dest='message', help='message for patchset')
parser.add_option('--message-file', dest='message_file', help='file which contains message for patchset')
parser.add_option('-t', dest='title', help='title for patchset (Rietveld only)')
parser.add_option('-r', '--reviewers', action='append', default=[], help='reviewer email addresses')
parser.add_option('--cc', action='append', default=[], help='cc email addresses')
parser.add_option('-s', '--send-mail', action='store_true', help='send email to reviewer immediately')
parser.add_option('--emulate_svn_auto_props', '--emulate-svn-auto-props', action='store_true', dest='emulate_svn_auto_props', help="Emulate Subversion's auto properties feature.")
parser.add_option('-c', '--use-commit-queue', action='store_true', help='tell the commit queue to commit this patchset')
parser.add_option('--private', action='store_true', help='set the review private (rietveld only)')
parser.add_option('--target_branch', '--target-branch', metavar='TARGET', help=('Apply CL to remote ref TARGET. ' + 'Default: remote branch head, or master'))
parser.add_option('--squash', action='store_true', help='Squash multiple commits into one (Gerrit only)')
parser.add_option('--no-squash', action='store_true', help=("Don't squash multiple commits into one " + '(Gerrit only)'))
parser.add_option('--email', default=None, help='email address to use to connect to Rietveld')
parser.add_option('--tbr-owners', dest='tbr_owners', action='store_true', help='add a set of OWNERS to TBR')
parser.add_option('-d', '--cq-dry-run', dest='cq_dry_run', action='store_true', help='Send the patchset to do a CQ dry run right after upload.')
parser.add_option('--dependencies', action='store_true', help='Uploads CLs of all the local branches that depend on the current branch')
orig_args = args
add_git_similarity(parser)
auth.add_auth_options(parser)
_add_codereview_select_options(parser)
(options, args) = parser.parse_args(args)
_process_codereview_select_options(parser, options)
auth_config = auth.extract_auth_config_from_options(options)
if git_common.is_dirty_git_tree('upload'):
return 1
options.reviewers = cleanup_list(options.reviewers)
options.cc = cleanup_list(options.cc)
if options.message_file:
if options.message:
parser.error('only one of --message and --message-file allowed.')
options.message = gclient_utils.FileRead(options.message_file)
options.message_file = None
settings.GetIsGerrit()
cl = Changelist(auth_config=auth_config, codereview=options.forced_codereview)
return cl.CMDUpload(options, args, orig_args)
| -1,781,172,476,083,426,800
|
Uploads the current changelist to codereview.
Can skip dependency patchset uploads for a branch by running:
git config branch.branch_name.skip-deps-uploads True
To unset run:
git config --unset branch.branch_name.skip-deps-uploads
Can also set the above globally by using the --global flag.
|
git_cl.py
|
CMDupload
|
wuyong2k/chromium_depot_tool
|
python
|
@subcommand.usage('[args to "git diff"]')
def CMDupload(parser, args):
'Uploads the current changelist to codereview.\n\n Can skip dependency patchset uploads for a branch by running:\n git config branch.branch_name.skip-deps-uploads True\n To unset run:\n git config --unset branch.branch_name.skip-deps-uploads\n Can also set the above globally by using the --global flag.\n '
parser.add_option('--bypass-hooks', action='store_true', dest='bypass_hooks', help='bypass upload presubmit hook')
parser.add_option('--bypass-watchlists', action='store_true', dest='bypass_watchlists', help='bypass watchlists auto CC-ing reviewers')
parser.add_option('-f', action='store_true', dest='force', help="force yes to questions (don't prompt)")
parser.add_option('-m', dest='message', help='message for patchset')
parser.add_option('--message-file', dest='message_file', help='file which contains message for patchset')
parser.add_option('-t', dest='title', help='title for patchset (Rietveld only)')
parser.add_option('-r', '--reviewers', action='append', default=[], help='reviewer email addresses')
parser.add_option('--cc', action='append', default=[], help='cc email addresses')
parser.add_option('-s', '--send-mail', action='store_true', help='send email to reviewer immediately')
parser.add_option('--emulate_svn_auto_props', '--emulate-svn-auto-props', action='store_true', dest='emulate_svn_auto_props', help="Emulate Subversion's auto properties feature.")
parser.add_option('-c', '--use-commit-queue', action='store_true', help='tell the commit queue to commit this patchset')
parser.add_option('--private', action='store_true', help='set the review private (rietveld only)')
parser.add_option('--target_branch', '--target-branch', metavar='TARGET', help=('Apply CL to remote ref TARGET. ' + 'Default: remote branch head, or master'))
parser.add_option('--squash', action='store_true', help='Squash multiple commits into one (Gerrit only)')
parser.add_option('--no-squash', action='store_true', help=("Don't squash multiple commits into one " + '(Gerrit only)'))
parser.add_option('--email', default=None, help='email address to use to connect to Rietveld')
parser.add_option('--tbr-owners', dest='tbr_owners', action='store_true', help='add a set of OWNERS to TBR')
parser.add_option('-d', '--cq-dry-run', dest='cq_dry_run', action='store_true', help='Send the patchset to do a CQ dry run right after upload.')
parser.add_option('--dependencies', action='store_true', help='Uploads CLs of all the local branches that depend on the current branch')
orig_args = args
add_git_similarity(parser)
auth.add_auth_options(parser)
_add_codereview_select_options(parser)
(options, args) = parser.parse_args(args)
_process_codereview_select_options(parser, options)
auth_config = auth.extract_auth_config_from_options(options)
if git_common.is_dirty_git_tree('upload'):
return 1
options.reviewers = cleanup_list(options.reviewers)
options.cc = cleanup_list(options.cc)
if options.message_file:
if options.message:
parser.error('only one of --message and --message-file allowed.')
options.message = gclient_utils.FileRead(options.message_file)
options.message_file = None
settings.GetIsGerrit()
cl = Changelist(auth_config=auth_config, codereview=options.forced_codereview)
return cl.CMDUpload(options, args, orig_args)
|
def SendUpstream(parser, args, cmd):
'Common code for CMDland and CmdDCommit\n\n In case of Gerrit, uses Gerrit REST api to "submit" the issue, which pushes\n upstream and closes the issue automatically and atomically.\n\n Otherwise (in case of Rietveld):\n Squashes branch into a single commit.\n Updates changelog with metadata (e.g. pointer to review).\n Pushes/dcommits the code upstream.\n Updates review and closes.\n '
parser.add_option('--bypass-hooks', action='store_true', dest='bypass_hooks', help='bypass upload presubmit hook')
parser.add_option('-m', dest='message', help='override review description')
parser.add_option('-f', action='store_true', dest='force', help="force yes to questions (don't prompt)")
parser.add_option('-c', dest='contributor', help=(('external contributor for patch (appended to ' + 'description and used as author for git). Should be ') + "formatted as 'First Last <example@example.com>'"))
add_git_similarity(parser)
auth.add_auth_options(parser)
(options, args) = parser.parse_args(args)
auth_config = auth.extract_auth_config_from_options(options)
cl = Changelist(auth_config=auth_config)
if cl.IsGerrit():
if options.message:
parser.error('-m MESSAGE option is not supported for Gerrit.')
if options.contributor:
parser.error('-c CONTRIBUTOR option is not supported for Gerrit.\nBefore uploading a commit to Gerrit, ensure it\'s author field is the contributor\'s "name <email>". If you can\'t upload such a commit for review, contact your repository admin and request"Forge-Author" permission.')
return cl._codereview_impl.CMDLand(options.force, options.bypass_hooks, options.verbose)
current = cl.GetBranch()
(remote, upstream_branch) = cl.FetchUpstreamTuple(cl.GetBranch())
if ((not settings.GetIsGitSvn()) and (remote == '.')):
print()
print(('Attempting to push branch %r into another local branch!' % current))
print()
print('Either reparent this branch on top of origin/master:')
print(' git reparent-branch --root')
print()
print('OR run `git rebase-update` if you think the parent branch is ')
print('already committed.')
print()
print((' Current parent: %r' % upstream_branch))
return 1
if ((not args) or (cmd == 'land')):
args = [cl.GetUpstreamBranch()]
if options.contributor:
if (not re.match('^.*\\s<\\S+@\\S+>$', options.contributor)):
print("Please provide contibutor as 'First Last <example@example.com>'")
return 1
base_branch = args[0]
base_has_submodules = IsSubmoduleMergeCommit(base_branch)
if git_common.is_dirty_git_tree(cmd):
return 1
upstream_commits = RunGit(['rev-list', ('^' + cl.GetBranchRef()), base_branch]).splitlines()
if upstream_commits:
print(('Base branch "%s" has %d commits not in this branch.' % (base_branch, len(upstream_commits))))
print(('Run "git merge %s" before attempting to %s.' % (base_branch, cmd)))
return 1
svn_head = None
if ((cmd == 'dcommit') or base_has_submodules):
svn_head = RunGit(['log', '--grep=^git-svn-id:', '-1', '--pretty=format:%H'])
if (cmd == 'dcommit'):
base_svn_head = base_branch
if base_has_submodules:
base_svn_head += '^1'
extra_commits = RunGit(['rev-list', ('^' + svn_head), base_svn_head])
if extra_commits:
print(('This branch has %d additional commits not upstreamed yet.' % len(extra_commits.splitlines())))
print(('Upstream "%s" or rebase this branch on top of the upstream trunk before attempting to %s.' % (base_branch, cmd)))
return 1
merge_base = RunGit(['merge-base', base_branch, 'HEAD']).strip()
if (not options.bypass_hooks):
author = None
if options.contributor:
author = re.search('\\<(.*)\\>', options.contributor).group(1)
hook_results = cl.RunHook(committing=True, may_prompt=(not options.force), verbose=options.verbose, change=cl.GetChange(merge_base, author))
if (not hook_results.should_continue()):
return 1
status = GetTreeStatus()
if ('closed' == status):
print(('The tree is closed. Please wait for it to reopen. Use "git cl %s --bypass-hooks" to commit on a closed tree.' % cmd))
return 1
elif ('unknown' == status):
print(('Unable to determine tree status. Please verify manually and use "git cl %s --bypass-hooks" to commit on a closed tree.' % cmd))
return 1
change_desc = ChangeDescription(options.message)
if ((not change_desc.description) and cl.GetIssue()):
change_desc = ChangeDescription(cl.GetDescription())
if (not change_desc.description):
if ((not cl.GetIssue()) and options.bypass_hooks):
change_desc = ChangeDescription(CreateDescriptionFromLog([merge_base]))
else:
print('No description set.')
print(('Visit %s/edit to set it.' % cl.GetIssueURL()))
return 1
if cl.GetIssue():
change_desc.update_reviewers(cl.GetApprovingReviewers())
commit_desc = ChangeDescription(change_desc.description)
if cl.GetIssue():
commit_desc.append_footer(('Review URL: %s .' % cl.GetIssueURL()))
if options.contributor:
commit_desc.append_footer(('Patch from %s.' % options.contributor))
print('Description:')
print(commit_desc.description)
branches = [merge_base, cl.GetBranchRef()]
if (not options.force):
print_stats(options.similarity, options.find_copies, branches)
MERGE_BRANCH = 'git-cl-commit'
CHERRY_PICK_BRANCH = 'git-cl-cherry-pick'
for branch in [MERGE_BRANCH, CHERRY_PICK_BRANCH]:
showref_cmd = ['show-ref', '--quiet', '--verify', ('refs/heads/%s' % branch)]
result = RunGitWithCode(showref_cmd)
if (result[0] == 0):
RunGit(['branch', '-D', branch])
rel_base_path = settings.GetRelativeRoot()
if rel_base_path:
os.chdir(rel_base_path)
retcode = (- 1)
pushed_to_pending = False
pending_ref = None
revision = None
try:
RunGit(['checkout', '-q', '-b', MERGE_BRANCH])
RunGit(['reset', '--soft', merge_base])
if options.contributor:
RunGit(['commit', '--author', options.contributor, '-m', commit_desc.description])
else:
RunGit(['commit', '-m', commit_desc.description])
if base_has_submodules:
cherry_pick_commit = RunGit(['rev-list', 'HEAD^!']).rstrip()
RunGit(['branch', CHERRY_PICK_BRANCH, svn_head])
RunGit(['checkout', CHERRY_PICK_BRANCH])
RunGit(['cherry-pick', cherry_pick_commit])
if (cmd == 'land'):
(remote, branch) = cl.FetchUpstreamTuple(cl.GetBranch())
mirror = settings.GetGitMirror(remote)
pushurl = (mirror.url if mirror else remote)
pending_prefix = settings.GetPendingRefPrefix()
if ((not pending_prefix) or branch.startswith(pending_prefix)):
(retcode, output) = RunGitWithCode(['push', '--porcelain', pushurl, ('HEAD:%s' % branch)])
pushed_to_pending = (pending_prefix and branch.startswith(pending_prefix))
else:
assert branch.startswith('refs/'), branch
assert (pending_prefix[(- 1)] == '/'), pending_prefix
pending_ref = (pending_prefix + branch[len('refs/'):])
(retcode, output) = PushToGitPending(pushurl, pending_ref, branch)
pushed_to_pending = (retcode == 0)
if (retcode == 0):
revision = RunGit(['rev-parse', 'HEAD']).strip()
else:
cmd_args = ['svn', 'dcommit', ('-C%s' % options.similarity), '--no-rebase', '--rmdir']
if settings.GetForceHttpsCommitUrl():
remote_url = cl.GetGitSvnRemoteUrl()
if (urlparse.urlparse(remote_url).scheme == 'http'):
remote_url = remote_url.replace('http://', 'https://')
cmd_args.append(('--commit-url=%s' % remote_url))
(_, output) = RunGitWithCode(cmd_args)
if ('Committed r' in output):
revision = re.match('.*?\nCommitted r(\\d+)', output, re.DOTALL).group(1)
logging.debug(output)
finally:
RunGit(['checkout', '-q', cl.GetBranch()])
RunGit(['branch', '-D', MERGE_BRANCH])
if base_has_submodules:
RunGit(['branch', '-D', CHERRY_PICK_BRANCH])
if (not revision):
print('Failed to push. If this persists, please file a bug.')
return 1
killed = False
if pushed_to_pending:
try:
revision = WaitForRealCommit(remote, revision, base_branch, branch)
pushed_to_pending = False
except KeyboardInterrupt:
killed = True
if cl.GetIssue():
to_pending = (' to pending queue' if pushed_to_pending else '')
viewvc_url = settings.GetViewVCUrl()
if (not to_pending):
if (viewvc_url and revision):
change_desc.append_footer(('Committed: %s%s' % (viewvc_url, revision)))
elif revision:
change_desc.append_footer(('Committed: %s' % (revision,)))
print('Closing issue (you may be prompted for your codereview password)...')
cl.UpdateDescription(change_desc.description)
cl.CloseIssue()
props = cl.GetIssueProperties()
patch_num = len(props['patchsets'])
comment = ('Committed patchset #%d (id:%d)%s manually as %s' % (patch_num, props['patchsets'][(- 1)], to_pending, revision))
if options.bypass_hooks:
comment += (' (tree was closed).' if (GetTreeStatus() == 'closed') else '.')
else:
comment += ' (presubmit successful).'
cl.RpcServer().add_comment(cl.GetIssue(), comment)
cl.SetIssue(None)
if pushed_to_pending:
(_, branch) = cl.FetchUpstreamTuple(cl.GetBranch())
print(('The commit is in the pending queue (%s).' % pending_ref))
print(('It will show up on %s in ~1 min, once it gets a Cr-Commit-Position footer.' % branch))
hook = (POSTUPSTREAM_HOOK_PATTERN % cmd)
if os.path.isfile(hook):
RunCommand([hook, merge_base], error_ok=True)
return (1 if killed else 0)
| 149,992,166,533,250,600
|
Common code for CMDland and CmdDCommit
In case of Gerrit, uses Gerrit REST api to "submit" the issue, which pushes
upstream and closes the issue automatically and atomically.
Otherwise (in case of Rietveld):
Squashes branch into a single commit.
Updates changelog with metadata (e.g. pointer to review).
Pushes/dcommits the code upstream.
Updates review and closes.
|
git_cl.py
|
SendUpstream
|
wuyong2k/chromium_depot_tool
|
python
|
def SendUpstream(parser, args, cmd):
'Common code for CMDland and CmdDCommit\n\n In case of Gerrit, uses Gerrit REST api to "submit" the issue, which pushes\n upstream and closes the issue automatically and atomically.\n\n Otherwise (in case of Rietveld):\n Squashes branch into a single commit.\n Updates changelog with metadata (e.g. pointer to review).\n Pushes/dcommits the code upstream.\n Updates review and closes.\n '
parser.add_option('--bypass-hooks', action='store_true', dest='bypass_hooks', help='bypass upload presubmit hook')
parser.add_option('-m', dest='message', help='override review description')
parser.add_option('-f', action='store_true', dest='force', help="force yes to questions (don't prompt)")
parser.add_option('-c', dest='contributor', help=(('external contributor for patch (appended to ' + 'description and used as author for git). Should be ') + "formatted as 'First Last <example@example.com>'"))
add_git_similarity(parser)
auth.add_auth_options(parser)
(options, args) = parser.parse_args(args)
auth_config = auth.extract_auth_config_from_options(options)
cl = Changelist(auth_config=auth_config)
if cl.IsGerrit():
if options.message:
parser.error('-m MESSAGE option is not supported for Gerrit.')
if options.contributor:
parser.error('-c CONTRIBUTOR option is not supported for Gerrit.\nBefore uploading a commit to Gerrit, ensure it\'s author field is the contributor\'s "name <email>". If you can\'t upload such a commit for review, contact your repository admin and request"Forge-Author" permission.')
return cl._codereview_impl.CMDLand(options.force, options.bypass_hooks, options.verbose)
current = cl.GetBranch()
(remote, upstream_branch) = cl.FetchUpstreamTuple(cl.GetBranch())
if ((not settings.GetIsGitSvn()) and (remote == '.')):
print()
print(('Attempting to push branch %r into another local branch!' % current))
print()
print('Either reparent this branch on top of origin/master:')
print(' git reparent-branch --root')
print()
print('OR run `git rebase-update` if you think the parent branch is ')
print('already committed.')
print()
print((' Current parent: %r' % upstream_branch))
return 1
if ((not args) or (cmd == 'land')):
args = [cl.GetUpstreamBranch()]
if options.contributor:
if (not re.match('^.*\\s<\\S+@\\S+>$', options.contributor)):
print("Please provide contibutor as 'First Last <example@example.com>'")
return 1
base_branch = args[0]
base_has_submodules = IsSubmoduleMergeCommit(base_branch)
if git_common.is_dirty_git_tree(cmd):
return 1
upstream_commits = RunGit(['rev-list', ('^' + cl.GetBranchRef()), base_branch]).splitlines()
if upstream_commits:
print(('Base branch "%s" has %d commits not in this branch.' % (base_branch, len(upstream_commits))))
print(('Run "git merge %s" before attempting to %s.' % (base_branch, cmd)))
return 1
svn_head = None
if ((cmd == 'dcommit') or base_has_submodules):
svn_head = RunGit(['log', '--grep=^git-svn-id:', '-1', '--pretty=format:%H'])
if (cmd == 'dcommit'):
base_svn_head = base_branch
if base_has_submodules:
base_svn_head += '^1'
extra_commits = RunGit(['rev-list', ('^' + svn_head), base_svn_head])
if extra_commits:
print(('This branch has %d additional commits not upstreamed yet.' % len(extra_commits.splitlines())))
print(('Upstream "%s" or rebase this branch on top of the upstream trunk before attempting to %s.' % (base_branch, cmd)))
return 1
merge_base = RunGit(['merge-base', base_branch, 'HEAD']).strip()
if (not options.bypass_hooks):
author = None
if options.contributor:
author = re.search('\\<(.*)\\>', options.contributor).group(1)
hook_results = cl.RunHook(committing=True, may_prompt=(not options.force), verbose=options.verbose, change=cl.GetChange(merge_base, author))
if (not hook_results.should_continue()):
return 1
status = GetTreeStatus()
if ('closed' == status):
print(('The tree is closed. Please wait for it to reopen. Use "git cl %s --bypass-hooks" to commit on a closed tree.' % cmd))
return 1
elif ('unknown' == status):
print(('Unable to determine tree status. Please verify manually and use "git cl %s --bypass-hooks" to commit on a closed tree.' % cmd))
return 1
change_desc = ChangeDescription(options.message)
if ((not change_desc.description) and cl.GetIssue()):
change_desc = ChangeDescription(cl.GetDescription())
if (not change_desc.description):
if ((not cl.GetIssue()) and options.bypass_hooks):
change_desc = ChangeDescription(CreateDescriptionFromLog([merge_base]))
else:
print('No description set.')
print(('Visit %s/edit to set it.' % cl.GetIssueURL()))
return 1
if cl.GetIssue():
change_desc.update_reviewers(cl.GetApprovingReviewers())
commit_desc = ChangeDescription(change_desc.description)
if cl.GetIssue():
commit_desc.append_footer(('Review URL: %s .' % cl.GetIssueURL()))
if options.contributor:
commit_desc.append_footer(('Patch from %s.' % options.contributor))
print('Description:')
print(commit_desc.description)
branches = [merge_base, cl.GetBranchRef()]
if (not options.force):
print_stats(options.similarity, options.find_copies, branches)
MERGE_BRANCH = 'git-cl-commit'
CHERRY_PICK_BRANCH = 'git-cl-cherry-pick'
for branch in [MERGE_BRANCH, CHERRY_PICK_BRANCH]:
showref_cmd = ['show-ref', '--quiet', '--verify', ('refs/heads/%s' % branch)]
result = RunGitWithCode(showref_cmd)
if (result[0] == 0):
RunGit(['branch', '-D', branch])
rel_base_path = settings.GetRelativeRoot()
if rel_base_path:
os.chdir(rel_base_path)
retcode = (- 1)
pushed_to_pending = False
pending_ref = None
revision = None
try:
RunGit(['checkout', '-q', '-b', MERGE_BRANCH])
RunGit(['reset', '--soft', merge_base])
if options.contributor:
RunGit(['commit', '--author', options.contributor, '-m', commit_desc.description])
else:
RunGit(['commit', '-m', commit_desc.description])
if base_has_submodules:
cherry_pick_commit = RunGit(['rev-list', 'HEAD^!']).rstrip()
RunGit(['branch', CHERRY_PICK_BRANCH, svn_head])
RunGit(['checkout', CHERRY_PICK_BRANCH])
RunGit(['cherry-pick', cherry_pick_commit])
if (cmd == 'land'):
(remote, branch) = cl.FetchUpstreamTuple(cl.GetBranch())
mirror = settings.GetGitMirror(remote)
pushurl = (mirror.url if mirror else remote)
pending_prefix = settings.GetPendingRefPrefix()
if ((not pending_prefix) or branch.startswith(pending_prefix)):
(retcode, output) = RunGitWithCode(['push', '--porcelain', pushurl, ('HEAD:%s' % branch)])
pushed_to_pending = (pending_prefix and branch.startswith(pending_prefix))
else:
assert branch.startswith('refs/'), branch
assert (pending_prefix[(- 1)] == '/'), pending_prefix
pending_ref = (pending_prefix + branch[len('refs/'):])
(retcode, output) = PushToGitPending(pushurl, pending_ref, branch)
pushed_to_pending = (retcode == 0)
if (retcode == 0):
revision = RunGit(['rev-parse', 'HEAD']).strip()
else:
cmd_args = ['svn', 'dcommit', ('-C%s' % options.similarity), '--no-rebase', '--rmdir']
if settings.GetForceHttpsCommitUrl():
remote_url = cl.GetGitSvnRemoteUrl()
if (urlparse.urlparse(remote_url).scheme == 'http'):
remote_url = remote_url.replace('http://', 'https://')
cmd_args.append(('--commit-url=%s' % remote_url))
(_, output) = RunGitWithCode(cmd_args)
if ('Committed r' in output):
revision = re.match('.*?\nCommitted r(\\d+)', output, re.DOTALL).group(1)
logging.debug(output)
finally:
RunGit(['checkout', '-q', cl.GetBranch()])
RunGit(['branch', '-D', MERGE_BRANCH])
if base_has_submodules:
RunGit(['branch', '-D', CHERRY_PICK_BRANCH])
if (not revision):
print('Failed to push. If this persists, please file a bug.')
return 1
killed = False
if pushed_to_pending:
try:
revision = WaitForRealCommit(remote, revision, base_branch, branch)
pushed_to_pending = False
except KeyboardInterrupt:
killed = True
if cl.GetIssue():
to_pending = (' to pending queue' if pushed_to_pending else )
viewvc_url = settings.GetViewVCUrl()
if (not to_pending):
if (viewvc_url and revision):
change_desc.append_footer(('Committed: %s%s' % (viewvc_url, revision)))
elif revision:
change_desc.append_footer(('Committed: %s' % (revision,)))
print('Closing issue (you may be prompted for your codereview password)...')
cl.UpdateDescription(change_desc.description)
cl.CloseIssue()
props = cl.GetIssueProperties()
patch_num = len(props['patchsets'])
comment = ('Committed patchset #%d (id:%d)%s manually as %s' % (patch_num, props['patchsets'][(- 1)], to_pending, revision))
if options.bypass_hooks:
comment += (' (tree was closed).' if (GetTreeStatus() == 'closed') else '.')
else:
comment += ' (presubmit successful).'
cl.RpcServer().add_comment(cl.GetIssue(), comment)
cl.SetIssue(None)
if pushed_to_pending:
(_, branch) = cl.FetchUpstreamTuple(cl.GetBranch())
print(('The commit is in the pending queue (%s).' % pending_ref))
print(('It will show up on %s in ~1 min, once it gets a Cr-Commit-Position footer.' % branch))
hook = (POSTUPSTREAM_HOOK_PATTERN % cmd)
if os.path.isfile(hook):
RunCommand([hook, merge_base], error_ok=True)
return (1 if killed else 0)
|
def PushToGitPending(remote, pending_ref, upstream_ref):
'Fetches pending_ref, cherry-picks current HEAD on top of it, pushes.\n\n Returns:\n (retcode of last operation, output log of last operation).\n '
assert pending_ref.startswith('refs/'), pending_ref
local_pending_ref = ('refs/git-cl/' + pending_ref[len('refs/'):])
cherry = RunGit(['rev-parse', 'HEAD']).strip()
code = 0
out = ''
max_attempts = 3
attempts_left = max_attempts
while attempts_left:
if (attempts_left != max_attempts):
print(('Retrying, %d attempts left...' % ((attempts_left - 1),)))
attempts_left -= 1
print(('Fetching pending ref %s...' % pending_ref))
(code, out) = RunGitWithCode(['retry', 'fetch', remote, ('+%s:%s' % (pending_ref, local_pending_ref))])
if code:
print(('Fetch failed with exit code %d.' % code))
if out.strip():
print(out.strip())
continue
print('Cherry-picking commit on top of pending ref...')
RunGitWithCode(['checkout', local_pending_ref], suppress_stderr=True)
(code, out) = RunGitWithCode(['cherry-pick', cherry])
if code:
print(("Your patch doesn't apply cleanly to ref '%s', the following files have merge conflicts:" % pending_ref))
print(RunGit(['diff', '--name-status', '--diff-filter=U']).strip())
print('Please rebase your patch and try again.')
RunGitWithCode(['cherry-pick', '--abort'])
return (code, out)
print(('Pushing commit to %s... It can take a while.' % pending_ref))
(code, out) = RunGitWithCode(['retry', 'push', '--porcelain', remote, ('HEAD:%s' % pending_ref)])
if (code == 0):
print('Commit pushed to pending ref successfully!')
return (code, out)
print(('Push failed with exit code %d.' % code))
if out.strip():
print(out.strip())
if IsFatalPushFailure(out):
print('Fatal push error. Make sure your .netrc credentials and git user.email are correct and you have push access to the repo.')
return (code, out)
print('All attempts to push to pending ref failed.')
return (code, out)
| 6,304,055,351,849,430,000
|
Fetches pending_ref, cherry-picks current HEAD on top of it, pushes.
Returns:
(retcode of last operation, output log of last operation).
|
git_cl.py
|
PushToGitPending
|
wuyong2k/chromium_depot_tool
|
python
|
def PushToGitPending(remote, pending_ref, upstream_ref):
'Fetches pending_ref, cherry-picks current HEAD on top of it, pushes.\n\n Returns:\n (retcode of last operation, output log of last operation).\n '
assert pending_ref.startswith('refs/'), pending_ref
local_pending_ref = ('refs/git-cl/' + pending_ref[len('refs/'):])
cherry = RunGit(['rev-parse', 'HEAD']).strip()
code = 0
out =
max_attempts = 3
attempts_left = max_attempts
while attempts_left:
if (attempts_left != max_attempts):
print(('Retrying, %d attempts left...' % ((attempts_left - 1),)))
attempts_left -= 1
print(('Fetching pending ref %s...' % pending_ref))
(code, out) = RunGitWithCode(['retry', 'fetch', remote, ('+%s:%s' % (pending_ref, local_pending_ref))])
if code:
print(('Fetch failed with exit code %d.' % code))
if out.strip():
print(out.strip())
continue
print('Cherry-picking commit on top of pending ref...')
RunGitWithCode(['checkout', local_pending_ref], suppress_stderr=True)
(code, out) = RunGitWithCode(['cherry-pick', cherry])
if code:
print(("Your patch doesn't apply cleanly to ref '%s', the following files have merge conflicts:" % pending_ref))
print(RunGit(['diff', '--name-status', '--diff-filter=U']).strip())
print('Please rebase your patch and try again.')
RunGitWithCode(['cherry-pick', '--abort'])
return (code, out)
print(('Pushing commit to %s... It can take a while.' % pending_ref))
(code, out) = RunGitWithCode(['retry', 'push', '--porcelain', remote, ('HEAD:%s' % pending_ref)])
if (code == 0):
print('Commit pushed to pending ref successfully!')
return (code, out)
print(('Push failed with exit code %d.' % code))
if out.strip():
print(out.strip())
if IsFatalPushFailure(out):
print('Fatal push error. Make sure your .netrc credentials and git user.email are correct and you have push access to the repo.')
return (code, out)
print('All attempts to push to pending ref failed.')
return (code, out)
|
def IsFatalPushFailure(push_stdout):
"True if retrying push won't help."
return ('(prohibited by Gerrit)' in push_stdout)
| 908,085,907,017,846,900
|
True if retrying push won't help.
|
git_cl.py
|
IsFatalPushFailure
|
wuyong2k/chromium_depot_tool
|
python
|
def IsFatalPushFailure(push_stdout):
return ('(prohibited by Gerrit)' in push_stdout)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.