bugged
stringlengths
4
228k
fixed
stringlengths
0
96.3M
__index_level_0__
int64
0
481k
def inline_as_py(values, bins=10, range=None): # define bins, size N if (range is not None): mn, mx = range if (mn > mx): raise AttributeError( 'max must be larger than min in range parameter.') if not np.iterable(bins): if range is None: range = (values.min(), values.max()) mn, mx = [mi+0.0 for mi in range] if mn == mx: mn -= 0.5 mx += 0.5 bins = np.linspace(mn, mx, bins+1, endpoint=True) else: bins = np.asarray(bins) if (np.diff(bins) < 0).any(): raise AttributeError( 'bins must increase monotonically.') # define n, empty array of size N+1 count = np.zeros(bins.size - 1, int) nvalues = values.size nbins = bins.size if values.size == 0: raise AttributeError( 'a must contain some data') if values[-1] < bins[0]: raise AttributeError( 'last element of a must be smaller than first element of bins') if (values[0] > bins[0]): rb = 0; else: lb = 0; rb = nvalues + 1; while(lb < rb - 1): if (values[(lb + rb) / 2.] < bins[0]): lb = (lb + rb) / 2. else: rb = (lb + rb) / 2. # Sweep through the values, counting, until they get too big lb = 0; valid = (rb < nvalues) if valid: valid = valid & (values[rb] < bins[nbins - 1]) while valid: # Advance the edge caret until the current value is in the current bin while (bins[lb+1] < values[rb]): lb += 1 # Increment the current bin count[lb] += 1 # Increment the value caret rb += 1 valid = (rb < nvalues) if valid: valid = valid & (values[rb] < bins[nbins - 1]) return count, bins
def inline_as_py(values, bins=10, range=None): # define bins, size N if (range is not None): mn, mx = range if (mn > mx): raise AttributeError( 'max must be larger than min in range parameter.') if not np.iterable(bins): if range is None: range = (values.min(), values.max()) mn, mx = [mi+0.0 for mi in range] if mn == mx: mn -= 0.5 mx += 0.5 bins = np.linspace(mn, mx, bins+1, endpoint=True) else: bins = np.asarray(bins) if (np.diff(bins) < 0).any(): raise AttributeError( 'bins must increase monotonically.') # define n, empty array of size N+1 count = np.zeros(bins.size - 1, int) nvalues = values.size nbins = bins.size if values.size == 0: raise AttributeError( 'a must contain some data') if values[-1] < bins[0]: raise AttributeError( 'last element of a must be smaller than first element of bins') if (values[0] >= bins[0]): rb = 0; else: lb = 0; rb = nvalues + 1; while(lb < rb - 1): if (values[(lb + rb) / 2.] < bins[0]): lb = (lb + rb) / 2. else: rb = (lb + rb) / 2. # Sweep through the values, counting, until they get too big lb = 0; valid = (rb < nvalues) if valid: valid = valid & (values[rb] < bins[nbins - 1]) while valid: # Advance the edge caret until the current value is in the current bin while (bins[lb+1] < values[rb]): lb += 1 # Increment the current bin count[lb] += 1 # Increment the value caret rb += 1 valid = (rb < nvalues) if valid: valid = valid & (values[rb] < bins[nbins - 1]) return count, bins
475,200
def inline_as_py(values, bins=10, range=None): # define bins, size N if (range is not None): mn, mx = range if (mn > mx): raise AttributeError( 'max must be larger than min in range parameter.') if not np.iterable(bins): if range is None: range = (values.min(), values.max()) mn, mx = [mi+0.0 for mi in range] if mn == mx: mn -= 0.5 mx += 0.5 bins = np.linspace(mn, mx, bins+1, endpoint=True) else: bins = np.asarray(bins) if (np.diff(bins) < 0).any(): raise AttributeError( 'bins must increase monotonically.') # define n, empty array of size N+1 count = np.zeros(bins.size - 1, int) nvalues = values.size nbins = bins.size if values.size == 0: raise AttributeError( 'a must contain some data') if values[-1] < bins[0]: raise AttributeError( 'last element of a must be smaller than first element of bins') if (values[0] > bins[0]): rb = 0; else: lb = 0; rb = nvalues + 1; while(lb < rb - 1): if (values[(lb + rb) / 2.] < bins[0]): lb = (lb + rb) / 2. else: rb = (lb + rb) / 2. # Sweep through the values, counting, until they get too big lb = 0; valid = (rb < nvalues) if valid: valid = valid & (values[rb] < bins[nbins - 1]) while valid: # Advance the edge caret until the current value is in the current bin while (bins[lb+1] < values[rb]): lb += 1 # Increment the current bin count[lb] += 1 # Increment the value caret rb += 1 valid = (rb < nvalues) if valid: valid = valid & (values[rb] < bins[nbins - 1]) return count, bins
def inline_as_py(values, bins=10, range=None): # define bins, size N if (range is not None): mn, mx = range if (mn > mx): raise AttributeError( 'max must be larger than min in range parameter.') if not np.iterable(bins): if range is None: range = (values.min(), values.max()) mn, mx = [mi+0.0 for mi in range] if mn == mx: mn -= 0.5 mx += 0.5 bins = np.linspace(mn, mx, bins+1, endpoint=True) else: bins = np.asarray(bins) if (np.diff(bins) < 0).any(): raise AttributeError( 'bins must increase monotonically.') # define n, empty array of size N+1 count = np.zeros(bins.size - 1, int) nvalues = values.size nbins = bins.size if values.size == 0: raise AttributeError( 'a must contain some data') if values[-1] < bins[0]: raise AttributeError( 'last element of a must be smaller than first element of bins') if (values[0] > bins[0]): rb = 0; else: lb = 0; rb = nvalues + 1; while(lb < rb - 1): if (values[(lb + rb) / 2.] < bins[0]): lb = (lb + rb) / 2. else: rb = (lb + rb) / 2. # Sweep through the values, counting, until they get too big lb = 0; valid = (rb < nvalues) if valid: valid = valid & (values[rb] < bins[nbins-1]) while valid: # Advance the edge caret until the current value is in the current bin while (bins[lb+1] < values[rb]): lb += 1 # Increment the current bin count[lb] += 1 # Increment the value caret rb += 1 valid = (rb < nvalues) if valid: valid = valid & (values[rb] < bins[nbins-1]) return count, bins
475,201
def inline_as_py(values, bins=10, range=None): # define bins, size N if (range is not None): mn, mx = range if (mn > mx): raise AttributeError( 'max must be larger than min in range parameter.') if not np.iterable(bins): if range is None: range = (values.min(), values.max()) mn, mx = [mi+0.0 for mi in range] if mn == mx: mn -= 0.5 mx += 0.5 bins = np.linspace(mn, mx, bins+1, endpoint=True) else: bins = np.asarray(bins) if (np.diff(bins) < 0).any(): raise AttributeError( 'bins must increase monotonically.') # define n, empty array of size N+1 count = np.zeros(bins.size - 1, int) nvalues = values.size nbins = bins.size if values.size == 0: raise AttributeError( 'a must contain some data') if values[-1] < bins[0]: raise AttributeError( 'last element of a must be smaller than first element of bins') if (values[0] > bins[0]): rb = 0; else: lb = 0; rb = nvalues + 1; while(lb < rb - 1): if (values[(lb + rb) / 2.] < bins[0]): lb = (lb + rb) / 2. else: rb = (lb + rb) / 2. # Sweep through the values, counting, until they get too big lb = 0; valid = (rb < nvalues) if valid: valid = valid & (values[rb] < bins[nbins - 1]) while valid: # Advance the edge caret until the current value is in the current bin while (bins[lb+1] < values[rb]): lb += 1 # Increment the current bin count[lb] += 1 # Increment the value caret rb += 1 valid = (rb < nvalues) if valid: valid = valid & (values[rb] < bins[nbins - 1]) return count, bins
def inline_as_py(values, bins=10, range=None): # define bins, size N if (range is not None): mn, mx = range if (mn > mx): raise AttributeError( 'max must be larger than min in range parameter.') if not np.iterable(bins): if range is None: range = (values.min(), values.max()) mn, mx = [mi+0.0 for mi in range] if mn == mx: mn -= 0.5 mx += 0.5 bins = np.linspace(mn, mx, bins+1, endpoint=True) else: bins = np.asarray(bins) if (np.diff(bins) < 0).any(): raise AttributeError( 'bins must increase monotonically.') # define n, empty array of size N+1 count = np.zeros(bins.size - 1, int) nvalues = values.size nbins = bins.size if values.size == 0: raise AttributeError( 'a must contain some data') if values[-1] < bins[0]: raise AttributeError( 'last element of a must be smaller than first element of bins') if (values[0] > bins[0]): rb = 0; else: lb = 0; rb = nvalues + 1; while(lb < rb - 1): if (values[(lb + rb) / 2.] < bins[0]): lb = (lb + rb) / 2. else: rb = (lb + rb) / 2. # Sweep through the values, counting, until they get too big lb = 0; valid = (rb < nvalues) if valid: valid = valid & (values[rb] < bins[nbins - 1]) while valid: # Advance the edge caret until the current value is in the current bin while (bins[lb+1] <= values[rb]): lb += 1 # Increment the current bin count[lb] += 1 # Increment the value caret rb += 1 valid = (rb < nvalues) if valid: valid = valid & (values[rb] < bins[nbins - 1]) return count, bins
475,202
def inline_as_py(values, bins=10, range=None): # define bins, size N if (range is not None): mn, mx = range if (mn > mx): raise AttributeError( 'max must be larger than min in range parameter.') if not np.iterable(bins): if range is None: range = (values.min(), values.max()) mn, mx = [mi+0.0 for mi in range] if mn == mx: mn -= 0.5 mx += 0.5 bins = np.linspace(mn, mx, bins+1, endpoint=True) else: bins = np.asarray(bins) if (np.diff(bins) < 0).any(): raise AttributeError( 'bins must increase monotonically.') # define n, empty array of size N+1 count = np.zeros(bins.size - 1, int) nvalues = values.size nbins = bins.size if values.size == 0: raise AttributeError( 'a must contain some data') if values[-1] < bins[0]: raise AttributeError( 'last element of a must be smaller than first element of bins') if (values[0] > bins[0]): rb = 0; else: lb = 0; rb = nvalues + 1; while(lb < rb - 1): if (values[(lb + rb) / 2.] < bins[0]): lb = (lb + rb) / 2. else: rb = (lb + rb) / 2. # Sweep through the values, counting, until they get too big lb = 0; valid = (rb < nvalues) if valid: valid = valid & (values[rb] < bins[nbins - 1]) while valid: # Advance the edge caret until the current value is in the current bin while (bins[lb+1] < values[rb]): lb += 1 # Increment the current bin count[lb] += 1 # Increment the value caret rb += 1 valid = (rb < nvalues) if valid: valid = valid & (values[rb] < bins[nbins - 1]) return count, bins
defif bins[-1] == values[rb]: count[-1] += 1 inline_as_py(values,if bins[-1] == values[rb]: count[-1] += 1 bins=10,if bins[-1] == values[rb]: count[-1] += 1 range=None):if bins[-1] == values[rb]: count[-1] += 1 #if bins[-1] == values[rb]: count[-1] += 1 defineif bins[-1] == values[rb]: count[-1] += 1 bins,if bins[-1] == values[rb]: count[-1] += 1 sizeif bins[-1] == values[rb]: count[-1] += 1 Nif bins[-1] == values[rb]: count[-1] += 1 ifif bins[-1] == values[rb]: count[-1] += 1 (rangeif bins[-1] == values[rb]: count[-1] += 1 isif bins[-1] == values[rb]: count[-1] += 1 notif bins[-1] == values[rb]: count[-1] += 1 None):if bins[-1] == values[rb]: count[-1] += 1 mn,if bins[-1] == values[rb]: count[-1] += 1 mxif bins[-1] == values[rb]: count[-1] += 1 =if bins[-1] == values[rb]: count[-1] += 1 rangeif bins[-1] == values[rb]: count[-1] += 1 ifif bins[-1] == values[rb]: count[-1] += 1 (mnif bins[-1] == values[rb]: count[-1] += 1 >if bins[-1] == values[rb]: count[-1] += 1 mx):if bins[-1] == values[rb]: count[-1] += 1 raiseif bins[-1] == values[rb]: count[-1] += 1 AttributeError(if bins[-1] == values[rb]: count[-1] += 1 'maxif bins[-1] == values[rb]: count[-1] += 1 mustif bins[-1] == values[rb]: count[-1] += 1 beif bins[-1] == values[rb]: count[-1] += 1 largerif bins[-1] == values[rb]: count[-1] += 1 thanif bins[-1] == values[rb]: count[-1] += 1 minif bins[-1] == values[rb]: count[-1] += 1 inif bins[-1] == values[rb]: count[-1] += 1 rangeif bins[-1] == values[rb]: count[-1] += 1 parameter.')if bins[-1] == values[rb]: count[-1] += 1 if bins[-1] == values[rb]: count[-1] += 1 ifif bins[-1] == values[rb]: count[-1] += 1 notif bins[-1] == values[rb]: count[-1] += 1 np.iterable(bins):if bins[-1] == values[rb]: count[-1] += 1 ifif bins[-1] == values[rb]: count[-1] += 1 rangeif bins[-1] == values[rb]: count[-1] += 1 isif bins[-1] == values[rb]: count[-1] += 1 None:if bins[-1] == values[rb]: count[-1] += 1 rangeif bins[-1] == values[rb]: count[-1] += 1 =if bins[-1] == values[rb]: count[-1] += 1 (values.min(),if bins[-1] == values[rb]: count[-1] += 1 values.max())if bins[-1] == values[rb]: count[-1] += 1 mn,if bins[-1] == values[rb]: count[-1] += 1 mxif bins[-1] == values[rb]: count[-1] += 1 =if bins[-1] == values[rb]: count[-1] += 1 [mi+0.0if bins[-1] == values[rb]: count[-1] += 1 forif bins[-1] == values[rb]: count[-1] += 1 miif bins[-1] == values[rb]: count[-1] += 1 inif bins[-1] == values[rb]: count[-1] += 1 range]if bins[-1] == values[rb]: count[-1] += 1 ifif bins[-1] == values[rb]: count[-1] += 1 mnif bins[-1] == values[rb]: count[-1] += 1 ==if bins[-1] == values[rb]: count[-1] += 1 mx:if bins[-1] == values[rb]: count[-1] += 1 mnif bins[-1] == values[rb]: count[-1] += 1 -=if bins[-1] == values[rb]: count[-1] += 1 0.5if bins[-1] == values[rb]: count[-1] += 1 mxif bins[-1] == values[rb]: count[-1] += 1 +=if bins[-1] == values[rb]: count[-1] += 1 0.5if bins[-1] == values[rb]: count[-1] += 1 binsif bins[-1] == values[rb]: count[-1] += 1 =if bins[-1] == values[rb]: count[-1] += 1 np.linspace(mn,if bins[-1] == values[rb]: count[-1] += 1 mx,if bins[-1] == values[rb]: count[-1] += 1 bins+1,if bins[-1] == values[rb]: count[-1] += 1 endpoint=True)if bins[-1] == values[rb]: count[-1] += 1 else:if bins[-1] == values[rb]: count[-1] += 1 binsif bins[-1] == values[rb]: count[-1] += 1 =if bins[-1] == values[rb]: count[-1] += 1 np.asarray(bins)if bins[-1] == values[rb]: count[-1] += 1 ifif bins[-1] == values[rb]: count[-1] += 1 (np.diff(bins)if bins[-1] == values[rb]: count[-1] += 1 <if bins[-1] == values[rb]: count[-1] += 1 0).any():if bins[-1] == values[rb]: count[-1] += 1 raiseif bins[-1] == values[rb]: count[-1] += 1 AttributeError(if bins[-1] == values[rb]: count[-1] += 1 'binsif bins[-1] == values[rb]: count[-1] += 1 mustif bins[-1] == values[rb]: count[-1] += 1 increaseif bins[-1] == values[rb]: count[-1] += 1 monotonically.')if bins[-1] == values[rb]: count[-1] += 1 if bins[-1] == values[rb]: count[-1] += 1 #if bins[-1] == values[rb]: count[-1] += 1 defineif bins[-1] == values[rb]: count[-1] += 1 n,if bins[-1] == values[rb]: count[-1] += 1 emptyif bins[-1] == values[rb]: count[-1] += 1 arrayif bins[-1] == values[rb]: count[-1] += 1 ofif bins[-1] == values[rb]: count[-1] += 1 sizeif bins[-1] == values[rb]: count[-1] += 1 N+1if bins[-1] == values[rb]: count[-1] += 1 countif bins[-1] == values[rb]: count[-1] += 1 =if bins[-1] == values[rb]: count[-1] += 1 np.zeros(bins.sizeif bins[-1] == values[rb]: count[-1] += 1 -if bins[-1] == values[rb]: count[-1] += 1 1,if bins[-1] == values[rb]: count[-1] += 1 int)if bins[-1] == values[rb]: count[-1] += 1 nvaluesif bins[-1] == values[rb]: count[-1] += 1 =if bins[-1] == values[rb]: count[-1] += 1 values.sizeif bins[-1] == values[rb]: count[-1] += 1 nbinsif bins[-1] == values[rb]: count[-1] += 1 =if bins[-1] == values[rb]: count[-1] += 1 bins.sizeif bins[-1] == values[rb]: count[-1] += 1 if bins[-1] == values[rb]: count[-1] += 1 ifif bins[-1] == values[rb]: count[-1] += 1 values.sizeif bins[-1] == values[rb]: count[-1] += 1 ==if bins[-1] == values[rb]: count[-1] += 1 0:if bins[-1] == values[rb]: count[-1] += 1 raiseif bins[-1] == values[rb]: count[-1] += 1 AttributeError(if bins[-1] == values[rb]: count[-1] += 1 'aif bins[-1] == values[rb]: count[-1] += 1 mustif bins[-1] == values[rb]: count[-1] += 1 containif bins[-1] == values[rb]: count[-1] += 1 someif bins[-1] == values[rb]: count[-1] += 1 data')if bins[-1] == values[rb]: count[-1] += 1 if bins[-1] == values[rb]: count[-1] += 1 ifif bins[-1] == values[rb]: count[-1] += 1 values[-1]if bins[-1] == values[rb]: count[-1] += 1 <if bins[-1] == values[rb]: count[-1] += 1 bins[0]:if bins[-1] == values[rb]: count[-1] += 1 raiseif bins[-1] == values[rb]: count[-1] += 1 AttributeError(if bins[-1] == values[rb]: count[-1] += 1 'lastif bins[-1] == values[rb]: count[-1] += 1 elementif bins[-1] == values[rb]: count[-1] += 1 ofif bins[-1] == values[rb]: count[-1] += 1 aif bins[-1] == values[rb]: count[-1] += 1 mustif bins[-1] == values[rb]: count[-1] += 1 beif bins[-1] == values[rb]: count[-1] += 1 smallerif bins[-1] == values[rb]: count[-1] += 1 thanif bins[-1] == values[rb]: count[-1] += 1 firstif bins[-1] == values[rb]: count[-1] += 1 elementif bins[-1] == values[rb]: count[-1] += 1 ofif bins[-1] == values[rb]: count[-1] += 1 bins')if bins[-1] == values[rb]: count[-1] += 1 if bins[-1] == values[rb]: count[-1] += 1 ifif bins[-1] == values[rb]: count[-1] += 1 (values[0]if bins[-1] == values[rb]: count[-1] += 1 >if bins[-1] == values[rb]: count[-1] += 1 bins[0]):if bins[-1] == values[rb]: count[-1] += 1 rbif bins[-1] == values[rb]: count[-1] += 1 =if bins[-1] == values[rb]: count[-1] += 1 0;if bins[-1] == values[rb]: count[-1] += 1 else:if bins[-1] == values[rb]: count[-1] += 1 lbif bins[-1] == values[rb]: count[-1] += 1 =if bins[-1] == values[rb]: count[-1] += 1 0;if bins[-1] == values[rb]: count[-1] += 1 rbif bins[-1] == values[rb]: count[-1] += 1 =if bins[-1] == values[rb]: count[-1] += 1 nvaluesif bins[-1] == values[rb]: count[-1] += 1 +if bins[-1] == values[rb]: count[-1] += 1 1;if bins[-1] == values[rb]: count[-1] += 1 while(lbif bins[-1] == values[rb]: count[-1] += 1 <if bins[-1] == values[rb]: count[-1] += 1 rbif bins[-1] == values[rb]: count[-1] += 1 -if bins[-1] == values[rb]: count[-1] += 1 1):if bins[-1] == values[rb]: count[-1] += 1 ifif bins[-1] == values[rb]: count[-1] += 1 (values[(lbif bins[-1] == values[rb]: count[-1] += 1 +if bins[-1] == values[rb]: count[-1] += 1 rb)if bins[-1] == values[rb]: count[-1] += 1 /if bins[-1] == values[rb]: count[-1] += 1 2.]if bins[-1] == values[rb]: count[-1] += 1 <if bins[-1] == values[rb]: count[-1] += 1 bins[0]):if bins[-1] == values[rb]: count[-1] += 1 lbif bins[-1] == values[rb]: count[-1] += 1 =if bins[-1] == values[rb]: count[-1] += 1 (lbif bins[-1] == values[rb]: count[-1] += 1 +if bins[-1] == values[rb]: count[-1] += 1 rb)if bins[-1] == values[rb]: count[-1] += 1 /if bins[-1] == values[rb]: count[-1] += 1 2.if bins[-1] == values[rb]: count[-1] += 1 else:if bins[-1] == values[rb]: count[-1] += 1 rbif bins[-1] == values[rb]: count[-1] += 1 =if bins[-1] == values[rb]: count[-1] += 1 (lbif bins[-1] == values[rb]: count[-1] += 1 +if bins[-1] == values[rb]: count[-1] += 1 rb)if bins[-1] == values[rb]: count[-1] += 1 /if bins[-1] == values[rb]: count[-1] += 1 2.if bins[-1] == values[rb]: count[-1] += 1 if bins[-1] == values[rb]: count[-1] += 1 #if bins[-1] == values[rb]: count[-1] += 1 Sweepif bins[-1] == values[rb]: count[-1] += 1 throughif bins[-1] == values[rb]: count[-1] += 1 theif bins[-1] == values[rb]: count[-1] += 1 values,if bins[-1] == values[rb]: count[-1] += 1 counting,if bins[-1] == values[rb]: count[-1] += 1 untilif bins[-1] == values[rb]: count[-1] += 1 theyif bins[-1] == values[rb]: count[-1] += 1 getif bins[-1] == values[rb]: count[-1] += 1 tooif bins[-1] == values[rb]: count[-1] += 1 bigif bins[-1] == values[rb]: count[-1] += 1 lbif bins[-1] == values[rb]: count[-1] += 1 =if bins[-1] == values[rb]: count[-1] += 1 0;if bins[-1] == values[rb]: count[-1] += 1 validif bins[-1] == values[rb]: count[-1] += 1 =if bins[-1] == values[rb]: count[-1] += 1 (rbif bins[-1] == values[rb]: count[-1] += 1 <if bins[-1] == values[rb]: count[-1] += 1 nvalues)if bins[-1] == values[rb]: count[-1] += 1 ifif bins[-1] == values[rb]: count[-1] += 1 valid:if bins[-1] == values[rb]: count[-1] += 1 validif bins[-1] == values[rb]: count[-1] += 1 =if bins[-1] == values[rb]: count[-1] += 1 validif bins[-1] == values[rb]: count[-1] += 1 &if bins[-1] == values[rb]: count[-1] += 1 (values[rb]if bins[-1] == values[rb]: count[-1] += 1 <if bins[-1] == values[rb]: count[-1] += 1 bins[nbinsif bins[-1] == values[rb]: count[-1] += 1 -if bins[-1] == values[rb]: count[-1] += 1 1])if bins[-1] == values[rb]: count[-1] += 1 whileif bins[-1] == values[rb]: count[-1] += 1 valid:if bins[-1] == values[rb]: count[-1] += 1 #if bins[-1] == values[rb]: count[-1] += 1 Advanceif bins[-1] == values[rb]: count[-1] += 1 theif bins[-1] == values[rb]: count[-1] += 1 edgeif bins[-1] == values[rb]: count[-1] += 1 caretif bins[-1] == values[rb]: count[-1] += 1 untilif bins[-1] == values[rb]: count[-1] += 1 theif bins[-1] == values[rb]: count[-1] += 1 currentif bins[-1] == values[rb]: count[-1] += 1 valueif bins[-1] == values[rb]: count[-1] += 1 isif bins[-1] == values[rb]: count[-1] += 1 inif bins[-1] == values[rb]: count[-1] += 1 theif bins[-1] == values[rb]: count[-1] += 1 currentif bins[-1] == values[rb]: count[-1] += 1 binif bins[-1] == values[rb]: count[-1] += 1 whileif bins[-1] == values[rb]: count[-1] += 1 (bins[lb+1]if bins[-1] == values[rb]: count[-1] += 1 <if bins[-1] == values[rb]: count[-1] += 1 values[rb]):if bins[-1] == values[rb]: count[-1] += 1 lbif bins[-1] == values[rb]: count[-1] += 1 +=if bins[-1] == values[rb]: count[-1] += 1 1if bins[-1] == values[rb]: count[-1] += 1 #if bins[-1] == values[rb]: count[-1] += 1 Incrementif bins[-1] == values[rb]: count[-1] += 1 theif bins[-1] == values[rb]: count[-1] += 1 currentif bins[-1] == values[rb]: count[-1] += 1 binif bins[-1] == values[rb]: count[-1] += 1 count[lb]if bins[-1] == values[rb]: count[-1] += 1 +=if bins[-1] == values[rb]: count[-1] += 1 1if bins[-1] == values[rb]: count[-1] += 1 #if bins[-1] == values[rb]: count[-1] += 1 Incrementif bins[-1] == values[rb]: count[-1] += 1 theif bins[-1] == values[rb]: count[-1] += 1 valueif bins[-1] == values[rb]: count[-1] += 1 caretif bins[-1] == values[rb]: count[-1] += 1 rbif bins[-1] == values[rb]: count[-1] += 1 +=if bins[-1] == values[rb]: count[-1] += 1 1if bins[-1] == values[rb]: count[-1] += 1 validif bins[-1] == values[rb]: count[-1] += 1 =if bins[-1] == values[rb]: count[-1] += 1 (rbif bins[-1] == values[rb]: count[-1] += 1 <if bins[-1] == values[rb]: count[-1] += 1 nvalues)if bins[-1] == values[rb]: count[-1] += 1 ifif bins[-1] == values[rb]: count[-1] += 1 valid:if bins[-1] == values[rb]: count[-1] += 1 validif bins[-1] == values[rb]: count[-1] += 1 =if bins[-1] == values[rb]: count[-1] += 1 validif bins[-1] == values[rb]: count[-1] += 1 &if bins[-1] == values[rb]: count[-1] += 1 (values[rb]if bins[-1] == values[rb]: count[-1] += 1 <if bins[-1] == values[rb]: count[-1] += 1 bins[nbinsif bins[-1] == values[rb]: count[-1] += 1 -if bins[-1] == values[rb]: count[-1] += 1 1])if bins[-1] == values[rb]: count[-1] += 1 if bins[-1] == values[rb]: count[-1] += 1 returnif bins[-1] == values[rb]: count[-1] += 1 count,if bins[-1] == values[rb]: count[-1] += 1 binsif bins[-1] == values[rb]: count[-1] += 1
475,203
def make_today_dir(project='tuning_change'): outputs = project_dirs[project] today = outputs + '/' + time.strftime('%y%m%d') if not os.path.exists(today): os.mkdir(today) return today
def make_today_dir(project='tuning_change'): outputs = project_dirs[project] + '/outputs' today = outputs + '/' + time.strftime('%y%m%d') if not os.path.exists(today): os.mkdir(today) return today
475,204
def GrepForActions(path, actions): """Grep a source file for calls to UserMetrics functions. Arguments: path: path to the file actions: set of actions to add to """ global number_of_files_total number_of_files_total = number_of_files_total + 1 # we look for the UserMetricsAction structur constructor # this should be on one line action_re = re.compile(r'UserMetricsAction\("([^"]*)') computed_action_re = re.compile(r'UserMetrics::RecordComputedAction') line_number = 0 for line in open(path): line_number = line_number + 1 match = action_re.search(line) if match: # Plain call to RecordAction actions.add(match.group(1)) elif computed_action_re.search(line): # Warn if this file shouldn't be calling RecordComputedAction. if os.path.basename(path) not in KNOWN_COMPUTED_USERS: print >>sys.stderr, 'WARNING: {0} has RecordComputedAction at {1}'.\ format(path, line_number)
def GrepForActions(path, actions): """Grep a source file for calls to UserMetrics functions. Arguments: path: path to the file actions: set of actions to add to """ global number_of_files_total number_of_files_total = number_of_files_total + 1 # we look for the UserMetricsAction structur constructor # this should be on one line action_re = re.compile(r'UserMetricsAction\("([^"]*)') computed_action_re = re.compile(r'UserMetrics::RecordComputedAction') line_number = 0 for line in open(path): line_number = line_number + 1 match = action_re.search(line) if match: # Plain call to RecordAction actions.add(match.group(1)) elif computed_action_re.search(line): # Warn if this file shouldn't be calling RecordComputedAction. if os.path.basename(path) not in KNOWN_COMPUTED_USERS: print >>sys.stderr, 'WARNING: {0} has RecordComputedAction at {1}'.\ format(path, line_number)
475,205
def ProcessPolicy(self, msg): """Handles a policy request.
def ProcessPolicy(self, msg): """Handles a policy request.
475,206
def CheckToken(self): """Helper for checking whether the client supplied a valid DM token.
defdmtoken = None CheckToken(self):dmtoken = None """Helperdmtoken = None fordmtoken = None checkingdmtoken = None whetherdmtoken = None thedmtoken = None clientdmtoken = None supplieddmtoken = None admtoken = None validdmtoken = None DMdmtoken = None token.
475,207
def CheckToken(self): """Helper for checking whether the client supplied a valid DM token.
def CheckToken(self): """Helper for checking whether the client supplied a valid DM token.
475,208
def WriteGLES2ImplementationDeclaration(self, func, file): """Writes the GLES2 Implemention declaration.""" file.Write("%s %s(%s);\n" % (func.return_type, func.original_name, func.MakeTypedOriginalArgString(""))) file.Write("\n")
def WriteGLES2ImplementationDeclaration(self, func, file): """Writes the GLES2 Implemention declaration.""" file.Write("%s %s(%s);\n" % (func.return_type, func.original_name, func.MakeTypedOriginalArgString(""))) file.Write("\n")
475,209
def WriteGLES2ImplementationHeader(self, func, file): """Writes the GLES2 Implemention.""" impl_func = func.GetInfo('impl_func') if func.can_auto_generate and (impl_func == None or impl_func == True): file.Write("%s %s(%s) {\n" % (func.return_type, func.original_name, func.MakeTypedOriginalArgString(""))) for arg in func.GetOriginalArgs(): arg.WriteClientSideValidationCode(file) file.Write(" helper_->%s(%s);\n" % (func.name, func.MakeOriginalArgString(""))) file.Write("}\n") file.Write("\n") else: self.WriteGLES2ImplementationDeclaration(func, file)
def WriteGLES2ImplementationHeader(self, func, file): """Writes the GLES2 Implemention.""" impl_func = func.GetInfo('impl_func') impl_decl = func.GetInfo('impl_decl') if (func.can_auto_generate and (impl_func == None or impl_func == True) and (impl_decl == None or impl_decl == True)): file.Write("%s %s(%s) {\n" % (func.return_type, func.original_name, func.MakeTypedOriginalArgString(""))) for arg in func.GetOriginalArgs(): arg.WriteClientSideValidationCode(file) file.Write(" helper_->%s(%s);\n" % (func.name, func.MakeOriginalArgString(""))) file.Write("}\n") file.Write("\n") else: self.WriteGLES2ImplementationDeclaration(func, file)
475,210
def WriteGLES2ImplementationHeader(self, func, file): """Overrriden from TypeHandler.""" file.Write("%s %s(%s) {\n" % (func.return_type, func.original_name, func.MakeTypedOriginalArgString(""))) file.Write(" FreeIds(%s);\n" % func.MakeOriginalArgString("")) file.Write(" helper_->%sImmediate(%s);\n" % (func.name, func.MakeOriginalArgString(""))) file.Write("}\n") file.Write("\n")
def WriteGLES2ImplementationHeader(self, func, file): """Overrriden from TypeHandler.""" file.Write("%s %s(%s) {\n" % (func.return_type, func.original_name, func.MakeTypedOriginalArgString(""))) file.Write(" FreeIds(%s);\n" % func.MakeOriginalArgString("")) file.Write(" helper_->%sImmediate(%s);\n" % (func.name, func.MakeOriginalArgString(""))) file.Write("}\n") file.Write("\n")
475,211
def WriteGLES2ImplementationHeader(self, func, file): """Overrriden from TypeHandler.""" file.Write("%s %s(%s) {\n" % (func.return_type, func.original_name, func.MakeTypedOriginalArgString(""))) all_but_last_args = func.GetOriginalArgs()[:-1] arg_string = ( ", ".join(["%s" % arg.name for arg in all_but_last_args])) code = """ typedef %(func_name)s::Result Result;
def WriteGLES2ImplementationHeader(self, func, file): """Overrriden from TypeHandler.""" file.Write("%s %s(%s) {\n" % (func.return_type, func.original_name, func.MakeTypedOriginalArgString(""))) all_but_last_args = func.GetOriginalArgs()[:-1] arg_string = ( ", ".join(["%s" % arg.name for arg in all_but_last_args])) code = """ typedef %(func_name)s::Result Result;
475,212
code = """ typedef %(func_name)s::Result Result;
code = """ typedef %(func_name)s::Result Result;
475,213
code = """ typedef %(func_name)s::Result Result;
code = """ typedef %(func_name)s::Result Result;
475,214
def WaitUntil(self, function, timeout=-1, retry_sleep=0.25, args=[]): """Poll on a condition until timeout.
def WaitUntil(self, function, timeout=-1, retry_sleep=0.25, args=[]): """Poll on a condition until timeout.
475,215
def WaitUntil(self, function, timeout=-1, retry_sleep=0.25, args=[]): """Poll on a condition until timeout.
def WaitUntil(self, function, timeout=-1, retry_sleep=0.25, args=[]): """Poll on a condition until timeout.
475,216
def testHistoryResult(self): """Verify that omnibox can fetch items from history.""" url = self.GetFileURLForPath(os.path.join(self.DataDir(), 'title2.html')) title = 'Title Of Awesomeness' self.AppendTab(pyauto.GURL(url)) def _VerifyHistoryResult(query_list, description, windex=0): """Verify result matching given description for given list of queries.""" for query_text in query_list: matches = self._GetOmniboxMatchesFor( query_text, windex=windex, attr_dict={'description': description}) self.assertTrue(matches) self.assertEqual(1, len(matches)) item = matches[0] self.assertEqual(url, item['destination_url']) # Query using URL & title _VerifyHistoryResult([url, title], title) # Verify results in another tab self.AppendTab(pyauto.GURL()) _VerifyHistoryResult([url, title], title) # Verify results in another window self.OpenNewBrowserWindow(True) self.WaitUntilOmniboxReadyHack(windex=1) _VerifyHistoryResult([url, title], title, windex=1) # Verify results in an incognito window self.RunCommand(pyauto.IDC_NEW_INCOGNITO_WINDOW) self.WaitUntilOmniboxReadyHack(windex=2) _VerifyHistoryResult([url, title], title, windex=2)
def testHistoryResult(self): """Verify that omnibox can fetch items from history.""" url = self.GetFileURLForDataPath('title2.html') title = 'Title Of Awesomeness' self.AppendTab(pyauto.GURL(url)) def _VerifyHistoryResult(query_list, description, windex=0): """Verify result matching given description for given list of queries.""" for query_text in query_list: matches = self._GetOmniboxMatchesFor( query_text, windex=windex, attr_dict={'description': description}) self.assertTrue(matches) self.assertEqual(1, len(matches)) item = matches[0] self.assertEqual(url, item['destination_url']) # Query using URL & title _VerifyHistoryResult([url, title], title) # Verify results in another tab self.AppendTab(pyauto.GURL()) _VerifyHistoryResult([url, title], title) # Verify results in another window self.OpenNewBrowserWindow(True) self.WaitUntilOmniboxReadyHack(windex=1) _VerifyHistoryResult([url, title], title, windex=1) # Verify results in an incognito window self.RunCommand(pyauto.IDC_NEW_INCOGNITO_WINDOW) self.WaitUntilOmniboxReadyHack(windex=2) _VerifyHistoryResult([url, title], title, windex=2)
475,217
def testSelect(self): """Verify omnibox popup selection.""" url1 = self.GetFileURLForPath(os.path.join(self.DataDir(), 'title2.html')) url2 = self.GetFileURLForPath(os.path.join(self.DataDir(), 'title1.html')) title1 = 'Title Of Awesomeness' self.NavigateToURL(url1) self.NavigateToURL(url2) matches = self._GetOmniboxMatchesFor('file://') self.assertTrue(matches) # Find the index of match for url1 index = None for i, match in enumerate(matches): if match['description'] == title1: index = i self.assertTrue(index is not None) self.OmniboxMovePopupSelection(index) # Select url1 line in popup self.assertEqual(url1, self.GetOmniboxInfo().Text()) self.OmniboxAcceptInput() self.assertEqual(title1, self.GetActiveTabTitle())
def testSelect(self): """Verify omnibox popup selection.""" url1 = self.GetFileURLForDataPath('title2.html') url2 = self.GetFileURLForDataPath('title1.html') title1 = 'Title Of Awesomeness' self.NavigateToURL(url1) self.NavigateToURL(url2) matches = self._GetOmniboxMatchesFor('file://') self.assertTrue(matches) # Find the index of match for url1 index = None for i, match in enumerate(matches): if match['description'] == title1: index = i self.assertTrue(index is not None) self.OmniboxMovePopupSelection(index) # Select url1 line in popup self.assertEqual(url1, self.GetOmniboxInfo().Text()) self.OmniboxAcceptInput() self.assertEqual(title1, self.GetActiveTabTitle())
475,218
def testSuggest(self): """Verify suggest results in omnibox.""" matches = self._GetOmniboxMatchesFor('apple') self.assertTrue(matches) self.assertTrue([x for x in matches if x['type'] == 'search-suggest'])
def testSuggest(self): """Verify suggested results in omnibox.""" matches = self._GetOmniboxMatchesFor('apple') self.assertTrue(matches) self.assertTrue([x for x in matches if x['type'] == 'search-suggest'])
475,219
def testDifferentTypesOfResults(self): """Verify different types of results from omnibox.
deftestDifferentTypesOfResults(self):"""Verifydifferenttypesofresultsfromomnibox.
475,220
def testDifferentTypesOfResults(self): """Verify different types of results from omnibox.
def testDifferentTypesOfResults(self): """Verify different types of results from omnibox.
475,221
def testSuggestPref(self): """Verify omnibox suggest-service enable/disable pref.""" self.assertTrue(self.GetPrefsInfo().Prefs(pyauto.kSearchSuggestEnabled)) matches = self._GetOmniboxMatchesFor('apple') self.assertTrue(matches) self.assertTrue([x for x in matches if x['type'] == 'search-suggest']) # Disable suggest-service self.SetPrefs(pyauto.kSearchSuggestEnabled, False) self.assertFalse(self.GetPrefsInfo().Prefs(pyauto.kSearchSuggestEnabled)) matches = self._GetOmniboxMatchesFor('apple') self.assertTrue(matches) # Verify there are no suggest results self.assertFalse([x for x in matches if x['type'] == 'search-suggest'])
def testSuggestPref(self): """Verify no suggests for omnibox when suggested-services disabled.""" search_string = 'apple' self.assertTrue(self.GetPrefsInfo().Prefs(pyauto.kSearchSuggestEnabled)) matches = self._GetOmniboxMatchesFor('apple') self.assertTrue(matches) self.assertTrue([x for x in matches if x['type'] == 'search-suggest']) # Disable suggest-service self.SetPrefs(pyauto.kSearchSuggestEnabled, False) self.assertFalse(self.GetPrefsInfo().Prefs(pyauto.kSearchSuggestEnabled)) matches = self._GetOmniboxMatchesFor('apple') self.assertTrue(matches) # Verify there are no suggest results self.assertFalse([x for x in matches if x['type'] == 'search-suggest'])
475,222
def testSuggestPref(self): """Verify omnibox suggest-service enable/disable pref.""" self.assertTrue(self.GetPrefsInfo().Prefs(pyauto.kSearchSuggestEnabled)) matches = self._GetOmniboxMatchesFor('apple') self.assertTrue(matches) self.assertTrue([x for x in matches if x['type'] == 'search-suggest']) # Disable suggest-service self.SetPrefs(pyauto.kSearchSuggestEnabled, False) self.assertFalse(self.GetPrefsInfo().Prefs(pyauto.kSearchSuggestEnabled)) matches = self._GetOmniboxMatchesFor('apple') self.assertTrue(matches) # Verify there are no suggest results self.assertFalse([x for x in matches if x['type'] == 'search-suggest'])
def testSuggestPref(self): """Verify omnibox suggest-service enable/disable pref.""" self.assertTrue(self.GetPrefsInfo().Prefs(pyauto.kSearchSuggestEnabled)) matches = self._GetOmniboxMatchesFor(search_string) self.assertTrue(matches) self.assertTrue([x for x in matches if x['type'] == 'search-suggest']) # Disable suggest-service self.SetPrefs(pyauto.kSearchSuggestEnabled, False) self.assertFalse(self.GetPrefsInfo().Prefs(pyauto.kSearchSuggestEnabled)) matches = self._GetOmniboxMatchesFor(search_string) self.assertTrue(matches) # Verify there are no suggest results self.assertFalse([x for x in matches if x['type'] == 'search-suggest'])
475,223
def testSuggestPref(self): """Verify omnibox suggest-service enable/disable pref.""" self.assertTrue(self.GetPrefsInfo().Prefs(pyauto.kSearchSuggestEnabled)) matches = self._GetOmniboxMatchesFor('apple') self.assertTrue(matches) self.assertTrue([x for x in matches if x['type'] == 'search-suggest']) # Disable suggest-service self.SetPrefs(pyauto.kSearchSuggestEnabled, False) self.assertFalse(self.GetPrefsInfo().Prefs(pyauto.kSearchSuggestEnabled)) matches = self._GetOmniboxMatchesFor('apple') self.assertTrue(matches) # Verify there are no suggest results self.assertFalse([x for x in matches if x['type'] == 'search-suggest'])
def testSuggestPref(self): """Verify omnibox suggest-service enable/disable pref.""" self.assertTrue(self.GetPrefsInfo().Prefs(pyauto.kSearchSuggestEnabled)) matches = self._GetOmniboxMatchesFor(search_string) self.assertTrue(matches) self.assertTrue([x for x in matches if x['type'] == 'search-suggest']) # Disable suggest-service self.SetPrefs(pyauto.kSearchSuggestEnabled, False) self.assertFalse(self.GetPrefsInfo().Prefs(pyauto.kSearchSuggestEnabled)) matches = self._GetOmniboxMatchesFor(search_string) self.assertTrue(matches) # Verify there are no suggest results self.assertFalse([x for x in matches if x['type'] == 'search-suggest'])
475,224
def _CheckBookmarkResultForVariousInputs(self, url, title, windex=0): """Check if we get the Bookmark for complete and partial inputs.""" # Check if the complete URL would get the bookmark url_matches = self._GetOmniboxMatchesFor(url, windex=windex) self._VerifyHasBookmarkResult(url_matches) # Check if the complete title would get the bookmark title_matches = self._GetOmniboxMatchesFor(title, windex=windex) self._VerifyHasBookmarkResult(title_matches) # Check if the partial URL would get the bookmark split_url = urlparse.urlsplit(url) partial_url = self._GetOmniboxMatchesFor(split_url.scheme, windex=windex) self._VerifyHasBookmarkResult(partial_url) # Check if the partial title would get the bookmark split_title = title.split() search_term = split_title[len(split_title) - 1] partial_title = self._GetOmniboxMatchesFor(search_term, windex=windex) self._VerifyHasBookmarkResult(partial_title)
def _CheckBookmarkResultForVariousInputs(self, url, title, windex=0): """Check if we get the Bookmark for complete and partial inputs.""" # Check if the complete URL would get the bookmark url_matches = self._GetOmniboxMatchesFor(url, windex=windex) self._VerifyHasBookmarkResult(url_matches) # Check if the complete title would get the bookmark title_matches = self._GetOmniboxMatchesFor(title, windex=windex) self._VerifyHasBookmarkResult(title_matches) # Check if the partial URL would get the bookmark split_url = urlparse.urlsplit(url) partial_url = self._GetOmniboxMatchesFor(split_url.scheme, windex=windex) self._VerifyHasBookmarkResult(partial_url) # Check if the partial title would get the bookmark split_title = title.split() search_term = split_title[len(split_title) - 1] partial_title = self._GetOmniboxMatchesFor(search_term, windex=windex) self._VerifyHasBookmarkResult(partial_title)
475,225
def _CheckBookmarkResultForVariousInputs(self, url, title, windex=0): """Check if we get the Bookmark for complete and partial inputs.""" # Check if the complete URL would get the bookmark url_matches = self._GetOmniboxMatchesFor(url, windex=windex) self._VerifyHasBookmarkResult(url_matches) # Check if the complete title would get the bookmark title_matches = self._GetOmniboxMatchesFor(title, windex=windex) self._VerifyHasBookmarkResult(title_matches) # Check if the partial URL would get the bookmark split_url = urlparse.urlsplit(url) partial_url = self._GetOmniboxMatchesFor(split_url.scheme, windex=windex) self._VerifyHasBookmarkResult(partial_url) # Check if the partial title would get the bookmark split_title = title.split() search_term = split_title[len(split_title) - 1] partial_title = self._GetOmniboxMatchesFor(search_term, windex=windex) self._VerifyHasBookmarkResult(partial_title)
def _CheckBookmarkResultForVariousInputs(self, url, title, windex=0): """Check if we get the Bookmark for complete and partial inputs.""" # Check if the complete URL would get the bookmark url_matches = self._GetOmniboxMatchesFor(url, windex=windex) self._VerifyHasBookmarkResult(url_matches) # Check if the complete title would get the bookmark title_matches = self._GetOmniboxMatchesFor(title, windex=windex) self._VerifyHasBookmarkResult(title_matches) # Check if the partial URL would get the bookmark split_url = urlparse.urlsplit(url) partial_url = self._GetOmniboxMatchesFor(split_url.scheme, windex=windex) self._VerifyHasBookmarkResult(partial_url) # Check if the partial title would get the bookmark split_title = title.split() search_term = split_title[len(split_title) - 1] partial_title = self._GetOmniboxMatchesFor(search_term, windex=windex) self._VerifyHasBookmarkResult(partial_title)
475,226
def _CheckBookmarkResultForVariousInputs(self, url, title, windex=0): """Check if we get the Bookmark for complete and partial inputs.""" # Check if the complete URL would get the bookmark url_matches = self._GetOmniboxMatchesFor(url, windex=windex) self._VerifyHasBookmarkResult(url_matches) # Check if the complete title would get the bookmark title_matches = self._GetOmniboxMatchesFor(title, windex=windex) self._VerifyHasBookmarkResult(title_matches) # Check if the partial URL would get the bookmark split_url = urlparse.urlsplit(url) partial_url = self._GetOmniboxMatchesFor(split_url.scheme, windex=windex) self._VerifyHasBookmarkResult(partial_url) # Check if the partial title would get the bookmark split_title = title.split() search_term = split_title[len(split_title) - 1] partial_title = self._GetOmniboxMatchesFor(search_term, windex=windex) self._VerifyHasBookmarkResult(partial_title)
def _CheckBookmarkResultForVariousInputs(self, url, title, windex=0): """Check if we get the Bookmark for complete and partial inputs.""" # Check if the complete URL would get the bookmark url_matches = self._GetOmniboxMatchesFor(url, windex=windex) self._VerifyHasBookmarkResult(url_matches) # Check if the complete title would get the bookmark title_matches = self._GetOmniboxMatchesFor(title, windex=windex) self._VerifyHasBookmarkResult(title_matches) # Check if the partial URL would get the bookmark split_url = urlparse.urlsplit(url) partial_url = self._GetOmniboxMatchesFor(split_url.scheme, windex=windex) self._VerifyHasBookmarkResult(partial_url) # Check if the partial title would get the bookmark split_title = title.split() search_term = split_title[len(split_title) - 1] partial_title = self._GetOmniboxMatchesFor(search_term, windex=windex) self._VerifyHasBookmarkResult(partial_title)
475,227
def _CheckBookmarkResultForVariousInputs(self, url, title, windex=0): """Check if we get the Bookmark for complete and partial inputs.""" # Check if the complete URL would get the bookmark url_matches = self._GetOmniboxMatchesFor(url, windex=windex) self._VerifyHasBookmarkResult(url_matches) # Check if the complete title would get the bookmark title_matches = self._GetOmniboxMatchesFor(title, windex=windex) self._VerifyHasBookmarkResult(title_matches) # Check if the partial URL would get the bookmark split_url = urlparse.urlsplit(url) partial_url = self._GetOmniboxMatchesFor(split_url.scheme, windex=windex) self._VerifyHasBookmarkResult(partial_url) # Check if the partial title would get the bookmark split_title = title.split() search_term = split_title[len(split_title) - 1] partial_title = self._GetOmniboxMatchesFor(search_term, windex=windex) self._VerifyHasBookmarkResult(partial_title)
def _CheckBookmarkResultForVariousInputs(self, url, title, windex=0): """Check if we get the Bookmark for complete and partial inputs.""" # Check if the complete URL would get the bookmark url_matches = self._GetOmniboxMatchesFor(url, windex=windex) self._VerifyHasBookmarkResult(url_matches) # Check if the complete title would get the bookmark title_matches = self._GetOmniboxMatchesFor(title, windex=windex) self._VerifyHasBookmarkResult(title_matches) # Check if the partial URL would get the bookmark split_url = urlparse.urlsplit(url) partial_url = self._GetOmniboxMatchesFor(split_url.scheme, windex=windex) self._VerifyHasBookmarkResult(partial_url) # Check if the partial title would get the bookmark split_title = title.split() search_term = split_title[len(split_title) - 1] partial_title = self._GetOmniboxMatchesFor(search_term, windex=windex) self._VerifyHasBookmarkResult(partial_title)
475,228
def testBookmarkResultInNewTabAndWindow(self): """Verify that omnibox can recognize bookmark in the search options in new tabs and Windows. """ url = self.GetFileURLForDataPath('title2.html') self.NavigateToURL(url) title = 'This is Awesomeness' bookmarks = self.GetBookmarkModel() bar_id = bookmarks.BookmarkBar()['id'] self.AddBookmarkURL(bar_id, 0, title, url) bookmarks = self.GetBookmarkModel() nodes = bookmarks.FindByTitle(title) self.AppendTab(pyauto.GURL(url)) self._CheckBookmarkResultForVariousInputs(url, title) self.OpenNewBrowserWindow(True) self.assertEqual(2, self.GetBrowserWindowCount()) self.NavigateToURL(url, 1, 0) self._CheckBookmarkResultForVariousInputs(url, title, windex=1) self.RunCommand(pyauto.IDC_NEW_INCOGNITO_WINDOW) self.assertEqual(3, self.GetBrowserWindowCount()) self.NavigateToURL(url, 2, 0) self._CheckBookmarkResultForVariousInputs(url, title, windex=2)
def testBookmarkResultInNewTabAndWindow(self): """Verify that omnibox can recognize a bookmark within search options in new tabs and windows.""" url = self.GetFileURLForDataPath('title2.html') self.NavigateToURL(url) title = 'This is Awesomeness' bookmarks = self.GetBookmarkModel() bar_id = bookmarks.BookmarkBar()['id'] self.AddBookmarkURL(bar_id, 0, title, url) bookmarks = self.GetBookmarkModel() nodes = bookmarks.FindByTitle(title) self.AppendTab(pyauto.GURL(url)) self._CheckBookmarkResultForVariousInputs(url, title) self.OpenNewBrowserWindow(True) self.assertEqual(2, self.GetBrowserWindowCount()) self.NavigateToURL(url, 1, 0) self._CheckBookmarkResultForVariousInputs(url, title, windex=1) self.RunCommand(pyauto.IDC_NEW_INCOGNITO_WINDOW) self.assertEqual(3, self.GetBrowserWindowCount()) self.NavigateToURL(url, 2, 0) self._CheckBookmarkResultForVariousInputs(url, title, windex=2)
475,229
def ShouldExcludePath(path): head, tail = os.path.split(path) if tail in ('.svn', '.git'): return True
def ShouldExcludePath(path): head, tail = os.path.split(path) if tail in ('.svn', '.git'): return True
475,230
def testPerfExpectations(self): perf_data = LoadData()
def testPerfExpectations(self): perf_data = LoadData()
475,231
def find_and_truncate(f): f.seek(0) while True: line = f.readline() if line == "": return False if '</valgrindoutput>' in line: # valgrind often has garbage after </valgrindoutput> upon crash f.truncate() return True
def find_and_truncate(f): f.seek(0) while True: line = f.readline() if line == "": return False if '</valgrindoutput>' in line: # valgrind often has garbage after </valgrindoutput> upon crash f.truncate() return True
475,232
def find_and_truncate(f): f.seek(0) while True: line = f.readline() if line == "": return False if '</valgrindoutput>' in line: # valgrind often has garbage after </valgrindoutput> upon crash f.truncate() return True
def find_and_truncate(f): f.seek(0) while True: line = f.readline() if line == "": return False if '</valgrindoutput>' in line: # valgrind often has garbage after </valgrindoutput> upon crash f.truncate() return True
475,233
def Report(self, files, check_sanity=False): '''Reads in a set of files and prints Memcheck report.
def Report(self, files, check_sanity=False): '''Reads in a set of files and prints Memcheck report.
475,234
def FileHandler(self): """This handler sends the contents of the requested file. Wow, it's like a real webserver!"""
def FileHandler(self): """This handler sends the contents of the requested file. Wow, it's like a real webserver!"""
475,235
def FileHandler(self): """This handler sends the contents of the requested file. Wow, it's like a real webserver!"""
def FileHandler(self): """This handler sends the contents of the requested file. Wow, it's like a real webserver!"""
475,236
def FileHandler(self): """This handler sends the contents of the requested file. Wow, it's like a real webserver!"""
def FileHandler(self): """This handler sends the contents of the requested file. Wow, it's like a real webserver!"""
475,237
def FileHandler(self): """This handler sends the contents of the requested file. Wow, it's like a real webserver!"""
def FileHandler(self): """This handler sends the contents of the requested file. Wow, it's like a real webserver!"""
475,238
def CheckChangeOnCommit(input_api, output_api): results = [] if not input_api.json: results.append(output_api.PresubmitNotifyResult( 'You don\'t have json nor simplejson installed.\n' ' This is a warning that you will need to upgrade your python ' 'installation.\n' ' This is no big deal but you\'ll eventually need to ' 'upgrade.\n' ' How? Easy! You can do it right now and shut me off! Just:\n' ' del depot_tools\\python.bat\n' ' gclient\n' ' Thanks for your patience.')) results.extend(_CommonChecks(input_api, output_api)) # TODO(thestig) temporarily disabled, doesn't work in third_party/ #results.extend(input_api.canned_checks.CheckSvnModifiedDirectories( # input_api, output_api, sources)) # Make sure the tree is 'open'. results.extend(input_api.canned_checks.CheckTreeIsOpen( input_api, output_api, 'http://chromium-status.appspot.com/current?format=raw', '(?i).*closed.*')) results.extend(input_api.canned_checks.CheckRietveldTryJobExecution(input_api, output_api, 'http://codereview.chromium.org', ('win', 'linux', 'mac'), 'tryserver@chromium.org')) # These builders are just too slow. IGNORED_BUILDERS = [ 'Chromium XP', 'Chromium Mac', 'Chromium Arm (dbg)', 'Chromium Linux', 'Chromium Linux x64', ] results.extend(input_api.canned_checks.CheckBuildbotPendingBuilds( input_api, output_api, 'http://build.chromium.org/buildbot/waterfall/json/builders?filter=1', 6, IGNORED_BUILDERS)) return results
def CheckChangeOnCommit(input_api, output_api): results = [] if not input_api.json: results.append(output_api.PresubmitNotifyResult( 'You don\'t have json nor simplejson installed.\n' ' This is a warning that you will need to upgrade your python ' 'installation.\n' ' This is no big deal but you\'ll eventually need to ' 'upgrade.\n' ' How? Easy! You can do it right now and shut me off! Just:\n' ' del depot_tools\\python.bat\n' ' gclient\n' ' Thanks for your patience.')) results.extend(_CommonChecks(input_api, output_api)) # TODO(thestig) temporarily disabled, doesn't work in third_party/ #results.extend(input_api.canned_checks.CheckSvnModifiedDirectories( # input_api, output_api, sources)) # Make sure the tree is 'open'. results.extend(input_api.canned_checks.CheckTreeIsOpen( input_api, output_api, json_url='http://chromium-status.appspot.com/current?format=json')) results.extend(input_api.canned_checks.CheckRietveldTryJobExecution(input_api, output_api, 'http://codereview.chromium.org', ('win', 'linux', 'mac'), 'tryserver@chromium.org')) # These builders are just too slow. IGNORED_BUILDERS = [ 'Chromium XP', 'Chromium Mac', 'Chromium Arm (dbg)', 'Chromium Linux', 'Chromium Linux x64', ] results.extend(input_api.canned_checks.CheckBuildbotPendingBuilds( input_api, output_api, 'http://build.chromium.org/buildbot/waterfall/json/builders?filter=1', 6, IGNORED_BUILDERS)) return results
475,239
def write_action(asset, webgl_mode): filename = posixpath.splitext(posixpath.basename(asset['path']))[0] filename = filename.replace('.','_') filename = filename.replace('-','_') filename = filename.lower() name = "convert_" + filename if webgl_mode: name = name + "_webgl" output = asset['path'].replace('convert_', '') output = posixpath.splitext(output)[0] + ".o3dtgz" output_dir = posixpath.dirname(output) output_file.write(" {\n") output_file.write(" 'action_name': '%s',\n" % name) output_file.write(" 'inputs': [\n") output_file.write(" '<(PRODUCT_DIR)/o3dConverter',\n") output_file.write(" '../o3d_assets/samples/%s',\n" % asset['path']) output_file.write(" ],\n") output_file.write(" 'outputs': [\n") if sys.platform[:5] == 'linux': # TODO(gspencer): This is a HACK! We shouldn't need to put the # absolute path here, but currently on Linux (scons), it is unable # to copy generated items out of the source tree (because the # repository mojo fails to find it and puts in the wrong path). output_file.write(" '%s',\n" % posixpath.abspath(output)) else: output_file.write(" '../samples/%s',\n" % output) output_file.write(" ],\n") output_file.write(" 'action': [\n") output_file.write(" '<(PRODUCT_DIR)/o3dConverter',\n") output_file.write(" '--no-condition',\n") output_file.write(" '--up-axis=%s',\n" % asset['up']) if webgl_mode: output_file.write(" '--no-binary',\n") output_file.write(" '--no-archive',\n") output_file.write(" '--convert-dds-to-png',\n") output_file.write(" '--convert-cg-to-glsl',\n") output_file.write(" '../o3d_assets/samples/%s',\n" % asset['path']) output_file.write(" '<(_outputs)',\n") output_file.write(" ],\n") output_file.write(" },\n")
def write_action(asset, webgl_mode): filename = posixpath.splitext(posixpath.basename(asset['path']))[0] filename = filename.replace('.','_') filename = filename.replace('-','_') filename = filename.lower() name = "convert_" + filename if webgl_mode: name = name + "_webgl" output = asset['path'].replace('convert_', '') output_base = posixpath.splitext(output)[0] output_tgz = output_base + ".o3dtgz" output_json = output_base + "/scene.json" output = output_tgz if webgl_mode: output = output_json output_dir = posixpath.dirname(output) output_file.write(" {\n") output_file.write(" 'action_name': '%s',\n" % name) output_file.write(" 'inputs': [\n") output_file.write(" '<(PRODUCT_DIR)/o3dConverter',\n") output_file.write(" '../o3d_assets/samples/%s',\n" % asset['path']) output_file.write(" ],\n") output_file.write(" 'outputs': [\n") if sys.platform[:5] == 'linux': # TODO(gspencer): This is a HACK! We shouldn't need to put the # absolute path here, but currently on Linux (scons), it is unable # to copy generated items out of the source tree (because the # repository mojo fails to find it and puts in the wrong path). output_file.write(" '%s',\n" % posixpath.abspath(output)) else: output_file.write(" '../samples/%s',\n" % output) output_file.write(" ],\n") output_file.write(" 'action': [\n") output_file.write(" '<(PRODUCT_DIR)/o3dConverter',\n") output_file.write(" '--no-condition',\n") output_file.write(" '--up-axis=%s',\n" % asset['up']) if webgl_mode: output_file.write(" '--no-binary',\n") output_file.write(" '--no-archive',\n") output_file.write(" '--convert-dds-to-png',\n") output_file.write(" '--convert-cg-to-glsl',\n") output_file.write(" '../o3d_assets/samples/%s',\n" % asset['path']) output_file.write(" '<(_outputs)',\n") output_file.write(" ],\n") output_file.write(" },\n")
475,240
def write_action(asset, webgl_mode): filename = posixpath.splitext(posixpath.basename(asset['path']))[0] filename = filename.replace('.','_') filename = filename.replace('-','_') filename = filename.lower() name = "convert_" + filename if webgl_mode: name = name + "_webgl" output = asset['path'].replace('convert_', '') output = posixpath.splitext(output)[0] + ".o3dtgz" output_dir = posixpath.dirname(output) output_file.write(" {\n") output_file.write(" 'action_name': '%s',\n" % name) output_file.write(" 'inputs': [\n") output_file.write(" '<(PRODUCT_DIR)/o3dConverter',\n") output_file.write(" '../o3d_assets/samples/%s',\n" % asset['path']) output_file.write(" ],\n") output_file.write(" 'outputs': [\n") if sys.platform[:5] == 'linux': # TODO(gspencer): This is a HACK! We shouldn't need to put the # absolute path here, but currently on Linux (scons), it is unable # to copy generated items out of the source tree (because the # repository mojo fails to find it and puts in the wrong path). output_file.write(" '%s',\n" % posixpath.abspath(output)) else: output_file.write(" '../samples/%s',\n" % output) output_file.write(" ],\n") output_file.write(" 'action': [\n") output_file.write(" '<(PRODUCT_DIR)/o3dConverter',\n") output_file.write(" '--no-condition',\n") output_file.write(" '--up-axis=%s',\n" % asset['up']) if webgl_mode: output_file.write(" '--no-binary',\n") output_file.write(" '--no-archive',\n") output_file.write(" '--convert-dds-to-png',\n") output_file.write(" '--convert-cg-to-glsl',\n") output_file.write(" '../o3d_assets/samples/%s',\n" % asset['path']) output_file.write(" '<(_outputs)',\n") output_file.write(" ],\n") output_file.write(" },\n")
def write_action(asset, webgl_mode): filename = posixpath.splitext(posixpath.basename(asset['path']))[0] filename = filename.replace('.','_') filename = filename.replace('-','_') filename = filename.lower() name = "convert_" + filename if webgl_mode: name = name + "_webgl" output = asset['path'].replace('convert_', '') output = posixpath.splitext(output)[0] + ".o3dtgz" output_dir = posixpath.dirname(output) output_file.write(" {\n") output_file.write(" 'action_name': '%s',\n" % name) output_file.write(" 'inputs': [\n") output_file.write(" '<(PRODUCT_DIR)/o3dConverter',\n") output_file.write(" '../o3d_assets/samples/%s',\n" % asset['path']) output_file.write(" ],\n") output_file.write(" 'outputs': [\n") if sys.platform[:5] == 'linux': # TODO(gspencer): This is a HACK! We shouldn't need to put the # absolute path here, but currently on Linux (scons), it is unable # to copy generated items out of the source tree (because the # repository mojo fails to find it and puts in the wrong path). output_file.write(" '%s',\n" % posixpath.abspath(output)) else: output_file.write(" '../samples/%s',\n" % output) output_file.write(" ],\n") output_file.write(" 'action': [\n") output_file.write(" '<(PRODUCT_DIR)/o3dConverter',\n") output_file.write(" '--no-condition',\n") output_file.write(" '--up-axis=%s',\n" % asset['up']) if webgl_mode: output_file.write(" '--no-binary',\n") output_file.write(" '--no-archive',\n") output_file.write(" '--convert-dds-to-png',\n") output_file.write(" '--convert-cg-to-glsl',\n") output_file.write(" '../o3d_assets/samples/%s',\n" % asset['path']) if webgl_mode: output_file.write(" '%s',\n" % output_tgz) else: output_file.write(" '<(_outputs)',\n") output_file.write(" ],\n") output_file.write(" },\n")
475,241
def RunGit(command): """Run a git subcommand, returning its output.""" proc = subprocess.Popen(['git'] + command, stdout=subprocess.PIPE) return proc.communicate()[0].strip()
def RunGit(command): """Run a git subcommand, returning its output.""" shell = (os.name == 'nt') proc = subprocess.Popen(['git'] + command, shell=shell, stdout=subprocess.PIPE) return proc.communicate()[0].strip()
475,242
def FindSVNRev(target_rev): """Map an SVN revision to a git hash. Like 'git svn find-rev' but without the git-svn bits.""" # We iterate through the commit log looking for "git-svn-id" lines, # which contain the SVN revision of that commit. We can stop once # we've found our target (or hit a revision number lower than what # we're looking for, indicating not found). target_rev = int(target_rev) # regexp matching the "commit" line from the log. commit_re = re.compile(r'^commit ([a-f\d]{40})$') # regexp matching the git-svn line from the log. git_svn_re = re.compile(r'^\s+git-svn-id: [^@]+@(\d+) ') log = subprocess.Popen(['git', 'log', '--no-color', '--first-parent', '--pretty=medium', 'origin'], stdout=subprocess.PIPE) # Track whether we saw a revision *later* than the one we're seeking. saw_later = False for line in log.stdout: match = commit_re.match(line) if match: commit = match.group(1) continue match = git_svn_re.match(line) if match: rev = int(match.group(1)) if rev <= target_rev: log.stdout.close() # Break pipe. if rev < target_rev: if not saw_later: return None # Can't be sure whether this rev is ok. print ("WARNING: r%d not found, so using next nearest earlier r%d" % (target_rev, rev)) return commit else: saw_later = True print "Error: reached end of log without finding commit info." print "Something has likely gone horribly wrong." return None
def FindSVNRev(target_rev): """Map an SVN revision to a git hash. Like 'git svn find-rev' but without the git-svn bits.""" # We iterate through the commit log looking for "git-svn-id" lines, # which contain the SVN revision of that commit. We can stop once # we've found our target (or hit a revision number lower than what # we're looking for, indicating not found). target_rev = int(target_rev) # regexp matching the "commit" line from the log. commit_re = re.compile(r'^commit ([a-f\d]{40})$') # regexp matching the git-svn line from the log. git_svn_re = re.compile(r'^\s+git-svn-id: [^@]+@(\d+) ') log = subprocess.Popen(['git', 'log', '--no-color', '--first-parent', '--pretty=medium', 'origin'], shell=shell, stdout=subprocess.PIPE) # Track whether we saw a revision *later* than the one we're seeking. saw_later = False for line in log.stdout: match = commit_re.match(line) if match: commit = match.group(1) continue match = git_svn_re.match(line) if match: rev = int(match.group(1)) if rev <= target_rev: log.stdout.close() # Break pipe. if rev < target_rev: if not saw_later: return None # Can't be sure whether this rev is ok. print ("WARNING: r%d not found, so using next nearest earlier r%d" % (target_rev, rev)) return commit else: saw_later = True print "Error: reached end of log without finding commit info." print "Something has likely gone horribly wrong." return None
475,243
def testPopupBlockerEnabled(self): """Verify popup blocking is enabled.""" self.assertFalse(self.GetBlockedPopupsInfo(), msg='Should have no blocked popups on startup') file_url = self.GetFileURLForPath(os.path.join( self.DataDir(), 'popup_blocker', 'popup-blocked-to-post-blank.html')) self.NavigateToURL(file_url) blocked_popups = self.GetBlockedPopupsInfo() self.assertEqual(1, len(blocked_popups), msg='Popup not blocked') self.assertEqual('Popup Success!', blocked_popups[0]['title'])
def testPopupBlockerEnabled(self): """Verify popup blocking is enabled.""" self.assertFalse(self.GetBlockedPopupsInfo(), msg='Should have no blocked popups on startup') file_url = self.GetFileURLForPath(os.path.join( self.DataDir(), 'popup_blocker', 'popup-blocked-to-post-blank.html')) self.NavigateToURL(file_url) blocked_popups = self.GetBlockedPopupsInfo() self.assertEqual(1, len(blocked_popups), msg='Popup not blocked') self.assertEqual('Popup Success!', blocked_popups[0]['title'])
475,244
def EchoAllHandler(self): """This handler yields a (more) human-readable page listing information about the request header & contents."""
def EchoAllHandler(self): """This handler yields a (more) human-readable page listing information about the request header & contents."""
475,245
def main(options, args): logfile = open('testserver.log', 'w') sys.stdout = FileMultiplexer(sys.stdout, logfile) sys.stderr = FileMultiplexer(sys.stderr, logfile) port = options.port if options.server_type == SERVER_HTTP: if options.cert: # let's make sure the cert file exists. if not os.path.isfile(options.cert): print 'specified server cert file not found: ' + options.cert + \ ' exiting...' return for ca_cert in options.ssl_client_ca: if not os.path.isfile(ca_cert): print 'specified trusted client CA file not found: ' + ca_cert + \ ' exiting...' return server = HTTPSServer(('127.0.0.1', port), TestPageHandler, options.cert, options.ssl_client_auth, options.ssl_client_ca) print 'HTTPS server started on port %d...' % port else: server = StoppableHTTPServer(('127.0.0.1', port), TestPageHandler) print 'HTTP server started on port %d...' % port server.data_dir = MakeDataDir() server.file_root_url = options.file_root_url server._sync_handler = None # means FTP Server else: my_data_dir = MakeDataDir() # Instantiate a dummy authorizer for managing 'virtual' users authorizer = pyftpdlib.ftpserver.DummyAuthorizer() # Define a new user having full r/w permissions and a read-only # anonymous user authorizer.add_user('chrome', 'chrome', my_data_dir, perm='elradfmw') authorizer.add_anonymous(my_data_dir) # Instantiate FTP handler class ftp_handler = pyftpdlib.ftpserver.FTPHandler ftp_handler.authorizer = authorizer # Define a customized banner (string returned when client connects) ftp_handler.banner = ("pyftpdlib %s based ftpd ready." % pyftpdlib.ftpserver.__ver__) # Instantiate FTP server class and listen to 127.0.0.1:port address = ('127.0.0.1', port) server = pyftpdlib.ftpserver.FTPServer(address, ftp_handler) print 'FTP server started on port %d...' % port # Notify the parent that we've started. (BaseServer subclasses # bind their sockets on construction.) if options.startup_pipe is not None: if sys.platform == 'win32': fd = msvcrt.open_osfhandle(options.startup_pipe, 0) else: fd = options.startup_pipe startup_pipe = os.fdopen(fd, "w") startup_pipe.write("READY") startup_pipe.close() try: server.serve_forever() except KeyboardInterrupt: print 'shutting down server' server.stop = True
def main(options, args): logfile = open('testserver.log', 'w') sys.stdout = FileMultiplexer(sys.stdout, logfile) sys.stderr = FileMultiplexer(sys.stderr, logfile) port = options.port if options.server_type == SERVER_HTTP: if options.cert: # let's make sure the cert file exists. if not os.path.isfile(options.cert): print 'specified server cert file not found: ' + options.cert + \ ' exiting...' return for ca_cert in options.ssl_client_ca: if not os.path.isfile(ca_cert): print 'specified trusted client CA file not found: ' + ca_cert + \ ' exiting...' return server = HTTPSServer(('127.0.0.1', port), TestPageHandler, options.cert, options.ssl_client_auth, options.ssl_client_ca) print 'HTTPS server started on port %d...' % server.server_port else: server = StoppableHTTPServer(('127.0.0.1', port), TestPageHandler) print 'HTTP server started on port %d...' % port server.data_dir = MakeDataDir() server.file_root_url = options.file_root_url server._sync_handler = None # means FTP Server else: my_data_dir = MakeDataDir() # Instantiate a dummy authorizer for managing 'virtual' users authorizer = pyftpdlib.ftpserver.DummyAuthorizer() # Define a new user having full r/w permissions and a read-only # anonymous user authorizer.add_user('chrome', 'chrome', my_data_dir, perm='elradfmw') authorizer.add_anonymous(my_data_dir) # Instantiate FTP handler class ftp_handler = pyftpdlib.ftpserver.FTPHandler ftp_handler.authorizer = authorizer # Define a customized banner (string returned when client connects) ftp_handler.banner = ("pyftpdlib %s based ftpd ready." % pyftpdlib.ftpserver.__ver__) # Instantiate FTP server class and listen to 127.0.0.1:port address = ('127.0.0.1', port) server = pyftpdlib.ftpserver.FTPServer(address, ftp_handler) print 'FTP server started on port %d...' % port # Notify the parent that we've started. (BaseServer subclasses # bind their sockets on construction.) if options.startup_pipe is not None: if sys.platform == 'win32': fd = msvcrt.open_osfhandle(options.startup_pipe, 0) else: fd = options.startup_pipe startup_pipe = os.fdopen(fd, "w") startup_pipe.write("READY") startup_pipe.close() try: server.serve_forever() except KeyboardInterrupt: print 'shutting down server' server.stop = True
475,246
def main(options, args): logfile = open('testserver.log', 'w') sys.stdout = FileMultiplexer(sys.stdout, logfile) sys.stderr = FileMultiplexer(sys.stderr, logfile) port = options.port if options.server_type == SERVER_HTTP: if options.cert: # let's make sure the cert file exists. if not os.path.isfile(options.cert): print 'specified server cert file not found: ' + options.cert + \ ' exiting...' return for ca_cert in options.ssl_client_ca: if not os.path.isfile(ca_cert): print 'specified trusted client CA file not found: ' + ca_cert + \ ' exiting...' return server = HTTPSServer(('127.0.0.1', port), TestPageHandler, options.cert, options.ssl_client_auth, options.ssl_client_ca) print 'HTTPS server started on port %d...' % port else: server = StoppableHTTPServer(('127.0.0.1', port), TestPageHandler) print 'HTTP server started on port %d...' % port server.data_dir = MakeDataDir() server.file_root_url = options.file_root_url server._sync_handler = None # means FTP Server else: my_data_dir = MakeDataDir() # Instantiate a dummy authorizer for managing 'virtual' users authorizer = pyftpdlib.ftpserver.DummyAuthorizer() # Define a new user having full r/w permissions and a read-only # anonymous user authorizer.add_user('chrome', 'chrome', my_data_dir, perm='elradfmw') authorizer.add_anonymous(my_data_dir) # Instantiate FTP handler class ftp_handler = pyftpdlib.ftpserver.FTPHandler ftp_handler.authorizer = authorizer # Define a customized banner (string returned when client connects) ftp_handler.banner = ("pyftpdlib %s based ftpd ready." % pyftpdlib.ftpserver.__ver__) # Instantiate FTP server class and listen to 127.0.0.1:port address = ('127.0.0.1', port) server = pyftpdlib.ftpserver.FTPServer(address, ftp_handler) print 'FTP server started on port %d...' % port # Notify the parent that we've started. (BaseServer subclasses # bind their sockets on construction.) if options.startup_pipe is not None: if sys.platform == 'win32': fd = msvcrt.open_osfhandle(options.startup_pipe, 0) else: fd = options.startup_pipe startup_pipe = os.fdopen(fd, "w") startup_pipe.write("READY") startup_pipe.close() try: server.serve_forever() except KeyboardInterrupt: print 'shutting down server' server.stop = True
def main(options, args): logfile = open('testserver.log', 'w') sys.stdout = FileMultiplexer(sys.stdout, logfile) sys.stderr = FileMultiplexer(sys.stderr, logfile) port = options.port if options.server_type == SERVER_HTTP: if options.cert: # let's make sure the cert file exists. if not os.path.isfile(options.cert): print 'specified server cert file not found: ' + options.cert + \ ' exiting...' return for ca_cert in options.ssl_client_ca: if not os.path.isfile(ca_cert): print 'specified trusted client CA file not found: ' + ca_cert + \ ' exiting...' return server = HTTPSServer(('127.0.0.1', port), TestPageHandler, options.cert, options.ssl_client_auth, options.ssl_client_ca) print 'HTTPS server started on port %d...' % port else: server = StoppableHTTPServer(('127.0.0.1', port), TestPageHandler) print 'HTTP server started on port %d...' % server.server_port server.data_dir = MakeDataDir() server.file_root_url = options.file_root_url server._sync_handler = None # means FTP Server else: my_data_dir = MakeDataDir() # Instantiate a dummy authorizer for managing 'virtual' users authorizer = pyftpdlib.ftpserver.DummyAuthorizer() # Define a new user having full r/w permissions and a read-only # anonymous user authorizer.add_user('chrome', 'chrome', my_data_dir, perm='elradfmw') authorizer.add_anonymous(my_data_dir) # Instantiate FTP handler class ftp_handler = pyftpdlib.ftpserver.FTPHandler ftp_handler.authorizer = authorizer # Define a customized banner (string returned when client connects) ftp_handler.banner = ("pyftpdlib %s based ftpd ready." % pyftpdlib.ftpserver.__ver__) # Instantiate FTP server class and listen to 127.0.0.1:port address = ('127.0.0.1', port) server = pyftpdlib.ftpserver.FTPServer(address, ftp_handler) print 'FTP server started on port %d...' % port # Notify the parent that we've started. (BaseServer subclasses # bind their sockets on construction.) if options.startup_pipe is not None: if sys.platform == 'win32': fd = msvcrt.open_osfhandle(options.startup_pipe, 0) else: fd = options.startup_pipe startup_pipe = os.fdopen(fd, "w") startup_pipe.write("READY") startup_pipe.close() try: server.serve_forever() except KeyboardInterrupt: print 'shutting down server' server.stop = True
475,247
def main(options, args): logfile = open('testserver.log', 'w') sys.stdout = FileMultiplexer(sys.stdout, logfile) sys.stderr = FileMultiplexer(sys.stderr, logfile) port = options.port if options.server_type == SERVER_HTTP: if options.cert: # let's make sure the cert file exists. if not os.path.isfile(options.cert): print 'specified server cert file not found: ' + options.cert + \ ' exiting...' return for ca_cert in options.ssl_client_ca: if not os.path.isfile(ca_cert): print 'specified trusted client CA file not found: ' + ca_cert + \ ' exiting...' return server = HTTPSServer(('127.0.0.1', port), TestPageHandler, options.cert, options.ssl_client_auth, options.ssl_client_ca) print 'HTTPS server started on port %d...' % port else: server = StoppableHTTPServer(('127.0.0.1', port), TestPageHandler) print 'HTTP server started on port %d...' % port server.data_dir = MakeDataDir() server.file_root_url = options.file_root_url server._sync_handler = None # means FTP Server else: my_data_dir = MakeDataDir() # Instantiate a dummy authorizer for managing 'virtual' users authorizer = pyftpdlib.ftpserver.DummyAuthorizer() # Define a new user having full r/w permissions and a read-only # anonymous user authorizer.add_user('chrome', 'chrome', my_data_dir, perm='elradfmw') authorizer.add_anonymous(my_data_dir) # Instantiate FTP handler class ftp_handler = pyftpdlib.ftpserver.FTPHandler ftp_handler.authorizer = authorizer # Define a customized banner (string returned when client connects) ftp_handler.banner = ("pyftpdlib %s based ftpd ready." % pyftpdlib.ftpserver.__ver__) # Instantiate FTP server class and listen to 127.0.0.1:port address = ('127.0.0.1', port) server = pyftpdlib.ftpserver.FTPServer(address, ftp_handler) print 'FTP server started on port %d...' % port # Notify the parent that we've started. (BaseServer subclasses # bind their sockets on construction.) if options.startup_pipe is not None: if sys.platform == 'win32': fd = msvcrt.open_osfhandle(options.startup_pipe, 0) else: fd = options.startup_pipe startup_pipe = os.fdopen(fd, "w") startup_pipe.write("READY") startup_pipe.close() try: server.serve_forever() except KeyboardInterrupt: print 'shutting down server' server.stop = True
deflisten_port = server.server_port main(options,listen_port = server.server_port args):listen_port = server.server_port logfilelisten_port = server.server_port =listen_port = server.server_port open('testserver.log',listen_port = server.server_port 'w')listen_port = server.server_port sys.stdoutlisten_port = server.server_port =listen_port = server.server_port FileMultiplexer(sys.stdout,listen_port = server.server_port logfile)listen_port = server.server_port sys.stderrlisten_port = server.server_port =listen_port = server.server_port FileMultiplexer(sys.stderr,listen_port = server.server_port logfile)listen_port = server.server_port listen_port = server.server_port portlisten_port = server.server_port =listen_port = server.server_port options.portlisten_port = server.server_port listen_port = server.server_port iflisten_port = server.server_port options.server_typelisten_port = server.server_port ==listen_port = server.server_port SERVER_HTTP:listen_port = server.server_port iflisten_port = server.server_port options.cert:listen_port = server.server_port #listen_port = server.server_port let'slisten_port = server.server_port makelisten_port = server.server_port surelisten_port = server.server_port thelisten_port = server.server_port certlisten_port = server.server_port filelisten_port = server.server_port exists.listen_port = server.server_port iflisten_port = server.server_port notlisten_port = server.server_port os.path.isfile(options.cert):listen_port = server.server_port printlisten_port = server.server_port 'specifiedlisten_port = server.server_port serverlisten_port = server.server_port certlisten_port = server.server_port filelisten_port = server.server_port notlisten_port = server.server_port found:listen_port = server.server_port 'listen_port = server.server_port +listen_port = server.server_port options.certlisten_port = server.server_port +listen_port = server.server_port \listen_port = server.server_port 'listen_port = server.server_port exiting...'listen_port = server.server_port returnlisten_port = server.server_port forlisten_port = server.server_port ca_certlisten_port = server.server_port inlisten_port = server.server_port options.ssl_client_ca:listen_port = server.server_port iflisten_port = server.server_port notlisten_port = server.server_port os.path.isfile(ca_cert):listen_port = server.server_port printlisten_port = server.server_port 'specifiedlisten_port = server.server_port trustedlisten_port = server.server_port clientlisten_port = server.server_port CAlisten_port = server.server_port filelisten_port = server.server_port notlisten_port = server.server_port found:listen_port = server.server_port 'listen_port = server.server_port +listen_port = server.server_port ca_certlisten_port = server.server_port +listen_port = server.server_port \listen_port = server.server_port 'listen_port = server.server_port exiting...'listen_port = server.server_port returnlisten_port = server.server_port serverlisten_port = server.server_port =listen_port = server.server_port HTTPSServer(('127.0.0.1',listen_port = server.server_port port),listen_port = server.server_port TestPageHandler,listen_port = server.server_port options.cert,listen_port = server.server_port options.ssl_client_auth,listen_port = server.server_port options.ssl_client_ca)listen_port = server.server_port printlisten_port = server.server_port 'HTTPSlisten_port = server.server_port serverlisten_port = server.server_port startedlisten_port = server.server_port onlisten_port = server.server_port portlisten_port = server.server_port %d...'listen_port = server.server_port %listen_port = server.server_port portlisten_port = server.server_port else:listen_port = server.server_port serverlisten_port = server.server_port =listen_port = server.server_port StoppableHTTPServer(('127.0.0.1',listen_port = server.server_port port),listen_port = server.server_port TestPageHandler)listen_port = server.server_port printlisten_port = server.server_port 'HTTPlisten_port = server.server_port serverlisten_port = server.server_port startedlisten_port = server.server_port onlisten_port = server.server_port portlisten_port = server.server_port %d...'listen_port = server.server_port %listen_port = server.server_port portlisten_port = server.server_port listen_port = server.server_port server.data_dirlisten_port = server.server_port =listen_port = server.server_port MakeDataDir()listen_port = server.server_port server.file_root_urllisten_port = server.server_port =listen_port = server.server_port options.file_root_urllisten_port = server.server_port server._sync_handlerlisten_port = server.server_port =listen_port = server.server_port Nonelisten_port = server.server_port listen_port = server.server_port #listen_port = server.server_port meanslisten_port = server.server_port FTPlisten_port = server.server_port Serverlisten_port = server.server_port else:listen_port = server.server_port my_data_dirlisten_port = server.server_port =listen_port = server.server_port MakeDataDir()listen_port = server.server_port listen_port = server.server_port #listen_port = server.server_port Instantiatelisten_port = server.server_port alisten_port = server.server_port dummylisten_port = server.server_port authorizerlisten_port = server.server_port forlisten_port = server.server_port managinglisten_port = server.server_port 'virtual'listen_port = server.server_port userslisten_port = server.server_port authorizerlisten_port = server.server_port =listen_port = server.server_port pyftpdlib.ftpserver.DummyAuthorizer()listen_port = server.server_port listen_port = server.server_port #listen_port = server.server_port Definelisten_port = server.server_port alisten_port = server.server_port newlisten_port = server.server_port userlisten_port = server.server_port havinglisten_port = server.server_port fulllisten_port = server.server_port r/wlisten_port = server.server_port permissionslisten_port = server.server_port andlisten_port = server.server_port alisten_port = server.server_port read-onlylisten_port = server.server_port #listen_port = server.server_port anonymouslisten_port = server.server_port userlisten_port = server.server_port authorizer.add_user('chrome',listen_port = server.server_port 'chrome',listen_port = server.server_port my_data_dir,listen_port = server.server_port perm='elradfmw')listen_port = server.server_port listen_port = server.server_port authorizer.add_anonymous(my_data_dir)listen_port = server.server_port listen_port = server.server_port #listen_port = server.server_port Instantiatelisten_port = server.server_port FTPlisten_port = server.server_port handlerlisten_port = server.server_port classlisten_port = server.server_port ftp_handlerlisten_port = server.server_port =listen_port = server.server_port pyftpdlib.ftpserver.FTPHandlerlisten_port = server.server_port ftp_handler.authorizerlisten_port = server.server_port =listen_port = server.server_port authorizerlisten_port = server.server_port listen_port = server.server_port #listen_port = server.server_port Definelisten_port = server.server_port alisten_port = server.server_port customizedlisten_port = server.server_port bannerlisten_port = server.server_port (stringlisten_port = server.server_port returnedlisten_port = server.server_port whenlisten_port = server.server_port clientlisten_port = server.server_port connects)listen_port = server.server_port ftp_handler.bannerlisten_port = server.server_port =listen_port = server.server_port ("pyftpdliblisten_port = server.server_port %slisten_port = server.server_port basedlisten_port = server.server_port ftpdlisten_port = server.server_port ready."listen_port = server.server_port %listen_port = server.server_port pyftpdlib.ftpserver.__ver__)listen_port = server.server_port listen_port = server.server_port #listen_port = server.server_port Instantiatelisten_port = server.server_port FTPlisten_port = server.server_port serverlisten_port = server.server_port classlisten_port = server.server_port andlisten_port = server.server_port listenlisten_port = server.server_port tolisten_port = server.server_port 127.0.0.1:portlisten_port = server.server_port addresslisten_port = server.server_port =listen_port = server.server_port ('127.0.0.1',listen_port = server.server_port port)listen_port = server.server_port serverlisten_port = server.server_port =listen_port = server.server_port pyftpdlib.ftpserver.FTPServer(address,listen_port = server.server_port ftp_handler)listen_port = server.server_port printlisten_port = server.server_port 'FTPlisten_port = server.server_port serverlisten_port = server.server_port startedlisten_port = server.server_port onlisten_port = server.server_port portlisten_port = server.server_port %d...'listen_port = server.server_port %listen_port = server.server_port portlisten_port = server.server_port listen_port = server.server_port #listen_port = server.server_port Notifylisten_port = server.server_port thelisten_port = server.server_port parentlisten_port = server.server_port thatlisten_port = server.server_port we'velisten_port = server.server_port started.listen_port = server.server_port (BaseServerlisten_port = server.server_port subclasseslisten_port = server.server_port #listen_port = server.server_port bindlisten_port = server.server_port theirlisten_port = server.server_port socketslisten_port = server.server_port onlisten_port = server.server_port construction.)listen_port = server.server_port iflisten_port = server.server_port options.startup_pipelisten_port = server.server_port islisten_port = server.server_port notlisten_port = server.server_port None:listen_port = server.server_port iflisten_port = server.server_port sys.platformlisten_port = server.server_port ==listen_port = server.server_port 'win32':listen_port = server.server_port fdlisten_port = server.server_port =listen_port = server.server_port msvcrt.open_osfhandle(options.startup_pipe,listen_port = server.server_port 0)listen_port = server.server_port else:listen_port = server.server_port fdlisten_port = server.server_port =listen_port = server.server_port options.startup_pipelisten_port = server.server_port startup_pipelisten_port = server.server_port =listen_port = server.server_port os.fdopen(fd,listen_port = server.server_port "w")listen_port = server.server_port startup_pipe.write("READY")listen_port = server.server_port startup_pipe.close()listen_port = server.server_port listen_port = server.server_port try:listen_port = server.server_port server.serve_forever()listen_port = server.server_port exceptlisten_port = server.server_port KeyboardInterrupt:listen_port = server.server_port printlisten_port = server.server_port 'shuttinglisten_port = server.server_port downlisten_port = server.server_port server'listen_port = server.server_port server.stoplisten_port = server.server_port =listen_port = server.server_port Truelisten_port = server.server_port
475,248
def main(options, args): logfile = open('testserver.log', 'w') sys.stdout = FileMultiplexer(sys.stdout, logfile) sys.stderr = FileMultiplexer(sys.stderr, logfile) port = options.port if options.server_type == SERVER_HTTP: if options.cert: # let's make sure the cert file exists. if not os.path.isfile(options.cert): print 'specified server cert file not found: ' + options.cert + \ ' exiting...' return for ca_cert in options.ssl_client_ca: if not os.path.isfile(ca_cert): print 'specified trusted client CA file not found: ' + ca_cert + \ ' exiting...' return server = HTTPSServer(('127.0.0.1', port), TestPageHandler, options.cert, options.ssl_client_auth, options.ssl_client_ca) print 'HTTPS server started on port %d...' % port else: server = StoppableHTTPServer(('127.0.0.1', port), TestPageHandler) print 'HTTP server started on port %d...' % port server.data_dir = MakeDataDir() server.file_root_url = options.file_root_url server._sync_handler = None # means FTP Server else: my_data_dir = MakeDataDir() # Instantiate a dummy authorizer for managing 'virtual' users authorizer = pyftpdlib.ftpserver.DummyAuthorizer() # Define a new user having full r/w permissions and a read-only # anonymous user authorizer.add_user('chrome', 'chrome', my_data_dir, perm='elradfmw') authorizer.add_anonymous(my_data_dir) # Instantiate FTP handler class ftp_handler = pyftpdlib.ftpserver.FTPHandler ftp_handler.authorizer = authorizer # Define a customized banner (string returned when client connects) ftp_handler.banner = ("pyftpdlib %s based ftpd ready." % pyftpdlib.ftpserver.__ver__) # Instantiate FTP server class and listen to 127.0.0.1:port address = ('127.0.0.1', port) server = pyftpdlib.ftpserver.FTPServer(address, ftp_handler) print 'FTP server started on port %d...' % port # Notify the parent that we've started. (BaseServer subclasses # bind their sockets on construction.) if options.startup_pipe is not None: if sys.platform == 'win32': fd = msvcrt.open_osfhandle(options.startup_pipe, 0) else: fd = options.startup_pipe startup_pipe = os.fdopen(fd, "w") startup_pipe.write("READY") startup_pipe.close() try: server.serve_forever() except KeyboardInterrupt: print 'shutting down server' server.stop = True
def main(options, args): logfile = open('testserver.log', 'w') sys.stdout = FileMultiplexer(sys.stdout, logfile) sys.stderr = FileMultiplexer(sys.stderr, logfile) port = options.port if options.server_type == SERVER_HTTP: if options.cert: # let's make sure the cert file exists. if not os.path.isfile(options.cert): print 'specified server cert file not found: ' + options.cert + \ ' exiting...' return for ca_cert in options.ssl_client_ca: if not os.path.isfile(ca_cert): print 'specified trusted client CA file not found: ' + ca_cert + \ ' exiting...' return server = HTTPSServer(('127.0.0.1', port), TestPageHandler, options.cert, options.ssl_client_auth, options.ssl_client_ca) print 'HTTPS server started on port %d...' % port else: server = StoppableHTTPServer(('127.0.0.1', port), TestPageHandler) print 'HTTP server started on port %d...' % port server.data_dir = MakeDataDir() server.file_root_url = options.file_root_url server._sync_handler = None # means FTP Server else: my_data_dir = MakeDataDir() # Instantiate a dummy authorizer for managing 'virtual' users authorizer = pyftpdlib.ftpserver.DummyAuthorizer() # Define a new user having full r/w permissions and a read-only # anonymous user authorizer.add_user('chrome', 'chrome', my_data_dir, perm='elradfmw') authorizer.add_anonymous(my_data_dir) # Instantiate FTP handler class ftp_handler = pyftpdlib.ftpserver.FTPHandler ftp_handler.authorizer = authorizer # Define a customized banner (string returned when client connects) ftp_handler.banner = ("pyftpdlib %s based ftpd ready." % pyftpdlib.ftpserver.__ver__) # Instantiate FTP server class and listen to 127.0.0.1:port address = ('127.0.0.1', port) server = pyftpdlib.ftpserver.FTPServer(address, ftp_handler) listen_port = server.socket.getsockname()[1] print 'FTP server started on port %d...' % listen_port # Notify the parent that we've started. (BaseServer subclasses # bind their sockets on construction.) if options.startup_pipe is not None: if sys.platform == 'win32': fd = msvcrt.open_osfhandle(options.startup_pipe, 0) else: fd = options.startup_pipe startup_pipe = os.fdopen(fd, "w") startup_pipe.write("READY") startup_pipe.close() try: server.serve_forever() except KeyboardInterrupt: print 'shutting down server' server.stop = True
475,249
def main(options, args): logfile = open('testserver.log', 'w') sys.stdout = FileMultiplexer(sys.stdout, logfile) sys.stderr = FileMultiplexer(sys.stderr, logfile) port = options.port if options.server_type == SERVER_HTTP: if options.cert: # let's make sure the cert file exists. if not os.path.isfile(options.cert): print 'specified server cert file not found: ' + options.cert + \ ' exiting...' return for ca_cert in options.ssl_client_ca: if not os.path.isfile(ca_cert): print 'specified trusted client CA file not found: ' + ca_cert + \ ' exiting...' return server = HTTPSServer(('127.0.0.1', port), TestPageHandler, options.cert, options.ssl_client_auth, options.ssl_client_ca) print 'HTTPS server started on port %d...' % port else: server = StoppableHTTPServer(('127.0.0.1', port), TestPageHandler) print 'HTTP server started on port %d...' % port server.data_dir = MakeDataDir() server.file_root_url = options.file_root_url server._sync_handler = None # means FTP Server else: my_data_dir = MakeDataDir() # Instantiate a dummy authorizer for managing 'virtual' users authorizer = pyftpdlib.ftpserver.DummyAuthorizer() # Define a new user having full r/w permissions and a read-only # anonymous user authorizer.add_user('chrome', 'chrome', my_data_dir, perm='elradfmw') authorizer.add_anonymous(my_data_dir) # Instantiate FTP handler class ftp_handler = pyftpdlib.ftpserver.FTPHandler ftp_handler.authorizer = authorizer # Define a customized banner (string returned when client connects) ftp_handler.banner = ("pyftpdlib %s based ftpd ready." % pyftpdlib.ftpserver.__ver__) # Instantiate FTP server class and listen to 127.0.0.1:port address = ('127.0.0.1', port) server = pyftpdlib.ftpserver.FTPServer(address, ftp_handler) print 'FTP server started on port %d...' % port # Notify the parent that we've started. (BaseServer subclasses # bind their sockets on construction.) if options.startup_pipe is not None: if sys.platform == 'win32': fd = msvcrt.open_osfhandle(options.startup_pipe, 0) else: fd = options.startup_pipe startup_pipe = os.fdopen(fd, "w") startup_pipe.write("READY") startup_pipe.close() try: server.serve_forever() except KeyboardInterrupt: print 'shutting down server' server.stop = True
def main(options, args): logfile = open('testserver.log', 'w') sys.stdout = FileMultiplexer(sys.stdout, logfile) sys.stderr = FileMultiplexer(sys.stderr, logfile) port = options.port if options.server_type == SERVER_HTTP: if options.cert: # let's make sure the cert file exists. if not os.path.isfile(options.cert): print 'specified server cert file not found: ' + options.cert + \ ' exiting...' return for ca_cert in options.ssl_client_ca: if not os.path.isfile(ca_cert): print 'specified trusted client CA file not found: ' + ca_cert + \ ' exiting...' return server = HTTPSServer(('127.0.0.1', port), TestPageHandler, options.cert, options.ssl_client_auth, options.ssl_client_ca) print 'HTTPS server started on port %d...' % port else: server = StoppableHTTPServer(('127.0.0.1', port), TestPageHandler) print 'HTTP server started on port %d...' % port server.data_dir = MakeDataDir() server.file_root_url = options.file_root_url server._sync_handler = None # means FTP Server else: my_data_dir = MakeDataDir() # Instantiate a dummy authorizer for managing 'virtual' users authorizer = pyftpdlib.ftpserver.DummyAuthorizer() # Define a new user having full r/w permissions and a read-only # anonymous user authorizer.add_user('chrome', 'chrome', my_data_dir, perm='elradfmw') authorizer.add_anonymous(my_data_dir) # Instantiate FTP handler class ftp_handler = pyftpdlib.ftpserver.FTPHandler ftp_handler.authorizer = authorizer # Define a customized banner (string returned when client connects) ftp_handler.banner = ("pyftpdlib %s based ftpd ready." % pyftpdlib.ftpserver.__ver__) # Instantiate FTP server class and listen to 127.0.0.1:port address = ('127.0.0.1', port) server = pyftpdlib.ftpserver.FTPServer(address, ftp_handler) print 'FTP server started on port %d...' % port # Notify the parent that we've started. (BaseServer subclasses # bind their sockets on construction.) if options.startup_pipe is not None: if sys.platform == 'win32': fd = msvcrt.open_osfhandle(options.startup_pipe, 0) else: fd = options.startup_pipe startup_pipe = os.fdopen(fd, "w") startup_pipe.write(struct.pack('@H', listen_port)) startup_pipe.close() try: server.serve_forever() except KeyboardInterrupt: print 'shutting down server' server.stop = True
475,250
def main(options, args): logfile = open('testserver.log', 'w') sys.stdout = FileMultiplexer(sys.stdout, logfile) sys.stderr = FileMultiplexer(sys.stderr, logfile) port = options.port if options.server_type == SERVER_HTTP: if options.cert: # let's make sure the cert file exists. if not os.path.isfile(options.cert): print 'specified server cert file not found: ' + options.cert + \ ' exiting...' return for ca_cert in options.ssl_client_ca: if not os.path.isfile(ca_cert): print 'specified trusted client CA file not found: ' + ca_cert + \ ' exiting...' return server = HTTPSServer(('127.0.0.1', port), TestPageHandler, options.cert, options.ssl_client_auth, options.ssl_client_ca) print 'HTTPS server started on port %d...' % port else: server = StoppableHTTPServer(('127.0.0.1', port), TestPageHandler) print 'HTTP server started on port %d...' % port server.data_dir = MakeDataDir() server.file_root_url = options.file_root_url server._sync_handler = None # means FTP Server else: my_data_dir = MakeDataDir() # Instantiate a dummy authorizer for managing 'virtual' users authorizer = pyftpdlib.ftpserver.DummyAuthorizer() # Define a new user having full r/w permissions and a read-only # anonymous user authorizer.add_user('chrome', 'chrome', my_data_dir, perm='elradfmw') authorizer.add_anonymous(my_data_dir) # Instantiate FTP handler class ftp_handler = pyftpdlib.ftpserver.FTPHandler ftp_handler.authorizer = authorizer # Define a customized banner (string returned when client connects) ftp_handler.banner = ("pyftpdlib %s based ftpd ready." % pyftpdlib.ftpserver.__ver__) # Instantiate FTP server class and listen to 127.0.0.1:port address = ('127.0.0.1', port) server = pyftpdlib.ftpserver.FTPServer(address, ftp_handler) print 'FTP server started on port %d...' % port # Notify the parent that we've started. (BaseServer subclasses # bind their sockets on construction.) if options.startup_pipe is not None: if sys.platform == 'win32': fd = msvcrt.open_osfhandle(options.startup_pipe, 0) else: fd = options.startup_pipe startup_pipe = os.fdopen(fd, "w") startup_pipe.write("READY") startup_pipe.close() try: server.serve_forever() except KeyboardInterrupt: print 'shutting down server' server.stop = True
def main(options, args): logfile = open('testserver.log', 'w') sys.stdout = FileMultiplexer(sys.stdout, logfile) sys.stderr = FileMultiplexer(sys.stderr, logfile) port = options.port if options.server_type == SERVER_HTTP: if options.cert: # let's make sure the cert file exists. if not os.path.isfile(options.cert): print 'specified server cert file not found: ' + options.cert + \ ' exiting...' return for ca_cert in options.ssl_client_ca: if not os.path.isfile(ca_cert): print 'specified trusted client CA file not found: ' + ca_cert + \ ' exiting...' return server = HTTPSServer(('127.0.0.1', port), TestPageHandler, options.cert, options.ssl_client_auth, options.ssl_client_ca) print 'HTTPS server started on port %d...' % port else: server = StoppableHTTPServer(('127.0.0.1', port), TestPageHandler) print 'HTTP server started on port %d...' % port server.data_dir = MakeDataDir() server.file_root_url = options.file_root_url server._sync_handler = None # means FTP Server else: my_data_dir = MakeDataDir() # Instantiate a dummy authorizer for managing 'virtual' users authorizer = pyftpdlib.ftpserver.DummyAuthorizer() # Define a new user having full r/w permissions and a read-only # anonymous user authorizer.add_user('chrome', 'chrome', my_data_dir, perm='elradfmw') authorizer.add_anonymous(my_data_dir) # Instantiate FTP handler class ftp_handler = pyftpdlib.ftpserver.FTPHandler ftp_handler.authorizer = authorizer # Define a customized banner (string returned when client connects) ftp_handler.banner = ("pyftpdlib %s based ftpd ready." % pyftpdlib.ftpserver.__ver__) # Instantiate FTP server class and listen to 127.0.0.1:port address = ('127.0.0.1', port) server = pyftpdlib.ftpserver.FTPServer(address, ftp_handler) print 'FTP server started on port %d...' % port # Notify the parent that we've started. (BaseServer subclasses # bind their sockets on construction.) if options.startup_pipe is not None: if sys.platform == 'win32': fd = msvcrt.open_osfhandle(options.startup_pipe, 0) else: fd = options.startup_pipe startup_pipe = os.fdopen(fd, "w") startup_pipe.write("READY") startup_pipe.close() try: server.serve_forever() except KeyboardInterrupt: print 'shutting down server' server.stop = True
475,251
def main(argv): parser = optparse.OptionParser() parser.add_option("--shards", type="int", dest="shards", default=10) options, args = parser.parse_args(argv) if len(args) != 1: print 'You must provide only one argument: path to the test binary' return 1 launchers = [] for shard in range(options.shards): launcher = TestLauncher(args[0], args[0], options.shards, shard) launcher.launch() launchers.append(launcher) return_code = 0 for launcher in launchers: if launcher.wait() != 0: return_code = 1 return return_code
def main(argv): parser = optparse.OptionParser() parser.add_option("--shards", type="int", dest="shards", default=10) options, args = parser.parse_args(argv) if not args: print 'You must provide path to the test binary' return 1 launchers = [] for shard in range(options.shards): launcher = TestLauncher(args[0], args[0], options.shards, shard) launcher.launch() launchers.append(launcher) return_code = 0 for launcher in launchers: if launcher.wait() != 0: return_code = 1 return return_code
475,252
def main(argv): parser = optparse.OptionParser() parser.add_option("--shards", type="int", dest="shards", default=10) options, args = parser.parse_args(argv) if len(args) != 1: print 'You must provide only one argument: path to the test binary' return 1 launchers = [] for shard in range(options.shards): launcher = TestLauncher(args[0], args[0], options.shards, shard) launcher.launch() launchers.append(launcher) return_code = 0 for launcher in launchers: if launcher.wait() != 0: return_code = 1 return return_code
def main(argv): parser = optparse.OptionParser() parser.add_option("--shards", type="int", dest="shards", default=10) options, args = parser.parse_args(argv) if len(args) != 1: print 'You must provide only one argument: path to the test binary' return 1 launchers = [] for shard in range(options.shards): launcher = TestLauncher(args, args[0], options.shards, shard) launcher.launch() launchers.append(launcher) return_code = 0 for launcher in launchers: if launcher.wait() != 0: return_code = 1 return return_code
475,253
def __init__(self, request, client_address, socket_server): self._connect_handlers = [ self.RedirectConnectHandler, self.ServerAuthConnectHandler, self.DefaultConnectResponseHandler] self._get_handlers = [ self.KillHandler, self.NoCacheMaxAgeTimeHandler, self.NoCacheTimeHandler, self.CacheTimeHandler, self.CacheExpiresHandler, self.CacheProxyRevalidateHandler, self.CachePrivateHandler, self.CachePublicHandler, self.CacheSMaxAgeHandler, self.CacheMustRevalidateHandler, self.CacheMustRevalidateMaxAgeHandler, self.CacheNoStoreHandler, self.CacheNoStoreMaxAgeHandler, self.CacheNoTransformHandler, self.DownloadHandler, self.DownloadFinishHandler, self.EchoHeader, self.EchoHeaderOverride, self.EchoAllHandler, self.FileHandler, self.RealFileWithCommonHeaderHandler, self.RealBZ2FileWithCommonHeaderHandler, self.SetCookieHandler, self.AuthBasicHandler, self.AuthDigestHandler, self.SlowServerHandler, self.ContentTypeHandler, self.ServerRedirectHandler, self.ClientRedirectHandler, self.ChromiumSyncTimeHandler, self.MultipartHandler, self.DefaultResponseHandler] self._post_handlers = [ self.EchoTitleHandler, self.EchoAllHandler, self.ChromiumSyncCommandHandler, self.EchoHandler] + self._get_handlers self._put_handlers = [ self.EchoTitleHandler, self.EchoAllHandler, self.EchoHandler] + self._get_handlers
def __init__(self, request, client_address, socket_server): self._connect_handlers = [ self.RedirectConnectHandler, self.ServerAuthConnectHandler, self.DefaultConnectResponseHandler] self._get_handlers = [ self.NoCacheMaxAgeTimeHandler, self.NoCacheTimeHandler, self.CacheTimeHandler, self.CacheExpiresHandler, self.CacheProxyRevalidateHandler, self.CachePrivateHandler, self.CachePublicHandler, self.CacheSMaxAgeHandler, self.CacheMustRevalidateHandler, self.CacheMustRevalidateMaxAgeHandler, self.CacheNoStoreHandler, self.CacheNoStoreMaxAgeHandler, self.CacheNoTransformHandler, self.DownloadHandler, self.DownloadFinishHandler, self.EchoHeader, self.EchoHeaderOverride, self.EchoAllHandler, self.FileHandler, self.RealFileWithCommonHeaderHandler, self.RealBZ2FileWithCommonHeaderHandler, self.SetCookieHandler, self.AuthBasicHandler, self.AuthDigestHandler, self.SlowServerHandler, self.ContentTypeHandler, self.ServerRedirectHandler, self.ClientRedirectHandler, self.ChromiumSyncTimeHandler, self.MultipartHandler, self.DefaultResponseHandler] self._post_handlers = [ self.EchoTitleHandler, self.EchoAllHandler, self.ChromiumSyncCommandHandler, self.EchoHandler] + self._get_handlers self._put_handlers = [ self.EchoTitleHandler, self.EchoAllHandler, self.EchoHandler] + self._get_handlers
475,254
def KillHandler(self): """This request handler kills the server, for use when we're done" with the a particular test."""
def KillHandler(self): """This request handler kills the server, for use when we're done" with the a particular test."""
475,255
def line_logger(msg): if (msg.find("kill") >= 0): server.stop = True print 'shutting down server' sys.exit(0)
def line_logger(msg): if (msg.find("kill") >= 0): server.stop = True print 'shutting down server' sys.exit(0)
475,256
def line_logger(msg): if (msg.find("kill") >= 0): server.stop = True print 'shutting down server' sys.exit(0)
def line_logger(msg): if (msg.find("kill") >= 0): server.stop = True print 'shutting down server' sys.exit(0)
475,257
def line_logger(msg): if (msg.find("kill") >= 0): server.stop = True print 'shutting down server' sys.exit(0)
def line_logger(msg): if (msg.find("kill") >= 0): server.stop = True print 'shutting down server' sys.exit(0)
475,258
def WriteGLES2ImplementationHeader(self, func, file): """Writes the GLES2 Implemention.""" impl_func = func.GetInfo('impl_func') impl_decl = func.GetInfo('impl_decl') if (func.can_auto_generate and (impl_func == None or impl_func == True) and (impl_decl == None or impl_decl == True)): file.Write("%s %s(%s) {\n" % (func.return_type, func.original_name, func.MakeTypedOriginalArgString(""))) for arg in func.GetOriginalArgs(): arg.WriteClientSideValidationCode(file, func) code = """ if (Is%(type)sReservedId(%(id)s)) { SetGLError(GL_INVALID_OPERATION, "%(name)s: %(id)s reserved id"); return;
def WriteGLES2ImplementationHeader(self, func, file): """Writes the GLES2 Implemention.""" impl_func = func.GetInfo('impl_func') impl_decl = func.GetInfo('impl_decl') if (func.can_auto_generate and (impl_func == None or impl_func == True) and (impl_decl == None or impl_decl == True)): file.Write("%s %s(%s) {\n" % (func.return_type, func.original_name, func.MakeTypedOriginalArgString(""))) for arg in func.GetOriginalArgs(): arg.WriteClientSideValidationCode(file, func) code = """ if (Is%(type)sReservedId(%(id)s)) { SetGLError(GL_INVALID_OPERATION, "%(name)s: %(id)s reserved id"); return;
475,259
def WriteGLES2ImplementationHeader(self, func, file): """Overrriden from TypeHandler.""" code = """%(return_type)s %(name)s(%(typed_args)s) {
def WriteGLES2ImplementationHeader(self, func, file): """Overrriden from TypeHandler.""" code = """%(return_type)s %(name)s(%(typed_args)s) {
475,260
def WriteGLES2ImplementationHeader(self, func, file): """Overrriden from TypeHandler.""" file.Write("%s %s(%s) {\n" % (func.return_type, func.original_name, func.MakeTypedOriginalArgString(""))) file.Write(" GLuint client_id;\n") file.Write(" MakeIds(&program_and_shader_id_allocator_, 1, &client_id);\n") file.Write(" helper_->%s(%s);\n" % (func.name, func.MakeCmdArgString(""))) file.Write(" return client_id;\n") file.Write("}\n") file.Write("\n")
def WriteGLES2ImplementationHeader(self, func, file): """Overrriden from TypeHandler.""" file.Write("%s %s(%s) {\n" % (func.return_type, func.original_name, func.MakeTypedOriginalArgString(""))) file.Write(" GLuint client_id;\n") file.Write(" program_and_shader_id_handler_->MakeIds(0, 1, &client_id);\n") file.Write(" helper_->%s(%s);\n" % (func.name, func.MakeCmdArgString(""))) file.Write(" return client_id;\n") file.Write("}\n") file.Write("\n")
475,261
def WriteGLES2ImplementationHeader(self, func, file): """Overrriden from TypeHandler.""" impl_decl = func.GetInfo('impl_decl') if impl_decl == None or impl_decl == True: file.Write("%s %s(%s) {\n" % (func.return_type, func.original_name, func.MakeTypedOriginalArgString(""))) file.Write(" FreeIds(&%s_id_allocator_, %s);\n" % (func.name[6:-1].lower(), func.MakeOriginalArgString(""))) file.Write(" helper_->%sImmediate(%s);\n" % (func.name, func.MakeOriginalArgString(""))) file.Write("}\n") file.Write("\n")
def WriteGLES2ImplementationHeader(self, func, file): """Overrriden from TypeHandler.""" impl_decl = func.GetInfo('impl_decl') if impl_decl == None or impl_decl == True: file.Write("%s %s(%s) {\n" % (func.return_type, func.original_name, func.MakeTypedOriginalArgString(""))) file.Write(" %s_id_handler_->FreeIds(%s);\n" % (func.name[6:-1].lower(), func.MakeOriginalArgString(""))) file.Write(" helper_->%sImmediate(%s);\n" % (func.name, func.MakeOriginalArgString(""))) file.Write("}\n") file.Write("\n")
475,262
def _DefaultCommand(self, tool, module, exe=None, valgrind_test_args=None): '''Generates the default command array that most tests will use.''' module_dir = os.path.join(self._source_dir, module)
def _DefaultCommand(self, tool, module, exe=None, valgrind_test_args=None): '''Generates the default command array that most tests will use.''' module_dir = os.path.join(self._source_dir, module)
475,263
def _DefaultCommand(self, tool, module, exe=None, valgrind_test_args=None): '''Generates the default command array that most tests will use.''' module_dir = os.path.join(self._source_dir, module)
def _DefaultCommand(self, tool, module, exe=None, valgrind_test_args=None): '''Generates the default command array that most tests will use.''' module_dir = os.path.join(self._source_dir, module)
475,264
def _ReadGtestFilterFile(self, tool, name, cmd): '''Read a file which is a list of tests to filter out with --gtest_filter and append the command-line option to cmd. ''' filters = [] for directory in self._data_dirs: gtest_filter_files = [ os.path.join(directory, name + ".gtest.txt"), os.path.join(directory, name + ".gtest-%s.txt" % \ tool.ToolName())] for platform_suffix in common.PlatformNames(): gtest_filter_files += [ os.path.join(directory, name + ".gtest_%s.txt" % platform_suffix), os.path.join(directory, name + ".gtest-%s_%s.txt" % \ (tool.ToolName(), platform_suffix))] for filename in gtest_filter_files: if os.path.exists(filename): logging.info("reading gtest filters from %s" % filename) f = open(filename, 'r') for line in f.readlines(): if line.startswith("#") or line.startswith("//") or line.isspace(): continue line = line.rstrip() test_prefixes = ["FLAKY", "FAILS"] for p in test_prefixes: # Strip prefixes from the test names. line = line.replace(".%s_" % p, ".") # Exclude the original test name. filters.append(line) if line[-2:] != ".*": # List all possible prefixes if line doesn't end with ".*". for p in test_prefixes: filters.append(line.replace(".", ".%s_" % p)) # Get rid of duplicates. filters = set(filters) gtest_filter = self._gtest_filter if len(filters): if gtest_filter: gtest_filter += ":" if gtest_filter.find("-") < 0: gtest_filter += "-" else: gtest_filter = "-" gtest_filter += ":".join(filters) if gtest_filter: cmd.append("--gtest_filter=%s" % gtest_filter)
def _ReadGtestFilterFile(self, tool, name, cmd): '''Read a file which is a list of tests to filter out with --gtest_filter and append the command-line option to cmd. ''' filters = [] for directory in self._data_dirs: gtest_filter_files = [ os.path.join(directory, name + ".gtest.txt"), os.path.join(directory, name + ".gtest-%s.txt" % \ tool.ToolName())] for platform_suffix in common.PlatformNames(): gtest_filter_files += [ os.path.join(directory, name + ".gtest_%s.txt" % platform_suffix), os.path.join(directory, name + ".gtest-%s_%s.txt" % \ (tool.ToolName(), platform_suffix))] for filename in gtest_filter_files: if os.path.exists(filename): logging.info("reading gtest filters from %s" % filename) f = open(filename, 'r') for line in f.readlines(): if line.startswith("#") or line.startswith("//") or line.isspace(): continue line = line.rstrip() test_prefixes = ["FLAKY", "FAILS"] for p in test_prefixes: # Strip prefixes from the test names. line = line.replace(".%s_" % p, ".") # Exclude the original test name. filters.append(line) if line[-2:] != ".*": # List all possible prefixes if line doesn't end with ".*". for p in test_prefixes: filters.append(line.replace(".", ".%s_" % p)) # Get rid of duplicates. filters = set(filters) gtest_filter = self._gtest_filter if len(filters): if gtest_filter: gtest_filter += ":" if gtest_filter.find("-") < 0: gtest_filter += "-" else: gtest_filter = "-" gtest_filter += ":".join(filters) if gtest_filter: cmd.append("--gtest_filter=%s" % gtest_filter)
475,265
def __FindSplit(self, string): """Finds a place to split a string.""" splitter = string.find('=') if splitter >= 0 and not string[splitter + 1] == '=' and splitter < 80: return splitter parts = string.split('(') if len(parts) > 1: splitter = len(parts[0]) for ii in range(1, len(parts)): if (not parts[ii - 1][-3:] == "if " and (len(parts[ii]) > 0 and not parts[ii][0] == ")") and splitter < 80): return splitter splitter += len(parts[ii]) + 1 done = False end = len(string) last_splitter = -1 while not done: splitter = string[0:end].rfind(',') if splitter < 0: return last_splitter elif splitter >= 80: end = splitter else: return splitter
def __FindSplit(self, string): """Finds a place to split a string.""" splitter = string.find('=') if splitter >= 0 and not string[splitter + 1] == '=' and splitter < 80: return splitter parts = string.split('(') if len(parts) > 1: splitter = len(parts[0]) for ii in range(1, len(parts)): if (not parts[ii - 1][-3:] == "if " and (len(parts[ii]) > 0 and not parts[ii][0] == ")" and not fptr.match(parts[ii])) and splitter < 80): return splitter splitter += len(parts[ii]) + 1 done = False end = len(string) last_splitter = -1 while not done: splitter = string[0:end].rfind(',') if splitter < 0: return last_splitter elif splitter >= 80: end = splitter else: return splitter
475,266
def __init__(self, filename, file_comment = None): CWriter.__init__(self, filename)
def __init__(self, filename, file_comment = None, guard_depth = None): CWriter.__init__(self, filename)
475,267
def __init__(self, filename, file_comment = None): CWriter.__init__(self, filename)
def __init__(self, filename, file_comment = None): CWriter.__init__(self, filename)
475,268
def WriteServiceUtilsImplementation(self, filename): """Writes the gles2 auto generated utility implementation.""" file = CHeaderWriter(filename) enums = sorted(_ENUM_LISTS.keys()) for enum in enums: if len(_ENUM_LISTS[enum]['valid']) > 0: file.Write("static %s valid_%s_table[] = {\n" % (_ENUM_LISTS[enum]['type'], ToUnderscore(enum))) for value in _ENUM_LISTS[enum]['valid']: file.Write(" %s,\n" % value) file.Write("};\n") file.Write("\n") file.Write("Validators::Validators()\n") pre = ': ' post = ',' count = 0 for enum in enums: count += 1 if count == len(enums): post = ' {' if len(_ENUM_LISTS[enum]['valid']) > 0: code = """ %(pre)s%(name)s( valid_%(name)s_table, arraysize(valid_%(name)s_table))%(post)s
def WriteServiceUtilsImplementation(self, filename): """Writes the gles2 auto generated utility implementation.""" file = CHeaderWriter(filename) enums = sorted(_ENUM_LISTS.keys()) for enum in enums: if len(_ENUM_LISTS[enum]['valid']) > 0: file.Write("static %s valid_%s_table[] = {\n" % (_ENUM_LISTS[enum]['type'], ToUnderscore(enum))) for value in _ENUM_LISTS[enum]['valid']: file.Write(" %s,\n" % value) file.Write("};\n") file.Write("\n") file.Write("Validators::Validators()\n") pre = ': ' post = ',' count = 0 for enum in enums: count += 1 if count == len(enums): post = ' {' if len(_ENUM_LISTS[enum]['valid']) > 0: code = """ %(pre)s%(name)s( valid_%(name)s_table, arraysize(valid_%(name)s_table))%(post)s
475,269
def main(argv): """This is the main function.""" parser = OptionParser() parser.add_option( "-g", "--generate-implementation-templates", action="store_true", help="generates files that are generally hand edited..") parser.add_option( "--generate-command-id-tests", action="store_true", help="generate tests for commands ids. Commands MUST not change ID!") parser.add_option( "--generate-docs", action="store_true", help="generate a docs friendly version of the command formats.") parser.add_option( "-v", "--verbose", action="store_true", help="prints more output.") (options, args) = parser.parse_args(args=argv) gen = GLGenerator(options.verbose) gen.ParseGLH("common/GLES2/gl2.h") gen.WriteCommandIds("common/gles2_cmd_ids_autogen.h") gen.WriteFormat("common/gles2_cmd_format_autogen.h") gen.WriteFormatTest("common/gles2_cmd_format_test_autogen.h") gen.WriteGLES2ImplementationHeader("client/gles2_implementation_autogen.h") gen.WriteGLES2CLibImplementation("client/gles2_c_lib_autogen.h") gen.WriteCmdHelperHeader("client/gles2_cmd_helper_autogen.h") gen.WriteServiceImplementation("service/gles2_cmd_decoder_autogen.h") gen.WriteServiceUnitTests("service/gles2_cmd_decoder_unittest_%d_autogen.h") gen.WriteServiceUtilsHeader("service/gles2_cmd_validation_autogen.h") gen.WriteServiceUtilsImplementation( "service/gles2_cmd_validation_implementation_autogen.h") if options.generate_command_id_tests: gen.WriteCommandIdTest("common/gles2_cmd_id_test_autogen.h") if options.generate_docs: gen.WriteDocs("docs/gles2_cmd_format_docs_autogen.h") if gen.errors > 0: print "%d errors" % gen.errors sys.exit(1)
def main(argv): """This is the main function.""" parser = OptionParser() parser.add_option( "-g", "--generate-implementation-templates", action="store_true", help="generates files that are generally hand edited..") parser.add_option( "--generate-command-id-tests", action="store_true", help="generate tests for commands ids. Commands MUST not change ID!") parser.add_option( "--generate-docs", action="store_true", help="generate a docs friendly version of the command formats.") parser.add_option( "-v", "--verbose", action="store_true", help="prints more output.") (options, args) = parser.parse_args(args=argv) gen = GLGenerator(options.verbose) gen.ParseGLH("common/GLES2/gl2.h") if options.alternate_mode == "ppapi": gen.WritePepperGLES2Interface("ppapi/c/ppb_opengles.h") elif options.alternate_mode == "chrome_ppapi": gen.WritePepperGLES2Implementation("webkit/glue/plugins/pepper_graphics_3d_gl.cc") else: gen.WriteCommandIds("common/gles2_cmd_ids_autogen.h") gen.WriteFormat("common/gles2_cmd_format_autogen.h") gen.WriteFormatTest("common/gles2_cmd_format_test_autogen.h") gen.WriteGLES2ImplementationHeader("client/gles2_implementation_autogen.h") gen.WriteGLES2CLibImplementation("client/gles2_c_lib_autogen.h") gen.WriteCmdHelperHeader("client/gles2_cmd_helper_autogen.h") gen.WriteServiceImplementation("service/gles2_cmd_decoder_autogen.h") gen.WriteServiceUnitTests("service/gles2_cmd_decoder_unittest_%d_autogen.h") gen.WriteServiceUtilsHeader("service/gles2_cmd_validation_autogen.h") gen.WriteServiceUtilsImplementation( "service/gles2_cmd_validation_implementation_autogen.h") if options.generate_command_id_tests: gen.WriteCommandIdTest("common/gles2_cmd_id_test_autogen.h") if options.generate_docs: gen.WriteDocs("docs/gles2_cmd_format_docs_autogen.h") if gen.errors > 0: print "%d errors" % gen.errors sys.exit(1)
475,270
def ParseGLH(self, filename): """Parses the GL2.h file and extracts the functions""" for line in _GL_FUNCTIONS.splitlines(): match = self._function_re.match(line) if match: func_name = match.group(2)[2:] func_info = self.GetFunctionInfo(func_name) if func_info.type != 'Noop': return_type = match.group(1).strip() arg_string = match.group(3) (args, num_pointer_args, is_gl_enum) = self.ParseArgs(arg_string) # comment in to find out which functions use bare enums. # if is_gl_enum: # self.Log("%s uses bare GLenum" % func_name) args_for_cmds = args if hasattr(func_info, 'cmd_args'): (args_for_cmds, num_pointer_args, is_gl_enum) = ( self.ParseArgs(getattr(func_info, 'cmd_args'))) cmd_args = [] for arg in args_for_cmds: arg.AddCmdArgs(cmd_args) init_args = [] for arg in args_for_cmds: arg.AddInitArgs(init_args) return_arg = CreateArg(return_type + " result") if return_arg: init_args.append(return_arg) f = Function(func_name, func_name, func_info, return_type, args, args_for_cmds, cmd_args, init_args, num_pointer_args) self.original_functions.append(f) self.AddFunction(f) f.type_handler.AddImmediateFunction(self, f) f.type_handler.AddBucketFunction(self, f)
def ParseGLH(self, filename): """Parses the GL2.h file and extracts the functions""" for line in _GL_FUNCTIONS.splitlines(): match = self._function_re.match(line) if match: func_name = match.group(2)[2:] func_info = self.GetFunctionInfo(func_name) if func_info.type != 'Noop': return_type = match.group(1).strip() arg_string = match.group(3) (args, num_pointer_args, is_gl_enum) = self.ParseArgs(arg_string) # comment in to find out which functions use bare enums. # if is_gl_enum: # self.Log("%s uses bare GLenum" % func_name) args_for_cmds = args if hasattr(func_info, 'cmd_args'): (args_for_cmds, num_pointer_args, is_gl_enum) = ( self.ParseArgs(getattr(func_info, 'cmd_args'))) cmd_args = [] for arg in args_for_cmds: arg.AddCmdArgs(cmd_args) init_args = [] for arg in args_for_cmds: arg.AddInitArgs(init_args) return_arg = CreateArg(return_type + " result") if return_arg: init_args.append(return_arg) f = Function(func_name, func_name, func_info, return_type, args, args_for_cmds, cmd_args, init_args, num_pointer_args) self.original_functions.append(f) self.AddFunction(f) f.type_handler.AddImmediateFunction(self, f) f.type_handler.AddBucketFunction(self, f)
475,271
def WriteCommandIds(self, filename): """Writes the command buffer format""" file = CHeaderWriter(filename) file.Write("#define GLES2_COMMAND_LIST(OP) \\\n") by_id = {} for func in self.functions: if not func.name in _CMD_ID_TABLE: self.Error("Command %s not in _CMD_ID_TABLE" % func.name) by_id[_CMD_ID_TABLE[func.name]] = func for id in sorted(by_id.keys()): file.Write(" %-60s /* %d */ \\\n" % ("OP(%s)" % by_id[id].name, id)) file.Write("\n")
def WriteCommandIds(self, filename): """Writes the command buffer format""" file = CHeaderWriter(filename) file.Write("#define GLES2_COMMAND_LIST(OP) \\\n") by_id = {} for func in self.functions: if True: if not func.name in _CMD_ID_TABLE: self.Error("Command %s not in _CMD_ID_TABLE" % func.name) by_id[_CMD_ID_TABLE[func.name]] = func for id in sorted(by_id.keys()): file.Write(" %-60s /* %d */ \\\n" % ("OP(%s)" % by_id[id].name, id)) file.Write("\n")
475,272
def WriteFormat(self, filename): """Writes the command buffer format""" file = CHeaderWriter(filename) for func in self.functions: func.WriteStruct(file) file.Write("\n") file.Close()
def WriteFormat(self, filename): """Writes the command buffer format""" file = CHeaderWriter(filename) for func in self.functions: if True: func.WriteStruct(file) file.Write("\n") file.Close()
475,273
def WriteDocs(self, filename): """Writes the command buffer doc version of the commands""" file = CWriter(filename) for func in self.functions: func.WriteDocs(file) file.Write("\n") file.Close()
def WriteDocs(self, filename): """Writes the command buffer doc version of the commands""" file = CWriter(filename) for func in self.functions: if True: func.WriteDocs(file) file.Write("\n") file.Close()
475,274
def WriteFormatTest(self, filename): """Writes the command buffer format test.""" file = CHeaderWriter( filename, "// This file contains unit tests for gles2 commmands\n" "// It is included by gles2_cmd_format_test.cc\n" "\n")
def WriteFormatTest(self, filename): """Writes the command buffer format test.""" file = CHeaderWriter( filename, "// This file contains unit tests for gles2 commmands\n" "// It is included by gles2_cmd_format_test.cc\n" "\n")
475,275
def WriteCommandIdTest(self, filename): """Writes the command id test.""" file = CHeaderWriter( filename, "// This file contains unit tests for gles2 commmand ids\n")
def WriteCommandIdTest(self, filename): """Writes the command id test.""" file = CHeaderWriter( filename, "// This file contains unit tests for gles2 commmand ids\n")
475,276
def WriteCmdHelperHeader(self, filename): """Writes the gles2 command helper.""" file = CHeaderWriter(filename)
def WriteCmdHelperHeader(self, filename): """Writes the gles2 command helper.""" file = CHeaderWriter(filename)
475,277
def WriteServiceImplementation(self, filename): """Writes the service decorder implementation.""" file = CHeaderWriter( filename, "// It is included by gles2_cmd_decoder.cc\n")
def WriteServiceImplementation(self, filename): """Writes the service decorder implementation.""" file = CHeaderWriter( filename, "// It is included by gles2_cmd_decoder.cc\n")
475,278
def WriteServiceUnitTests(self, filename): """Writes the service decorder unit tests.""" num_tests = len(self.functions) step = (num_tests + 1) / 2 count = 0 for test_num in range(0, num_tests, step): count += 1 name = filename % count file = CHeaderWriter( name, "// It is included by gles2_cmd_decoder_unittest_%d.cc\n" % count) file.SetFileNum(count) end = test_num + step if end > num_tests: end = num_tests for idx in range(test_num, end): func = self.functions[idx] if func.GetInfo('unit_test') == False: file.Write("// TODO(gman): %s\n" % func.name) else: func.WriteServiceUnitTest(file)
def WriteServiceUnitTests(self, filename): """Writes the service decorder unit tests.""" num_tests = len(self.functions) step = (num_tests + 1) / 2 count = 0 for test_num in range(0, num_tests, step): count += 1 name = filename % count file = CHeaderWriter( name, "// It is included by gles2_cmd_decoder_unittest_%d.cc\n" % count) file.SetFileNum(count) end = test_num + step if end > num_tests: end = num_tests for idx in range(test_num, end): func = self.functions[idx] if func.GetInfo('unit_test') == False: file.Write("// TODO(gman): %s\n" % func.name) else: func.WriteServiceUnitTest(file)
475,279
def __str__(self): return 'Failed with code %s: %s' % (self.message, repr(self.error_code))
def __str__(self): return 'Failed with code %s: %s' % (self.message, repr(self.error_code))
475,280
def StubFunction(cls, signature): """Generates a stub function definition for the given signature.
def StubFunction(cls, signature): """Generates a stub function definition for the given signature.
475,281
def _LocateBinDirs(): script_dir = os.path.dirname(__file__) chrome_src = os.path.join(script_dir, os.pardir, os.pardir, os.pardir) bin_dirs = { 'linux2': [ os.path.join(chrome_src, 'out', 'Debug'), os.path.join(chrome_src, 'sconsbuild', 'Debug'), os.path.join(chrome_src, 'out', 'Release'), os.path.join(chrome_src, 'sconsbuild', 'Release')], 'darwin': [ os.path.join(chrome_src, 'xcodebuild', 'Debug'), os.path.join(chrome_src, 'xcodebuild', 'Release')], 'win32': [ os.path.join(chrome_src, 'chrome', 'Debug'), os.path.join(chrome_src, 'chrome', 'Release')], 'cygwin': [ os.path.join(chrome_src, 'chrome', 'Debug'), os.path.join(chrome_src, 'chrome', 'Release')], } sys.path += bin_dirs.get(sys.platform, [])
def _LocateBinDirs(): script_dir = os.path.dirname(__file__) chrome_src = os.path.join(script_dir, os.pardir, os.pardir, os.pardir) bin_dirs = { 'linux2': [ os.path.join(chrome_src, 'out', 'Debug'), os.path.join(chrome_src, 'sconsbuild', 'Debug'), os.path.join(chrome_src, 'out', 'Release'), os.path.join(chrome_src, 'sconsbuild', 'Release')], 'darwin': [ os.path.join(chrome_src, 'xcodebuild', 'Debug'), os.path.join(chrome_src, 'xcodebuild', 'Release')], 'win32': [ os.path.join(chrome_src, 'chrome', 'Debug'), os.path.join(chrome_src, 'chrome', 'Release')], 'cygwin': [ os.path.join(chrome_src, 'chrome', 'Debug'), os.path.join(chrome_src, 'chrome', 'Release')], } sys.path += bin_dirs.get(sys.platform, [])
475,282
def testNotificationReplacement(self): """Test that we can replace a notification using the replaceId.""" self._AllowAllOrigins() self.NavigateToURL(self.TEST_PAGE_URL) self._CreateHTMLNotification(self.NO_SUCH_URL, 'chat') self.WaitForNotificationCount(1) self._CreateHTMLNotification(self.NO_SUCH_URL2, 'chat') notifications = self.GetActiveNotifications() self.assertEquals(1, len(notifications)) self.assertEquals(self.NO_SUCH_URL2, notifications[0]['content_url'])
def testNotificationReplacement(self): """Test that we can replace a notification using the replaceId.""" self._AllowAllOrigins() self.NavigateToURL(self.TEST_PAGE_URL) self._CreateHTMLNotification(self.NO_SUCH_URL, 'chat') notifications = self.GetActiveNotifications() self.assertEquals(1, len(notifications)) self.assertEquals(self.NO_SUCH_URL2, notifications[0]['content_url'])
475,283
def testNotificationReplacement(self): """Test that we can replace a notification using the replaceId.""" self._AllowAllOrigins() self.NavigateToURL(self.TEST_PAGE_URL) self._CreateHTMLNotification(self.NO_SUCH_URL, 'chat') self.WaitForNotificationCount(1) self._CreateHTMLNotification(self.NO_SUCH_URL2, 'chat') notifications = self.GetActiveNotifications() self.assertEquals(1, len(notifications)) self.assertEquals(self.NO_SUCH_URL2, notifications[0]['content_url'])
def testNotificationReplacement(self): """Test that we can replace a notification using the replaceId.""" self._AllowAllOrigins() self.NavigateToURL(self.TEST_PAGE_URL) self._CreateHTMLNotification(self.NO_SUCH_URL, 'chat') self.WaitForNotificationCount(1) self._CreateHTMLNotification(self.NO_SUCH_URL2, 'chat') notifications = self.GetActiveNotifications() self.assertEquals(1, len(notifications)) self.assertEquals(self.NO_SUCH_URL2, notifications[0]['content_url'])
475,284
def __init__(self, tag, name, parent_tag, sync_type): self.tag = tag self.name = name self.parent_tag = parent_tag self.sync_type = sync_type
def __init__(self, tag, name, parent_tag, sync_type): self.tag = tag self.name = name self.parent_tag = parent_tag self.sync_type = sync_type
475,285
def CheckChange(input_api, output_api): """Checks the memcheck suppressions files for bad data.""" errors = [] skip_next_line = False func_re = input_api.re.compile('[a-z_.]+\(.+\)$') for f, line_num, line in input_api.RightHandSideLines(lambda x: x.LocalPath().endswith('.txt')): line = line.lstrip() if line.startswith('#') or not line: continue if skip_next_line: skip_next_line = False continue if (line.startswith('fun:') or line.startswith('obj:') or line.startswith('Memcheck:') or line == '}' or line == '...'): continue if func_re.match(line): continue if line == '{': skip_next_line = True continue errors.append('"%s" is probably wrong: %s line %s' % (line, f.LocalPath(), line_num)) if errors: return [output_api.PresubmitError('\n'.join(errors))] return []
def CheckChange(input_api, output_api): """Checks the memcheck suppressions files for bad data.""" errors = [] skip_next_line = False func_re = input_api.re.compile('[a-z_.]+\(.+\)$') for f, line_num, line in input_api.RightHandSideLines(lambda x: x.LocalPath().endswith('.txt')): line = line.lstrip() if line.startswith('#') or not line: continue if skip_next_line: skip_next_line = False continue if (line.startswith('fun:') or line.startswith('obj:') or line.startswith('Memcheck:') or line == '}' or line == '...'): continue if func_re.match(line): continue errors.append('"%s" is probably wrong: %s line %s' % (line, f.LocalPath(), line_num)) if errors: return [output_api.PresubmitError('\n'.join(errors))] return []
475,286
def RunGit(command): """Run a git subcommand, returning its output.""" # On Windows, use shell=True to get PATH interpretation. shell = (os.name == 'nt') proc = subprocess.Popen(['git'] + command, shell=shell, stdout=subprocess.PIPE) return proc.communicate()[0].strip()
def RunGit(command): """Run a git subcommand, returning its output.""" # On Windows, use shell=True to get PATH interpretation. shell = (os.name == 'nt') proc = subprocess.Popen(command, shell=shell, stdout=subprocess.PIPE) out = proc.communicate()[0].strip() logging.info('Returned "%s"' % out) return out
475,287
def FindSVNRev(target_rev): """Map an SVN revision to a git hash. Like 'git svn find-rev' but without the git-svn bits.""" # We iterate through the commit log looking for "git-svn-id" lines, # which contain the SVN revision of that commit. We can stop once # we've found our target (or hit a revision number lower than what # we're looking for, indicating not found). target_rev = int(target_rev) # regexp matching the "commit" line from the log. commit_re = re.compile(r'^commit ([a-f\d]{40})$') # regexp matching the git-svn line from the log. git_svn_re = re.compile(r'^\s+git-svn-id: [^@]+@(\d+) ') log = subprocess.Popen(['git', 'log', '--no-color', '--first-parent', '--pretty=medium', 'origin'], shell=(os.name == 'nt'), stdout=subprocess.PIPE) # Track whether we saw a revision *later* than the one we're seeking. saw_later = False for line in log.stdout: match = commit_re.match(line) if match: commit = match.group(1) continue match = git_svn_re.match(line) if match: rev = int(match.group(1)) if rev <= target_rev: log.stdout.close() # Break pipe. if rev < target_rev: if not saw_later: return None # Can't be sure whether this rev is ok. print ("WARNING: r%d not found, so using next nearest earlier r%d" % (target_rev, rev)) return commit else: saw_later = True print "Error: reached end of log without finding commit info." print "Something has likely gone horribly wrong." return None
def FindSVNRev(target_rev): """Map an SVN revision to a git hash. Like 'git svn find-rev' but without the git-svn bits.""" # We iterate through the commit log looking for "git-svn-id" lines, # which contain the SVN revision of that commit. We can stop once # we've found our target (or hit a revision number lower than what # we're looking for, indicating not found). target_rev = int(target_rev) # regexp matching the "commit" line from the log. commit_re = re.compile(r'^commit ([a-f\d]{40})$') # regexp matching the git-svn line from the log. git_svn_re = re.compile(r'^\s+git-svn-id: [^@]+@(\d+) ') cmd = ['git', 'log', '--no-color', '--first-parent', '--pretty=medium', 'origin/master'] logging.info(' '.join(cmd)) log = subprocess.Popen(cmd, shell=(os.name == 'nt'), stdout=subprocess.PIPE) # Track whether we saw a revision *later* than the one we're seeking. saw_later = False for line in log.stdout: match = commit_re.match(line) if match: commit = match.group(1) continue match = git_svn_re.match(line) if match: rev = int(match.group(1)) if rev <= target_rev: log.stdout.close() # Break pipe. if rev < target_rev: if not saw_later: return None # Can't be sure whether this rev is ok. print ("WARNING: r%d not found, so using next nearest earlier r%d" % (target_rev, rev)) return commit else: saw_later = True print "Error: reached end of log without finding commit info." print "Something has likely gone horribly wrong." return None
475,288
def PrepareListsAndPrintOutput(self, write): """Create appropriate subsets of test lists and returns a ResultSummary object. Also prints expected test counts.
def PrepareListsAndPrintOutput(self, write): """Create appropriate subsets of test lists and returns a ResultSummary object. Also prints expected test counts.
475,289
def main(options, args): logfile = open('testserver.log', 'w') sys.stdout = FileMultiplexer(sys.stdout, logfile) sys.stderr = FileMultiplexer(sys.stderr, logfile) port = options.port if options.server_type == SERVER_HTTP: if options.cert: # let's make sure the cert file exists. if not os.path.isfile(options.cert): print 'specified server cert file not found: ' + options.cert + \ ' exiting...' return for ca_cert in options.ssl_client_ca: if not os.path.isfile(ca_cert): print 'specified trusted client CA file not found: ' + ca_cert + \ ' exiting...' return server = HTTPSServer(('127.0.0.1', port), TestPageHandler, options.cert, options.ssl_client_auth, options.ssl_client_ca, options.ssl_bulk_cipher) print 'HTTPS server started on port %d...' % server.server_port else: server = StoppableHTTPServer(('127.0.0.1', port), TestPageHandler) print 'HTTP server started on port %d...' % server.server_port server.data_dir = MakeDataDir() server.file_root_url = options.file_root_url listen_port = server.server_port server._device_management_handler = None elif options.server_type == SERVER_SYNC: server = SyncHTTPServer(('127.0.0.1', port), SyncPageHandler) print 'Sync HTTP server started on port %d...' % server.server_port listen_port = server.server_port # means FTP Server else: my_data_dir = MakeDataDir() # Instantiate a dummy authorizer for managing 'virtual' users authorizer = pyftpdlib.ftpserver.DummyAuthorizer() # Define a new user having full r/w permissions and a read-only # anonymous user authorizer.add_user('chrome', 'chrome', my_data_dir, perm='elradfmw') authorizer.add_anonymous(my_data_dir) # Instantiate FTP handler class ftp_handler = pyftpdlib.ftpserver.FTPHandler ftp_handler.authorizer = authorizer # Define a customized banner (string returned when client connects) ftp_handler.banner = ("pyftpdlib %s based ftpd ready." % pyftpdlib.ftpserver.__ver__) # Instantiate FTP server class and listen to 127.0.0.1:port address = ('127.0.0.1', port) server = pyftpdlib.ftpserver.FTPServer(address, ftp_handler) listen_port = server.socket.getsockname()[1] print 'FTP server started on port %d...' % listen_port # Notify the parent that we've started. (BaseServer subclasses # bind their sockets on construction.) if options.startup_pipe is not None: if sys.platform == 'win32': fd = msvcrt.open_osfhandle(options.startup_pipe, 0) else: fd = options.startup_pipe startup_pipe = os.fdopen(fd, "w") # Write the listening port as a 2 byte value. This is _not_ using # network byte ordering since the other end of the pipe is on the same # machine. startup_pipe.write(struct.pack('@H', listen_port)) startup_pipe.close() try: server.serve_forever() except KeyboardInterrupt: print 'shutting down server' server.stop = True
def main(options, args): logfile = open('testserver.log', 'w') sys.stdout = FileMultiplexer(sys.stdout, logfile) sys.stderr = FileMultiplexer(sys.stderr, logfile) port = options.port if options.server_type == SERVER_HTTP: if options.cert: # let's make sure the cert file exists. if not os.path.isfile(options.cert): print 'specified server cert file not found: ' + options.cert + \ ' exiting...' return for ca_cert in options.ssl_client_ca: if not os.path.isfile(ca_cert): print 'specified trusted client CA file not found: ' + ca_cert + \ ' exiting...' return server = HTTPSServer(('127.0.0.1', port), TestPageHandler, options.cert, options.ssl_client_auth, options.ssl_client_ca, options.ssl_bulk_cipher) print 'HTTPS server started on port %d...' % server.server_port else: server = StoppableHTTPServer(('127.0.0.1', port), TestPageHandler) print 'HTTP server started on port %d...' % server.server_port server.data_dir = MakeDataDir() server.file_root_url = options.file_root_url listen_port = server.server_port server._device_management_handler = None elif options.server_type == SERVER_SYNC: server = SyncHTTPServer(('127.0.0.1', port), SyncPageHandler) print 'Sync HTTP server started on port %d...' % server.server_port listen_port = server.server_port # means FTP Server else: my_data_dir = MakeDataDir() # Instantiate a dummy authorizer for managing 'virtual' users authorizer = pyftpdlib.ftpserver.DummyAuthorizer() # Define a new user having full r/w permissions and a read-only # anonymous user authorizer.add_user('chrome', 'chrome', my_data_dir, perm='elradfmw') authorizer.add_anonymous(my_data_dir) # Instantiate FTP handler class ftp_handler = pyftpdlib.ftpserver.FTPHandler ftp_handler.authorizer = authorizer # Define a customized banner (string returned when client connects) ftp_handler.banner = ("pyftpdlib %s based ftpd ready." % pyftpdlib.ftpserver.__ver__) # Instantiate FTP server class and listen to 127.0.0.1:port address = ('127.0.0.1', port) server = pyftpdlib.ftpserver.FTPServer(address, ftp_handler) listen_port = server.socket.getsockname()[1] print 'FTP server started on port %d...' % listen_port # Notify the parent that we've started. (BaseServer subclasses # bind their sockets on construction.) if options.startup_pipe is not None: if sys.platform == 'win32': fd = msvcrt.open_osfhandle(options.startup_pipe, 0) else: fd = options.startup_pipe startup_pipe = os.fdopen(fd, "w") # Write the listening port as a 2 byte value. This is _not_ using # network byte ordering since the other end of the pipe is on the same # machine. startup_pipe.write(struct.pack('@H', listen_port)) startup_pipe.close() try: server.serve_forever() except KeyboardInterrupt: print 'shutting down server' server.stop = True
475,290
def testDeleteNonexistentRow(self): """Attempts to delete a nonexistent row in the table.""" self.NavigateToURL(self.TEST_PAGE_URL) self._CreateTable() self._InsertRecord('text') did_throw_exception = False try: self._DeleteRecord(1) except: did_throw_exception = True self.assertTrue(did_throw_exception) self.assertEquals(['text'], self._GetRecords())
def testDeleteNonexistentRow(self): """Attempts to delete a nonexistent row in the table.""" self.NavigateToURL(self.TEST_PAGE_URL) self._CreateTable() self._InsertRecord('text') did_throw_exception = False try: self._DeleteRecord(1) except SQLExecutionError: did_throw_exception = True self.assertTrue(did_throw_exception) self.assertEquals(['text'], self._GetRecords())
475,291
def testIncognitoCannotReadRegularDatabase(self): """Attempt to read a database created in a regular browser from an incognito browser. """ self.NavigateToURL(self.TEST_PAGE_URL) self._CreateTable() self._InsertRecord('text') self.RunCommand(pyauto.IDC_NEW_INCOGNITO_WINDOW) self.NavigateToURL(self.TEST_PAGE_URL, 1, 0) can_read_regular_database = False try: # |_GetRecords| should throw an error because the table does not exist. if len(self._GetRecords(windex=1)) == 1: can_read_regular_database = True except SQLExecutionError: pass self.assertFalse(can_read_regular_database) self._CreateTable(windex=1) self.assertEqual(0, len(self._GetRecords(windex=1)))
def testIncognitoCannotReadRegularDatabase(self): """Attempt to read a database created in a regular browser from an incognito browser. """ self.NavigateToURL(self.TEST_PAGE_URL) self._CreateTable() self._InsertRecord('text') self.RunCommand(pyauto.IDC_NEW_INCOGNITO_WINDOW) self.NavigateToURL(self.TEST_PAGE_URL, 1, 0) can_read_regular_database = False try: # |_GetRecords| should throw an error because the table does not exist. if len(self._GetRecords(windex=1)) == 1: can_read_regular_database = True except SQLExecutionError: pass self.assertFalse(can_read_regular_database) self._CreateTable(windex=1) self.assertEqual(0, len(self._GetRecords(windex=1)))
475,292
def testRegularCannotReadIncognitoDatabase(self): """Attempt to read a database created in an incognito browser from a regular browser. """ self.RunCommand(pyauto.IDC_NEW_INCOGNITO_WINDOW) self.NavigateToURL(self.TEST_PAGE_URL, 1, 0) self._CreateTable(windex=1) self._InsertRecord('text', windex=1)
def testRegularCannotReadIncognitoDatabase(self): """Attempt to read a database created in an incognito browser from a regular browser. """ self.RunCommand(pyauto.IDC_NEW_INCOGNITO_WINDOW) self.NavigateToURL(self.TEST_PAGE_URL, 1, 0) self._CreateTable(windex=1) self._InsertRecord('text', windex=1)
475,293
def CheckNoDllRegisterServer(input_api, output_api): for f, line_num, line in input_api.RightHandSideLines(): if DLL_REGISTER_SERVER_RE.search(line): file_name = os.path.basename(f.LocalPath()) if file_name not in ['install_utils.h', 'install_utils_unittest.cc']: return [output_api.PresubmitError( '%s seems to contain a definition of DllRegisterServer.\n' 'Please search for CEEE_DEFINE_DLL_REGISTER_SERVER.' % f.LocalPath())] return []
def CheckNoDllRegisterServer(input_api, output_api): for f, line_num, line in input_api.RightHandSideLines(): if DLL_REGISTER_SERVER_RE.search(line): file_name = os.path.basename(f.LocalPath()) if file_name not in ['install_utils.h', 'install_utils_unittest.cc']: return [output_api.PresubmitError( '%s contains a definition of DllRegisterServer at line %s.\n' 'Please search for CEEE_DEFINE_DLL_REGISTER_SERVER.' % f.LocalPath())] return []
475,294
def CheckNoDllRegisterServer(input_api, output_api): for f, line_num, line in input_api.RightHandSideLines(): if DLL_REGISTER_SERVER_RE.search(line): file_name = os.path.basename(f.LocalPath()) if file_name not in ['install_utils.h', 'install_utils_unittest.cc']: return [output_api.PresubmitError( '%s seems to contain a definition of DllRegisterServer.\n' 'Please search for CEEE_DEFINE_DLL_REGISTER_SERVER.' % f.LocalPath())] return []
def CheckNoDllRegisterServer(input_api, output_api): for f, line_num, line in input_api.RightHandSideLines(): if DLL_REGISTER_SERVER_RE.search(line): file_name = os.path.basename(f.LocalPath()) if file_name not in ['install_utils.h', 'install_utils_unittest.cc']: return [output_api.PresubmitError( '%s seems to contain a definition of DllRegisterServer.\n' 'Please search for CEEE_DEFINE_DLL_REGISTER_SERVER.' % (f.LocalPath(), line_num))] return []
475,295
def WriteGLES2ImplementationHeader(self, func, file): """Writes the GLES2 Implemention.""" impl_func = func.GetInfo('impl_func') impl_decl = func.GetInfo('impl_decl') if (func.can_auto_generate and (impl_func == None or impl_func == True) and (impl_decl == None or impl_decl == True)): file.Write("%s %s(%s) {\n" % (func.return_type, func.original_name, func.MakeTypedOriginalArgString(""))) for arg in func.GetOriginalArgs(): arg.WriteClientSideValidationCode(file) file.Write(" helper_->%s(%s);\n" % (func.name, func.MakeOriginalArgString(""))) file.Write("}\n") file.Write("\n") else: self.WriteGLES2ImplementationDeclaration(func, file)
def WriteGLES2ImplementationHeader(self, func, file): """Writes the GLES2 Implemention.""" impl_func = func.GetInfo('impl_func') impl_decl = func.GetInfo('impl_decl') if (func.can_auto_generate and (impl_func == None or impl_func == True) and (impl_decl == None or impl_decl == True)): file.Write("%s %s(%s) {\n" % (func.return_type, func.original_name, func.MakeTypedOriginalArgString(""))) for arg in func.GetOriginalArgs(): arg.WriteClientSideValidationCode(file, func) file.Write(" helper_->%s(%s);\n" % (func.name, func.MakeOriginalArgString(""))) file.Write("}\n") file.Write("\n") else: self.WriteGLES2ImplementationDeclaration(func, file)
475,296
def WriteGLES2ImplementationHeader(self, func, file): """Writes the GLES2 Implemention.""" impl_func = func.GetInfo('impl_func') impl_decl = func.GetInfo('impl_decl') if (func.can_auto_generate and (impl_func == None or impl_func == True) and (impl_decl == None or impl_decl == True)): file.Write("%s %s(%s) {\n" % (func.return_type, func.original_name, func.MakeTypedOriginalArgString(""))) for arg in func.GetOriginalArgs(): arg.WriteClientSideValidationCode(file) code = """ if (Is%(type)sReservedId(%(id)s)) { SetGLError(GL_INVALID_OPERATION); return;
def WriteGLES2ImplementationHeader(self, func, file): """Writes the GLES2 Implemention.""" impl_func = func.GetInfo('impl_func') impl_decl = func.GetInfo('impl_decl') if (func.can_auto_generate and (impl_func == None or impl_func == True) and (impl_decl == None or impl_decl == True)): file.Write("%s %s(%s) {\n" % (func.return_type, func.original_name, func.MakeTypedOriginalArgString(""))) for arg in func.GetOriginalArgs(): arg.WriteClientSideValidationCode(file, func) code = """ if (Is%(type)sReservedId(%(id)s)) { SetGLError(GL_INVALID_OPERATION); return;
475,297
def WriteGLES2ImplementationHeader(self, func, file): """Writes the GLES2 Implemention.""" impl_func = func.GetInfo('impl_func') impl_decl = func.GetInfo('impl_decl') if (func.can_auto_generate and (impl_func == None or impl_func == True) and (impl_decl == None or impl_decl == True)): file.Write("%s %s(%s) {\n" % (func.return_type, func.original_name, func.MakeTypedOriginalArgString(""))) for arg in func.GetOriginalArgs(): arg.WriteClientSideValidationCode(file) code = """ if (Is%(type)sReservedId(%(id)s)) { SetGLError(GL_INVALID_OPERATION); return;
def WriteGLES2ImplementationHeader(self, func, file): """Writes the GLES2 Implemention.""" impl_func = func.GetInfo('impl_func') impl_decl = func.GetInfo('impl_decl') if (func.can_auto_generate and (impl_func == None or impl_func == True) and (impl_decl == None or impl_decl == True)): file.Write("%s %s(%s) {\n" % (func.return_type, func.original_name, func.MakeTypedOriginalArgString(""))) for arg in func.GetOriginalArgs(): arg.WriteClientSideValidationCode(file) code = """ if (Is%(type)sReservedId(%(id)s)) { SetGLError(GL_INVALID_OPERATION, "%(name)s: %(id)s reserved id"); return;
475,298
code = """ typedef %(func_name)s::Result Result;
code = """ typedef %(func_name)s::Result Result;
475,299